Skip to content

Commit

Permalink
feat: update models to conform to Codable protocol
Browse files Browse the repository at this point in the history
chore: update .gitignore to exclude index-build
  • Loading branch information
ajason committed Jan 8, 2025
1 parent 8cfa454 commit 8d03281
Show file tree
Hide file tree
Showing 7 changed files with 993 additions and 864 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,5 @@ DerivedData/
.swiftpm/configuration/registries.json
.swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata
.netrc
/.index-build
.vscode/settings.json
1,095 changes: 601 additions & 494 deletions Sources/OpenAI/Public/Parameters/Chat/ChatCompletionParameters.swift

Large diffs are not rendered by default.

214 changes: 107 additions & 107 deletions Sources/OpenAI/Public/ResponseModels/Chat/ChatCompletionChunkObject.swift
Original file line number Diff line number Diff line change
Expand Up @@ -8,114 +8,114 @@
import Foundation

/// Represents a [streamed](https://platform.openai.com/docs/api-reference/chat/streaming) chunk of a chat completion response returned by model, based on the provided input.
public struct ChatCompletionChunkObject: Decodable {

/// A unique identifier for the chat completion chunk.
public let id: String?
/// A list of chat completion choices. Can be more than one if n is greater than 1.
public let choices: [ChatChoice]
/// The Unix timestamp (in seconds) of when the chat completion chunk was created.
public let created: Int
/// The model to generate the completion.
public let model: String
/// The service tier used for processing the request. This field is only included if the service_tier parameter is specified in the request.
public let serviceTier: String?
/// This fingerprint represents the backend configuration that the model runs with.
/// Can be used in conjunction with the seed request parameter to understand when backend changes have been made that might impact determinism.
public let systemFingerprint: String?
/// The object type, which is always chat.completion.chunk.
public let object: String
/// An optional field that will only be present when you set stream_options: {"include_usage": true} in your request. When present, it contains a null value except for the last chunk which contains the token usage statistics for the entire request.
public let usage: ChatUsage?

public struct ChatChoice: Decodable {

/// A chat completion delta generated by streamed model responses.
public let delta: Delta
/// The reason the model stopped generating tokens. This will be stop if the model hit a natural stop point or a provided stop sequence, length if the maximum number of tokens specified in the request was reached, content_filter if content was omitted due to a flag from our content filters, tool_calls if the model called a tool, or function_call (deprecated) if the model called a function.
public let finishReason: IntOrStringValue?
/// The index of the choice in the list of choices.
public let index: Int
/// Provided by the Vision API.
public let finishDetails: FinishDetails?
/// Log probability information for the choice.
public let logprobs: LogProb?

public struct Delta: Decodable {

/// The contents of the chunk message.
public let content: String?
/// The tool calls generated by the model, such as function calls.
public let toolCalls: [ToolCall]?
/// The name and arguments of a function that should be called, as generated by the model.
@available(*, deprecated, message: "Deprecated and replaced by `tool_calls`")
public let functionCall: FunctionCall?
/// The role of the author of this message.
public let role: String?
/// The refusal message generated by the model.
public let refusal: String?

enum CodingKeys: String, CodingKey {
case content
case toolCalls = "tool_calls"
case functionCall = "function_call"
case role
case refusal
}
}

public struct LogProb: Decodable {
/// A list of message content tokens with log probability information.
let content: [TokenDetail]
}

public struct TokenDetail: Decodable {
/// The token.
let token: String
/// The log probability of this token.
let logprob: Double
/// A list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be null if there is no bytes representation for the token.
let bytes: [Int]?
/// List of the most likely tokens and their log probability, at this token position. In rare cases, there may be fewer than the number of requested top_logprobs returned.
let topLogprobs: [TopLogProb]

enum CodingKeys: String, CodingKey {
case token, logprob, bytes
case topLogprobs = "top_logprobs"
}

struct TopLogProb: Decodable {
/// The token.
let token: String
/// The log probability of this token.
let logprob: Double
/// A list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be null if there is no bytes representation for the token.
let bytes: [Int]?
}
}

/// Provided by the Vision API.
public struct FinishDetails: Decodable {
let type: String
public struct ChatCompletionChunkObject: Codable {

/// A unique identifier for the chat completion chunk.
public let id: String?
/// A list of chat completion choices. Can be more than one if n is greater than 1.
public let choices: [ChatChoice]
/// The Unix timestamp (in seconds) of when the chat completion chunk was created.
public let created: Int
/// The model to generate the completion.
public let model: String
/// The service tier used for processing the request. This field is only included if the service_tier parameter is specified in the request.
public let serviceTier: String?
/// This fingerprint represents the backend configuration that the model runs with.
/// Can be used in conjunction with the seed request parameter to understand when backend changes have been made that might impact determinism.
public let systemFingerprint: String?
/// The object type, which is always chat.completion.chunk.
public let object: String
/// An optional field that will only be present when you set stream_options: {"include_usage": true} in your request. When present, it contains a null value except for the last chunk which contains the token usage statistics for the entire request.
public let usage: ChatUsage?

public struct ChatChoice: Codable {

/// A chat completion delta generated by streamed model responses.
public let delta: Delta
/// The reason the model stopped generating tokens. This will be stop if the model hit a natural stop point or a provided stop sequence, length if the maximum number of tokens specified in the request was reached, content_filter if content was omitted due to a flag from our content filters, tool_calls if the model called a tool, or function_call (deprecated) if the model called a function.
public let finishReason: IntOrStringValue?
/// The index of the choice in the list of choices.
public let index: Int
/// Provided by the Vision API.
public let finishDetails: FinishDetails?
/// Log probability information for the choice.
public let logprobs: LogProb?

public struct Delta: Codable {

/// The contents of the chunk message.
public let content: String?
/// The tool calls generated by the model, such as function calls.
public let toolCalls: [ToolCall]?
/// The name and arguments of a function that should be called, as generated by the model.
@available(*, deprecated, message: "Deprecated and replaced by `tool_calls`")
public let functionCall: FunctionCall?
/// The role of the author of this message.
public let role: String?
/// The refusal message generated by the model.
public let refusal: String?

enum CodingKeys: String, CodingKey {
case content
case toolCalls = "tool_calls"
case functionCall = "function_call"
case role
case refusal
}

}

public struct LogProb: Codable {
/// A list of message content tokens with log probability information.
let content: [TokenDetail]
}

public struct TokenDetail: Codable {
/// The token.
let token: String
/// The log probability of this token.
let logprob: Double
/// A list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be null if there is no bytes representation for the token.
let bytes: [Int]?
/// List of the most likely tokens and their log probability, at this token position. In rare cases, there may be fewer than the number of requested top_logprobs returned.
let topLogprobs: [TopLogProb]

enum CodingKeys: String, CodingKey {
case delta
case finishReason = "finish_reason"
case index
case finishDetails = "finish_details"
case logprobs
case token, logprob, bytes
case topLogprobs = "top_logprobs"
}
}

enum CodingKeys: String, CodingKey {
case id
case choices
case created
case model
case serviceTier = "service_tier"
case systemFingerprint = "system_fingerprint"
case object
case usage
}

struct TopLogProb: Codable {
/// The token.
let token: String
/// The log probability of this token.
let logprob: Double
/// A list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be null if there is no bytes representation for the token.
let bytes: [Int]?
}
}

/// Provided by the Vision API.
public struct FinishDetails: Codable {
let type: String
}

enum CodingKeys: String, CodingKey {
case delta
case finishReason = "finish_reason"
case index
case finishDetails = "finish_details"
case logprobs
}
}

enum CodingKeys: String, CodingKey {
case id
case choices
case created
case model
case serviceTier = "service_tier"
case systemFingerprint = "system_fingerprint"
case object
case usage
}
}
Loading

0 comments on commit 8d03281

Please sign in to comment.