diff --git a/.gitignore b/.gitignore index e838b643..bf30bfd4 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,6 @@ +# hooks + + # Xcode # # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore diff --git a/Sources/OpenAI/Public/Models/ChatQuery.swift b/Sources/OpenAI/Public/Models/ChatQuery.swift index c7a88649..8f1f521a 100644 --- a/Sources/OpenAI/Public/Models/ChatQuery.swift +++ b/Sources/OpenAI/Public/Models/ChatQuery.swift @@ -9,6 +9,7 @@ import Foundation /// Creates a model response for the given chat conversation /// https://platform.openai.com/docs/guides/text-generation +/// https://platform.openai.com/docs/api-reference/chat/create public struct ChatQuery: Equatable, Codable, Streamable { /// A list of messages comprising the conversation so far diff --git a/Sources/OpenAI/Public/Models/ChatResult.swift b/Sources/OpenAI/Public/Models/ChatResult.swift index c2f7c12d..7cfbbff6 100644 --- a/Sources/OpenAI/Public/Models/ChatResult.swift +++ b/Sources/OpenAI/Public/Models/ChatResult.swift @@ -6,9 +6,43 @@ // import Foundation - +/// https://platform.openai.com/docs/api-reference/chat/object +/// Example Completion object print +/// ``` +/// { +/// "id": "chatcmpl-123456", +/// "object": "chat.completion", +/// "created": 1728933352, +/// "model": "gpt-4o-2024-08-06", +/// "choices": [ +/// { +/// "index": 0, +/// "message": { +/// "role": "assistant", +/// "content": "Hi there! How can I assist you today?", +/// "refusal": null +/// }, +/// "logprobs": null, +/// "finish_reason": "stop" +/// } +/// ], +/// "usage": { +/// "prompt_tokens": 19, +/// "completion_tokens": 10, +/// "total_tokens": 29, +/// "prompt_tokens_details": { +/// "cached_tokens": 0 +/// }, +/// "completion_tokens_details": { +/// "reasoning_tokens": 0 +/// } +/// }, +/// "system_fingerprint": "fp_6b68a8204b" +/// } +/// ``` public struct ChatResult: Codable, Equatable { + /// mimic the choices array in the chat completion object public struct Choice: Codable, Equatable { public typealias ChatCompletionMessage = ChatQuery.ChatCompletionMessageParam diff --git a/docs/Classes.html b/docs/Classes.html new file mode 100644 index 00000000..2831f986 --- /dev/null +++ b/docs/Classes.html @@ -0,0 +1,325 @@ + + + + Classes Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Classes

+

The following classes are available globally.

+ +
+
+
+ +
+
+
+ +
+
+ + diff --git a/docs/Classes/OpenAI.html b/docs/Classes/OpenAI.html new file mode 100644 index 00000000..5404d16c --- /dev/null +++ b/docs/Classes/OpenAI.html @@ -0,0 +1,828 @@ + + + + OpenAI Class Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

OpenAI

+
+
+ +
final public class OpenAI : OpenAIProtocol
+ +
+
+ +
+
+
+ +
+
+
+ +
+
+ + diff --git a/docs/Classes/OpenAI/Configuration.html b/docs/Classes/OpenAI/Configuration.html new file mode 100644 index 00000000..e07e9912 --- /dev/null +++ b/docs/Classes/OpenAI/Configuration.html @@ -0,0 +1,494 @@ + + + + Configuration Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Configuration

+
+
+ +
public struct Configuration
+ +
+
+ +
+
+
+
    +
  • +
    + + + + token + +
    +
    +
    +
    +
    + +
    +

    Declaration

    +
    +

    Swift

    +
    public let token: String
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    Optional OpenAI organization identifier. See https://platform.openai.com/docs/api-reference/authentication

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let organizationIdentifier: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + host + +
    +
    +
    +
    +
    +
    +

    API host. Set this property if you use some kind of proxy or your own server. Default is api.openai.com

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let host: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + port + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let port: Int
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + scheme + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let scheme: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + timeoutInterval + +
    +
    +
    +
    +
    +
    +

    Default request timeout

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let timeoutInterval: TimeInterval
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(token: String, organizationIdentifier: String? = nil, host: String = "api.openai.com", port: Int = 443, scheme: String = "https", timeoutInterval: TimeInterval = 60.0)
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Enums.html b/docs/Enums.html new file mode 100644 index 00000000..4aef1836 --- /dev/null +++ b/docs/Enums.html @@ -0,0 +1,325 @@ + + + + Enumerations Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Enumerations

+

The following enumerations are available globally.

+ +
+
+
+
    +
  • +
    + + + + OpenAIError + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum OpenAIError : Error
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Enums/OpenAIError.html b/docs/Enums/OpenAIError.html new file mode 100644 index 00000000..682f456c --- /dev/null +++ b/docs/Enums/OpenAIError.html @@ -0,0 +1,332 @@ + + + + OpenAIError Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

OpenAIError

+
+
+ +
public enum OpenAIError : Error
+ +
+
+ +
+
+
+
    +
  • +
    + + + + emptyData + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case emptyData
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Extensions.html b/docs/Extensions.html new file mode 100644 index 00000000..56666ea1 --- /dev/null +++ b/docs/Extensions.html @@ -0,0 +1,325 @@ + + + + Extensions Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Extensions

+

The following extensions are available globally.

+ +
+
+
+
    +
  • +
    + + + + Model + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public extension Model
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Extensions/Model.html b/docs/Extensions/Model.html new file mode 100644 index 00000000..e1d188ed --- /dev/null +++ b/docs/Extensions/Model.html @@ -0,0 +1,1516 @@ + + + + Model Extension Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Model

+
+
+ +
public extension Model
+ +
+
+ +
+
+
+
    +
  • +
    + + + + gpt4_o + +
    +
    +
    +
    +
    +
    +

    gpt-4o, currently the most advanced, multimodal flagship model that’s cheaper and faster than GPT-4 Turbo.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let gpt4_o: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt4_o_mini + +
    +
    +
    +
    +
    +
    +

    gpt-4o-mini, currently the most affordable and intelligent model for fast and lightweight requests.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let gpt4_o_mini: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt4_turbo + +
    +
    +
    +
    +
    +
    +

    gpt-4-turbo, The latest GPT-4 Turbo model with vision capabilities. Vision requests can now use JSON mode and function calling and more. Context window: 128,000 tokens

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let gpt4_turbo: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt4_turbo_preview + +
    +
    +
    +
    +
    +
    +

    gpt-4-turbo, gpt-4 model with improved instruction following, JSON mode, reproducible outputs, parallel function calling and more. Maximum of 4096 output tokens

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    @available(*, deprecated, message: "Please upgrade to the newer model")
    +static let gpt4_turbo_preview: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt4_vision_preview + +
    +
    +
    +
    +
    +
    +

    gpt-4-vision-preview, able to understand images, in addition to all other GPT-4 Turbo capabilities.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let gpt4_vision_preview: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt4_0125_preview + +
    +
    +
    +
    +
    +
    +

    Snapshot of gpt-4-turbo-preview from January 25th 2024. This model reduces cases of “laziness” where the model doesn’t complete a task. Also fixes the bug impacting non-English UTF-8 generations. Maximum of 4096 output tokens

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let gpt4_0125_preview: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt4_1106_preview + +
    +
    +
    +
    +
    +
    +

    Snapshot of gpt-4-turbo-preview from November 6th 2023. Improved instruction following, JSON mode, reproducible outputs, parallel function calling and more. Maximum of 4096 output tokens

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    @available(*, deprecated, message: "Please upgrade to the newer model")
    +static let gpt4_1106_preview: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt4 + +
    +
    +
    +
    +
    +
    +

    Most capable gpt-4 model, outperforms any GPT-3.5 model, able to do more complex tasks, and optimized for chat.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let gpt4: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt4_0613 + +
    +
    +
    +
    +
    +
    +

    Snapshot of gpt-4 from June 13th 2023 with function calling data. Unlike gpt-4, this model will not receive updates, and will be deprecated 3 months after a new version is released.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let gpt4_0613: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt4_0314 + +
    +
    +
    +
    +
    +
    +

    Snapshot of gpt-4 from March 14th 2023. Unlike gpt-4, this model will not receive updates, and will only be supported for a three month period ending on June 14th 2023.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    @available(*, deprecated, message: "Please upgrade to the newer model")
    +static let gpt4_0314: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt4_32k + +
    +
    +
    +
    +
    +
    +

    Same capabilities as the base gpt-4 model but with 4x the context length. Will be updated with our latest model iteration.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let gpt4_32k: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt4_32k_0613 + +
    +
    +
    +
    +
    +
    +

    Snapshot of gpt-4-32k from June 13th 2023. Unlike gpt-4-32k, this model will not receive updates, and will be deprecated 3 months after a new version is released.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let gpt4_32k_0613: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt4_32k_0314 + +
    +
    +
    +
    +
    +
    +

    Snapshot of gpt-4-32k from March 14th 2023. Unlike gpt-4-32k, this model will not receive updates, and will only be supported for a three month period ending on June 14th 2023.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    @available(*, deprecated, message: "Please upgrade to the newer model")
    +static let gpt4_32k_0314: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt3_5Turbo + +
    +
    +
    +
    +
    +
    +

    Most capable gpt-3.5-turbo model and optimized for chat. Will be updated with our latest model iteration.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let gpt3_5Turbo: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt3_5Turbo_0125 + +
    +
    +
    +
    +
    +
    +

    Snapshot of gpt-3.5-turbo from January 25th 2024. Decreased prices by 50%. Various improvements including higher accuracy at responding in requested formats and a fix for a bug which caused a text encoding issue for non-English language function calls.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let gpt3_5Turbo_0125: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt3_5Turbo_1106 + +
    +
    +
    +
    +
    +
    +

    Snapshot of gpt-3.5-turbo from November 6th 2023. The latest gpt-3.5-turbo model with improved instruction following, JSON mode, reproducible outputs, parallel function calling and more.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    @available(*, deprecated, message: "Please upgrade to the newer model")
    +static let gpt3_5Turbo_1106: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt3_5Turbo_0613 + +
    +
    +
    +
    +
    +
    +

    Snapshot of gpt-3.5-turbo from June 13th 2023 with function calling data. Unlike gpt-3.5-turbo, this model will not receive updates, and will be deprecated 3 months after a new version is released.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    @available(*, deprecated, message: "Please upgrade to the newer model")
    +static let gpt3_5Turbo_0613: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt3_5Turbo_0301 + +
    +
    +
    +
    +
    +
    +

    Snapshot of gpt-3.5-turbo from March 1st 2023. Unlike gpt-3.5-turbo, this model will not receive updates, and will only be supported for a three month period ending on June 1st 2023.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    @available(*, deprecated, message: "Please upgrade to the newer model")
    +static let gpt3_5Turbo_0301: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt3_5Turbo_16k + +
    +
    +
    +
    +
    +
    +

    Same capabilities as the standard gpt-3.5-turbo model but with 4 times the context.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let gpt3_5Turbo_16k: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt3_5Turbo_16k_0613 + +
    +
    +
    +
    +
    +
    +

    Snapshot of gpt-3.5-turbo-16k from June 13th 2023. Unlike gpt-3.5-turbo-16k, this model will not receive updates, and will be deprecated 3 months after a new version is released.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let gpt3_5Turbo_16k_0613: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + textDavinci_003 + +
    +
    +
    +
    +
    +
    +

    Can do any language task with better quality, longer output, and consistent instruction-following than the curie, babbage, or ada models. Also supports inserting completions within text.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textDavinci_003: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + textDavinci_002 + +
    +
    +
    +
    +
    +
    +

    Similar capabilities to text-davinci-003 but trained with supervised fine-tuning instead of reinforcement learning.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textDavinci_002: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + textCurie + +
    +
    +
    +
    +
    +
    +

    Very capable, faster and lower cost than Davinci.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textCurie: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + textBabbage + +
    +
    +
    +
    +
    +
    +

    Capable of straightforward tasks, very fast, and lower cost.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textBabbage: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + textAda + +
    +
    +
    +
    +
    +
    +

    Capable of very simple tasks, usually the fastest model in the GPT-3 series, and lowest cost.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textAda: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + textDavinci_001 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textDavinci_001: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + codeDavinciEdit_001 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let codeDavinciEdit_001: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + tts_1 + +
    +
    +
    +
    +
    +
    +

    The latest text to speech model, optimized for speed.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let tts_1: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + tts_1_hd + +
    +
    +
    +
    +
    +
    +

    The latest text to speech model, optimized for quality.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let tts_1_hd: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + whisper_1 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let whisper_1: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + dall_e_2 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let dall_e_2: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + dall_e_3 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let dall_e_3: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + davinci + +
    +
    +
    +
    +
    +
    +

    Most capable GPT-3 model. Can do any task the other models can do, often with higher quality.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let davinci: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + curie + +
    +
    +
    +
    +
    +
    +

    Very capable, but faster and lower cost than Davinci.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let curie: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + babbage + +
    +
    +
    +
    +
    +
    +

    Capable of straightforward tasks, very fast, and lower cost.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let babbage: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ada + +
    +
    +
    +
    +
    +
    +

    Capable of very simple tasks, usually the fastest model in the GPT-3 series, and lowest cost.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let ada: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + textEmbeddingAda + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textEmbeddingAda: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + textSearchAda + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textSearchAda: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + textSearchBabbageDoc + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textSearchBabbageDoc: String
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textSearchBabbageQuery001: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + textEmbedding3 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textEmbedding3: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + textEmbedding3Large + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textEmbedding3Large: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + textModerationStable + +
    +
    +
    +
    +
    +
    +

    Almost as capable as the latest model, but slightly older.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textModerationStable: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + textModerationLatest + +
    +
    +
    +
    +
    +
    +

    Most capable moderation model. Accuracy will be slightly higher than the stable model.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textModerationLatest: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + moderation + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let moderation: String
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Protocols.html b/docs/Protocols.html new file mode 100644 index 00000000..4cfb4346 --- /dev/null +++ b/docs/Protocols.html @@ -0,0 +1,325 @@ + + + + Protocols Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Protocols

+

The following protocols are available globally.

+ +
+
+
+
    +
  • +
    + + + + OpenAIProtocol + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public protocol OpenAIProtocol
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Protocols/OpenAIProtocol.html b/docs/Protocols/OpenAIProtocol.html new file mode 100644 index 00000000..eeb3620a --- /dev/null +++ b/docs/Protocols/OpenAIProtocol.html @@ -0,0 +1,2181 @@ + + + + OpenAIProtocol Protocol Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

OpenAIProtocol

+
+
+ +
public protocol OpenAIProtocol
+ +
+
+ +
+
+
+
    +
  • + +
    +
    +
    +
    +
    +

    This function sends a completions query to the OpenAI API and retrieves generated completions in response. The Completions API enables you to build applications using OpenAI’s language models, like the powerful GPT-3.

    + +

    Example:

    +
    let query = CompletionsQuery(model: .textDavinci_003, prompt: "What is 42?")
    +openAI.completions(query: query) { result in
    +  //Handle result here
    +}
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func completions(query: CompletionsQuery, completion: @escaping (Result<CompletionsResult, Error>) -> Void)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + query + + +
    +

    A CompletionsQuery object containing the input parameters for the API request. This includes the prompt, model, temperature, max tokens, and other settings.

    +
    +
    + + completion + + +
    +

    A closure which receives the result when the API request finishes. The closure’s parameter, Result<CompletionsResult, Error>, will contain either the CompletionsResult object with the generated completions, or an error if the request failed.

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    This function sends a completions query to the OpenAI API and retrieves generated completions in response. The Completions API enables you to build applications using OpenAI’s language models, like the powerful GPT-3. The result is returned by chunks.

    + +

    Example:

    +
    let query = CompletionsQuery(model: .textDavinci_003, prompt: "What is 42?")
    +openAI.completions(query: query) { result in
    +  //Handle result here
    +}
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func completionsStream(query: CompletionsQuery, onResult: @escaping (Result<CompletionsResult, Error>) -> Void, completion: ((Error?) -> Void)?)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + + + + + +
    + + query + + +
    +

    A CompletionsQuery object containing the input parameters for the API request. This includes the prompt, model, temperature, max tokens, and other settings.

    +
    +
    + + onResult + + +
    +

    A closure which receives the result when the API request finishes. The closure’s parameter, Result<CompletionsResult, Error>, will contain either the CompletionsResult object with the generated completions, or an error if the request failed.

    +
    +
    + + completion + + +
    +

    A closure that is being called when all chunks are delivered or uncrecoverable error occured

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    This function sends an images query to the OpenAI API and retrieves generated images in response. The Images Generation API enables you to create various images or graphics using OpenAI’s powerful deep learning models.

    + +

    Example:

    +
    let query = ImagesQuery(prompt: "White cat with heterochromia sitting on the kitchen table", n: 1, size: ImagesQuery.Size._1024)
    +openAI.images(query: query) { result in
    +  //Handle result here
    +}
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func images(query: ImagesQuery, completion: @escaping (Result<ImagesResult, Error>) -> Void)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + query + + +
    +

    An ImagesQuery object containing the input parameters for the API request. This includes the query parameters such as the text prompt, image size, and other settings.

    +
    +
    + + completion + + +
    +

    A closure which receives the result when the API request finishes. The closure’s parameter, Result<ImagesResult, Error>, will contain either the ImagesResult object with the generated images, or an error if the request failed.

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    This function sends an image edit query to the OpenAI API and retrieves generated images in response. The Images Edit API enables you to edit images or graphics using OpenAI’s powerful deep learning models.

    + +

    Example:

    +
    let query = ImagesEditQuery(image: "@whitecat.png", prompt: "White cat with heterochromia sitting on the kitchen table with a bowl of food", n: 1, size: ImagesQuery.Size._1024)
    +openAI.imageEdits(query: query) { result in
    +  //Handle result here
    +}
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func imageEdits(query: ImageEditsQuery, completion: @escaping (Result<ImagesResult, Error>) -> Void)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + query + + +
    +

    An ImagesEditQuery object containing the input parameters for the API request. This includes the query parameters such as the image to be edited, an image to be used a mask if applicable, text prompt, image size, and other settings.

    +
    +
    + + completion + + +
    +

    A closure which receives the result when the API request finishes. The closure’s parameter, Result<ImagesResult, Error>, will contain either the ImagesResult object with the generated images, or an error if the request failed.

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    This function sends an image variation query to the OpenAI API and retrieves generated images in response. The Images Variations API enables you to create a variation of a given image using OpenAI’s powerful deep learning models.

    + +

    Example:

    +
    let query = ImagesVariationQuery(image: "@whitecat.png", n: 1, size: ImagesQuery.Size._1024)
    +openAI.imageVariations(query: query) { result in
    +  //Handle result here
    +}
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func imageVariations(query: ImageVariationsQuery, completion: @escaping (Result<ImagesResult, Error>) -> Void)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + query + + +
    +

    An ImagesVariationQuery object containing the input parameters for the API request. This includes the query parameters such as the image to use as a basis for the variation(s), image size, and other settings.

    +
    +
    + + completion + + +
    +

    A closure which receives the result when the API request finishes. The closure’s parameter, Result<ImagesResult, Error>, will contain either the ImagesResult object with the generated images, or an error if the request failed.

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    This function sends an embeddings query to the OpenAI API and retrieves embeddings in response. The Embeddings API enables you to generate high-dimensional vector representations of texts, which can be used for various natural language processing tasks such as semantic similarity, clustering, and classification.

    + +

    Example:

    +
    let query = EmbeddingsQuery(model: .textSearchBabbageDoc, input: "The food was delicious and the waiter...")
    +openAI.embeddings(query: query) { result in
    +  //Handle response here
    +}
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func embeddings(query: EmbeddingsQuery, completion: @escaping (Result<EmbeddingsResult, Error>) -> Void)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + query + + +
    +

    An EmbeddingsQuery object containing the input parameters for the API request. This includes the list of text prompts to be converted into embeddings, the model to be used, and other settings.

    +
    +
    + + completion + + +
    +

    A closure which receives the result when the API request finishes. The closure’s parameter, Result<EmbeddingsResult, Error>, will contain either the EmbeddingsResult object with the generated embeddings, or an error if the request failed.

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    This function sends a chat query to the OpenAI API and retrieves chat conversation responses. The Chat API enables you to build chatbots or conversational applications using OpenAI’s powerful natural language models, like GPT-3.

    + +

    Example:

    +
    let query = ChatQuery(model: .gpt3_5Turbo, messages: [.init(role: "user", content: "who are you")])
    +openAI.chats(query: query) { result in
    +  //Handle response here
    +}
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func chats(query: ChatQuery, completion: @escaping (Result<ChatResult, Error>) -> Void)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + query + + +
    +

    A ChatQuery object containing the input parameters for the API request. This includes the lists of message objects for the conversation, the model to be used, and other settings.

    +
    +
    + + completion + + +
    +

    A closure which receives the result when the API request finishes. The closure’s parameter, Result<ChatResult, Error>, will contain either the ChatResult object with the model’s response to the conversation, or an error if the request failed.

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    This function sends a chat query to the OpenAI API and retrieves chat stream conversation responses. The Chat API enables you to build chatbots or conversational applications using OpenAI’s powerful natural language models, like GPT-3. The result is returned by chunks.

    + +

    Example:

    +
    let query = ChatQuery(model: .gpt3_5Turbo, messages: [.init(role: "user", content: "who are you")])
    +openAI.chats(query: query) { result in
    +  //Handle response here
    +}
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func chatsStream(query: ChatQuery, onResult: @escaping (Result<ChatStreamResult, Error>) -> Void, completion: ((Error?) -> Void)?)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + + + + + +
    + + query + + +
    +

    A ChatQuery object containing the input parameters for the API request. This includes the lists of message objects for the conversation, the model to be used, and other settings.

    +
    +
    + + onResult + + +
    +

    A closure which receives the result when the API request finishes. The closure’s parameter, Result<ChatStreamResult, Error>, will contain either the ChatStreamResult object with the model’s response to the conversation, or an error if the request failed.

    +
    +
    + + completion + + +
    +

    A closure that is being called when all chunks are delivered or uncrecoverable error occured

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    This function sends an edits query to the OpenAI API and retrieves an edited version of the prompt based on the instruction given.

    + +

    Example:

    +
    let query = EditsQuery(model: .gpt4, input: "What day of the wek is it?", instruction: "Fix the spelling mistakes")
    +openAI.edits(query: query) { result in
    +  //Handle response here
    +}
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func edits(query: EditsQuery, completion: @escaping (Result<EditsResult, Error>) -> Void)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + query + + +
    +

    An EditsQuery object containing the input parameters for the API request. This includes the input to be edited, the instruction specifying how it should be edited, and other settings.

    +
    +
    + + completion + + +
    +

    A closure which receives the result when the API request finishes. The closure’s parameter, Result<EditsResult, Error>, will contain either the EditsResult object with the model’s response to the queried edit, or an error if the request failed.

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    This function sends a model query to the OpenAI API and retrieves a model instance, providing owner information. The Models API in this usage enables you to gather detailed information on the model in question, like GPT-3.

    + +

    Example:

    +
    let query = ModelQuery(model: .gpt3_5Turbo)
    +openAI.model(query: query) { result in
    +  //Handle response here
    +}
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func model(query: ModelQuery, completion: @escaping (Result<ModelResult, Error>) -> Void)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + query + + +
    +

    A ModelQuery object containing the input parameters for the API request, which is only the model to be queried.

    +
    +
    + + completion + + +
    +

    A closure which receives the result when the API request finishes. The closure’s parameter, Result<ModelResult, Error>, will contain either the ModelResult object with more information about the model, or an error if the request failed.

    +
    +
    +
    +
    +
    +
  • +
  • +
    + + + + models(completion:) + +
    +
    +
    +
    +
    +
    +

    This function sends a models query to the OpenAI API and retrieves a list of models. The Models API in this usage enables you to list all the available models.

    + +

    Example:

    +
    openAI.models() { result in
    +  //Handle response here
    +}
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func models(completion: @escaping (Result<ModelsResult, Error>) -> Void)
    + +
    +
    +
    +

    Parameters

    + + + + + + + +
    + + completion + + +
    +

    A closure which receives the result when the API request finishes. The closure’s parameter, Result<ModelsResult, Error>, will contain either the ModelsResult object with the list of model types, or an error if the request failed.

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    This function sends a moderations query to the OpenAI API and retrieves a list of category results to classify how text may violate OpenAI’s Content Policy.

    + +

    Example:

    +
    let query = ModerationsQuery(input: "I want to kill them.")
    +openAI.moderations(query: query) { result in
    +  //Handle response here
    +}
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    @available(iOS 13.0, *)
    +func moderations(query: ModerationsQuery, completion: @escaping (Result<ModerationsResult, Error>) -> Void)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + query + + +
    +

    A ModerationsQuery object containing the input parameters for the API request. This includes the input text and optionally the model to be used.

    +
    +
    + + completion + + +
    +

    A closure which receives the result when the API request finishes. The closure’s parameter, Result<ModerationsResult, Error>, will contain either the ModerationsResult object with the list of category results, or an error if the request failed.

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    This function sends an AudioSpeechQuery to the OpenAI API to create audio speech from text using a specific voice and format.

    + +

    Example:

    +
    let query = AudioSpeechQuery(model: .tts_1, input: "Hello, world!", voice: .alloy, responseFormat: .mp3, speed: 1.0)
    +openAI.audioCreateSpeech(query: query) { result in
    +   // Handle response here
    +}
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func audioCreateSpeech(query: AudioSpeechQuery, completion: @escaping (Result<AudioSpeechResult, Error>) -> Void)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + query + + +
    +

    An AudioSpeechQuery object containing the parameters for the API request. This includes the Text-to-Speech model to be used, input text, voice to be used for generating the audio, the desired audio format, and the speed of the generated audio.

    +
    +
    + + completion + + +
    +

    A closure which receives the result. The closure’s parameter, Result<AudioSpeechResult, Error>, will either contain the AudioSpeechResult object with the audio data or an error if the request failed.

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    Transcribes audio data using OpenAI’s audio transcription API and completes the operation asynchronously.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func audioTranscriptions(query: AudioTranscriptionQuery, completion: @escaping (Result<AudioTranscriptionResult, Error>) -> Void)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + query + + +
    +

    The AudioTranscriptionQuery instance, containing the information required for the transcription request.

    +
    +
    + + completion + + +
    +

    The completion handler to be executed upon completion of the transcription request. + Returns a Result of type AudioTranscriptionResult if successful, or an Error if an error occurs. +*

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    Translates audio data using OpenAI’s audio translation API and completes the operation asynchronously.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func audioTranslations(query: AudioTranslationQuery, completion: @escaping (Result<AudioTranslationResult, Error>) -> Void)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + query + + +
    +

    The AudioTranslationQuery instance, containing the information required for the translation request.

    +
    +
    + + completion + + +
    +

    The completion handler to be executed upon completion of the translation request. + Returns a Result of type AudioTranslationResult if successful, or an Error if an error occurs. +*

    +
    +
    +
    +
    +
    +
  • +
  • +
    + + + + completions(query:) + + + Extension method, asynchronous + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func completions(
    +    query: CompletionsQuery
    +) async throws -> CompletionsResult
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + completionsStream(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func completionsStream(
    +    query: CompletionsQuery
    +) -> AsyncThrowingStream<CompletionsResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + images(query:) + + + Extension method, asynchronous + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func images(
    +    query: ImagesQuery
    +) async throws -> ImagesResult
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + imageEdits(query:) + + + Extension method, asynchronous + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func imageEdits(
    +    query: ImageEditsQuery
    +) async throws -> ImagesResult
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + imageVariations(query:) + + + Extension method, asynchronous + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func imageVariations(
    +    query: ImageVariationsQuery
    +) async throws -> ImagesResult
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + embeddings(query:) + + + Extension method, asynchronous + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func embeddings(
    +    query: EmbeddingsQuery
    +) async throws -> EmbeddingsResult
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + chats(query:) + + + Extension method, asynchronous + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func chats(
    +    query: ChatQuery
    +) async throws -> ChatResult
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + chatsStream(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func chatsStream(
    +    query: ChatQuery
    +) -> AsyncThrowingStream<ChatStreamResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + edits(query:) + + + Extension method, asynchronous + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func edits(
    +    query: EditsQuery
    +) async throws -> EditsResult
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model(query:) + + + Extension method, asynchronous + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func model(
    +    query: ModelQuery
    +) async throws -> ModelResult
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + models() + + + Extension method, asynchronous + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func models() async throws -> ModelsResult
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + moderations(query:) + + + Extension method, asynchronous + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func moderations(
    +    query: ModerationsQuery
    +) async throws -> ModerationsResult
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + audioCreateSpeech(query:) + + + Extension method, asynchronous + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func audioCreateSpeech(
    +    query: AudioSpeechQuery
    +) async throws -> AudioSpeechResult
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + audioTranscriptions(query:) + + + Extension method, asynchronous + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func audioTranscriptions(
    +    query: AudioTranscriptionQuery
    +) async throws -> AudioTranscriptionResult
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + audioTranslations(query:) + + + Extension method, asynchronous + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func audioTranslations(
    +    query: AudioTranslationQuery
    +) async throws -> AudioTranslationResult
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + completions(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func completions(query: CompletionsQuery) -> AnyPublisher<CompletionsResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + completionsStream(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func completionsStream(query: CompletionsQuery) -> AnyPublisher<Result<CompletionsResult, Error>, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + images(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func images(query: ImagesQuery) -> AnyPublisher<ImagesResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + imageEdits(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func imageEdits(query: ImageEditsQuery) -> AnyPublisher<ImagesResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + imageVariations(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func imageVariations(query: ImageVariationsQuery) -> AnyPublisher<ImagesResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + embeddings(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func embeddings(query: EmbeddingsQuery) -> AnyPublisher<EmbeddingsResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + chats(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func chats(query: ChatQuery) -> AnyPublisher<ChatResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + chatsStream(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func chatsStream(query: ChatQuery) -> AnyPublisher<Result<ChatStreamResult, Error>, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + edits(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func edits(query: EditsQuery) -> AnyPublisher<EditsResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func model(query: ModelQuery) -> AnyPublisher<ModelResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + models() + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func models() -> AnyPublisher<ModelsResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + moderations(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func moderations(query: ModerationsQuery) -> AnyPublisher<ModerationsResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + audioCreateSpeech(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func audioCreateSpeech(query: AudioSpeechQuery) -> AnyPublisher<AudioSpeechResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + audioTranscriptions(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func audioTranscriptions(query: AudioTranscriptionQuery) -> AnyPublisher<AudioTranscriptionResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + audioTranslations(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func audioTranslations(query: AudioTranslationQuery) -> AnyPublisher<AudioTranslationResult, Error>
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs.html b/docs/Structs.html new file mode 100644 index 00000000..423988ff --- /dev/null +++ b/docs/Structs.html @@ -0,0 +1,1077 @@ + + + + Structures Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Structures

+

The following structures are available globally.

+ +
+
+
+
    +
  • +
    + + + + APIError + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct APIError : Error, Decodable, Equatable
    +
    extension APIError: LocalizedError
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + APIErrorResponse + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct APIErrorResponse : Error, Decodable, Equatable
    +
    extension APIErrorResponse: LocalizedError
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + AudioSpeechQuery + +
    +
    +
    +
    +
    +
    +

    Generates audio from the input text. +Learn more: OpenAI Speech – Documentation

    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct AudioSpeechQuery : Codable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + AudioSpeechResult + +
    +
    +
    +
    +
    +
    +

    The audio file content. +Learn more: OpenAI Speech – Documentation

    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct AudioSpeechResult : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct AudioTranscriptionQuery : Codable
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct AudioTranscriptionResult : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + AudioTranslationQuery + +
    +
    +
    +
    +
    +
    +

    Translates audio into English.

    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct AudioTranslationQuery : Codable
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct AudioTranslationResult : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ChatQuery + +
    +
    +
    +
    +
    + +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ChatQuery : Equatable, Codable, Streamable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ChatResult + +
    +
    +
    +
    +
    +
    +

    https://platform.openai.com/docs/api-reference/chat/object +Example Completion object print

    +
    {
    + "id": "chatcmpl-123456",
    + "object": "chat.completion",
    + "created": 1728933352,
    + "model": "gpt-4o-2024-08-06",
    + "choices": [
    +   {
    +     "index": 0,
    +     "message": {
    +       "role": "assistant",
    +       "content": "Hi there! How can I assist you today?",
    +       "refusal": null
    +     },
    +     "logprobs": null,
    +     "finish_reason": "stop"
    +   }
    + ],
    + "usage": {
    +   "prompt_tokens": 19,
    +   "completion_tokens": 10,
    +   "total_tokens": 29,
    +   "prompt_tokens_details": {
    +     "cached_tokens": 0
    +   },
    +   "completion_tokens_details": {
    +     "reasoning_tokens": 0
    +   }
    + },
    + "system_fingerprint": "fp_6b68a8204b"
    +}
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ChatResult : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ChatStreamResult + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ChatStreamResult : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CompletionsQuery + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct CompletionsQuery : Codable, Streamable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CompletionsResult + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct CompletionsResult : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + EditsQuery + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct EditsQuery : Codable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + EditsResult + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct EditsResult : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + EmbeddingsQuery + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct EmbeddingsQuery : Codable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + EmbeddingsResult + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct EmbeddingsResult : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ImageEditsQuery + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ImageEditsQuery : Codable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ImageVariationsQuery + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ImageVariationsQuery : Codable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ImagesQuery + +
    +
    +
    +
    +
    +
    +

    Given a prompt and/or an input image, the model will generate a new image. +https://platform.openai.com/docs/guides/images

    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ImagesQuery : Codable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ImagesResult + +
    +
    +
    +
    +
    +
    +

    Returns a list of image objects.

    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ImagesResult : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ModelQuery + +
    +
    +
    +
    +
    +
    +

    Retrieves a model instance, providing basic information about the model such as the owner and permissioning.

    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ModelQuery : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ModelResult + +
    +
    +
    +
    +
    +
    +

    The model object matching the specified ID.

    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ModelResult : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ModelsResult + +
    +
    +
    +
    +
    +
    +

    A list of model objects.

    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ModelsResult : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ModerationsQuery + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ModerationsQuery : Codable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ModerationsResult + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ModerationsResult : Codable, Equatable
    +
    extension ModerationsResult: Identifiable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Vector + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Vector
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/APIError.html b/docs/Structs/APIError.html new file mode 100644 index 00000000..2ddf50e6 --- /dev/null +++ b/docs/Structs/APIError.html @@ -0,0 +1,489 @@ + + + + APIError Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

APIError

+
+
+ +
public struct APIError : Error, Decodable, Equatable
+
extension APIError: LocalizedError
+ +
+
+ +
+
+
+
    +
  • +
    + + + + message + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let message: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + type + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let type: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + param + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let param: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + code + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let code: String?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(message: String, type: String, param: String?, code: String?)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(from:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(from decoder: Decoder) throws
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + errorDescription + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public var errorDescription: String? { get }
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/APIErrorResponse.html b/docs/Structs/APIErrorResponse.html new file mode 100644 index 00000000..9b5a0389 --- /dev/null +++ b/docs/Structs/APIErrorResponse.html @@ -0,0 +1,359 @@ + + + + APIErrorResponse Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

APIErrorResponse

+
+
+ +
public struct APIErrorResponse : Error, Decodable, Equatable
+
extension APIErrorResponse: LocalizedError
+ +
+
+ +
+
+
+
    +
  • +
    + + + + error + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let error: APIError
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + errorDescription + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public var errorDescription: String? { get }
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/AudioSpeechQuery.html b/docs/Structs/AudioSpeechQuery.html new file mode 100644 index 00000000..b63839da --- /dev/null +++ b/docs/Structs/AudioSpeechQuery.html @@ -0,0 +1,620 @@ + + + + AudioSpeechQuery Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

AudioSpeechQuery

+
+
+ +
public struct AudioSpeechQuery : Codable
+ +
+
+

Generates audio from the input text. +Learn more: OpenAI Speech – Documentation

+ +
+
+
+
    +
  • +
    + + + + AudioSpeechVoice + +
    +
    +
    +
    +
    +
    +

    Encapsulates the voices available for audio generation.

    + +

    To get aquinted with each of the voices and listen to the samples visit: +OpenAI Text-to-Speech – Voice Options

    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum AudioSpeechVoice : String, Codable, CaseIterable
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    Encapsulates the response formats available for audio data.

    + +

    Formats:

    + +
      +
    • mp3
    • +
    • opus
    • +
    • aac
    • +
    • flac
    • +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum AudioSpeechResponseFormat : String, Codable, CaseIterable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + input + +
    +
    +
    +
    +
    +
    +

    The text to generate audio for. The maximum length is 4096 characters.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let input: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    One of the available TTS models: tts-1 or tts-1-hd

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + voice + +
    +
    +
    +
    +
    +
    +

    The voice to use when generating the audio. Supported voices are alloy, echo, fable, onyx, nova, and shimmer. Previews of the voices are available in the Text to speech guide. +https://platform.openai.com/docs/guides/text-to-speech/voice-options

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let voice: AudioSpeechVoice
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + responseFormat + +
    +
    +
    +
    +
    +
    +

    The format to audio in. Supported formats are mp3, opus, aac, and flac. +Defaults to mp3

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let responseFormat: AudioSpeechResponseFormat?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + speed + +
    +
    +
    +
    +
    +
    +

    The speed of the generated audio. Select a value from 0.25 to 4.0. 1.0 is the default. +Defaults to 1

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let speed: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(model: Model, input: String, voice: AudioSpeechVoice, responseFormat: AudioSpeechResponseFormat = .mp3, speed: Double?)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Speed + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    enum Speed : Double
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static func normalizeSpeechSpeed(_ inputSpeed: Double?) -> String
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/AudioSpeechQuery/AudioSpeechResponseFormat.html b/docs/Structs/AudioSpeechQuery/AudioSpeechResponseFormat.html new file mode 100644 index 00000000..b68d2557 --- /dev/null +++ b/docs/Structs/AudioSpeechQuery/AudioSpeechResponseFormat.html @@ -0,0 +1,422 @@ + + + + AudioSpeechResponseFormat Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

AudioSpeechResponseFormat

+
+
+ +
public enum AudioSpeechResponseFormat : String, Codable, CaseIterable
+ +
+
+

Encapsulates the response formats available for audio data.

+ +

Formats:

+ +
    +
  • mp3
  • +
  • opus
  • +
  • aac
  • +
  • flac
  • +
+ +
+
+
+
    +
  • +
    + + + + mp3 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case mp3
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + opus + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case opus
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + aac + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case aac
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + flac + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case flac
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/AudioSpeechQuery/AudioSpeechVoice.html b/docs/Structs/AudioSpeechQuery/AudioSpeechVoice.html new file mode 100644 index 00000000..2cdea3a2 --- /dev/null +++ b/docs/Structs/AudioSpeechQuery/AudioSpeechVoice.html @@ -0,0 +1,468 @@ + + + + AudioSpeechVoice Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

AudioSpeechVoice

+
+
+ +
public enum AudioSpeechVoice : String, Codable, CaseIterable
+ +
+
+

Encapsulates the voices available for audio generation.

+ +

To get aquinted with each of the voices and listen to the samples visit: +OpenAI Text-to-Speech – Voice Options

+ +
+
+
+
    +
  • +
    + + + + alloy + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case alloy
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + echo + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case echo
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + fable + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case fable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + onyx + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case onyx
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + nova + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case nova
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + shimmer + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case shimmer
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/AudioSpeechQuery/CodingKeys.html b/docs/Structs/AudioSpeechQuery/CodingKeys.html new file mode 100644 index 00000000..88e7eaf0 --- /dev/null +++ b/docs/Structs/AudioSpeechQuery/CodingKeys.html @@ -0,0 +1,438 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + input + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case input
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + voice + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case voice
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + responseFormat + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case responseFormat = "response_format"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + speed + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case speed
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/AudioSpeechQuery/Speed.html b/docs/Structs/AudioSpeechQuery/Speed.html new file mode 100644 index 00000000..37d4ed0c --- /dev/null +++ b/docs/Structs/AudioSpeechQuery/Speed.html @@ -0,0 +1,386 @@ + + + + Speed Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Speed

+
+
+ +
enum Speed : Double
+ +
+
+ +
+
+
+
    +
  • +
    + + + + normal + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case normal = 1.0
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + max + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case max = 4.0
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + min + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case min = 0.25
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/AudioSpeechResult.html b/docs/Structs/AudioSpeechResult.html new file mode 100644 index 00000000..1daa0273 --- /dev/null +++ b/docs/Structs/AudioSpeechResult.html @@ -0,0 +1,335 @@ + + + + AudioSpeechResult Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

AudioSpeechResult

+
+
+ +
public struct AudioSpeechResult : Codable, Equatable
+ +
+
+

The audio file content. +Learn more: OpenAI Speech – Documentation

+ +
+
+
+
    +
  • +
    + + + + audio + +
    +
    +
    +
    +
    +
    +

    Audio data for one of the following formats :mp3, opus, aac, flac

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let audio: Data
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/AudioTranscriptionQuery.html b/docs/Structs/AudioTranscriptionQuery.html new file mode 100644 index 00000000..f96d291b --- /dev/null +++ b/docs/Structs/AudioTranscriptionQuery.html @@ -0,0 +1,577 @@ + + + + AudioTranscriptionQuery Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

AudioTranscriptionQuery

+
+
+ +
public struct AudioTranscriptionQuery : Codable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + ResponseFormat + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum ResponseFormat : String, Codable, Equatable, CaseIterable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + file + +
    +
    +
    +
    +
    +
    +

    The audio file object (not file name) to transcribe, in one of these formats: flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let file: Data
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + fileType + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let fileType: `Self`.FileType
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    ID of the model to use. Only whisper-1 is currently available.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + responseFormat + +
    +
    +
    +
    +
    +
    +

    The format of the transcript output, in one of these options: json, text, srt, verbose_json, or vtt. +Defaults to json

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let responseFormat: `Self`.ResponseFormat?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + prompt + +
    +
    +
    +
    +
    +
    +

    An optional text to guide the model’s style or continue a previous audio segment. The prompt should match the audio language.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let prompt: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + temperature + +
    +
    +
    +
    +
    +
    +

    The sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit. +Defaults to 0

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let temperature: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + language + +
    +
    +
    +
    +
    +
    +

    The language of the input audio. Supplying the input language in ISO-639-1 format will improve accuracy and latency. +https://platform.openai.com/docs/guides/speech-to-text/prompting

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let language: String?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(file: Data, fileType: `Self`.FileType, model: Model, prompt: String? = nil, temperature: Double? = nil, language: String? = nil, responseFormat: `Self`.ResponseFormat? = nil)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + FileType + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum FileType : String, Codable, Equatable, CaseIterable
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/AudioTranscriptionQuery/FileType.html b/docs/Structs/AudioTranscriptionQuery/FileType.html new file mode 100644 index 00000000..6ed68c6f --- /dev/null +++ b/docs/Structs/AudioTranscriptionQuery/FileType.html @@ -0,0 +1,542 @@ + + + + FileType Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

FileType

+
+
+ +
public enum FileType : String, Codable, Equatable, CaseIterable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + flac + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case flac
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + mp3 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case mp3
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + mpga + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case mpga
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + mp4 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case mp4
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + m4a + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case m4a
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + mpeg + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case mpeg
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ogg + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case ogg
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + wav + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case wav
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + webm + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case webm
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/AudioTranscriptionQuery/ResponseFormat.html b/docs/Structs/AudioTranscriptionQuery/ResponseFormat.html new file mode 100644 index 00000000..9d2100c0 --- /dev/null +++ b/docs/Structs/AudioTranscriptionQuery/ResponseFormat.html @@ -0,0 +1,438 @@ + + + + ResponseFormat Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ResponseFormat

+
+
+ +
public enum ResponseFormat : String, Codable, Equatable, CaseIterable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + json + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case json
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + text + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case text
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + verboseJson + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case verboseJson = "verbose_json"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + srt + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case srt
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + vtt + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case vtt
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/AudioTranscriptionResult.html b/docs/Structs/AudioTranscriptionResult.html new file mode 100644 index 00000000..5477b0e7 --- /dev/null +++ b/docs/Structs/AudioTranscriptionResult.html @@ -0,0 +1,333 @@ + + + + AudioTranscriptionResult Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

AudioTranscriptionResult

+
+
+ +
public struct AudioTranscriptionResult : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + text + +
    +
    +
    +
    +
    +
    +

    The transcribed text.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let text: String
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/AudioTranslationQuery.html b/docs/Structs/AudioTranslationQuery.html new file mode 100644 index 00000000..bc412efa --- /dev/null +++ b/docs/Structs/AudioTranslationQuery.html @@ -0,0 +1,549 @@ + + + + AudioTranslationQuery Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

AudioTranslationQuery

+
+
+ +
public struct AudioTranslationQuery : Codable
+ +
+
+

Translates audio into English.

+ +
+
+
+
    +
  • +
    + + + + FileType + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias FileType = AudioTranscriptionQuery.FileType
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ResponseFormat + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias ResponseFormat = AudioTranscriptionQuery.ResponseFormat
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + file + +
    +
    +
    +
    +
    +
    +

    The audio file object (not file name) translate, in one of these formats: flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let file: Data
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + fileType + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let fileType: `Self`.FileType
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    ID of the model to use. Only whisper-1 is currently available.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + responseFormat + +
    +
    +
    +
    +
    +
    +

    The format of the transcript output, in one of these options: json, text, srt, verbose_json, or vtt. +Defaults to json

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let responseFormat: `Self`.ResponseFormat?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + prompt + +
    +
    +
    +
    +
    +
    +

    An optional text to guide the model’s style or continue a previous audio segment. The prompt should be in English. +https://platform.openai.com/docs/guides/speech-to-text/prompting

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let prompt: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + temperature + +
    +
    +
    +
    +
    +
    +

    The sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit. +Defaults to 0

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let temperature: Double?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(file: Data, fileType: `Self`.FileType, model: Model, prompt: String? = nil, temperature: Double? = nil, responseFormat: `Self`.ResponseFormat? = nil)
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/AudioTranslationResult.html b/docs/Structs/AudioTranslationResult.html new file mode 100644 index 00000000..d64aa14a --- /dev/null +++ b/docs/Structs/AudioTranslationResult.html @@ -0,0 +1,333 @@ + + + + AudioTranslationResult Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

AudioTranslationResult

+
+
+ +
public struct AudioTranslationResult : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + text + +
    +
    +
    +
    +
    +
    +

    The translated text.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let text: String
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery.html b/docs/Structs/ChatQuery.html new file mode 100644 index 00000000..ffd9d479 --- /dev/null +++ b/docs/Structs/ChatQuery.html @@ -0,0 +1,1022 @@ + + + + ChatQuery Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatQuery

+
+
+ +
public struct ChatQuery : Equatable, Codable, Streamable
+ +
+
+

Creates a model response for the given chat conversation +https://platform.openai.com/docs/guides/text-generation +https://platform.openai.com/docs/api-reference/chat/create

+ +
+
+
+
    +
  • +
    + + + + messages + +
    +
    +
    +
    +
    +
    +

    A list of messages comprising the conversation so far

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let messages: [`Self`.ChatCompletionMessageParam]
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    ID of the model to use. See the model endpoint compatibility table for details on which models work with the Chat API. +https://platform.openai.com/docs/models/model-endpoint-compatibility

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + frequencyPenalty + +
    +
    +
    +
    +
    +
    +

    Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model’s likelihood to repeat the same line verbatim. +Defaults to 0 +https://platform.openai.com/docs/guides/text-generation/parameter-details

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let frequencyPenalty: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logitBias + +
    +
    +
    +
    +
    +
    +

    Modify the likelihood of specified tokens appearing in the completion. +Accepts a JSON object that maps tokens (specified by their token ID in the tokenizer) to an associated bias value from -100 to 100. Mathematically, the bias is added to the logits generated by the model prior to sampling. The exact effect will vary per model, but values between -1 and 1 should decrease or increase likelihood of selection; values like -100 or 100 should result in a ban or exclusive selection of the relevant token. +Defaults to null

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let logitBias: [String : Int]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logprobs + +
    +
    +
    +
    +
    +
    +

    Whether to return log probabilities of the output tokens or not. If true, returns the log probabilities of each output token returned in the content of message. This option is currently not available on the gpt-4-vision-preview model. +Defaults to false

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let logprobs: Bool?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + maxTokens + +
    +
    +
    +
    +
    +
    +

    The maximum number of tokens to generate in the completion. +The total length of input tokens and generated tokens is limited by the model’s context length. +https://platform.openai.com/tokenizer

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let maxTokens: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + n + +
    +
    +
    +
    +
    +
    +

    How many chat completion choices to generate for each input message. Note that you will be charged based on the number of generated tokens across all of the choices. Keep n as 1 to minimize costs. +Defaults to 1

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let n: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + presencePenalty + +
    +
    +
    +
    +
    +
    +

    Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model’s likelihood to talk about new topics. +https://platform.openai.com/docs/guides/text-generation/parameter-details

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let presencePenalty: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + responseFormat + +
    +
    +
    +
    +
    +
    +

    An object specifying the format that the model must output. Compatible with gpt-4-1106-preview and gpt-3.5-turbo-1106. +Setting to { “type”: “json_object” } enables JSON mode, which guarantees the message the model generates is valid JSON. +Important: when using JSON mode, you must also instruct the model to produce JSON yourself via a system or user message. Without this, the model may generate an unending stream of whitespace until the generation reaches the token limit, resulting in a long-running and seemingly “stuck” request. Also note that the message content may be partially cut off if finish_reason=“length”, which indicates the generation exceeded max_tokens or the conversation exceeded the max context length.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let responseFormat: `Self`.ResponseFormat?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + seed + +
    +
    +
    +
    +
    +
    +

    This feature is in Beta. If specified, our system will make a best effort to sample deterministically, such that repeated requests with the same seed and parameters should return the same result. Determinism is not guaranteed, and you should refer to the system_fingerprint response parameter to monitor changes in the backend.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let seed: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + stop + +
    +
    +
    +
    +
    +
    +

    Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence. +Defaults to null

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let stop: Stop?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + temperature + +
    +
    +
    +
    +
    +
    +

    What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. +We generally recommend altering this or top_p but not both. +Defaults to 1

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let temperature: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + toolChoice + +
    +
    +
    +
    +
    +
    +

    Controls which (if any) function is called by the model. none means the model will not call a function and instead generates a message. auto means the model can pick between generating a message or calling a function. Specifying a particular function via {“type”: “function”, “function”: {“name”: “my_function”}} forces the model to call that function. +none is the default when no functions are present. auto is the default if functions are present

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let toolChoice: `Self`.ChatCompletionFunctionCallOptionParam?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + tools + +
    +
    +
    +
    +
    +
    +

    A list of tools the model may call. Currently, only functions are supported as a tool. Use this to provide a list of functions the model may generate JSON inputs for.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let tools: [`Self`.ChatCompletionToolParam]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + topLogprobs + +
    +
    +
    +
    +
    +
    +

    An integer between 0 and 5 specifying the number of most likely tokens to return at each token position, each with an associated log probability. logprobs must be set to true if this parameter is used.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let topLogprobs: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + topP + +
    +
    +
    +
    +
    +
    +

    An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. +We generally recommend altering this or temperature but not both. +Defaults to 1

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let topP: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + user + +
    +
    +
    +
    +
    +
    +

    A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. +https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let user: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + stream + +
    +
    +
    +
    +
    +
    +

    If set, partial message deltas will be sent, like in ChatGPT. Tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a data: [DONE] message. +https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public var stream: Bool
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    messages: [Self.ChatCompletionMessageParam],
    +    model: Model,
    +    frequencyPenalty: Double? = nil,
    +    logitBias: [String : Int]? = nil,
    +    logprobs: Bool? = nil,
    +    maxTokens: Int? = nil,
    +    n: Int? = nil,
    +    presencePenalty: Double? = nil,
    +    responseFormat: Self.ResponseFormat? = nil,
    +    seed: Int? = nil,
    +    stop: Self.Stop? = nil,
    +    temperature: Double? = nil,
    +    toolChoice: Self.ChatCompletionFunctionCallOptionParam? = nil,
    +    tools: [Self.ChatCompletionToolParam]? = nil,
    +    topLogprobs: Int? = nil,
    +    topP: Double? = nil,
    +    user: String? = nil,
    +    stream: Bool = false
    +)
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum ChatCompletionMessageParam : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Stop + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum Stop : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ResponseFormat + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum ResponseFormat : String, Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum ChatCompletionFunctionCallOptionParam : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ChatCompletionToolParam : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ChatCompletionFunctionCallOptionParam.html b/docs/Structs/ChatQuery/ChatCompletionFunctionCallOptionParam.html new file mode 100644 index 00000000..14274779 --- /dev/null +++ b/docs/Structs/ChatQuery/ChatCompletionFunctionCallOptionParam.html @@ -0,0 +1,438 @@ + + + + ChatCompletionFunctionCallOptionParam Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatCompletionFunctionCallOptionParam

+
+
+ +
public enum ChatCompletionFunctionCallOptionParam : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + none + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case none
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + auto + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case auto
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + function(_:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case function(String)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + encode(to:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public func encode(to encoder: Encoder) throws
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(function:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(function: String)
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ChatCompletionMessageParam.html b/docs/Structs/ChatQuery/ChatCompletionMessageParam.html new file mode 100644 index 00000000..f6323aa5 --- /dev/null +++ b/docs/Structs/ChatQuery/ChatCompletionMessageParam.html @@ -0,0 +1,791 @@ + + + + ChatCompletionMessageParam Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatCompletionMessageParam

+
+
+ +
public enum ChatCompletionMessageParam : Codable, Equatable
+ +
+
+ +
+
+
+ +
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam.html b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam.html new file mode 100644 index 00000000..20a6e7df --- /dev/null +++ b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam.html @@ -0,0 +1,528 @@ + + + + ChatCompletionAssistantMessageParam Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatCompletionAssistantMessageParam

+
+
+ +
public struct ChatCompletionAssistantMessageParam : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + Role + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias Role = ChatQuery.ChatCompletionMessageParam.Role
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + role + +
    +
    +
    +
    +
    +
    +

    / The role of the messages author, in this case assistant.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let role: `Self`.Role
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + content + +
    +
    +
    +
    +
    +
    +

    The contents of the assistant message. Required unless tool_calls is specified.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let content: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + name + +
    +
    +
    +
    +
    +
    +

    The name of the author of this message. name is required if role is function, and it should be the name of the function whose response is in the content. May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 characters.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let name: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + toolCalls + +
    +
    +
    +
    +
    +
    +

    The tool calls generated by the model, such as function calls.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let toolCalls: [`Self`.ChatCompletionMessageToolCallParam]?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    content: String? = nil,
    +    name: String? = nil,
    +    toolCalls: [Self.ChatCompletionMessageToolCallParam]? = nil
    +)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ChatCompletionMessageToolCallParam : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam.html b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam.html new file mode 100644 index 00000000..4c08eaf2 --- /dev/null +++ b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam.html @@ -0,0 +1,475 @@ + + + + ChatCompletionMessageToolCallParam Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatCompletionMessageToolCallParam

+
+
+ +
public struct ChatCompletionMessageToolCallParam : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + ToolsType + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias ToolsType = ChatQuery.ChatCompletionToolParam.ToolsType
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + id + +
    +
    +
    +
    +
    +
    +

    The ID of the tool call.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let id: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + function + +
    +
    +
    +
    +
    +
    +

    The function that the model called.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let function: `Self`.FunctionCall
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + type + +
    +
    +
    +
    +
    +
    +

    The type of the tool. Currently, only function is supported.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let type: `Self`.ToolsType
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(id:function:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    id: String,
    +    function:  Self.FunctionCall
    +)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + FunctionCall + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct FunctionCall : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam/FunctionCall.html b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam/FunctionCall.html new file mode 100644 index 00000000..8a50f6cc --- /dev/null +++ b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam/FunctionCall.html @@ -0,0 +1,368 @@ + + + + FunctionCall Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

FunctionCall

+
+
+ +
public struct FunctionCall : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + arguments + +
    +
    +
    +
    +
    +
    +

    The arguments to call the function with, as generated by the model in JSON format. Note that the model does not always generate valid JSON, and may hallucinate parameters not defined by your function schema. Validate the arguments in your code before calling your function.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let arguments: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + name + +
    +
    +
    +
    +
    +
    +

    The name of the function to call.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let name: String
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/CodingKeys.html b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/CodingKeys.html new file mode 100644 index 00000000..a451143d --- /dev/null +++ b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/CodingKeys.html @@ -0,0 +1,416 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + name + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case name
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + role + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case role
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + content + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case content
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + toolCalls + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case toolCalls = "tool_calls"
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionSystemMessageParam.html b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionSystemMessageParam.html new file mode 100644 index 00000000..4dd31f2c --- /dev/null +++ b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionSystemMessageParam.html @@ -0,0 +1,446 @@ + + + + ChatCompletionSystemMessageParam Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatCompletionSystemMessageParam

+
+
+ +
public struct ChatCompletionSystemMessageParam : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + Role + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias Role = ChatQuery.ChatCompletionMessageParam.Role
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + content + +
    +
    +
    +
    +
    +
    +

    The contents of the system message.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let content: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + role + +
    +
    +
    +
    +
    +
    +

    The role of the messages author, in this case system.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let role: `Self`.Role
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + name + +
    +
    +
    +
    +
    +
    +

    An optional name for the participant. Provides the model information to differentiate between participants of the same role.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let name: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(content:name:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    content: String,
    +    name: String? = nil
    +)
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam.html b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam.html new file mode 100644 index 00000000..87fcde21 --- /dev/null +++ b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam.html @@ -0,0 +1,473 @@ + + + + ChatCompletionToolMessageParam Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatCompletionToolMessageParam

+
+
+ +
public struct ChatCompletionToolMessageParam : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + Role + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias Role = ChatQuery.ChatCompletionMessageParam.Role
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + content + +
    +
    +
    +
    +
    +
    +

    The contents of the tool message.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let content: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + role + +
    +
    +
    +
    +
    +
    +

    The role of the messages author, in this case tool.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let role: `Self`.Role
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + toolCallId + +
    +
    +
    +
    +
    +
    +

    Tool call that this message is responding to.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let toolCallId: String
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    content: String,
    +    toolCallId: String
    +)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam/CodingKeys.html b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam/CodingKeys.html new file mode 100644 index 00000000..21ab5d41 --- /dev/null +++ b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam/CodingKeys.html @@ -0,0 +1,390 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + content + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case content
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + role + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case role
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + toolCallId + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case toolCallId = "tool_call_id"
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam.html b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam.html new file mode 100644 index 00000000..efc3ba92 --- /dev/null +++ b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam.html @@ -0,0 +1,473 @@ + + + + ChatCompletionUserMessageParam Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatCompletionUserMessageParam

+
+
+ +
public struct ChatCompletionUserMessageParam : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + Role + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias Role = ChatQuery.ChatCompletionMessageParam.Role
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + content + +
    +
    +
    +
    +
    +
    +

    The contents of the user message.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let content: Content
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + role + +
    +
    +
    +
    +
    +
    +

    The role of the messages author, in this case user.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let role: `Self`.Role
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + name + +
    +
    +
    +
    +
    +
    +

    An optional name for the participant. Provides the model information to differentiate between participants of the same role.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let name: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(content:name:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    content: Content,
    +    name: String? = nil
    +)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Content + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum Content : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content.html b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content.html new file mode 100644 index 00000000..9a7eff8e --- /dev/null +++ b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content.html @@ -0,0 +1,548 @@ + + + + Content Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Content

+
+
+ +
public enum Content : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + string(_:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case string(String)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + vision(_:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case vision([VisionContent])
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + string + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public var string: String? { get }
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(string:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(string: String)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(vision:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(vision: [VisionContent])
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : CodingKey
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + encode(to:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public func encode(to encoder: Encoder) throws
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + VisionContent + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum VisionContent : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(from:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(from decoder: Decoder) throws
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/CodingKeys.html b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/CodingKeys.html new file mode 100644 index 00000000..0503ce5d --- /dev/null +++ b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/CodingKeys.html @@ -0,0 +1,366 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + string + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case string
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + vision + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case vision
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent.html b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent.html new file mode 100644 index 00000000..c34fc068 --- /dev/null +++ b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent.html @@ -0,0 +1,550 @@ + + + + VisionContent Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

VisionContent

+
+
+ +
public enum VisionContent : Codable, Equatable
+ +
+
+ +
+
+
+ +
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam.html b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam.html new file mode 100644 index 00000000..0da0715f --- /dev/null +++ b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam.html @@ -0,0 +1,449 @@ + + + + ChatCompletionContentPartImageParam Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatCompletionContentPartImageParam

+
+
+ +
public struct ChatCompletionContentPartImageParam : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + imageUrl + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let imageUrl: ImageURL
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + type + +
    +
    +
    +
    +
    +
    +

    The type of the content part.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let type: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(imageUrl:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(imageUrl: ImageURL)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ImageURL + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ImageURL : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/CodingKeys.html b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/CodingKeys.html new file mode 100644 index 00000000..8af7efdb --- /dev/null +++ b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/CodingKeys.html @@ -0,0 +1,370 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + imageUrl + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case imageUrl = "image_url"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + type + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case type
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL.html b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL.html new file mode 100644 index 00000000..3e54f49b --- /dev/null +++ b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL.html @@ -0,0 +1,452 @@ + + + + ImageURL Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ImageURL

+
+
+ +
public struct ImageURL : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + url + +
    +
    +
    +
    +
    +
    +

    Either a URL of the image or the base64 encoded image data.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let url: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + detail + +
    +
    +
    +
    +
    +
    +

    Specifies the detail level of the image. Learn more in the +Vision guide https://platform.openai.com/docs/guides/vision/low-or-high-fidelity-image-understanding

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let detail: Detail
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(url:detail:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(url: String, detail: Detail)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(url:detail:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(url: Data, detail: Detail)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Detail + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum Detail : String, Codable, Equatable, CaseIterable
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL/Detail.html b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL/Detail.html new file mode 100644 index 00000000..30667954 --- /dev/null +++ b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL/Detail.html @@ -0,0 +1,398 @@ + + + + Detail Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Detail

+
+
+ +
public enum Detail : String, Codable, Equatable, CaseIterable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + auto + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case auto
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + low + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case low
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + high + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case high
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartTextParam.html b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartTextParam.html new file mode 100644 index 00000000..2d4e9b1a --- /dev/null +++ b/docs/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartTextParam.html @@ -0,0 +1,396 @@ + + + + ChatCompletionContentPartTextParam Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatCompletionContentPartTextParam

+
+
+ +
public struct ChatCompletionContentPartTextParam : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + text + +
    +
    +
    +
    +
    +
    +

    The text content.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let text: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + type + +
    +
    +
    +
    +
    +
    +

    The type of the content part.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let type: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(text:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(text: String)
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ChatCompletionMessageParam/Role.html b/docs/Structs/ChatQuery/ChatCompletionMessageParam/Role.html new file mode 100644 index 00000000..2a48f81b --- /dev/null +++ b/docs/Structs/ChatQuery/ChatCompletionMessageParam/Role.html @@ -0,0 +1,414 @@ + + + + Role Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Role

+
+
+ +
public enum Role : String, Codable, Equatable, CaseIterable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + system + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case system
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + user + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case user
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + assistant + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case assistant
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + tool + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case tool
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ChatCompletionToolParam.html b/docs/Structs/ChatQuery/ChatCompletionToolParam.html new file mode 100644 index 00000000..c1b69352 --- /dev/null +++ b/docs/Structs/ChatQuery/ChatCompletionToolParam.html @@ -0,0 +1,442 @@ + + + + ChatCompletionToolParam Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatCompletionToolParam

+
+
+ +
public struct ChatCompletionToolParam : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + function + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let function: `Self`.FunctionDefinition
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + type + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let type: `Self`.ToolsType
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(function:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    function: Self.FunctionDefinition
    +)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + FunctionDefinition + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct FunctionDefinition : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ToolsType + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum ToolsType : String, Codable, Equatable
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition.html b/docs/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition.html new file mode 100644 index 00000000..41495366 --- /dev/null +++ b/docs/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition.html @@ -0,0 +1,452 @@ + + + + FunctionDefinition Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

FunctionDefinition

+
+
+ +
public struct FunctionDefinition : Codable, Equatable
+ +
+
+ +
+
+
+ +
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html b/docs/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html new file mode 100644 index 00000000..704e3d15 --- /dev/null +++ b/docs/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html @@ -0,0 +1,637 @@ + + + + FunctionParameters Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

FunctionParameters

+
+
+ +
public struct FunctionParameters : Codable, Equatable
+ +
+
+

See the guide for examples, and the JSON Schema reference for documentation about the format.

+ +
+
+
+
    +
  • +
    + + + + type + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let type: `Self`.JSONType
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + properties + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let properties: [String : Property]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + required + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let required: [String]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + pattern + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let pattern: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + const + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let const: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + enum + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let `enum`: [String]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + multipleOf + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let multipleOf: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + minimum + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let minimum: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + maximum + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let maximum: Int?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    type: Self.JSONType,
    +    properties: [String : Property]? = nil,
    +    required: [String]? = nil,
    +    pattern: String? = nil,
    +    const: String? = nil,
    +    enum: [String]? = nil,
    +    multipleOf: Int? = nil,
    +    minimum: Int? = nil,
    +    maximum: Int? = nil
    +)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Property + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Property : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + JSONType + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum JSONType : String, Codable
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/JSONType.html b/docs/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/JSONType.html new file mode 100644 index 00000000..c7ca614d --- /dev/null +++ b/docs/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/JSONType.html @@ -0,0 +1,496 @@ + + + + JSONType Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

JSONType

+
+
+ +
public enum JSONType : String, Codable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + integer + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case integer
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + string + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case string
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + boolean + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case boolean
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + array + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case array
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + object + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case object
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + number + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case number
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + null + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case null
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html b/docs/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html new file mode 100644 index 00000000..3a32d386 --- /dev/null +++ b/docs/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html @@ -0,0 +1,772 @@ + + + + Property Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Property

+
+
+ +
public struct Property : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + JSONType + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias JSONType = ChatQuery.ChatCompletionToolParam.FunctionDefinition.FunctionParameters.JSONType
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + type + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let type: `Self`.JSONType
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + description + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let description: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + format + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let format: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + items + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let items: `Self`.Items?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + required + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let required: [String]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + pattern + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let pattern: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + const + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let const: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + enum + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let `enum`: [String]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + multipleOf + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let multipleOf: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + minimum + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let minimum: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + maximum + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let maximum: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + minItems + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let minItems: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + maxItems + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let maxItems: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + uniqueItems + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let uniqueItems: Bool?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    type: Self.JSONType,
    +    description: String? = nil,
    +    format: String? = nil,
    +    items: Self.Items? = nil,
    +    required: [String]? = nil,
    +    pattern: String? = nil,
    +    const: String? = nil,
    +    enum: [String]? = nil,
    +    multipleOf: Int? = nil,
    +    minimum: Double? = nil,
    +    maximum: Double? = nil,
    +    minItems: Int? = nil,
    +    maxItems: Int? = nil,
    +    uniqueItems: Bool? = nil
    +)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Items + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Items : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html b/docs/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html new file mode 100644 index 00000000..b94c4d7b --- /dev/null +++ b/docs/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html @@ -0,0 +1,666 @@ + + + + Items Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Items

+
+
+ +
public struct Items : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + JSONType + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias JSONType = ChatQuery.ChatCompletionToolParam.FunctionDefinition.FunctionParameters.JSONType
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + type + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let type: `Self`.JSONType
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + properties + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let properties: [String : Property]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + pattern + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let pattern: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + const + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let const: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + enum + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let `enum`: [String]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + multipleOf + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let multipleOf: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + minimum + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let minimum: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + maximum + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let maximum: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + minItems + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let minItems: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + maxItems + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let maxItems: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + uniqueItems + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let uniqueItems: Bool?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    type: Self.JSONType,
    +    properties: [String : Property]? = nil,
    +    pattern: String? = nil,
    +    const: String? = nil,
    +    `enum`: [String]? = nil,
    +    multipleOf: Int? = nil,
    +    minimum: Double? = nil,
    +    maximum: Double? = nil,
    +    minItems: Int? = nil,
    +    maxItems: Int? = nil,
    +    uniqueItems: Bool? = nil
    +)
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ChatCompletionToolParam/ToolsType.html b/docs/Structs/ChatQuery/ChatCompletionToolParam/ToolsType.html new file mode 100644 index 00000000..cddc3257 --- /dev/null +++ b/docs/Structs/ChatQuery/ChatCompletionToolParam/ToolsType.html @@ -0,0 +1,336 @@ + + + + ToolsType Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ToolsType

+
+
+ +
public enum ToolsType : String, Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + function + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case function
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/CodingKeys.html b/docs/Structs/ChatQuery/CodingKeys.html new file mode 100644 index 00000000..62e5f81e --- /dev/null +++ b/docs/Structs/ChatQuery/CodingKeys.html @@ -0,0 +1,776 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + messages + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case messages
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + frequencyPenalty + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case frequencyPenalty = "frequency_penalty"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logitBias + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case logitBias = "logit_bias"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logprobs + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case logprobs
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + maxTokens + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case maxTokens = "max_tokens"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + n + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case n
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + presencePenalty + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case presencePenalty = "presence_penalty"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + responseFormat + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case responseFormat = "response_format"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + seed + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case seed
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + stop + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case stop
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + temperature + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case temperature
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + toolChoice + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case toolChoice = "tool_choice"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + tools + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case tools
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + topLogprobs + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case topLogprobs = "top_logprobs"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + topP + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case topP = "top_p"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + user + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case user
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + stream + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case stream
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/ResponseFormat.html b/docs/Structs/ChatQuery/ResponseFormat.html new file mode 100644 index 00000000..12b0450d --- /dev/null +++ b/docs/Structs/ChatQuery/ResponseFormat.html @@ -0,0 +1,386 @@ + + + + ResponseFormat Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ResponseFormat

+
+
+ +
public enum ResponseFormat : String, Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + jsonObject + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case jsonObject = "json_object"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + text + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case text
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + encode(to:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public func encode(to encoder: Encoder) throws
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatQuery/Stop.html b/docs/Structs/ChatQuery/Stop.html new file mode 100644 index 00000000..c03477e1 --- /dev/null +++ b/docs/Structs/ChatQuery/Stop.html @@ -0,0 +1,438 @@ + + + + Stop Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Stop

+
+
+ +
public enum Stop : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + string(_:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case string(String)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + stringList(_:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case stringList([String])
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + encode(to:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public func encode(to encoder: Encoder) throws
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(string:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(string: String)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(stringList:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(stringList: [String])
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatResult.html b/docs/Structs/ChatResult.html new file mode 100644 index 00000000..21f86f60 --- /dev/null +++ b/docs/Structs/ChatResult.html @@ -0,0 +1,611 @@ + + + + ChatResult Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatResult

+
+
+ +
public struct ChatResult : Codable, Equatable
+ +
+
+

https://platform.openai.com/docs/api-reference/chat/object +Example Completion object print

+
{
+ "id": "chatcmpl-123456",
+ "object": "chat.completion",
+ "created": 1728933352,
+ "model": "gpt-4o-2024-08-06",
+ "choices": [
+   {
+     "index": 0,
+     "message": {
+       "role": "assistant",
+       "content": "Hi there! How can I assist you today?",
+       "refusal": null
+     },
+     "logprobs": null,
+     "finish_reason": "stop"
+   }
+ ],
+ "usage": {
+   "prompt_tokens": 19,
+   "completion_tokens": 10,
+   "total_tokens": 29,
+   "prompt_tokens_details": {
+     "cached_tokens": 0
+   },
+   "completion_tokens_details": {
+     "reasoning_tokens": 0
+   }
+ },
+ "system_fingerprint": "fp_6b68a8204b"
+}
+
+ +
+
+
+
    +
  • +
    + + + + Choice + +
    +
    +
    +
    +
    +
    +

    mimic the choices array in the chat completion object

    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Choice : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CompletionUsage + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct CompletionUsage : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + id + +
    +
    +
    +
    +
    +
    +

    A unique identifier for the chat completion.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let id: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + object + +
    +
    +
    +
    +
    +
    +

    The object type, which is always chat.completion.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let object: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + created + +
    +
    +
    +
    +
    +
    +

    The Unix timestamp (in seconds) of when the chat completion was created.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let created: TimeInterval
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    The model used for the chat completion.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + choices + +
    +
    +
    +
    +
    +
    +

    A list of chat completion choices. Can be more than one if n is greater than 1.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let choices: [Choice]
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + usage + +
    +
    +
    +
    +
    +
    +

    Usage statistics for the completion request.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let usage: `Self`.CompletionUsage?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + systemFingerprint + +
    +
    +
    +
    +
    +
    +

    This fingerprint represents the backend configuration that the model runs with. +Can be used in conjunction with the seed request parameter to understand when backend changes have been made that might impact determinism.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let systemFingerprint: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatResult/Choice.html b/docs/Structs/ChatResult/Choice.html new file mode 100644 index 00000000..11c1df48 --- /dev/null +++ b/docs/Structs/ChatResult/Choice.html @@ -0,0 +1,524 @@ + + + + Choice Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Choice

+
+
+ +
public struct Choice : Codable, Equatable
+ +
+
+

mimic the choices array in the chat completion object

+ +
+
+
+
    +
  • +
    + + + + ChatCompletionMessage + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias ChatCompletionMessage = ChatQuery.ChatCompletionMessageParam
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + index + +
    +
    +
    +
    +
    +
    +

    The index of the choice in the list of choices.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let index: Int
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logprobs + +
    +
    +
    +
    +
    +
    +

    Log probability information for the choice.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let logprobs: `Self`.ChoiceLogprobs?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + message + +
    +
    +
    +
    +
    +
    +

    A chat completion message generated by the model.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let message: `Self`.ChatCompletionMessage
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + finishReason + +
    +
    +
    +
    +
    +
    +

    The reason the model stopped generating tokens. This will be stop if the model hit a natural stop point or a provided stop sequence, length if the maximum number of tokens specified in the request was reached, content_filter if content was omitted due to a flag from our content filters, tool_calls if the model called a tool, or function_call (deprecated) if the model called a function.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let finishReason: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ChoiceLogprobs + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ChoiceLogprobs : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + FinishReason + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum FinishReason : String, Codable, Equatable
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatResult/Choice/ChoiceLogprobs.html b/docs/Structs/ChatResult/Choice/ChoiceLogprobs.html new file mode 100644 index 00000000..dfb22cde --- /dev/null +++ b/docs/Structs/ChatResult/Choice/ChoiceLogprobs.html @@ -0,0 +1,363 @@ + + + + ChoiceLogprobs Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChoiceLogprobs

+
+
+ +
public struct ChoiceLogprobs : Codable, Equatable
+ +
+
+ +
+
+
+ +
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html b/docs/Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html new file mode 100644 index 00000000..4e8d67fc --- /dev/null +++ b/docs/Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html @@ -0,0 +1,478 @@ + + + + ChatCompletionTokenLogprob Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatCompletionTokenLogprob

+
+
+ +
public struct ChatCompletionTokenLogprob : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + token + +
    +
    +
    +
    +
    +
    +

    The token.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let token: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + bytes + +
    +
    +
    +
    +
    +
    +

    A list of integers representing the UTF-8 bytes representation of the token. +Useful in instances where characters are represented by multiple tokens and +their byte representations must be combined to generate the correct text +representation. Can be null if there is no bytes representation for the token.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let bytes: [Int]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logprob + +
    +
    +
    +
    +
    +
    +

    The log probability of this token.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let logprob: Double
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + topLogprobs + +
    +
    +
    +
    +
    +
    +

    List of the most likely tokens and their log probability, at this token position. +In rare cases, there may be fewer than the number of requested top_logprobs returned.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let topLogprobs: [TopLogprob]
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + TopLogprob + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct TopLogprob : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html b/docs/Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html new file mode 100644 index 00000000..c15be5f8 --- /dev/null +++ b/docs/Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html @@ -0,0 +1,418 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + token + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case token
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + bytes + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case bytes
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logprob + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case logprob
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + topLogprobs + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case topLogprobs = "top_logprobs"
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html b/docs/Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html new file mode 100644 index 00000000..dbcf56c9 --- /dev/null +++ b/docs/Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html @@ -0,0 +1,396 @@ + + + + TopLogprob Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

TopLogprob

+
+
+ +
public struct TopLogprob : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + token + +
    +
    +
    +
    +
    +
    +

    The token.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let token: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + bytes + +
    +
    +
    +
    +
    +
    +

    A list of integers representing the UTF-8 bytes representation of the token. +Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be null if there is no bytes representation for the token.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let bytes: [Int]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logprob + +
    +
    +
    +
    +
    +
    +

    The log probability of this token.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let logprob: Double
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatResult/Choice/CodingKeys.html b/docs/Structs/ChatResult/Choice/CodingKeys.html new file mode 100644 index 00000000..c74c9053 --- /dev/null +++ b/docs/Structs/ChatResult/Choice/CodingKeys.html @@ -0,0 +1,414 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + index + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case index
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logprobs + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case logprobs
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + message + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case message
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + finishReason + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case finishReason = "finish_reason"
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatResult/Choice/FinishReason.html b/docs/Structs/ChatResult/Choice/FinishReason.html new file mode 100644 index 00000000..9a2ee189 --- /dev/null +++ b/docs/Structs/ChatResult/Choice/FinishReason.html @@ -0,0 +1,440 @@ + + + + FinishReason Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

FinishReason

+
+
+ +
public enum FinishReason : String, Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + stop + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case stop
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + length + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case length
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + toolCalls + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case toolCalls = "tool_calls"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + contentFilter + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case contentFilter = "content_filter"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + functionCall + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case functionCall = "function_call"
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatResult/CodingKeys.html b/docs/Structs/ChatResult/CodingKeys.html new file mode 100644 index 00000000..74f6690a --- /dev/null +++ b/docs/Structs/ChatResult/CodingKeys.html @@ -0,0 +1,490 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + id + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case id
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + object + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case object
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + created + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case created
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + choices + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case choices
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + usage + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case usage
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + systemFingerprint + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case systemFingerprint = "system_fingerprint"
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatResult/CompletionUsage.html b/docs/Structs/ChatResult/CompletionUsage.html new file mode 100644 index 00000000..d670ccde --- /dev/null +++ b/docs/Structs/ChatResult/CompletionUsage.html @@ -0,0 +1,389 @@ + + + + CompletionUsage Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CompletionUsage

+
+
+ +
public struct CompletionUsage : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + completionTokens + +
    +
    +
    +
    +
    +
    +

    Number of tokens in the generated completion.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let completionTokens: Int
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + promptTokens + +
    +
    +
    +
    +
    +
    +

    Number of tokens in the prompt.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let promptTokens: Int
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + totalTokens + +
    +
    +
    +
    +
    +
    +

    Total number of tokens used in the request (prompt + completion).

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let totalTokens: Int
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatStreamResult.html b/docs/Structs/ChatStreamResult.html new file mode 100644 index 00000000..ea9c41c8 --- /dev/null +++ b/docs/Structs/ChatStreamResult.html @@ -0,0 +1,524 @@ + + + + ChatStreamResult Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatStreamResult

+
+
+ +
public struct ChatStreamResult : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + Choice + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Choice : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + id + +
    +
    +
    +
    +
    +
    +

    A unique identifier for the chat completion. Each chunk has the same ID.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let id: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + object + +
    +
    +
    +
    +
    +
    +

    The object type, which is always chat.completion.chunk.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let object: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + created + +
    +
    +
    +
    +
    +
    +

    The Unix timestamp (in seconds) of when the chat completion was created. +Each chunk has the same timestamp.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let created: TimeInterval
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    The model to generate the completion.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + choices + +
    +
    +
    +
    +
    +
    +

    A list of chat completion choices. +Can be more than one if n is greater than 1.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let choices: [Choice]
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + systemFingerprint + +
    +
    +
    +
    +
    +
    +

    This fingerprint represents the backend configuration that the model runs with. Can be used in conjunction with the seed request parameter to understand when backend changes have been made that might impact determinism.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let systemFingerprint: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatStreamResult/Choice.html b/docs/Structs/ChatStreamResult/Choice.html new file mode 100644 index 00000000..07ec3ba2 --- /dev/null +++ b/docs/Structs/ChatStreamResult/Choice.html @@ -0,0 +1,524 @@ + + + + Choice Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Choice

+
+
+ +
public struct Choice : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + FinishReason + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias FinishReason = ChatResult.Choice.FinishReason
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ChoiceDelta + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ChoiceDelta : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + index + +
    +
    +
    +
    +
    +
    +

    The index of the choice in the list of choices.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let index: Int
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + delta + +
    +
    +
    +
    +
    +
    +

    A chat completion delta generated by streamed model responses.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let delta: `Self`.ChoiceDelta
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + finishReason + +
    +
    +
    +
    +
    +
    +

    The reason the model stopped generating tokens. +This will be stop if the model hit a natural stop point or a provided stop sequence, length if the maximum number of tokens specified in the request was reached, content_filter if content was omitted due to a flag from our content filters, tool_calls if the model called a tool, or function_call (deprecated) if the model called a function.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let finishReason: FinishReason?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logprobs + +
    +
    +
    +
    +
    +
    +

    Log probability information for the choice.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let logprobs: `Self`.ChoiceLogprobs?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ChoiceLogprobs + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ChoiceLogprobs : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatStreamResult/Choice/ChoiceDelta.html b/docs/Structs/ChatStreamResult/Choice/ChoiceDelta.html new file mode 100644 index 00000000..1d2aaf57 --- /dev/null +++ b/docs/Structs/ChatStreamResult/Choice/ChoiceDelta.html @@ -0,0 +1,470 @@ + + + + ChoiceDelta Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChoiceDelta

+
+
+ +
public struct ChoiceDelta : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + Role + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias Role = ChatQuery.ChatCompletionMessageParam.Role
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + content + +
    +
    +
    +
    +
    +
    +

    The contents of the chunk message.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let content: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + role + +
    +
    +
    +
    +
    +
    +

    The role of the author of this message.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let role: `Self`.Role?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + toolCalls + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let toolCalls: [`Self`.ChoiceDeltaToolCall]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ChoiceDeltaToolCall + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ChoiceDeltaToolCall : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall.html b/docs/Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall.html new file mode 100644 index 00000000..80421b18 --- /dev/null +++ b/docs/Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall.html @@ -0,0 +1,476 @@ + + + + ChoiceDeltaToolCall Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChoiceDeltaToolCall

+
+
+ +
public struct ChoiceDeltaToolCall : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + index + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let index: Int
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + id + +
    +
    +
    +
    +
    +
    +

    The ID of the tool call.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let id: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + function + +
    +
    +
    +
    +
    +
    +

    The function that the model called.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let function: `Self`.ChoiceDeltaToolCallFunction?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + type + +
    +
    +
    +
    +
    +
    +

    The type of the tool. Currently, only function is supported.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let type: String?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    index: Int,
    +    id: String? = nil,
    +    function: Self.ChoiceDeltaToolCallFunction? = nil
    +)
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ChoiceDeltaToolCallFunction : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall/ChoiceDeltaToolCallFunction.html b/docs/Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall/ChoiceDeltaToolCallFunction.html new file mode 100644 index 00000000..7859e501 --- /dev/null +++ b/docs/Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall/ChoiceDeltaToolCallFunction.html @@ -0,0 +1,397 @@ + + + + ChoiceDeltaToolCallFunction Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChoiceDeltaToolCallFunction

+
+
+ +
public struct ChoiceDeltaToolCallFunction : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + arguments + +
    +
    +
    +
    +
    +
    +

    The arguments to call the function with, as generated by the model in JSON format. Note that the model does not always generate valid JSON, and may hallucinate parameters not defined by your function schema. Validate the arguments in your code before calling your function.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let arguments: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + name + +
    +
    +
    +
    +
    +
    +

    The name of the function to call.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let name: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(arguments:name:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    arguments: String? = nil,
    +    name: String? = nil
    +)
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatStreamResult/Choice/ChoiceDelta/CodingKeys.html b/docs/Structs/ChatStreamResult/Choice/ChoiceDelta/CodingKeys.html new file mode 100644 index 00000000..b066c3d9 --- /dev/null +++ b/docs/Structs/ChatStreamResult/Choice/ChoiceDelta/CodingKeys.html @@ -0,0 +1,390 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + content + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case content
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + role + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case role
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + toolCalls + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case toolCalls = "tool_calls"
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatStreamResult/Choice/ChoiceLogprobs.html b/docs/Structs/ChatStreamResult/Choice/ChoiceLogprobs.html new file mode 100644 index 00000000..6d3adc29 --- /dev/null +++ b/docs/Structs/ChatStreamResult/Choice/ChoiceLogprobs.html @@ -0,0 +1,364 @@ + + + + ChoiceLogprobs Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChoiceLogprobs

+
+
+ +
public struct ChoiceLogprobs : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + content + +
    +
    +
    +
    +
    +
    +

    A list of message content tokens with log probability information.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let content: [`Self`.ChatCompletionTokenLogprob]?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ChatCompletionTokenLogprob : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html b/docs/Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html new file mode 100644 index 00000000..87b531b2 --- /dev/null +++ b/docs/Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html @@ -0,0 +1,474 @@ + + + + ChatCompletionTokenLogprob Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatCompletionTokenLogprob

+
+
+ +
public struct ChatCompletionTokenLogprob : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + token + +
    +
    +
    +
    +
    +
    +

    The token.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let token: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + bytes + +
    +
    +
    +
    +
    +
    +

    A list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be null if there is no bytes representation for the token.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let bytes: [Int]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logprob + +
    +
    +
    +
    +
    +
    +

    The log probability of this token.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let logprob: Double
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + topLogprobs + +
    +
    +
    +
    +
    +
    +

    List of the most likely tokens and their log probability, at this token position. In rare cases, there may be fewer than the number of requested top_logprobs returned.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let topLogprobs: [`Self`.TopLogprob]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + TopLogprob + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct TopLogprob : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html b/docs/Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html new file mode 100644 index 00000000..8cd920fa --- /dev/null +++ b/docs/Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html @@ -0,0 +1,418 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + token + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case token
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + bytes + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case bytes
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logprob + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case logprob
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + topLogprobs + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case topLogprobs = "top_logprobs"
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html b/docs/Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html new file mode 100644 index 00000000..532a768e --- /dev/null +++ b/docs/Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html @@ -0,0 +1,395 @@ + + + + TopLogprob Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

TopLogprob

+
+
+ +
public struct TopLogprob : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + token + +
    +
    +
    +
    +
    +
    +

    The token.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let token: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + bytes + +
    +
    +
    +
    +
    +
    +

    A list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be null if there is no bytes representation for the token.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let bytes: [Int]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logprob + +
    +
    +
    +
    +
    +
    +

    The log probability of this token.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let logprob: Double
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatStreamResult/Choice/CodingKeys.html b/docs/Structs/ChatStreamResult/Choice/CodingKeys.html new file mode 100644 index 00000000..11ad4d0e --- /dev/null +++ b/docs/Structs/ChatStreamResult/Choice/CodingKeys.html @@ -0,0 +1,414 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + index + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case index
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + delta + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case delta
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + finishReason + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case finishReason = "finish_reason"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logprobs + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case logprobs
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ChatStreamResult/CodingKeys.html b/docs/Structs/ChatStreamResult/CodingKeys.html new file mode 100644 index 00000000..6458b785 --- /dev/null +++ b/docs/Structs/ChatStreamResult/CodingKeys.html @@ -0,0 +1,464 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + id + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case id
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + object + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case object
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + created + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case created
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + choices + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case choices
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + systemFingerprint + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case systemFingerprint = "system_fingerprint"
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/CompletionsQuery.html b/docs/Structs/CompletionsQuery.html new file mode 100644 index 00000000..5b863fac --- /dev/null +++ b/docs/Structs/CompletionsQuery.html @@ -0,0 +1,575 @@ + + + + CompletionsQuery Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CompletionsQuery

+
+
+ +
public struct CompletionsQuery : Codable, Streamable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    ID of the model to use.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + prompt + +
    +
    +
    +
    +
    +
    +

    The prompt(s) to generate completions for, encoded as a string, array of strings, array of tokens, or array of token arrays.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let prompt: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + temperature + +
    +
    +
    +
    +
    +
    +

    What sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let temperature: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + maxTokens + +
    +
    +
    +
    +
    +
    +

    The maximum number of tokens to generate in the completion.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let maxTokens: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + topP + +
    +
    +
    +
    +
    +
    +

    An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let topP: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + frequencyPenalty + +
    +
    +
    +
    +
    +
    +

    Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model’s likelihood to repeat the same line verbatim.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let frequencyPenalty: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + presencePenalty + +
    +
    +
    +
    +
    +
    +

    Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model’s likelihood to talk about new topics.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let presencePenalty: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + stop + +
    +
    +
    +
    +
    +
    +

    Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let stop: [String]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + user + +
    +
    +
    +
    +
    +
    +

    A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let user: String?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(model: Model, prompt: String, temperature: Double? = nil, maxTokens: Int? = nil, topP: Double? = nil, frequencyPenalty: Double? = nil, presencePenalty: Double? = nil, stop: [String]? = nil, user: String? = nil)
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/CompletionsResult.html b/docs/Structs/CompletionsResult.html new file mode 100644 index 00000000..ac1aaf9e --- /dev/null +++ b/docs/Structs/CompletionsResult.html @@ -0,0 +1,516 @@ + + + + CompletionsResult Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CompletionsResult

+
+
+ +
public struct CompletionsResult : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + Usage + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Usage : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Choice + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Choice : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + id + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let id: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + object + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let object: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + created + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let created: TimeInterval
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + choices + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let choices: [Choice]
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + usage + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let usage: Usage?
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/CompletionsResult/Choice.html b/docs/Structs/CompletionsResult/Choice.html new file mode 100644 index 00000000..afada57d --- /dev/null +++ b/docs/Structs/CompletionsResult/Choice.html @@ -0,0 +1,386 @@ + + + + Choice Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Choice

+
+
+ +
public struct Choice : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + text + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let text: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + index + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let index: Int
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + finishReason + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let finishReason: String?
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/CompletionsResult/Usage.html b/docs/Structs/CompletionsResult/Usage.html new file mode 100644 index 00000000..e83845d5 --- /dev/null +++ b/docs/Structs/CompletionsResult/Usage.html @@ -0,0 +1,386 @@ + + + + Usage Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Usage

+
+
+ +
public struct Usage : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + promptTokens + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let promptTokens: Int
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + completionTokens + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let completionTokens: Int
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + totalTokens + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let totalTokens: Int
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/EditsQuery.html b/docs/Structs/EditsQuery.html new file mode 100644 index 00000000..d6efa469 --- /dev/null +++ b/docs/Structs/EditsQuery.html @@ -0,0 +1,494 @@ + + + + EditsQuery Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

EditsQuery

+
+
+ +
public struct EditsQuery : Codable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    ID of the model to use.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + input + +
    +
    +
    +
    +
    +
    +

    Input text to get embeddings for.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let input: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + instruction + +
    +
    +
    +
    +
    +
    +

    The instruction that tells the model how to edit the prompt.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let instruction: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + n + +
    +
    +
    +
    +
    +
    +

    The number of images to generate. Must be between 1 and 10.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let n: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + temperature + +
    +
    +
    +
    +
    +
    +

    What sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let temperature: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + topP + +
    +
    +
    +
    +
    +
    +

    An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let topP: Double?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(model: Model, input: String?, instruction: String, n: Int? = nil, temperature: Double? = nil, topP: Double? = nil)
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/EditsResult.html b/docs/Structs/EditsResult.html new file mode 100644 index 00000000..9c4b1915 --- /dev/null +++ b/docs/Structs/EditsResult.html @@ -0,0 +1,464 @@ + + + + EditsResult Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

EditsResult

+
+
+ +
public struct EditsResult : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + Choice + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Choice : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Usage + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Usage : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + object + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let object: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + created + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let created: TimeInterval
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + choices + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let choices: [Choice]
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + usage + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let usage: Usage
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/EditsResult/Choice.html b/docs/Structs/EditsResult/Choice.html new file mode 100644 index 00000000..0cfccc44 --- /dev/null +++ b/docs/Structs/EditsResult/Choice.html @@ -0,0 +1,360 @@ + + + + Choice Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Choice

+
+
+ +
public struct Choice : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + text + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let text: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + index + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let index: Int
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/EditsResult/Usage.html b/docs/Structs/EditsResult/Usage.html new file mode 100644 index 00000000..1e848023 --- /dev/null +++ b/docs/Structs/EditsResult/Usage.html @@ -0,0 +1,386 @@ + + + + Usage Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Usage

+
+
+ +
public struct Usage : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + promptTokens + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let promptTokens: Int
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + completionTokens + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let completionTokens: Int
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + totalTokens + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let totalTokens: Int
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/EmbeddingsQuery.html b/docs/Structs/EmbeddingsQuery.html new file mode 100644 index 00000000..fb78cd32 --- /dev/null +++ b/docs/Structs/EmbeddingsQuery.html @@ -0,0 +1,530 @@ + + + + EmbeddingsQuery Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

EmbeddingsQuery

+
+
+ +
public struct EmbeddingsQuery : Codable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + input + +
    +
    +
    +
    +
    +
    +

    Input text to embed, encoded as a string or array of tokens. To embed multiple inputs in a single request, pass an array of strings or array of token arrays. The input must not exceed the max input tokens for the model (8192 tokens for text-embedding-ada-002), cannot be an empty string, and any array must be 2048 dimensions or less.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let input: `Self`.Input
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    ID of the model to use. You can use the List models API to see all of your available models, or see our Model overview for descriptions of them. +https://platform.openai.com/docs/api-reference/models/list +https://platform.openai.com/docs/models/overview

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + encodingFormat + +
    +
    +
    +
    +
    +
    +

    The format to return the embeddings in. Can be either float or base64. +https://pypi.org/project/pybase64/

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let encodingFormat: `Self`.EncodingFormat?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + user + +
    +
    +
    +
    +
    +
    +

    A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. +https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let user: String?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    input: Self.Input,
    +    model: Model,
    +    encodingFormat: Self.EncodingFormat? = nil,
    +    user: String? = nil
    +)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Input + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum Input : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + EncodingFormat + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum EncodingFormat : String, Codable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/EmbeddingsQuery/CodingKeys.html b/docs/Structs/EmbeddingsQuery/CodingKeys.html new file mode 100644 index 00000000..994c7dcf --- /dev/null +++ b/docs/Structs/EmbeddingsQuery/CodingKeys.html @@ -0,0 +1,412 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + input + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case input
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + encodingFormat + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case encodingFormat = "encoding_format"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + user + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case user
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/EmbeddingsQuery/EncodingFormat.html b/docs/Structs/EmbeddingsQuery/EncodingFormat.html new file mode 100644 index 00000000..d1d98399 --- /dev/null +++ b/docs/Structs/EmbeddingsQuery/EncodingFormat.html @@ -0,0 +1,360 @@ + + + + EncodingFormat Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

EncodingFormat

+
+
+ +
public enum EncodingFormat : String, Codable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + float + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case float
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + base64 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case base64
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/EmbeddingsQuery/Input.html b/docs/Structs/EmbeddingsQuery/Input.html new file mode 100644 index 00000000..ff7e775a --- /dev/null +++ b/docs/Structs/EmbeddingsQuery/Input.html @@ -0,0 +1,542 @@ + + + + Input Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Input

+
+
+ +
public enum Input : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + string(_:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case string(String)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + stringList(_:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case stringList([String])
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + intList(_:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case intList([Int])
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + intMatrix(_:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case intMatrix([[Int]])
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + encode(to:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public func encode(to encoder: Encoder) throws
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(string:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(string: String)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(stringList:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(stringList: [String])
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(intList:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(intList: [Int])
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(intMatrix:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(intMatrix: [[Int]])
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/EmbeddingsResult.html b/docs/Structs/EmbeddingsResult.html new file mode 100644 index 00000000..d8251f10 --- /dev/null +++ b/docs/Structs/EmbeddingsResult.html @@ -0,0 +1,465 @@ + + + + EmbeddingsResult Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

EmbeddingsResult

+
+
+ +
public struct EmbeddingsResult : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + Embedding + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Embedding : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Usage + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Usage : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + data + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let data: [Embedding]
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + usage + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let usage: Usage
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + object + +
    +
    +
    +
    +
    +
    +

    The object type, which is always “list”.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let object: String
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/EmbeddingsResult/Embedding.html b/docs/Structs/EmbeddingsResult/Embedding.html new file mode 100644 index 00000000..71f10564 --- /dev/null +++ b/docs/Structs/EmbeddingsResult/Embedding.html @@ -0,0 +1,390 @@ + + + + Embedding Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Embedding

+
+
+ +
public struct Embedding : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + object + +
    +
    +
    +
    +
    +
    +

    The object type, which is always “embedding”.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let object: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + embedding + +
    +
    +
    +
    +
    +
    +

    The embedding vector, which is a list of floats. The length of vector depends on the model as listed in the embedding guide. +https://platform.openai.com/docs/guides/embeddings

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let embedding: [Double]
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + index + +
    +
    +
    +
    +
    +
    +

    The index of the embedding in the list of embeddings.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let index: Int
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/EmbeddingsResult/Usage.html b/docs/Structs/EmbeddingsResult/Usage.html new file mode 100644 index 00000000..ac8f6faf --- /dev/null +++ b/docs/Structs/EmbeddingsResult/Usage.html @@ -0,0 +1,360 @@ + + + + Usage Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Usage

+
+
+ +
public struct Usage : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + promptTokens + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let promptTokens: Int
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + totalTokens + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let totalTokens: Int
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ImageEditsQuery.html b/docs/Structs/ImageEditsQuery.html new file mode 100644 index 00000000..b4f190f8 --- /dev/null +++ b/docs/Structs/ImageEditsQuery.html @@ -0,0 +1,639 @@ + + + + ImageEditsQuery Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ImageEditsQuery

+
+
+ +
public struct ImageEditsQuery : Codable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + ResponseFormat + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias ResponseFormat = ImagesQuery.ResponseFormat
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Size + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias Size = ImagesQuery.Size
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + image + +
    +
    +
    +
    +
    +
    +

    The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask is not provided, image must have transparency, which will be used as the mask.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let image: Data
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + mask + +
    +
    +
    +
    +
    +
    +

    An additional image whose fully transparent areas (e.g. where alpha is zero) indicate where image should be edited. Must be a valid PNG file, less than 4MB, and have the same dimensions as image.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let mask: Data?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + prompt + +
    +
    +
    +
    +
    +
    +

    A text description of the desired image(s). The maximum length is 1000 characters.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let prompt: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    The model to use for image generation. +Defaults to dall-e-2

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + n + +
    +
    +
    +
    +
    +
    +

    The number of images to generate. Must be between 1 and 10.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let n: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + responseFormat + +
    +
    +
    +
    +
    +
    +

    The format in which the generated images are returned. Must be one of url or b64_json. +Defaults to url

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let responseFormat: `Self`.ResponseFormat?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + size + +
    +
    +
    +
    +
    +
    +

    The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let size: Size?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + user + +
    +
    +
    +
    +
    +
    +

    A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. +https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let user: String?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    image: Data,
    +    prompt: String,
    +    mask: Data? = nil,
    +    model: Model? = nil,
    +    n: Int? = nil,
    +    responseFormat: Self.ResponseFormat? = nil,
    +    size: Self.Size? = nil,
    +    user: String? = nil
    +)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ImageEditsQuery/CodingKeys.html b/docs/Structs/ImageEditsQuery/CodingKeys.html new file mode 100644 index 00000000..003f87f3 --- /dev/null +++ b/docs/Structs/ImageEditsQuery/CodingKeys.html @@ -0,0 +1,516 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + image + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case image
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + mask + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case mask
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + prompt + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case prompt
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + n + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case n
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + responseFormat + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case responseFormat = "response_format"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + size + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case size
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + user + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case user
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ImageVariationsQuery.html b/docs/Structs/ImageVariationsQuery.html new file mode 100644 index 00000000..a2e95236 --- /dev/null +++ b/docs/Structs/ImageVariationsQuery.html @@ -0,0 +1,559 @@ + + + + ImageVariationsQuery Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ImageVariationsQuery

+
+
+ +
public struct ImageVariationsQuery : Codable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + ResponseFormat + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias ResponseFormat = ImagesQuery.ResponseFormat
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + image + +
    +
    +
    +
    +
    +
    +

    The image to edit. Must be a valid PNG file, less than 4MB, and square.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let image: Data
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    The model to use for image generation. Only dall-e-2 is supported at this time. +Defaults to dall-e-2

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + n + +
    +
    +
    +
    +
    +
    +

    The number of images to generate. Must be between 1 and 10. +Defaults to 1

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let n: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + responseFormat + +
    +
    +
    +
    +
    +
    +

    The format in which the generated images are returned. Must be one of url or b64_json. +Defaults to url

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let responseFormat: `Self`.ResponseFormat?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + size + +
    +
    +
    +
    +
    +
    +

    The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024. +Defaults to 1024x1024

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let size: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + user + +
    +
    +
    +
    +
    +
    +

    A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. +https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let user: String?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    image: Data,
    +    model: Model? = nil,
    +    n: Int? = nil,
    +    responseFormat: Self.ResponseFormat? = nil,
    +    size: String? = nil,
    +    user: String? = nil
    +)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ImageVariationsQuery/CodingKeys.html b/docs/Structs/ImageVariationsQuery/CodingKeys.html new file mode 100644 index 00000000..d3c555a5 --- /dev/null +++ b/docs/Structs/ImageVariationsQuery/CodingKeys.html @@ -0,0 +1,464 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + image + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case image
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + n + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case n
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + responseFormat + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case responseFormat = "response_format"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + size + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case size
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + user + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case user
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ImagesQuery.html b/docs/Structs/ImagesQuery.html new file mode 100644 index 00000000..4812d094 --- /dev/null +++ b/docs/Structs/ImagesQuery.html @@ -0,0 +1,701 @@ + + + + ImagesQuery Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ImagesQuery

+
+
+ +
public struct ImagesQuery : Codable
+ +
+
+

Given a prompt and/or an input image, the model will generate a new image. +https://platform.openai.com/docs/guides/images

+ +
+
+
+
    +
  • +
    + + + + ResponseFormat + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum ResponseFormat : String, Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + prompt + +
    +
    +
    +
    +
    +
    +

    A text description of the desired image(s). The maximum length is 1000 characters for dall-e-2 and 4000 characters for dall-e-3.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let prompt: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    The model to use for image generation. +Defaults to dall-e-2

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + responseFormat + +
    +
    +
    +
    +
    +
    +

    The format in which the generated images are returned. Must be one of url or b64_json. +Defaults to url

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let responseFormat: `Self`.ResponseFormat?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + n + +
    +
    +
    +
    +
    +
    +

    The number of images to generate. Must be between 1 and 10. For dall-e-3, only n=1 is supported. +Defaults to 1

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let n: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + size + +
    +
    +
    +
    +
    +
    +

    The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024 for dall-e-2. Must be one of 1024x1024, 1792x1024, or 1024x1792 for dall-e-3 models. +Defaults to 1024x1024

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let size: `Self`.Size?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + user + +
    +
    +
    +
    +
    +
    +

    A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. +https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let user: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + style + +
    +
    +
    +
    +
    +
    +

    The style of the generated images. Must be one of vivid or natural. Vivid causes the model to lean towards generating hyper-real and dramatic images. Natural causes the model to produce more natural, less hyper-real looking images. This param is only supported for dall-e-3. +Defaults to vivid

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let style: `Self`.Style?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + quality + +
    +
    +
    +
    +
    +
    +

    The quality of the image that will be generated. hd creates images with finer details and greater consistency across the image. This param is only supported for dall-e-3. +Defaults to standard

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let quality: `Self`.Quality?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    prompt: String,
    +    model: Model? = nil,
    +    n: Int? = nil,
    +    quality:Self.Quality? = nil,
    +    responseFormat: Self.ResponseFormat? = nil,
    +    size: Size? = nil,
    +    style: Self.Style? = nil,
    +    user: String? = nil
    +)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Style + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum Style : String, Codable, CaseIterable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Quality + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum Quality : String, Codable, CaseIterable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Size + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum Size : String, Codable, CaseIterable
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ImagesQuery/CodingKeys.html b/docs/Structs/ImagesQuery/CodingKeys.html new file mode 100644 index 00000000..bf4b7cbf --- /dev/null +++ b/docs/Structs/ImagesQuery/CodingKeys.html @@ -0,0 +1,516 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + prompt + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case prompt
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + n + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case n
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + size + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case size
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + user + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case user
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + style + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case style
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + responseFormat + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case responseFormat = "response_format"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + quality + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case quality
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ImagesQuery/Quality.html b/docs/Structs/ImagesQuery/Quality.html new file mode 100644 index 00000000..99029b9a --- /dev/null +++ b/docs/Structs/ImagesQuery/Quality.html @@ -0,0 +1,360 @@ + + + + Quality Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Quality

+
+
+ +
public enum Quality : String, Codable, CaseIterable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + standard + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case standard
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + hd + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case hd
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ImagesQuery/ResponseFormat.html b/docs/Structs/ImagesQuery/ResponseFormat.html new file mode 100644 index 00000000..6b771624 --- /dev/null +++ b/docs/Structs/ImagesQuery/ResponseFormat.html @@ -0,0 +1,360 @@ + + + + ResponseFormat Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ResponseFormat

+
+
+ +
public enum ResponseFormat : String, Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + url + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case url
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + b64_json + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case b64_json
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ImagesQuery/Size.html b/docs/Structs/ImagesQuery/Size.html new file mode 100644 index 00000000..4bee5dbb --- /dev/null +++ b/docs/Structs/ImagesQuery/Size.html @@ -0,0 +1,438 @@ + + + + Size Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Size

+
+
+ +
public enum Size : String, Codable, CaseIterable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + _256 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case _256 = "256x256"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + _512 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case _512 = "512x512"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + _1024 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case _1024 = "1024x1024"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + _1792_1024 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case _1792_1024 = "1792x1024"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + _1024_1792 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case _1024_1792 = "1024x1792"
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ImagesQuery/Style.html b/docs/Structs/ImagesQuery/Style.html new file mode 100644 index 00000000..6376cc05 --- /dev/null +++ b/docs/Structs/ImagesQuery/Style.html @@ -0,0 +1,360 @@ + + + + Style Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Style

+
+
+ +
public enum Style : String, Codable, CaseIterable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + natural + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case natural
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + vivid + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case vivid
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ImagesResult.html b/docs/Structs/ImagesResult.html new file mode 100644 index 00000000..80096634 --- /dev/null +++ b/docs/Structs/ImagesResult.html @@ -0,0 +1,387 @@ + + + + ImagesResult Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ImagesResult

+
+
+ +
public struct ImagesResult : Codable, Equatable
+ +
+
+

Returns a list of image objects.

+ +
+
+
+
    +
  • +
    + + + + created + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let created: TimeInterval
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + data + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let data: [`Self`.Image]
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Image + +
    +
    +
    +
    +
    +
    +

    Represents the url or the content of an image generated by the OpenAI API.

    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Image : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ImagesResult/Image.html b/docs/Structs/ImagesResult/Image.html new file mode 100644 index 00000000..3facad36 --- /dev/null +++ b/docs/Structs/ImagesResult/Image.html @@ -0,0 +1,417 @@ + + + + Image Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Image

+
+
+ +
public struct Image : Codable, Equatable
+ +
+
+

Represents the url or the content of an image generated by the OpenAI API.

+ +
+
+
+
    +
  • +
    + + + + b64Json + +
    +
    +
    +
    +
    +
    +

    The base64-encoded JSON of the generated image, if response_format is b64_json

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let b64Json: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + revisedPrompt + +
    +
    +
    +
    +
    +
    +

    The prompt that was used to generate the image, if there was any revision to the prompt.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let revisedPrompt: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + url + +
    +
    +
    +
    +
    +
    +

    The URL of the generated image, if response_format is url (default).

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let url: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ImagesResult/Image/CodingKeys.html b/docs/Structs/ImagesResult/Image/CodingKeys.html new file mode 100644 index 00000000..6993ab03 --- /dev/null +++ b/docs/Structs/ImagesResult/Image/CodingKeys.html @@ -0,0 +1,388 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + b64Json + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case b64Json = "b64_json"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + revisedPrompt + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case revisedPrompt = "revised_prompt"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + url + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case url
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ModelQuery.html b/docs/Structs/ModelQuery.html new file mode 100644 index 00000000..f359c1fc --- /dev/null +++ b/docs/Structs/ModelQuery.html @@ -0,0 +1,360 @@ + + + + ModelQuery Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ModelQuery

+
+
+ +
public struct ModelQuery : Codable, Equatable
+ +
+
+

Retrieves a model instance, providing basic information about the model such as the owner and permissioning.

+ +
+
+
+
    +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    The ID of the model to use for this request.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(model:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(model: Model)
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ModelResult.html b/docs/Structs/ModelResult.html new file mode 100644 index 00000000..2e416535 --- /dev/null +++ b/docs/Structs/ModelResult.html @@ -0,0 +1,442 @@ + + + + ModelResult Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ModelResult

+
+
+ +
public struct ModelResult : Codable, Equatable
+ +
+
+

The model object matching the specified ID.

+ +
+
+
+
    +
  • +
    + + + + id + +
    +
    +
    +
    +
    +
    +

    The model identifier, which can be referenced in the API endpoints.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let id: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + created + +
    +
    +
    +
    +
    +
    +

    The Unix timestamp (in seconds) when the model was created.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let created: TimeInterval
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + object + +
    +
    +
    +
    +
    +
    +

    The object type, which is always “model”.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let object: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ownedBy + +
    +
    +
    +
    +
    +
    +

    The organization that owns the model.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let ownedBy: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ModelResult/CodingKeys.html b/docs/Structs/ModelResult/CodingKeys.html new file mode 100644 index 00000000..cc3da332 --- /dev/null +++ b/docs/Structs/ModelResult/CodingKeys.html @@ -0,0 +1,412 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + id + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case id
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + created + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case created
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + object + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case object
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ownedBy + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case ownedBy = "owned_by"
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ModelsResult.html b/docs/Structs/ModelsResult.html new file mode 100644 index 00000000..0aadfcf5 --- /dev/null +++ b/docs/Structs/ModelsResult.html @@ -0,0 +1,361 @@ + + + + ModelsResult Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ModelsResult

+
+
+ +
public struct ModelsResult : Codable, Equatable
+ +
+
+

A list of model objects.

+ +
+
+
+
    +
  • +
    + + + + data + +
    +
    +
    +
    +
    +
    +

    A list of model objects.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let data: [ModelResult]
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + object + +
    +
    +
    +
    +
    +
    +

    The object type, which is always list

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let object: String
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ModerationsQuery.html b/docs/Structs/ModerationsQuery.html new file mode 100644 index 00000000..8749658a --- /dev/null +++ b/docs/Structs/ModerationsQuery.html @@ -0,0 +1,386 @@ + + + + ModerationsQuery Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ModerationsQuery

+
+
+ +
public struct ModerationsQuery : Codable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + input + +
    +
    +
    +
    +
    +
    +

    The input text to classify.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let input: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    ID of the model to use.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(input:model:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(input: String, model: Model? = nil)
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ModerationsResult.html b/docs/Structs/ModerationsResult.html new file mode 100644 index 00000000..6a579df1 --- /dev/null +++ b/docs/Structs/ModerationsResult.html @@ -0,0 +1,412 @@ + + + + ModerationsResult Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ModerationsResult

+
+
+ +
public struct ModerationsResult : Codable, Equatable
+
extension ModerationsResult: Identifiable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + Moderation + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Moderation : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + id + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let id: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + results + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let results: [`Self`.Moderation]
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ModerationsResult/Moderation.html b/docs/Structs/ModerationsResult/Moderation.html new file mode 100644 index 00000000..634e1d49 --- /dev/null +++ b/docs/Structs/ModerationsResult/Moderation.html @@ -0,0 +1,443 @@ + + + + Moderation Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Moderation

+
+
+ +
public struct Moderation : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + Categories + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Categories : Codable, Equatable, Sequence
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CategoryScores + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct CategoryScores : Codable, Equatable, Sequence
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + categories + +
    +
    +
    +
    +
    +
    +

    Collection of per-category binary usage policies violation flags. For each category, the value is true if the model flags the corresponding category as violated, false otherwise.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let categories: Categories
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + categoryScores + +
    +
    +
    +
    +
    +
    +

    Collection of per-category raw scores output by the model, denoting the model’s confidence that the input violates the OpenAI’s policy for the category. The value is between 0 and 1, where higher values denote higher confidence. The scores should not be interpreted as probabilities.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let categoryScores: CategoryScores
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + flagged + +
    +
    +
    +
    +
    +
    +

    True if the model classifies the content as violating OpenAI’s usage policies, false otherwise.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let flagged: Bool
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ModerationsResult/Moderation/Categories.html b/docs/Structs/ModerationsResult/Moderation/Categories.html new file mode 100644 index 00000000..24b32f05 --- /dev/null +++ b/docs/Structs/ModerationsResult/Moderation/Categories.html @@ -0,0 +1,660 @@ + + + + Categories Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Categories

+
+
+ +
public struct Categories : Codable, Equatable, Sequence
+ +
+
+ +
+
+
+
    +
  • +
    + + + + harassment + +
    +
    +
    +
    +
    +
    +

    Content that expresses, incites, or promotes harassing language towards any target.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let harassment: Bool
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + harassmentThreatening + +
    +
    +
    +
    +
    +
    +

    Harassment content that also includes violence or serious harm towards any target.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let harassmentThreatening: Bool
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + hate + +
    +
    +
    +
    +
    +
    +

    Content that expresses, incites, or promotes hate based on race, gender, ethnicity, religion, nationality, sexual orientation, disability status, or caste.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let hate: Bool
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + hateThreatening + +
    +
    +
    +
    +
    +
    +

    Hateful content that also includes violence or serious harm towards the targeted group.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let hateThreatening: Bool
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + selfHarm + +
    +
    +
    +
    +
    +
    +

    Content that promotes, encourages, or depicts acts of self-harm, such as suicide, cutting, and eating disorders.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let selfHarm: Bool
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + selfHarmIntent + +
    +
    +
    +
    +
    +
    +

    Content where the speaker expresses that they are engaging or intend to engage in acts of self-harm, such as suicide, cutting, and eating disorders.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let selfHarmIntent: Bool
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + selfHarmInstructions + +
    +
    +
    +
    +
    +
    +

    Content that encourages performing acts of self-harm, such as suicide, cutting, and eating disorders, or that gives instructions or advice on how to commit such acts.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let selfHarmInstructions: Bool
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + sexual + +
    +
    +
    +
    +
    +
    +

    Content meant to arouse sexual excitement, such as the description of sexual activity, or that promotes sexual services (excluding sex education and wellness).

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let sexual: Bool
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + sexualMinors + +
    +
    +
    +
    +
    +
    +

    Sexual content that includes an individual who is under 18 years old.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let sexualMinors: Bool
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + violence + +
    +
    +
    +
    +
    +
    +

    Content that promotes or glorifies violence or celebrates the suffering or humiliation of others.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let violence: Bool
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + violenceGraphic + +
    +
    +
    +
    +
    +
    +

    Violent content that depicts death, violence, or serious physical injury in extreme graphic detail.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let violenceGraphic: Bool
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey, CaseIterable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + makeIterator() + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public func makeIterator() -> IndexingIterator<[(String, Bool)]>
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ModerationsResult/Moderation/Categories/CodingKeys.html b/docs/Structs/ModerationsResult/Moderation/Categories/CodingKeys.html new file mode 100644 index 00000000..1f7905fe --- /dev/null +++ b/docs/Structs/ModerationsResult/Moderation/Categories/CodingKeys.html @@ -0,0 +1,598 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey, CaseIterable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + harassment + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case harassment
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + harassmentThreatening + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case harassmentThreatening = "harassment/threatening"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + hate + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case hate
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + hateThreatening + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case hateThreatening = "hate/threatening"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + selfHarm + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case selfHarm = "self-harm"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + selfHarmIntent + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case selfHarmIntent = "self-harm/intent"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + selfHarmInstructions + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case selfHarmInstructions = "self-harm/instructions"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + sexual + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case sexual
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + sexualMinors + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case sexualMinors = "sexual/minors"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + violence + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case violence
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + violenceGraphic + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case violenceGraphic = "violence/graphic"
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ModerationsResult/Moderation/CategoryScores.html b/docs/Structs/ModerationsResult/Moderation/CategoryScores.html new file mode 100644 index 00000000..ebc27305 --- /dev/null +++ b/docs/Structs/ModerationsResult/Moderation/CategoryScores.html @@ -0,0 +1,660 @@ + + + + CategoryScores Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CategoryScores

+
+
+ +
public struct CategoryScores : Codable, Equatable, Sequence
+ +
+
+ +
+
+
+
    +
  • +
    + + + + harassment + +
    +
    +
    +
    +
    +
    +

    Content that expresses, incites, or promotes harassing language towards any target.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let harassment: Double
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + harassmentThreatening + +
    +
    +
    +
    +
    +
    +

    Harassment content that also includes violence or serious harm towards any target.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let harassmentThreatening: Double
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + hate + +
    +
    +
    +
    +
    +
    +

    Content that expresses, incites, or promotes hate based on race, gender, ethnicity, religion, nationality, sexual orientation, disability status, or caste.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let hate: Double
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + hateThreatening + +
    +
    +
    +
    +
    +
    +

    Hateful content that also includes violence or serious harm towards the targeted group.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let hateThreatening: Double
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + selfHarm + +
    +
    +
    +
    +
    +
    +

    Content that promotes, encourages, or depicts acts of self-harm, such as suicide, cutting, and eating disorders.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let selfHarm: Double
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + selfHarmIntent + +
    +
    +
    +
    +
    +
    +

    Content where the speaker expresses that they are engaging or intend to engage in acts of self-harm, such as suicide, cutting, and eating disorders.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let selfHarmIntent: Double
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + selfHarmInstructions + +
    +
    +
    +
    +
    +
    +

    Content that encourages performing acts of self-harm, such as suicide, cutting, and eating disorders, or that gives instructions or advice on how to commit such acts.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let selfHarmInstructions: Double
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + sexual + +
    +
    +
    +
    +
    +
    +

    Content meant to arouse sexual excitement, such as the description of sexual activity, or that promotes sexual services (excluding sex education and wellness).

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let sexual: Double
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + sexualMinors + +
    +
    +
    +
    +
    +
    +

    Sexual content that includes an individual who is under 18 years old.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let sexualMinors: Double
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + violence + +
    +
    +
    +
    +
    +
    +

    Content that promotes or glorifies violence or celebrates the suffering or humiliation of others.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let violence: Double
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + violenceGraphic + +
    +
    +
    +
    +
    +
    +

    Violent content that depicts death, violence, or serious physical injury in extreme graphic detail.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let violenceGraphic: Double
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey, CaseIterable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + makeIterator() + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public func makeIterator() -> IndexingIterator<[(String, Bool)]>
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html b/docs/Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html new file mode 100644 index 00000000..9ceabfbf --- /dev/null +++ b/docs/Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html @@ -0,0 +1,598 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey, CaseIterable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + harassment + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case harassment
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + harassmentThreatening + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case harassmentThreatening = "harassment/threatening"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + hate + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case hate
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + hateThreatening + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case hateThreatening = "hate/threatening"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + selfHarm + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case selfHarm = "self-harm"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + selfHarmIntent + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case selfHarmIntent = "self-harm/intent"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + selfHarmInstructions + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case selfHarmInstructions = "self-harm/instructions"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + sexual + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case sexual
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + sexualMinors + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case sexualMinors = "sexual/minors"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + violence + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case violence
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + violenceGraphic + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case violenceGraphic = "violence/graphic"
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Structs/Vector.html b/docs/Structs/Vector.html new file mode 100644 index 00000000..353605b1 --- /dev/null +++ b/docs/Structs/Vector.html @@ -0,0 +1,422 @@ + + + + Vector Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Vector

+
+
+ +
public struct Vector
+ +
+
+ +
+
+
+
    +
  • + +
    +
    +
    +
    +
    +

    Returns the similarity between two vectors

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public static func cosineSimilarity(a: [Double], b: [Double]) -> Double
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + a + + +
    +

    The first vector

    +
    +
    + + b + + +
    +

    The second vector

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    Returns the difference between two vectors. Cosine distance is defined as 1 - cosineSimilarity(a, b)

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public func cosineDifference(a: [Double], b: [Double]) -> Double
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + a + + +
    +

    The first vector

    +
    +
    + + b + + +
    +

    The second vector

    +
    +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/Typealiases.html b/docs/Typealiases.html new file mode 100644 index 00000000..1b045dd1 --- /dev/null +++ b/docs/Typealiases.html @@ -0,0 +1,325 @@ + + + + Type Aliases Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Type Aliases

+

The following type aliases are available globally.

+ +
+
+
+
    +
  • +
    + + + + Model + +
    +
    +
    +
    +
    +
    +

    Defines all available OpenAI models supported by the library.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias Model = String
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/badge.svg b/docs/badge.svg new file mode 100644 index 00000000..a096feca --- /dev/null +++ b/docs/badge.svg @@ -0,0 +1,28 @@ + + + + + + + + + + + + + + + + documentation + + + documentation + + + 100% + + + 100% + + + diff --git a/docs/css/highlight.css b/docs/css/highlight.css new file mode 100644 index 00000000..c170357c --- /dev/null +++ b/docs/css/highlight.css @@ -0,0 +1,202 @@ +/*! Jazzy - https://github.com/realm/jazzy + * Copyright Realm Inc. + * SPDX-License-Identifier: MIT + */ +/* Credit to https://gist.github.com/wataru420/2048287 */ +.highlight .c { + color: #999988; + font-style: italic; } + +.highlight .err { + color: #a61717; + background-color: #e3d2d2; } + +.highlight .k { + color: #000000; + font-weight: bold; } + +.highlight .o { + color: #000000; + font-weight: bold; } + +.highlight .cm { + color: #999988; + font-style: italic; } + +.highlight .cp { + color: #999999; + font-weight: bold; } + +.highlight .c1 { + color: #999988; + font-style: italic; } + +.highlight .cs { + color: #999999; + font-weight: bold; + font-style: italic; } + +.highlight .gd { + color: #000000; + background-color: #ffdddd; } + +.highlight .gd .x { + color: #000000; + background-color: #ffaaaa; } + +.highlight .ge { + color: #000000; + font-style: italic; } + +.highlight .gr { + color: #aa0000; } + +.highlight .gh { + color: #999999; } + +.highlight .gi { + color: #000000; + background-color: #ddffdd; } + +.highlight .gi .x { + color: #000000; + background-color: #aaffaa; } + +.highlight .go { + color: #888888; } + +.highlight .gp { + color: #555555; } + +.highlight .gs { + font-weight: bold; } + +.highlight .gu { + color: #aaaaaa; } + +.highlight .gt { + color: #aa0000; } + +.highlight .kc { + color: #000000; + font-weight: bold; } + +.highlight .kd { + color: #000000; + font-weight: bold; } + +.highlight .kp { + color: #000000; + font-weight: bold; } + +.highlight .kr { + color: #000000; + font-weight: bold; } + +.highlight .kt { + color: #445588; } + +.highlight .m { + color: #009999; } + +.highlight .s { + color: #d14; } + +.highlight .na { + color: #008080; } + +.highlight .nb { + color: #0086B3; } + +.highlight .nc { + color: #445588; + font-weight: bold; } + +.highlight .no { + color: #008080; } + +.highlight .ni { + color: #800080; } + +.highlight .ne { + color: #990000; + font-weight: bold; } + +.highlight .nf { + color: #990000; } + +.highlight .nn { + color: #555555; } + +.highlight .nt { + color: #000080; } + +.highlight .nv { + color: #008080; } + +.highlight .ow { + color: #000000; + font-weight: bold; } + +.highlight .w { + color: #bbbbbb; } + +.highlight .mf { + color: #009999; } + +.highlight .mh { + color: #009999; } + +.highlight .mi { + color: #009999; } + +.highlight .mo { + color: #009999; } + +.highlight .sb { + color: #d14; } + +.highlight .sc { + color: #d14; } + +.highlight .sd { + color: #d14; } + +.highlight .s2 { + color: #d14; } + +.highlight .se { + color: #d14; } + +.highlight .sh { + color: #d14; } + +.highlight .si { + color: #d14; } + +.highlight .sx { + color: #d14; } + +.highlight .sr { + color: #009926; } + +.highlight .s1 { + color: #d14; } + +.highlight .ss { + color: #990073; } + +.highlight .bp { + color: #999999; } + +.highlight .vc { + color: #008080; } + +.highlight .vg { + color: #008080; } + +.highlight .vi { + color: #008080; } + +.highlight .il { + color: #009999; } diff --git a/docs/css/jazzy.css b/docs/css/jazzy.css new file mode 100644 index 00000000..f84ef864 --- /dev/null +++ b/docs/css/jazzy.css @@ -0,0 +1,442 @@ +/*! Jazzy - https://github.com/realm/jazzy + * Copyright Realm Inc. + * SPDX-License-Identifier: MIT + */ +html, body, div, span, h1, h3, h4, p, a, code, em, img, ul, li, table, tbody, tr, td { + background: transparent; + border: 0; + margin: 0; + outline: 0; + padding: 0; + vertical-align: baseline; } + +body { + background-color: #f2f2f2; + font-family: Helvetica, freesans, Arial, sans-serif; + font-size: 14px; + -webkit-font-smoothing: subpixel-antialiased; + word-wrap: break-word; } + +h1, h2, h3 { + margin-top: 0.8em; + margin-bottom: 0.3em; + font-weight: 100; + color: black; } + +h1 { + font-size: 2.5em; } + +h2 { + font-size: 2em; + border-bottom: 1px solid #e2e2e2; } + +h4 { + font-size: 13px; + line-height: 1.5; + margin-top: 21px; } + +h5 { + font-size: 1.1em; } + +h6 { + font-size: 1.1em; + color: #777; } + +.section-name { + color: gray; + display: block; + font-family: Helvetica; + font-size: 22px; + font-weight: 100; + margin-bottom: 15px; } + +pre, code { + font: 0.95em Menlo, monospace; + color: #777; + word-wrap: normal; } + +p code, li code { + background-color: #eee; + padding: 2px 4px; + border-radius: 4px; } + +pre > code { + padding: 0; } + +a { + color: #0088cc; + text-decoration: none; } + a code { + color: inherit; } + +ul { + padding-left: 15px; } + +li { + line-height: 1.8em; } + +img { + max-width: 100%; } + +blockquote { + margin-left: 0; + padding: 0 10px; + border-left: 4px solid #ccc; } + +hr { + height: 1px; + border: none; + background-color: #e2e2e2; } + +.footnote-ref { + display: inline-block; + scroll-margin-top: 70px; } + +.footnote-def { + scroll-margin-top: 70px; } + +.content-wrapper { + margin: 0 auto; + width: 980px; } + +header { + font-size: 0.85em; + line-height: 32px; + background-color: #414141; + position: fixed; + width: 100%; + z-index: 3; } + header img { + padding-right: 6px; + vertical-align: -3px; + height: 16px; } + header a { + color: #fff; } + header p { + float: left; + color: #999; } + header .header-right { + float: right; + margin-left: 16px; } + +#breadcrumbs { + background-color: #f2f2f2; + height: 26px; + padding-top: 12px; + position: fixed; + width: inherit; + z-index: 2; + margin-top: 32px; + white-space: nowrap; + overflow-x: scroll; } + #breadcrumbs #carat { + height: 10px; + margin: 0 5px; } + +.sidebar { + background-color: #f9f9f9; + border: 1px solid #e2e2e2; + overflow-y: auto; + overflow-x: hidden; + position: fixed; + top: 70px; + bottom: 0; + width: 230px; + word-wrap: normal; } + +.nav-groups { + list-style-type: none; + background: #fff; + padding-left: 0; } + +.nav-group-name { + border-bottom: 1px solid #e2e2e2; + font-size: 1.1em; + font-weight: 100; + padding: 15px 0 15px 20px; } + .nav-group-name > a { + color: #333; } + +.nav-group-tasks { + margin-top: 5px; } + +.nav-group-task { + font-size: 0.9em; + list-style-type: none; + white-space: nowrap; } + .nav-group-task a { + color: #888; } + +.main-content { + background-color: #fff; + border: 1px solid #e2e2e2; + margin-left: 246px; + position: absolute; + overflow: hidden; + padding-bottom: 20px; + top: 70px; + width: 734px; } + .main-content p, .main-content a, .main-content code, .main-content em, .main-content ul, .main-content table, .main-content blockquote { + margin-bottom: 1em; } + .main-content p { + line-height: 1.8em; } + .main-content section .section:first-child { + margin-top: 0; + padding-top: 0; } + .main-content section .task-group-section .task-group:first-of-type { + padding-top: 10px; } + .main-content section .task-group-section .task-group:first-of-type .section-name { + padding-top: 15px; } + .main-content section .heading:before { + content: ""; + display: block; + padding-top: 70px; + margin: -70px 0 0; } + .main-content .section-name p { + margin-bottom: inherit; + line-height: inherit; } + .main-content .section-name code { + background-color: inherit; + padding: inherit; + color: inherit; } + +.section { + padding: 0 25px; } + +.highlight { + background-color: #eee; + padding: 10px 12px; + border: 1px solid #e2e2e2; + border-radius: 4px; + overflow-x: auto; } + +.declaration .highlight { + overflow-x: initial; + padding: 0 40px 40px 0; + margin-bottom: -25px; + background-color: transparent; + border: none; } + +.section-name { + margin: 0; + margin-left: 18px; } + +.task-group-section { + margin-top: 10px; + padding-left: 6px; + border-top: 1px solid #e2e2e2; } + +.task-group { + padding-top: 0px; } + +.task-name-container a[name]:before { + content: ""; + display: block; + padding-top: 70px; + margin: -70px 0 0; } + +.section-name-container { + position: relative; + display: inline-block; } + .section-name-container .section-name-link { + position: absolute; + top: 0; + left: 0; + bottom: 0; + right: 0; + margin-bottom: 0; } + .section-name-container .section-name { + position: relative; + pointer-events: none; + z-index: 1; } + .section-name-container .section-name a { + pointer-events: auto; } + +.item { + padding-top: 8px; + width: 100%; + list-style-type: none; } + .item a[name]:before { + content: ""; + display: block; + padding-top: 70px; + margin: -70px 0 0; } + .item code { + background-color: transparent; + padding: 0; } + .item .token, .item .direct-link { + display: inline-block; + text-indent: -20px; + padding-left: 3px; + margin-left: 35px; + font-size: 11.9px; + transition: all 300ms; } + .item .token-open { + margin-left: 20px; } + .item .discouraged { + text-decoration: line-through; } + +.declaration-note { + font-size: .85em; + color: gray; + font-style: italic; } + +.pointer-container { + border-bottom: 1px solid #e2e2e2; + left: -23px; + padding-bottom: 13px; + position: relative; + width: 110%; } + +.pointer { + background: #f9f9f9; + border-left: 1px solid #e2e2e2; + border-top: 1px solid #e2e2e2; + height: 12px; + left: 21px; + top: -7px; + -webkit-transform: rotate(45deg); + -moz-transform: rotate(45deg); + -o-transform: rotate(45deg); + transform: rotate(45deg); + position: absolute; + width: 12px; } + +.height-container { + display: none; + left: -25px; + padding: 0 25px; + position: relative; + width: 100%; + overflow: hidden; } + .height-container .section { + background: #f9f9f9; + border-bottom: 1px solid #e2e2e2; + left: -25px; + position: relative; + width: 100%; + padding-top: 10px; + padding-bottom: 5px; } + +.aside, .language { + padding: 6px 12px; + margin: 12px 0; + border-left: 5px solid #dddddd; + overflow-y: hidden; } + .aside .aside-title, .language .aside-title { + font-size: 9px; + letter-spacing: 2px; + text-transform: uppercase; + padding-bottom: 0; + margin: 0; + color: #aaa; + -webkit-user-select: none; } + .aside p:last-child, .language p:last-child { + margin-bottom: 0; } + +.language { + border-left: 5px solid #cde9f4; } + .language .aside-title { + color: #4b8afb; } + +.aside-warning, .aside-deprecated, .aside-unavailable { + border-left: 5px solid #ff6666; } + .aside-warning .aside-title, .aside-deprecated .aside-title, .aside-unavailable .aside-title { + color: #ff0000; } + +.graybox { + border-collapse: collapse; + width: 100%; } + .graybox p { + margin: 0; + word-break: break-word; + min-width: 50px; } + .graybox td { + border: 1px solid #e2e2e2; + padding: 5px 25px 5px 10px; + vertical-align: middle; } + .graybox tr td:first-of-type { + text-align: right; + padding: 7px; + vertical-align: top; + word-break: normal; + width: 40px; } + +.slightly-smaller { + font-size: 0.9em; } + +#footer { + position: relative; + top: 10px; + bottom: 0px; + margin-left: 25px; } + #footer p { + margin: 0; + color: #aaa; + font-size: 0.8em; } + +html.dash header, html.dash #breadcrumbs, html.dash .sidebar { + display: none; } + +html.dash .main-content { + width: 980px; + margin-left: 0; + border: none; + width: 100%; + top: 0; + padding-bottom: 0; } + +html.dash .height-container { + display: block; } + +html.dash .item .token { + margin-left: 0; } + +html.dash .content-wrapper { + width: auto; } + +html.dash #footer { + position: static; } + +form[role=search] { + float: right; } + form[role=search] input { + font: Helvetica, freesans, Arial, sans-serif; + margin-top: 6px; + font-size: 13px; + line-height: 20px; + padding: 0px 10px; + border: none; + border-radius: 1em; } + .loading form[role=search] input { + background: white url(../img/spinner.gif) center right 4px no-repeat; } + form[role=search] .tt-menu { + margin: 0; + min-width: 300px; + background: #fff; + color: #333; + border: 1px solid #e2e2e2; + z-index: 4; } + form[role=search] .tt-highlight { + font-weight: bold; } + form[role=search] .tt-suggestion { + font: Helvetica, freesans, Arial, sans-serif; + font-size: 14px; + padding: 0 8px; } + form[role=search] .tt-suggestion span { + display: table-cell; + white-space: nowrap; } + form[role=search] .tt-suggestion .doc-parent-name { + width: 100%; + text-align: right; + font-weight: normal; + font-size: 0.9em; + padding-left: 16px; } + form[role=search] .tt-suggestion:hover, + form[role=search] .tt-suggestion.tt-cursor { + cursor: pointer; + background-color: #4183c4; + color: #fff; } + form[role=search] .tt-suggestion:hover .doc-parent-name, + form[role=search] .tt-suggestion.tt-cursor .doc-parent-name { + color: #fff; } diff --git a/docs/docsets/.docset/Contents/Info.plist b/docs/docsets/.docset/Contents/Info.plist new file mode 100644 index 00000000..61863ec4 --- /dev/null +++ b/docs/docsets/.docset/Contents/Info.plist @@ -0,0 +1,20 @@ + + + + + CFBundleIdentifier + com.jazzy. + CFBundleName + + DocSetPlatformFamily + + isDashDocset + + dashIndexFilePath + index.html + isJavaScriptEnabled + + DashDocSetFamily + dashtoc + + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Classes.html b/docs/docsets/.docset/Contents/Resources/Documents/Classes.html new file mode 100644 index 00000000..2831f986 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Classes.html @@ -0,0 +1,325 @@ + + + + Classes Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Classes

+

The following classes are available globally.

+ +
+
+
+ +
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Classes/OpenAI.html b/docs/docsets/.docset/Contents/Resources/Documents/Classes/OpenAI.html new file mode 100644 index 00000000..5404d16c --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Classes/OpenAI.html @@ -0,0 +1,828 @@ + + + + OpenAI Class Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

OpenAI

+
+
+ +
final public class OpenAI : OpenAIProtocol
+ +
+
+ +
+
+
+ +
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Classes/OpenAI/Configuration.html b/docs/docsets/.docset/Contents/Resources/Documents/Classes/OpenAI/Configuration.html new file mode 100644 index 00000000..e07e9912 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Classes/OpenAI/Configuration.html @@ -0,0 +1,494 @@ + + + + Configuration Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Configuration

+
+
+ +
public struct Configuration
+ +
+
+ +
+
+
+
    +
  • +
    + + + + token + +
    +
    +
    +
    +
    + +
    +

    Declaration

    +
    +

    Swift

    +
    public let token: String
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    Optional OpenAI organization identifier. See https://platform.openai.com/docs/api-reference/authentication

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let organizationIdentifier: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + host + +
    +
    +
    +
    +
    +
    +

    API host. Set this property if you use some kind of proxy or your own server. Default is api.openai.com

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let host: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + port + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let port: Int
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + scheme + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let scheme: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + timeoutInterval + +
    +
    +
    +
    +
    +
    +

    Default request timeout

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let timeoutInterval: TimeInterval
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(token: String, organizationIdentifier: String? = nil, host: String = "api.openai.com", port: Int = 443, scheme: String = "https", timeoutInterval: TimeInterval = 60.0)
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Enums.html b/docs/docsets/.docset/Contents/Resources/Documents/Enums.html new file mode 100644 index 00000000..4aef1836 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Enums.html @@ -0,0 +1,325 @@ + + + + Enumerations Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Enumerations

+

The following enumerations are available globally.

+ +
+
+
+
    +
  • +
    + + + + OpenAIError + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum OpenAIError : Error
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Enums/OpenAIError.html b/docs/docsets/.docset/Contents/Resources/Documents/Enums/OpenAIError.html new file mode 100644 index 00000000..682f456c --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Enums/OpenAIError.html @@ -0,0 +1,332 @@ + + + + OpenAIError Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

OpenAIError

+
+
+ +
public enum OpenAIError : Error
+ +
+
+ +
+
+
+
    +
  • +
    + + + + emptyData + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case emptyData
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Extensions.html b/docs/docsets/.docset/Contents/Resources/Documents/Extensions.html new file mode 100644 index 00000000..56666ea1 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Extensions.html @@ -0,0 +1,325 @@ + + + + Extensions Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Extensions

+

The following extensions are available globally.

+ +
+
+
+
    +
  • +
    + + + + Model + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public extension Model
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Extensions/Model.html b/docs/docsets/.docset/Contents/Resources/Documents/Extensions/Model.html new file mode 100644 index 00000000..e1d188ed --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Extensions/Model.html @@ -0,0 +1,1516 @@ + + + + Model Extension Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Model

+
+
+ +
public extension Model
+ +
+
+ +
+
+
+
    +
  • +
    + + + + gpt4_o + +
    +
    +
    +
    +
    +
    +

    gpt-4o, currently the most advanced, multimodal flagship model that’s cheaper and faster than GPT-4 Turbo.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let gpt4_o: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt4_o_mini + +
    +
    +
    +
    +
    +
    +

    gpt-4o-mini, currently the most affordable and intelligent model for fast and lightweight requests.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let gpt4_o_mini: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt4_turbo + +
    +
    +
    +
    +
    +
    +

    gpt-4-turbo, The latest GPT-4 Turbo model with vision capabilities. Vision requests can now use JSON mode and function calling and more. Context window: 128,000 tokens

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let gpt4_turbo: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt4_turbo_preview + +
    +
    +
    +
    +
    +
    +

    gpt-4-turbo, gpt-4 model with improved instruction following, JSON mode, reproducible outputs, parallel function calling and more. Maximum of 4096 output tokens

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    @available(*, deprecated, message: "Please upgrade to the newer model")
    +static let gpt4_turbo_preview: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt4_vision_preview + +
    +
    +
    +
    +
    +
    +

    gpt-4-vision-preview, able to understand images, in addition to all other GPT-4 Turbo capabilities.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let gpt4_vision_preview: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt4_0125_preview + +
    +
    +
    +
    +
    +
    +

    Snapshot of gpt-4-turbo-preview from January 25th 2024. This model reduces cases of “laziness” where the model doesn’t complete a task. Also fixes the bug impacting non-English UTF-8 generations. Maximum of 4096 output tokens

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let gpt4_0125_preview: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt4_1106_preview + +
    +
    +
    +
    +
    +
    +

    Snapshot of gpt-4-turbo-preview from November 6th 2023. Improved instruction following, JSON mode, reproducible outputs, parallel function calling and more. Maximum of 4096 output tokens

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    @available(*, deprecated, message: "Please upgrade to the newer model")
    +static let gpt4_1106_preview: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt4 + +
    +
    +
    +
    +
    +
    +

    Most capable gpt-4 model, outperforms any GPT-3.5 model, able to do more complex tasks, and optimized for chat.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let gpt4: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt4_0613 + +
    +
    +
    +
    +
    +
    +

    Snapshot of gpt-4 from June 13th 2023 with function calling data. Unlike gpt-4, this model will not receive updates, and will be deprecated 3 months after a new version is released.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let gpt4_0613: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt4_0314 + +
    +
    +
    +
    +
    +
    +

    Snapshot of gpt-4 from March 14th 2023. Unlike gpt-4, this model will not receive updates, and will only be supported for a three month period ending on June 14th 2023.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    @available(*, deprecated, message: "Please upgrade to the newer model")
    +static let gpt4_0314: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt4_32k + +
    +
    +
    +
    +
    +
    +

    Same capabilities as the base gpt-4 model but with 4x the context length. Will be updated with our latest model iteration.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let gpt4_32k: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt4_32k_0613 + +
    +
    +
    +
    +
    +
    +

    Snapshot of gpt-4-32k from June 13th 2023. Unlike gpt-4-32k, this model will not receive updates, and will be deprecated 3 months after a new version is released.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let gpt4_32k_0613: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt4_32k_0314 + +
    +
    +
    +
    +
    +
    +

    Snapshot of gpt-4-32k from March 14th 2023. Unlike gpt-4-32k, this model will not receive updates, and will only be supported for a three month period ending on June 14th 2023.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    @available(*, deprecated, message: "Please upgrade to the newer model")
    +static let gpt4_32k_0314: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt3_5Turbo + +
    +
    +
    +
    +
    +
    +

    Most capable gpt-3.5-turbo model and optimized for chat. Will be updated with our latest model iteration.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let gpt3_5Turbo: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt3_5Turbo_0125 + +
    +
    +
    +
    +
    +
    +

    Snapshot of gpt-3.5-turbo from January 25th 2024. Decreased prices by 50%. Various improvements including higher accuracy at responding in requested formats and a fix for a bug which caused a text encoding issue for non-English language function calls.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let gpt3_5Turbo_0125: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt3_5Turbo_1106 + +
    +
    +
    +
    +
    +
    +

    Snapshot of gpt-3.5-turbo from November 6th 2023. The latest gpt-3.5-turbo model with improved instruction following, JSON mode, reproducible outputs, parallel function calling and more.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    @available(*, deprecated, message: "Please upgrade to the newer model")
    +static let gpt3_5Turbo_1106: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt3_5Turbo_0613 + +
    +
    +
    +
    +
    +
    +

    Snapshot of gpt-3.5-turbo from June 13th 2023 with function calling data. Unlike gpt-3.5-turbo, this model will not receive updates, and will be deprecated 3 months after a new version is released.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    @available(*, deprecated, message: "Please upgrade to the newer model")
    +static let gpt3_5Turbo_0613: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt3_5Turbo_0301 + +
    +
    +
    +
    +
    +
    +

    Snapshot of gpt-3.5-turbo from March 1st 2023. Unlike gpt-3.5-turbo, this model will not receive updates, and will only be supported for a three month period ending on June 1st 2023.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    @available(*, deprecated, message: "Please upgrade to the newer model")
    +static let gpt3_5Turbo_0301: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt3_5Turbo_16k + +
    +
    +
    +
    +
    +
    +

    Same capabilities as the standard gpt-3.5-turbo model but with 4 times the context.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let gpt3_5Turbo_16k: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + gpt3_5Turbo_16k_0613 + +
    +
    +
    +
    +
    +
    +

    Snapshot of gpt-3.5-turbo-16k from June 13th 2023. Unlike gpt-3.5-turbo-16k, this model will not receive updates, and will be deprecated 3 months after a new version is released.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let gpt3_5Turbo_16k_0613: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + textDavinci_003 + +
    +
    +
    +
    +
    +
    +

    Can do any language task with better quality, longer output, and consistent instruction-following than the curie, babbage, or ada models. Also supports inserting completions within text.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textDavinci_003: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + textDavinci_002 + +
    +
    +
    +
    +
    +
    +

    Similar capabilities to text-davinci-003 but trained with supervised fine-tuning instead of reinforcement learning.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textDavinci_002: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + textCurie + +
    +
    +
    +
    +
    +
    +

    Very capable, faster and lower cost than Davinci.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textCurie: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + textBabbage + +
    +
    +
    +
    +
    +
    +

    Capable of straightforward tasks, very fast, and lower cost.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textBabbage: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + textAda + +
    +
    +
    +
    +
    +
    +

    Capable of very simple tasks, usually the fastest model in the GPT-3 series, and lowest cost.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textAda: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + textDavinci_001 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textDavinci_001: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + codeDavinciEdit_001 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let codeDavinciEdit_001: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + tts_1 + +
    +
    +
    +
    +
    +
    +

    The latest text to speech model, optimized for speed.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let tts_1: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + tts_1_hd + +
    +
    +
    +
    +
    +
    +

    The latest text to speech model, optimized for quality.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let tts_1_hd: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + whisper_1 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let whisper_1: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + dall_e_2 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let dall_e_2: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + dall_e_3 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let dall_e_3: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + davinci + +
    +
    +
    +
    +
    +
    +

    Most capable GPT-3 model. Can do any task the other models can do, often with higher quality.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let davinci: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + curie + +
    +
    +
    +
    +
    +
    +

    Very capable, but faster and lower cost than Davinci.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let curie: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + babbage + +
    +
    +
    +
    +
    +
    +

    Capable of straightforward tasks, very fast, and lower cost.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let babbage: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ada + +
    +
    +
    +
    +
    +
    +

    Capable of very simple tasks, usually the fastest model in the GPT-3 series, and lowest cost.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let ada: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + textEmbeddingAda + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textEmbeddingAda: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + textSearchAda + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textSearchAda: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + textSearchBabbageDoc + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textSearchBabbageDoc: String
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textSearchBabbageQuery001: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + textEmbedding3 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textEmbedding3: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + textEmbedding3Large + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textEmbedding3Large: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + textModerationStable + +
    +
    +
    +
    +
    +
    +

    Almost as capable as the latest model, but slightly older.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textModerationStable: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + textModerationLatest + +
    +
    +
    +
    +
    +
    +

    Most capable moderation model. Accuracy will be slightly higher than the stable model.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let textModerationLatest: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + moderation + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static let moderation: String
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Protocols.html b/docs/docsets/.docset/Contents/Resources/Documents/Protocols.html new file mode 100644 index 00000000..4cfb4346 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Protocols.html @@ -0,0 +1,325 @@ + + + + Protocols Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Protocols

+

The following protocols are available globally.

+ +
+
+
+
    +
  • +
    + + + + OpenAIProtocol + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public protocol OpenAIProtocol
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Protocols/OpenAIProtocol.html b/docs/docsets/.docset/Contents/Resources/Documents/Protocols/OpenAIProtocol.html new file mode 100644 index 00000000..eeb3620a --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Protocols/OpenAIProtocol.html @@ -0,0 +1,2181 @@ + + + + OpenAIProtocol Protocol Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

OpenAIProtocol

+
+
+ +
public protocol OpenAIProtocol
+ +
+
+ +
+
+
+
    +
  • + +
    +
    +
    +
    +
    +

    This function sends a completions query to the OpenAI API and retrieves generated completions in response. The Completions API enables you to build applications using OpenAI’s language models, like the powerful GPT-3.

    + +

    Example:

    +
    let query = CompletionsQuery(model: .textDavinci_003, prompt: "What is 42?")
    +openAI.completions(query: query) { result in
    +  //Handle result here
    +}
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func completions(query: CompletionsQuery, completion: @escaping (Result<CompletionsResult, Error>) -> Void)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + query + + +
    +

    A CompletionsQuery object containing the input parameters for the API request. This includes the prompt, model, temperature, max tokens, and other settings.

    +
    +
    + + completion + + +
    +

    A closure which receives the result when the API request finishes. The closure’s parameter, Result<CompletionsResult, Error>, will contain either the CompletionsResult object with the generated completions, or an error if the request failed.

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    This function sends a completions query to the OpenAI API and retrieves generated completions in response. The Completions API enables you to build applications using OpenAI’s language models, like the powerful GPT-3. The result is returned by chunks.

    + +

    Example:

    +
    let query = CompletionsQuery(model: .textDavinci_003, prompt: "What is 42?")
    +openAI.completions(query: query) { result in
    +  //Handle result here
    +}
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func completionsStream(query: CompletionsQuery, onResult: @escaping (Result<CompletionsResult, Error>) -> Void, completion: ((Error?) -> Void)?)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + + + + + +
    + + query + + +
    +

    A CompletionsQuery object containing the input parameters for the API request. This includes the prompt, model, temperature, max tokens, and other settings.

    +
    +
    + + onResult + + +
    +

    A closure which receives the result when the API request finishes. The closure’s parameter, Result<CompletionsResult, Error>, will contain either the CompletionsResult object with the generated completions, or an error if the request failed.

    +
    +
    + + completion + + +
    +

    A closure that is being called when all chunks are delivered or uncrecoverable error occured

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    This function sends an images query to the OpenAI API and retrieves generated images in response. The Images Generation API enables you to create various images or graphics using OpenAI’s powerful deep learning models.

    + +

    Example:

    +
    let query = ImagesQuery(prompt: "White cat with heterochromia sitting on the kitchen table", n: 1, size: ImagesQuery.Size._1024)
    +openAI.images(query: query) { result in
    +  //Handle result here
    +}
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func images(query: ImagesQuery, completion: @escaping (Result<ImagesResult, Error>) -> Void)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + query + + +
    +

    An ImagesQuery object containing the input parameters for the API request. This includes the query parameters such as the text prompt, image size, and other settings.

    +
    +
    + + completion + + +
    +

    A closure which receives the result when the API request finishes. The closure’s parameter, Result<ImagesResult, Error>, will contain either the ImagesResult object with the generated images, or an error if the request failed.

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    This function sends an image edit query to the OpenAI API and retrieves generated images in response. The Images Edit API enables you to edit images or graphics using OpenAI’s powerful deep learning models.

    + +

    Example:

    +
    let query = ImagesEditQuery(image: "@whitecat.png", prompt: "White cat with heterochromia sitting on the kitchen table with a bowl of food", n: 1, size: ImagesQuery.Size._1024)
    +openAI.imageEdits(query: query) { result in
    +  //Handle result here
    +}
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func imageEdits(query: ImageEditsQuery, completion: @escaping (Result<ImagesResult, Error>) -> Void)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + query + + +
    +

    An ImagesEditQuery object containing the input parameters for the API request. This includes the query parameters such as the image to be edited, an image to be used a mask if applicable, text prompt, image size, and other settings.

    +
    +
    + + completion + + +
    +

    A closure which receives the result when the API request finishes. The closure’s parameter, Result<ImagesResult, Error>, will contain either the ImagesResult object with the generated images, or an error if the request failed.

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    This function sends an image variation query to the OpenAI API and retrieves generated images in response. The Images Variations API enables you to create a variation of a given image using OpenAI’s powerful deep learning models.

    + +

    Example:

    +
    let query = ImagesVariationQuery(image: "@whitecat.png", n: 1, size: ImagesQuery.Size._1024)
    +openAI.imageVariations(query: query) { result in
    +  //Handle result here
    +}
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func imageVariations(query: ImageVariationsQuery, completion: @escaping (Result<ImagesResult, Error>) -> Void)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + query + + +
    +

    An ImagesVariationQuery object containing the input parameters for the API request. This includes the query parameters such as the image to use as a basis for the variation(s), image size, and other settings.

    +
    +
    + + completion + + +
    +

    A closure which receives the result when the API request finishes. The closure’s parameter, Result<ImagesResult, Error>, will contain either the ImagesResult object with the generated images, or an error if the request failed.

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    This function sends an embeddings query to the OpenAI API and retrieves embeddings in response. The Embeddings API enables you to generate high-dimensional vector representations of texts, which can be used for various natural language processing tasks such as semantic similarity, clustering, and classification.

    + +

    Example:

    +
    let query = EmbeddingsQuery(model: .textSearchBabbageDoc, input: "The food was delicious and the waiter...")
    +openAI.embeddings(query: query) { result in
    +  //Handle response here
    +}
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func embeddings(query: EmbeddingsQuery, completion: @escaping (Result<EmbeddingsResult, Error>) -> Void)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + query + + +
    +

    An EmbeddingsQuery object containing the input parameters for the API request. This includes the list of text prompts to be converted into embeddings, the model to be used, and other settings.

    +
    +
    + + completion + + +
    +

    A closure which receives the result when the API request finishes. The closure’s parameter, Result<EmbeddingsResult, Error>, will contain either the EmbeddingsResult object with the generated embeddings, or an error if the request failed.

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    This function sends a chat query to the OpenAI API and retrieves chat conversation responses. The Chat API enables you to build chatbots or conversational applications using OpenAI’s powerful natural language models, like GPT-3.

    + +

    Example:

    +
    let query = ChatQuery(model: .gpt3_5Turbo, messages: [.init(role: "user", content: "who are you")])
    +openAI.chats(query: query) { result in
    +  //Handle response here
    +}
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func chats(query: ChatQuery, completion: @escaping (Result<ChatResult, Error>) -> Void)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + query + + +
    +

    A ChatQuery object containing the input parameters for the API request. This includes the lists of message objects for the conversation, the model to be used, and other settings.

    +
    +
    + + completion + + +
    +

    A closure which receives the result when the API request finishes. The closure’s parameter, Result<ChatResult, Error>, will contain either the ChatResult object with the model’s response to the conversation, or an error if the request failed.

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    This function sends a chat query to the OpenAI API and retrieves chat stream conversation responses. The Chat API enables you to build chatbots or conversational applications using OpenAI’s powerful natural language models, like GPT-3. The result is returned by chunks.

    + +

    Example:

    +
    let query = ChatQuery(model: .gpt3_5Turbo, messages: [.init(role: "user", content: "who are you")])
    +openAI.chats(query: query) { result in
    +  //Handle response here
    +}
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func chatsStream(query: ChatQuery, onResult: @escaping (Result<ChatStreamResult, Error>) -> Void, completion: ((Error?) -> Void)?)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + + + + + +
    + + query + + +
    +

    A ChatQuery object containing the input parameters for the API request. This includes the lists of message objects for the conversation, the model to be used, and other settings.

    +
    +
    + + onResult + + +
    +

    A closure which receives the result when the API request finishes. The closure’s parameter, Result<ChatStreamResult, Error>, will contain either the ChatStreamResult object with the model’s response to the conversation, or an error if the request failed.

    +
    +
    + + completion + + +
    +

    A closure that is being called when all chunks are delivered or uncrecoverable error occured

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    This function sends an edits query to the OpenAI API and retrieves an edited version of the prompt based on the instruction given.

    + +

    Example:

    +
    let query = EditsQuery(model: .gpt4, input: "What day of the wek is it?", instruction: "Fix the spelling mistakes")
    +openAI.edits(query: query) { result in
    +  //Handle response here
    +}
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func edits(query: EditsQuery, completion: @escaping (Result<EditsResult, Error>) -> Void)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + query + + +
    +

    An EditsQuery object containing the input parameters for the API request. This includes the input to be edited, the instruction specifying how it should be edited, and other settings.

    +
    +
    + + completion + + +
    +

    A closure which receives the result when the API request finishes. The closure’s parameter, Result<EditsResult, Error>, will contain either the EditsResult object with the model’s response to the queried edit, or an error if the request failed.

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    This function sends a model query to the OpenAI API and retrieves a model instance, providing owner information. The Models API in this usage enables you to gather detailed information on the model in question, like GPT-3.

    + +

    Example:

    +
    let query = ModelQuery(model: .gpt3_5Turbo)
    +openAI.model(query: query) { result in
    +  //Handle response here
    +}
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func model(query: ModelQuery, completion: @escaping (Result<ModelResult, Error>) -> Void)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + query + + +
    +

    A ModelQuery object containing the input parameters for the API request, which is only the model to be queried.

    +
    +
    + + completion + + +
    +

    A closure which receives the result when the API request finishes. The closure’s parameter, Result<ModelResult, Error>, will contain either the ModelResult object with more information about the model, or an error if the request failed.

    +
    +
    +
    +
    +
    +
  • +
  • +
    + + + + models(completion:) + +
    +
    +
    +
    +
    +
    +

    This function sends a models query to the OpenAI API and retrieves a list of models. The Models API in this usage enables you to list all the available models.

    + +

    Example:

    +
    openAI.models() { result in
    +  //Handle response here
    +}
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func models(completion: @escaping (Result<ModelsResult, Error>) -> Void)
    + +
    +
    +
    +

    Parameters

    + + + + + + + +
    + + completion + + +
    +

    A closure which receives the result when the API request finishes. The closure’s parameter, Result<ModelsResult, Error>, will contain either the ModelsResult object with the list of model types, or an error if the request failed.

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    This function sends a moderations query to the OpenAI API and retrieves a list of category results to classify how text may violate OpenAI’s Content Policy.

    + +

    Example:

    +
    let query = ModerationsQuery(input: "I want to kill them.")
    +openAI.moderations(query: query) { result in
    +  //Handle response here
    +}
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    @available(iOS 13.0, *)
    +func moderations(query: ModerationsQuery, completion: @escaping (Result<ModerationsResult, Error>) -> Void)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + query + + +
    +

    A ModerationsQuery object containing the input parameters for the API request. This includes the input text and optionally the model to be used.

    +
    +
    + + completion + + +
    +

    A closure which receives the result when the API request finishes. The closure’s parameter, Result<ModerationsResult, Error>, will contain either the ModerationsResult object with the list of category results, or an error if the request failed.

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    This function sends an AudioSpeechQuery to the OpenAI API to create audio speech from text using a specific voice and format.

    + +

    Example:

    +
    let query = AudioSpeechQuery(model: .tts_1, input: "Hello, world!", voice: .alloy, responseFormat: .mp3, speed: 1.0)
    +openAI.audioCreateSpeech(query: query) { result in
    +   // Handle response here
    +}
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func audioCreateSpeech(query: AudioSpeechQuery, completion: @escaping (Result<AudioSpeechResult, Error>) -> Void)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + query + + +
    +

    An AudioSpeechQuery object containing the parameters for the API request. This includes the Text-to-Speech model to be used, input text, voice to be used for generating the audio, the desired audio format, and the speed of the generated audio.

    +
    +
    + + completion + + +
    +

    A closure which receives the result. The closure’s parameter, Result<AudioSpeechResult, Error>, will either contain the AudioSpeechResult object with the audio data or an error if the request failed.

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    Transcribes audio data using OpenAI’s audio transcription API and completes the operation asynchronously.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func audioTranscriptions(query: AudioTranscriptionQuery, completion: @escaping (Result<AudioTranscriptionResult, Error>) -> Void)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + query + + +
    +

    The AudioTranscriptionQuery instance, containing the information required for the transcription request.

    +
    +
    + + completion + + +
    +

    The completion handler to be executed upon completion of the transcription request. + Returns a Result of type AudioTranscriptionResult if successful, or an Error if an error occurs. +*

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    Translates audio data using OpenAI’s audio translation API and completes the operation asynchronously.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func audioTranslations(query: AudioTranslationQuery, completion: @escaping (Result<AudioTranslationResult, Error>) -> Void)
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + query + + +
    +

    The AudioTranslationQuery instance, containing the information required for the translation request.

    +
    +
    + + completion + + +
    +

    The completion handler to be executed upon completion of the translation request. + Returns a Result of type AudioTranslationResult if successful, or an Error if an error occurs. +*

    +
    +
    +
    +
    +
    +
  • +
  • +
    + + + + completions(query:) + + + Extension method, asynchronous + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func completions(
    +    query: CompletionsQuery
    +) async throws -> CompletionsResult
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + completionsStream(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func completionsStream(
    +    query: CompletionsQuery
    +) -> AsyncThrowingStream<CompletionsResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + images(query:) + + + Extension method, asynchronous + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func images(
    +    query: ImagesQuery
    +) async throws -> ImagesResult
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + imageEdits(query:) + + + Extension method, asynchronous + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func imageEdits(
    +    query: ImageEditsQuery
    +) async throws -> ImagesResult
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + imageVariations(query:) + + + Extension method, asynchronous + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func imageVariations(
    +    query: ImageVariationsQuery
    +) async throws -> ImagesResult
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + embeddings(query:) + + + Extension method, asynchronous + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func embeddings(
    +    query: EmbeddingsQuery
    +) async throws -> EmbeddingsResult
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + chats(query:) + + + Extension method, asynchronous + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func chats(
    +    query: ChatQuery
    +) async throws -> ChatResult
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + chatsStream(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func chatsStream(
    +    query: ChatQuery
    +) -> AsyncThrowingStream<ChatStreamResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + edits(query:) + + + Extension method, asynchronous + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func edits(
    +    query: EditsQuery
    +) async throws -> EditsResult
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model(query:) + + + Extension method, asynchronous + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func model(
    +    query: ModelQuery
    +) async throws -> ModelResult
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + models() + + + Extension method, asynchronous + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func models() async throws -> ModelsResult
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + moderations(query:) + + + Extension method, asynchronous + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func moderations(
    +    query: ModerationsQuery
    +) async throws -> ModerationsResult
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + audioCreateSpeech(query:) + + + Extension method, asynchronous + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func audioCreateSpeech(
    +    query: AudioSpeechQuery
    +) async throws -> AudioSpeechResult
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + audioTranscriptions(query:) + + + Extension method, asynchronous + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func audioTranscriptions(
    +    query: AudioTranscriptionQuery
    +) async throws -> AudioTranscriptionResult
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + audioTranslations(query:) + + + Extension method, asynchronous + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func audioTranslations(
    +    query: AudioTranslationQuery
    +) async throws -> AudioTranslationResult
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + completions(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func completions(query: CompletionsQuery) -> AnyPublisher<CompletionsResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + completionsStream(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func completionsStream(query: CompletionsQuery) -> AnyPublisher<Result<CompletionsResult, Error>, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + images(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func images(query: ImagesQuery) -> AnyPublisher<ImagesResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + imageEdits(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func imageEdits(query: ImageEditsQuery) -> AnyPublisher<ImagesResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + imageVariations(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func imageVariations(query: ImageVariationsQuery) -> AnyPublisher<ImagesResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + embeddings(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func embeddings(query: EmbeddingsQuery) -> AnyPublisher<EmbeddingsResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + chats(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func chats(query: ChatQuery) -> AnyPublisher<ChatResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + chatsStream(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func chatsStream(query: ChatQuery) -> AnyPublisher<Result<ChatStreamResult, Error>, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + edits(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func edits(query: EditsQuery) -> AnyPublisher<EditsResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func model(query: ModelQuery) -> AnyPublisher<ModelResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + models() + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func models() -> AnyPublisher<ModelsResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + moderations(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func moderations(query: ModerationsQuery) -> AnyPublisher<ModerationsResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + audioCreateSpeech(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func audioCreateSpeech(query: AudioSpeechQuery) -> AnyPublisher<AudioSpeechResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + audioTranscriptions(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func audioTranscriptions(query: AudioTranscriptionQuery) -> AnyPublisher<AudioTranscriptionResult, Error>
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + audioTranslations(query:) + + + Extension method + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    func audioTranslations(query: AudioTranslationQuery) -> AnyPublisher<AudioTranslationResult, Error>
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs.html new file mode 100644 index 00000000..423988ff --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs.html @@ -0,0 +1,1077 @@ + + + + Structures Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Structures

+

The following structures are available globally.

+ +
+
+
+
    +
  • +
    + + + + APIError + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct APIError : Error, Decodable, Equatable
    +
    extension APIError: LocalizedError
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + APIErrorResponse + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct APIErrorResponse : Error, Decodable, Equatable
    +
    extension APIErrorResponse: LocalizedError
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + AudioSpeechQuery + +
    +
    +
    +
    +
    +
    +

    Generates audio from the input text. +Learn more: OpenAI Speech – Documentation

    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct AudioSpeechQuery : Codable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + AudioSpeechResult + +
    +
    +
    +
    +
    +
    +

    The audio file content. +Learn more: OpenAI Speech – Documentation

    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct AudioSpeechResult : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct AudioTranscriptionQuery : Codable
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct AudioTranscriptionResult : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + AudioTranslationQuery + +
    +
    +
    +
    +
    +
    +

    Translates audio into English.

    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct AudioTranslationQuery : Codable
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct AudioTranslationResult : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ChatQuery + +
    +
    +
    +
    +
    + +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ChatQuery : Equatable, Codable, Streamable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ChatResult + +
    +
    +
    +
    +
    +
    +

    https://platform.openai.com/docs/api-reference/chat/object +Example Completion object print

    +
    {
    + "id": "chatcmpl-123456",
    + "object": "chat.completion",
    + "created": 1728933352,
    + "model": "gpt-4o-2024-08-06",
    + "choices": [
    +   {
    +     "index": 0,
    +     "message": {
    +       "role": "assistant",
    +       "content": "Hi there! How can I assist you today?",
    +       "refusal": null
    +     },
    +     "logprobs": null,
    +     "finish_reason": "stop"
    +   }
    + ],
    + "usage": {
    +   "prompt_tokens": 19,
    +   "completion_tokens": 10,
    +   "total_tokens": 29,
    +   "prompt_tokens_details": {
    +     "cached_tokens": 0
    +   },
    +   "completion_tokens_details": {
    +     "reasoning_tokens": 0
    +   }
    + },
    + "system_fingerprint": "fp_6b68a8204b"
    +}
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ChatResult : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ChatStreamResult + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ChatStreamResult : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CompletionsQuery + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct CompletionsQuery : Codable, Streamable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CompletionsResult + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct CompletionsResult : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + EditsQuery + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct EditsQuery : Codable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + EditsResult + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct EditsResult : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + EmbeddingsQuery + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct EmbeddingsQuery : Codable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + EmbeddingsResult + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct EmbeddingsResult : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ImageEditsQuery + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ImageEditsQuery : Codable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ImageVariationsQuery + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ImageVariationsQuery : Codable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ImagesQuery + +
    +
    +
    +
    +
    +
    +

    Given a prompt and/or an input image, the model will generate a new image. +https://platform.openai.com/docs/guides/images

    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ImagesQuery : Codable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ImagesResult + +
    +
    +
    +
    +
    +
    +

    Returns a list of image objects.

    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ImagesResult : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ModelQuery + +
    +
    +
    +
    +
    +
    +

    Retrieves a model instance, providing basic information about the model such as the owner and permissioning.

    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ModelQuery : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ModelResult + +
    +
    +
    +
    +
    +
    +

    The model object matching the specified ID.

    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ModelResult : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ModelsResult + +
    +
    +
    +
    +
    +
    +

    A list of model objects.

    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ModelsResult : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ModerationsQuery + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ModerationsQuery : Codable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ModerationsResult + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ModerationsResult : Codable, Equatable
    +
    extension ModerationsResult: Identifiable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Vector + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Vector
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/APIError.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/APIError.html new file mode 100644 index 00000000..2ddf50e6 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/APIError.html @@ -0,0 +1,489 @@ + + + + APIError Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

APIError

+
+
+ +
public struct APIError : Error, Decodable, Equatable
+
extension APIError: LocalizedError
+ +
+
+ +
+
+
+
    +
  • +
    + + + + message + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let message: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + type + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let type: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + param + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let param: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + code + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let code: String?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(message: String, type: String, param: String?, code: String?)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(from:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(from decoder: Decoder) throws
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + errorDescription + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public var errorDescription: String? { get }
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/APIErrorResponse.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/APIErrorResponse.html new file mode 100644 index 00000000..9b5a0389 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/APIErrorResponse.html @@ -0,0 +1,359 @@ + + + + APIErrorResponse Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

APIErrorResponse

+
+
+ +
public struct APIErrorResponse : Error, Decodable, Equatable
+
extension APIErrorResponse: LocalizedError
+ +
+
+ +
+
+
+
    +
  • +
    + + + + error + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let error: APIError
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + errorDescription + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public var errorDescription: String? { get }
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioSpeechQuery.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioSpeechQuery.html new file mode 100644 index 00000000..b63839da --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioSpeechQuery.html @@ -0,0 +1,620 @@ + + + + AudioSpeechQuery Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

AudioSpeechQuery

+
+
+ +
public struct AudioSpeechQuery : Codable
+ +
+
+

Generates audio from the input text. +Learn more: OpenAI Speech – Documentation

+ +
+
+
+
    +
  • +
    + + + + AudioSpeechVoice + +
    +
    +
    +
    +
    +
    +

    Encapsulates the voices available for audio generation.

    + +

    To get aquinted with each of the voices and listen to the samples visit: +OpenAI Text-to-Speech – Voice Options

    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum AudioSpeechVoice : String, Codable, CaseIterable
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    Encapsulates the response formats available for audio data.

    + +

    Formats:

    + +
      +
    • mp3
    • +
    • opus
    • +
    • aac
    • +
    • flac
    • +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum AudioSpeechResponseFormat : String, Codable, CaseIterable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + input + +
    +
    +
    +
    +
    +
    +

    The text to generate audio for. The maximum length is 4096 characters.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let input: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    One of the available TTS models: tts-1 or tts-1-hd

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + voice + +
    +
    +
    +
    +
    +
    +

    The voice to use when generating the audio. Supported voices are alloy, echo, fable, onyx, nova, and shimmer. Previews of the voices are available in the Text to speech guide. +https://platform.openai.com/docs/guides/text-to-speech/voice-options

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let voice: AudioSpeechVoice
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + responseFormat + +
    +
    +
    +
    +
    +
    +

    The format to audio in. Supported formats are mp3, opus, aac, and flac. +Defaults to mp3

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let responseFormat: AudioSpeechResponseFormat?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + speed + +
    +
    +
    +
    +
    +
    +

    The speed of the generated audio. Select a value from 0.25 to 4.0. 1.0 is the default. +Defaults to 1

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let speed: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(model: Model, input: String, voice: AudioSpeechVoice, responseFormat: AudioSpeechResponseFormat = .mp3, speed: Double?)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Speed + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    enum Speed : Double
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    static func normalizeSpeechSpeed(_ inputSpeed: Double?) -> String
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioSpeechQuery/AudioSpeechResponseFormat.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioSpeechQuery/AudioSpeechResponseFormat.html new file mode 100644 index 00000000..b68d2557 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioSpeechQuery/AudioSpeechResponseFormat.html @@ -0,0 +1,422 @@ + + + + AudioSpeechResponseFormat Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

AudioSpeechResponseFormat

+
+
+ +
public enum AudioSpeechResponseFormat : String, Codable, CaseIterable
+ +
+
+

Encapsulates the response formats available for audio data.

+ +

Formats:

+ +
    +
  • mp3
  • +
  • opus
  • +
  • aac
  • +
  • flac
  • +
+ +
+
+
+
    +
  • +
    + + + + mp3 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case mp3
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + opus + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case opus
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + aac + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case aac
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + flac + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case flac
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioSpeechQuery/AudioSpeechVoice.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioSpeechQuery/AudioSpeechVoice.html new file mode 100644 index 00000000..2cdea3a2 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioSpeechQuery/AudioSpeechVoice.html @@ -0,0 +1,468 @@ + + + + AudioSpeechVoice Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

AudioSpeechVoice

+
+
+ +
public enum AudioSpeechVoice : String, Codable, CaseIterable
+ +
+
+

Encapsulates the voices available for audio generation.

+ +

To get aquinted with each of the voices and listen to the samples visit: +OpenAI Text-to-Speech – Voice Options

+ +
+
+
+
    +
  • +
    + + + + alloy + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case alloy
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + echo + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case echo
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + fable + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case fable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + onyx + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case onyx
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + nova + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case nova
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + shimmer + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case shimmer
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioSpeechQuery/CodingKeys.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioSpeechQuery/CodingKeys.html new file mode 100644 index 00000000..88e7eaf0 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioSpeechQuery/CodingKeys.html @@ -0,0 +1,438 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + input + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case input
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + voice + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case voice
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + responseFormat + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case responseFormat = "response_format"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + speed + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case speed
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioSpeechQuery/Speed.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioSpeechQuery/Speed.html new file mode 100644 index 00000000..37d4ed0c --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioSpeechQuery/Speed.html @@ -0,0 +1,386 @@ + + + + Speed Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Speed

+
+
+ +
enum Speed : Double
+ +
+
+ +
+
+
+
    +
  • +
    + + + + normal + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case normal = 1.0
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + max + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case max = 4.0
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + min + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case min = 0.25
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioSpeechResult.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioSpeechResult.html new file mode 100644 index 00000000..1daa0273 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioSpeechResult.html @@ -0,0 +1,335 @@ + + + + AudioSpeechResult Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

AudioSpeechResult

+
+
+ +
public struct AudioSpeechResult : Codable, Equatable
+ +
+
+

The audio file content. +Learn more: OpenAI Speech – Documentation

+ +
+
+
+
    +
  • +
    + + + + audio + +
    +
    +
    +
    +
    +
    +

    Audio data for one of the following formats :mp3, opus, aac, flac

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let audio: Data
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioTranscriptionQuery.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioTranscriptionQuery.html new file mode 100644 index 00000000..f96d291b --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioTranscriptionQuery.html @@ -0,0 +1,577 @@ + + + + AudioTranscriptionQuery Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

AudioTranscriptionQuery

+
+
+ +
public struct AudioTranscriptionQuery : Codable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + ResponseFormat + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum ResponseFormat : String, Codable, Equatable, CaseIterable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + file + +
    +
    +
    +
    +
    +
    +

    The audio file object (not file name) to transcribe, in one of these formats: flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let file: Data
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + fileType + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let fileType: `Self`.FileType
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    ID of the model to use. Only whisper-1 is currently available.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + responseFormat + +
    +
    +
    +
    +
    +
    +

    The format of the transcript output, in one of these options: json, text, srt, verbose_json, or vtt. +Defaults to json

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let responseFormat: `Self`.ResponseFormat?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + prompt + +
    +
    +
    +
    +
    +
    +

    An optional text to guide the model’s style or continue a previous audio segment. The prompt should match the audio language.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let prompt: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + temperature + +
    +
    +
    +
    +
    +
    +

    The sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit. +Defaults to 0

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let temperature: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + language + +
    +
    +
    +
    +
    +
    +

    The language of the input audio. Supplying the input language in ISO-639-1 format will improve accuracy and latency. +https://platform.openai.com/docs/guides/speech-to-text/prompting

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let language: String?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(file: Data, fileType: `Self`.FileType, model: Model, prompt: String? = nil, temperature: Double? = nil, language: String? = nil, responseFormat: `Self`.ResponseFormat? = nil)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + FileType + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum FileType : String, Codable, Equatable, CaseIterable
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioTranscriptionQuery/FileType.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioTranscriptionQuery/FileType.html new file mode 100644 index 00000000..6ed68c6f --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioTranscriptionQuery/FileType.html @@ -0,0 +1,542 @@ + + + + FileType Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

FileType

+
+
+ +
public enum FileType : String, Codable, Equatable, CaseIterable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + flac + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case flac
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + mp3 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case mp3
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + mpga + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case mpga
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + mp4 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case mp4
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + m4a + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case m4a
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + mpeg + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case mpeg
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ogg + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case ogg
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + wav + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case wav
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + webm + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case webm
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioTranscriptionQuery/ResponseFormat.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioTranscriptionQuery/ResponseFormat.html new file mode 100644 index 00000000..9d2100c0 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioTranscriptionQuery/ResponseFormat.html @@ -0,0 +1,438 @@ + + + + ResponseFormat Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ResponseFormat

+
+
+ +
public enum ResponseFormat : String, Codable, Equatable, CaseIterable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + json + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case json
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + text + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case text
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + verboseJson + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case verboseJson = "verbose_json"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + srt + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case srt
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + vtt + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case vtt
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioTranscriptionResult.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioTranscriptionResult.html new file mode 100644 index 00000000..5477b0e7 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioTranscriptionResult.html @@ -0,0 +1,333 @@ + + + + AudioTranscriptionResult Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

AudioTranscriptionResult

+
+
+ +
public struct AudioTranscriptionResult : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + text + +
    +
    +
    +
    +
    +
    +

    The transcribed text.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let text: String
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioTranslationQuery.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioTranslationQuery.html new file mode 100644 index 00000000..bc412efa --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioTranslationQuery.html @@ -0,0 +1,549 @@ + + + + AudioTranslationQuery Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

AudioTranslationQuery

+
+
+ +
public struct AudioTranslationQuery : Codable
+ +
+
+

Translates audio into English.

+ +
+
+
+
    +
  • +
    + + + + FileType + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias FileType = AudioTranscriptionQuery.FileType
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ResponseFormat + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias ResponseFormat = AudioTranscriptionQuery.ResponseFormat
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + file + +
    +
    +
    +
    +
    +
    +

    The audio file object (not file name) translate, in one of these formats: flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let file: Data
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + fileType + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let fileType: `Self`.FileType
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    ID of the model to use. Only whisper-1 is currently available.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + responseFormat + +
    +
    +
    +
    +
    +
    +

    The format of the transcript output, in one of these options: json, text, srt, verbose_json, or vtt. +Defaults to json

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let responseFormat: `Self`.ResponseFormat?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + prompt + +
    +
    +
    +
    +
    +
    +

    An optional text to guide the model’s style or continue a previous audio segment. The prompt should be in English. +https://platform.openai.com/docs/guides/speech-to-text/prompting

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let prompt: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + temperature + +
    +
    +
    +
    +
    +
    +

    The sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit. +Defaults to 0

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let temperature: Double?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(file: Data, fileType: `Self`.FileType, model: Model, prompt: String? = nil, temperature: Double? = nil, responseFormat: `Self`.ResponseFormat? = nil)
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioTranslationResult.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioTranslationResult.html new file mode 100644 index 00000000..d64aa14a --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/AudioTranslationResult.html @@ -0,0 +1,333 @@ + + + + AudioTranslationResult Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

AudioTranslationResult

+
+
+ +
public struct AudioTranslationResult : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + text + +
    +
    +
    +
    +
    +
    +

    The translated text.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let text: String
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery.html new file mode 100644 index 00000000..ffd9d479 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery.html @@ -0,0 +1,1022 @@ + + + + ChatQuery Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatQuery

+
+
+ +
public struct ChatQuery : Equatable, Codable, Streamable
+ +
+
+

Creates a model response for the given chat conversation +https://platform.openai.com/docs/guides/text-generation +https://platform.openai.com/docs/api-reference/chat/create

+ +
+
+
+
    +
  • +
    + + + + messages + +
    +
    +
    +
    +
    +
    +

    A list of messages comprising the conversation so far

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let messages: [`Self`.ChatCompletionMessageParam]
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    ID of the model to use. See the model endpoint compatibility table for details on which models work with the Chat API. +https://platform.openai.com/docs/models/model-endpoint-compatibility

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + frequencyPenalty + +
    +
    +
    +
    +
    +
    +

    Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model’s likelihood to repeat the same line verbatim. +Defaults to 0 +https://platform.openai.com/docs/guides/text-generation/parameter-details

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let frequencyPenalty: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logitBias + +
    +
    +
    +
    +
    +
    +

    Modify the likelihood of specified tokens appearing in the completion. +Accepts a JSON object that maps tokens (specified by their token ID in the tokenizer) to an associated bias value from -100 to 100. Mathematically, the bias is added to the logits generated by the model prior to sampling. The exact effect will vary per model, but values between -1 and 1 should decrease or increase likelihood of selection; values like -100 or 100 should result in a ban or exclusive selection of the relevant token. +Defaults to null

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let logitBias: [String : Int]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logprobs + +
    +
    +
    +
    +
    +
    +

    Whether to return log probabilities of the output tokens or not. If true, returns the log probabilities of each output token returned in the content of message. This option is currently not available on the gpt-4-vision-preview model. +Defaults to false

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let logprobs: Bool?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + maxTokens + +
    +
    +
    +
    +
    +
    +

    The maximum number of tokens to generate in the completion. +The total length of input tokens and generated tokens is limited by the model’s context length. +https://platform.openai.com/tokenizer

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let maxTokens: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + n + +
    +
    +
    +
    +
    +
    +

    How many chat completion choices to generate for each input message. Note that you will be charged based on the number of generated tokens across all of the choices. Keep n as 1 to minimize costs. +Defaults to 1

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let n: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + presencePenalty + +
    +
    +
    +
    +
    +
    +

    Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model’s likelihood to talk about new topics. +https://platform.openai.com/docs/guides/text-generation/parameter-details

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let presencePenalty: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + responseFormat + +
    +
    +
    +
    +
    +
    +

    An object specifying the format that the model must output. Compatible with gpt-4-1106-preview and gpt-3.5-turbo-1106. +Setting to { “type”: “json_object” } enables JSON mode, which guarantees the message the model generates is valid JSON. +Important: when using JSON mode, you must also instruct the model to produce JSON yourself via a system or user message. Without this, the model may generate an unending stream of whitespace until the generation reaches the token limit, resulting in a long-running and seemingly “stuck” request. Also note that the message content may be partially cut off if finish_reason=“length”, which indicates the generation exceeded max_tokens or the conversation exceeded the max context length.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let responseFormat: `Self`.ResponseFormat?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + seed + +
    +
    +
    +
    +
    +
    +

    This feature is in Beta. If specified, our system will make a best effort to sample deterministically, such that repeated requests with the same seed and parameters should return the same result. Determinism is not guaranteed, and you should refer to the system_fingerprint response parameter to monitor changes in the backend.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let seed: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + stop + +
    +
    +
    +
    +
    +
    +

    Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence. +Defaults to null

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let stop: Stop?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + temperature + +
    +
    +
    +
    +
    +
    +

    What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. +We generally recommend altering this or top_p but not both. +Defaults to 1

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let temperature: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + toolChoice + +
    +
    +
    +
    +
    +
    +

    Controls which (if any) function is called by the model. none means the model will not call a function and instead generates a message. auto means the model can pick between generating a message or calling a function. Specifying a particular function via {“type”: “function”, “function”: {“name”: “my_function”}} forces the model to call that function. +none is the default when no functions are present. auto is the default if functions are present

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let toolChoice: `Self`.ChatCompletionFunctionCallOptionParam?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + tools + +
    +
    +
    +
    +
    +
    +

    A list of tools the model may call. Currently, only functions are supported as a tool. Use this to provide a list of functions the model may generate JSON inputs for.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let tools: [`Self`.ChatCompletionToolParam]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + topLogprobs + +
    +
    +
    +
    +
    +
    +

    An integer between 0 and 5 specifying the number of most likely tokens to return at each token position, each with an associated log probability. logprobs must be set to true if this parameter is used.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let topLogprobs: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + topP + +
    +
    +
    +
    +
    +
    +

    An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. +We generally recommend altering this or temperature but not both. +Defaults to 1

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let topP: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + user + +
    +
    +
    +
    +
    +
    +

    A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. +https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let user: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + stream + +
    +
    +
    +
    +
    +
    +

    If set, partial message deltas will be sent, like in ChatGPT. Tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a data: [DONE] message. +https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public var stream: Bool
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    messages: [Self.ChatCompletionMessageParam],
    +    model: Model,
    +    frequencyPenalty: Double? = nil,
    +    logitBias: [String : Int]? = nil,
    +    logprobs: Bool? = nil,
    +    maxTokens: Int? = nil,
    +    n: Int? = nil,
    +    presencePenalty: Double? = nil,
    +    responseFormat: Self.ResponseFormat? = nil,
    +    seed: Int? = nil,
    +    stop: Self.Stop? = nil,
    +    temperature: Double? = nil,
    +    toolChoice: Self.ChatCompletionFunctionCallOptionParam? = nil,
    +    tools: [Self.ChatCompletionToolParam]? = nil,
    +    topLogprobs: Int? = nil,
    +    topP: Double? = nil,
    +    user: String? = nil,
    +    stream: Bool = false
    +)
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum ChatCompletionMessageParam : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Stop + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum Stop : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ResponseFormat + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum ResponseFormat : String, Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum ChatCompletionFunctionCallOptionParam : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ChatCompletionToolParam : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionFunctionCallOptionParam.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionFunctionCallOptionParam.html new file mode 100644 index 00000000..14274779 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionFunctionCallOptionParam.html @@ -0,0 +1,438 @@ + + + + ChatCompletionFunctionCallOptionParam Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatCompletionFunctionCallOptionParam

+
+
+ +
public enum ChatCompletionFunctionCallOptionParam : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + none + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case none
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + auto + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case auto
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + function(_:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case function(String)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + encode(to:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public func encode(to encoder: Encoder) throws
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(function:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(function: String)
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam.html new file mode 100644 index 00000000..f6323aa5 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam.html @@ -0,0 +1,791 @@ + + + + ChatCompletionMessageParam Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatCompletionMessageParam

+
+
+ +
public enum ChatCompletionMessageParam : Codable, Equatable
+ +
+
+ +
+
+
+ +
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam.html new file mode 100644 index 00000000..20a6e7df --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam.html @@ -0,0 +1,528 @@ + + + + ChatCompletionAssistantMessageParam Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatCompletionAssistantMessageParam

+
+
+ +
public struct ChatCompletionAssistantMessageParam : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + Role + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias Role = ChatQuery.ChatCompletionMessageParam.Role
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + role + +
    +
    +
    +
    +
    +
    +

    / The role of the messages author, in this case assistant.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let role: `Self`.Role
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + content + +
    +
    +
    +
    +
    +
    +

    The contents of the assistant message. Required unless tool_calls is specified.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let content: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + name + +
    +
    +
    +
    +
    +
    +

    The name of the author of this message. name is required if role is function, and it should be the name of the function whose response is in the content. May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 characters.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let name: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + toolCalls + +
    +
    +
    +
    +
    +
    +

    The tool calls generated by the model, such as function calls.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let toolCalls: [`Self`.ChatCompletionMessageToolCallParam]?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    content: String? = nil,
    +    name: String? = nil,
    +    toolCalls: [Self.ChatCompletionMessageToolCallParam]? = nil
    +)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ChatCompletionMessageToolCallParam : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam.html new file mode 100644 index 00000000..4c08eaf2 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam.html @@ -0,0 +1,475 @@ + + + + ChatCompletionMessageToolCallParam Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatCompletionMessageToolCallParam

+
+
+ +
public struct ChatCompletionMessageToolCallParam : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + ToolsType + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias ToolsType = ChatQuery.ChatCompletionToolParam.ToolsType
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + id + +
    +
    +
    +
    +
    +
    +

    The ID of the tool call.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let id: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + function + +
    +
    +
    +
    +
    +
    +

    The function that the model called.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let function: `Self`.FunctionCall
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + type + +
    +
    +
    +
    +
    +
    +

    The type of the tool. Currently, only function is supported.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let type: `Self`.ToolsType
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(id:function:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    id: String,
    +    function:  Self.FunctionCall
    +)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + FunctionCall + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct FunctionCall : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam/FunctionCall.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam/FunctionCall.html new file mode 100644 index 00000000..8a50f6cc --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam/FunctionCall.html @@ -0,0 +1,368 @@ + + + + FunctionCall Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

FunctionCall

+
+
+ +
public struct FunctionCall : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + arguments + +
    +
    +
    +
    +
    +
    +

    The arguments to call the function with, as generated by the model in JSON format. Note that the model does not always generate valid JSON, and may hallucinate parameters not defined by your function schema. Validate the arguments in your code before calling your function.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let arguments: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + name + +
    +
    +
    +
    +
    +
    +

    The name of the function to call.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let name: String
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/CodingKeys.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/CodingKeys.html new file mode 100644 index 00000000..a451143d --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/CodingKeys.html @@ -0,0 +1,416 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + name + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case name
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + role + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case role
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + content + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case content
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + toolCalls + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case toolCalls = "tool_calls"
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionSystemMessageParam.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionSystemMessageParam.html new file mode 100644 index 00000000..4dd31f2c --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionSystemMessageParam.html @@ -0,0 +1,446 @@ + + + + ChatCompletionSystemMessageParam Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatCompletionSystemMessageParam

+
+
+ +
public struct ChatCompletionSystemMessageParam : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + Role + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias Role = ChatQuery.ChatCompletionMessageParam.Role
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + content + +
    +
    +
    +
    +
    +
    +

    The contents of the system message.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let content: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + role + +
    +
    +
    +
    +
    +
    +

    The role of the messages author, in this case system.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let role: `Self`.Role
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + name + +
    +
    +
    +
    +
    +
    +

    An optional name for the participant. Provides the model information to differentiate between participants of the same role.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let name: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(content:name:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    content: String,
    +    name: String? = nil
    +)
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam.html new file mode 100644 index 00000000..87fcde21 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam.html @@ -0,0 +1,473 @@ + + + + ChatCompletionToolMessageParam Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatCompletionToolMessageParam

+
+
+ +
public struct ChatCompletionToolMessageParam : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + Role + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias Role = ChatQuery.ChatCompletionMessageParam.Role
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + content + +
    +
    +
    +
    +
    +
    +

    The contents of the tool message.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let content: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + role + +
    +
    +
    +
    +
    +
    +

    The role of the messages author, in this case tool.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let role: `Self`.Role
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + toolCallId + +
    +
    +
    +
    +
    +
    +

    Tool call that this message is responding to.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let toolCallId: String
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    content: String,
    +    toolCallId: String
    +)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam/CodingKeys.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam/CodingKeys.html new file mode 100644 index 00000000..21ab5d41 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam/CodingKeys.html @@ -0,0 +1,390 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + content + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case content
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + role + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case role
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + toolCallId + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case toolCallId = "tool_call_id"
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam.html new file mode 100644 index 00000000..efc3ba92 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam.html @@ -0,0 +1,473 @@ + + + + ChatCompletionUserMessageParam Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatCompletionUserMessageParam

+
+
+ +
public struct ChatCompletionUserMessageParam : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + Role + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias Role = ChatQuery.ChatCompletionMessageParam.Role
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + content + +
    +
    +
    +
    +
    +
    +

    The contents of the user message.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let content: Content
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + role + +
    +
    +
    +
    +
    +
    +

    The role of the messages author, in this case user.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let role: `Self`.Role
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + name + +
    +
    +
    +
    +
    +
    +

    An optional name for the participant. Provides the model information to differentiate between participants of the same role.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let name: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(content:name:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    content: Content,
    +    name: String? = nil
    +)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Content + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum Content : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content.html new file mode 100644 index 00000000..9a7eff8e --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content.html @@ -0,0 +1,548 @@ + + + + Content Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Content

+
+
+ +
public enum Content : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + string(_:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case string(String)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + vision(_:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case vision([VisionContent])
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + string + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public var string: String? { get }
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(string:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(string: String)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(vision:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(vision: [VisionContent])
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : CodingKey
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + encode(to:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public func encode(to encoder: Encoder) throws
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + VisionContent + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum VisionContent : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(from:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(from decoder: Decoder) throws
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/CodingKeys.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/CodingKeys.html new file mode 100644 index 00000000..0503ce5d --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/CodingKeys.html @@ -0,0 +1,366 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + string + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case string
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + vision + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case vision
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent.html new file mode 100644 index 00000000..c34fc068 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent.html @@ -0,0 +1,550 @@ + + + + VisionContent Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

VisionContent

+
+
+ +
public enum VisionContent : Codable, Equatable
+ +
+
+ +
+
+
+ +
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam.html new file mode 100644 index 00000000..0da0715f --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam.html @@ -0,0 +1,449 @@ + + + + ChatCompletionContentPartImageParam Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatCompletionContentPartImageParam

+
+
+ +
public struct ChatCompletionContentPartImageParam : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + imageUrl + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let imageUrl: ImageURL
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + type + +
    +
    +
    +
    +
    +
    +

    The type of the content part.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let type: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(imageUrl:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(imageUrl: ImageURL)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ImageURL + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ImageURL : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/CodingKeys.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/CodingKeys.html new file mode 100644 index 00000000..8af7efdb --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/CodingKeys.html @@ -0,0 +1,370 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + imageUrl + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case imageUrl = "image_url"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + type + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case type
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL.html new file mode 100644 index 00000000..3e54f49b --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL.html @@ -0,0 +1,452 @@ + + + + ImageURL Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ImageURL

+
+
+ +
public struct ImageURL : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + url + +
    +
    +
    +
    +
    +
    +

    Either a URL of the image or the base64 encoded image data.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let url: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + detail + +
    +
    +
    +
    +
    +
    +

    Specifies the detail level of the image. Learn more in the +Vision guide https://platform.openai.com/docs/guides/vision/low-or-high-fidelity-image-understanding

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let detail: Detail
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(url:detail:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(url: String, detail: Detail)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(url:detail:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(url: Data, detail: Detail)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Detail + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum Detail : String, Codable, Equatable, CaseIterable
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL/Detail.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL/Detail.html new file mode 100644 index 00000000..30667954 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL/Detail.html @@ -0,0 +1,398 @@ + + + + Detail Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Detail

+
+
+ +
public enum Detail : String, Codable, Equatable, CaseIterable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + auto + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case auto
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + low + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case low
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + high + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case high
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartTextParam.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartTextParam.html new file mode 100644 index 00000000..2d4e9b1a --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartTextParam.html @@ -0,0 +1,396 @@ + + + + ChatCompletionContentPartTextParam Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatCompletionContentPartTextParam

+
+
+ +
public struct ChatCompletionContentPartTextParam : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + text + +
    +
    +
    +
    +
    +
    +

    The text content.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let text: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + type + +
    +
    +
    +
    +
    +
    +

    The type of the content part.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let type: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(text:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(text: String)
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/Role.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/Role.html new file mode 100644 index 00000000..2a48f81b --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionMessageParam/Role.html @@ -0,0 +1,414 @@ + + + + Role Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Role

+
+
+ +
public enum Role : String, Codable, Equatable, CaseIterable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + system + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case system
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + user + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case user
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + assistant + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case assistant
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + tool + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case tool
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionToolParam.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionToolParam.html new file mode 100644 index 00000000..c1b69352 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionToolParam.html @@ -0,0 +1,442 @@ + + + + ChatCompletionToolParam Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatCompletionToolParam

+
+
+ +
public struct ChatCompletionToolParam : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + function + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let function: `Self`.FunctionDefinition
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + type + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let type: `Self`.ToolsType
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(function:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    function: Self.FunctionDefinition
    +)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + FunctionDefinition + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct FunctionDefinition : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ToolsType + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum ToolsType : String, Codable, Equatable
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition.html new file mode 100644 index 00000000..41495366 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition.html @@ -0,0 +1,452 @@ + + + + FunctionDefinition Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

FunctionDefinition

+
+
+ +
public struct FunctionDefinition : Codable, Equatable
+ +
+
+ +
+
+
+ +
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html new file mode 100644 index 00000000..704e3d15 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html @@ -0,0 +1,637 @@ + + + + FunctionParameters Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

FunctionParameters

+
+
+ +
public struct FunctionParameters : Codable, Equatable
+ +
+
+

See the guide for examples, and the JSON Schema reference for documentation about the format.

+ +
+
+
+
    +
  • +
    + + + + type + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let type: `Self`.JSONType
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + properties + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let properties: [String : Property]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + required + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let required: [String]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + pattern + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let pattern: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + const + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let const: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + enum + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let `enum`: [String]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + multipleOf + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let multipleOf: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + minimum + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let minimum: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + maximum + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let maximum: Int?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    type: Self.JSONType,
    +    properties: [String : Property]? = nil,
    +    required: [String]? = nil,
    +    pattern: String? = nil,
    +    const: String? = nil,
    +    enum: [String]? = nil,
    +    multipleOf: Int? = nil,
    +    minimum: Int? = nil,
    +    maximum: Int? = nil
    +)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Property + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Property : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + JSONType + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum JSONType : String, Codable
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/JSONType.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/JSONType.html new file mode 100644 index 00000000..c7ca614d --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/JSONType.html @@ -0,0 +1,496 @@ + + + + JSONType Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

JSONType

+
+
+ +
public enum JSONType : String, Codable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + integer + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case integer
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + string + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case string
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + boolean + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case boolean
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + array + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case array
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + object + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case object
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + number + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case number
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + null + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case null
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html new file mode 100644 index 00000000..3a32d386 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html @@ -0,0 +1,772 @@ + + + + Property Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Property

+
+
+ +
public struct Property : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + JSONType + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias JSONType = ChatQuery.ChatCompletionToolParam.FunctionDefinition.FunctionParameters.JSONType
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + type + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let type: `Self`.JSONType
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + description + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let description: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + format + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let format: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + items + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let items: `Self`.Items?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + required + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let required: [String]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + pattern + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let pattern: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + const + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let const: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + enum + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let `enum`: [String]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + multipleOf + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let multipleOf: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + minimum + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let minimum: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + maximum + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let maximum: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + minItems + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let minItems: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + maxItems + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let maxItems: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + uniqueItems + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let uniqueItems: Bool?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    type: Self.JSONType,
    +    description: String? = nil,
    +    format: String? = nil,
    +    items: Self.Items? = nil,
    +    required: [String]? = nil,
    +    pattern: String? = nil,
    +    const: String? = nil,
    +    enum: [String]? = nil,
    +    multipleOf: Int? = nil,
    +    minimum: Double? = nil,
    +    maximum: Double? = nil,
    +    minItems: Int? = nil,
    +    maxItems: Int? = nil,
    +    uniqueItems: Bool? = nil
    +)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Items + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Items : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html new file mode 100644 index 00000000..b94c4d7b --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html @@ -0,0 +1,666 @@ + + + + Items Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Items

+
+
+ +
public struct Items : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + JSONType + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias JSONType = ChatQuery.ChatCompletionToolParam.FunctionDefinition.FunctionParameters.JSONType
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + type + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let type: `Self`.JSONType
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + properties + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let properties: [String : Property]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + pattern + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let pattern: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + const + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let const: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + enum + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let `enum`: [String]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + multipleOf + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let multipleOf: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + minimum + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let minimum: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + maximum + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let maximum: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + minItems + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let minItems: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + maxItems + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let maxItems: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + uniqueItems + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let uniqueItems: Bool?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    type: Self.JSONType,
    +    properties: [String : Property]? = nil,
    +    pattern: String? = nil,
    +    const: String? = nil,
    +    `enum`: [String]? = nil,
    +    multipleOf: Int? = nil,
    +    minimum: Double? = nil,
    +    maximum: Double? = nil,
    +    minItems: Int? = nil,
    +    maxItems: Int? = nil,
    +    uniqueItems: Bool? = nil
    +)
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionToolParam/ToolsType.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionToolParam/ToolsType.html new file mode 100644 index 00000000..cddc3257 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ChatCompletionToolParam/ToolsType.html @@ -0,0 +1,336 @@ + + + + ToolsType Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ToolsType

+
+
+ +
public enum ToolsType : String, Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + function + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case function
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/CodingKeys.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/CodingKeys.html new file mode 100644 index 00000000..62e5f81e --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/CodingKeys.html @@ -0,0 +1,776 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + messages + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case messages
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + frequencyPenalty + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case frequencyPenalty = "frequency_penalty"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logitBias + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case logitBias = "logit_bias"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logprobs + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case logprobs
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + maxTokens + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case maxTokens = "max_tokens"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + n + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case n
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + presencePenalty + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case presencePenalty = "presence_penalty"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + responseFormat + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case responseFormat = "response_format"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + seed + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case seed
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + stop + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case stop
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + temperature + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case temperature
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + toolChoice + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case toolChoice = "tool_choice"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + tools + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case tools
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + topLogprobs + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case topLogprobs = "top_logprobs"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + topP + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case topP = "top_p"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + user + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case user
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + stream + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case stream
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ResponseFormat.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ResponseFormat.html new file mode 100644 index 00000000..12b0450d --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/ResponseFormat.html @@ -0,0 +1,386 @@ + + + + ResponseFormat Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ResponseFormat

+
+
+ +
public enum ResponseFormat : String, Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + jsonObject + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case jsonObject = "json_object"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + text + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case text
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + encode(to:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public func encode(to encoder: Encoder) throws
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/Stop.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/Stop.html new file mode 100644 index 00000000..c03477e1 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatQuery/Stop.html @@ -0,0 +1,438 @@ + + + + Stop Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Stop

+
+
+ +
public enum Stop : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + string(_:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case string(String)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + stringList(_:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case stringList([String])
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + encode(to:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public func encode(to encoder: Encoder) throws
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(string:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(string: String)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(stringList:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(stringList: [String])
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult.html new file mode 100644 index 00000000..21f86f60 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult.html @@ -0,0 +1,611 @@ + + + + ChatResult Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatResult

+
+
+ +
public struct ChatResult : Codable, Equatable
+ +
+
+

https://platform.openai.com/docs/api-reference/chat/object +Example Completion object print

+
{
+ "id": "chatcmpl-123456",
+ "object": "chat.completion",
+ "created": 1728933352,
+ "model": "gpt-4o-2024-08-06",
+ "choices": [
+   {
+     "index": 0,
+     "message": {
+       "role": "assistant",
+       "content": "Hi there! How can I assist you today?",
+       "refusal": null
+     },
+     "logprobs": null,
+     "finish_reason": "stop"
+   }
+ ],
+ "usage": {
+   "prompt_tokens": 19,
+   "completion_tokens": 10,
+   "total_tokens": 29,
+   "prompt_tokens_details": {
+     "cached_tokens": 0
+   },
+   "completion_tokens_details": {
+     "reasoning_tokens": 0
+   }
+ },
+ "system_fingerprint": "fp_6b68a8204b"
+}
+
+ +
+
+
+
    +
  • +
    + + + + Choice + +
    +
    +
    +
    +
    +
    +

    mimic the choices array in the chat completion object

    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Choice : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CompletionUsage + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct CompletionUsage : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + id + +
    +
    +
    +
    +
    +
    +

    A unique identifier for the chat completion.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let id: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + object + +
    +
    +
    +
    +
    +
    +

    The object type, which is always chat.completion.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let object: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + created + +
    +
    +
    +
    +
    +
    +

    The Unix timestamp (in seconds) of when the chat completion was created.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let created: TimeInterval
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    The model used for the chat completion.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + choices + +
    +
    +
    +
    +
    +
    +

    A list of chat completion choices. Can be more than one if n is greater than 1.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let choices: [Choice]
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + usage + +
    +
    +
    +
    +
    +
    +

    Usage statistics for the completion request.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let usage: `Self`.CompletionUsage?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + systemFingerprint + +
    +
    +
    +
    +
    +
    +

    This fingerprint represents the backend configuration that the model runs with. +Can be used in conjunction with the seed request parameter to understand when backend changes have been made that might impact determinism.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let systemFingerprint: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/Choice.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/Choice.html new file mode 100644 index 00000000..11c1df48 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/Choice.html @@ -0,0 +1,524 @@ + + + + Choice Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Choice

+
+
+ +
public struct Choice : Codable, Equatable
+ +
+
+

mimic the choices array in the chat completion object

+ +
+
+
+
    +
  • +
    + + + + ChatCompletionMessage + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias ChatCompletionMessage = ChatQuery.ChatCompletionMessageParam
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + index + +
    +
    +
    +
    +
    +
    +

    The index of the choice in the list of choices.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let index: Int
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logprobs + +
    +
    +
    +
    +
    +
    +

    Log probability information for the choice.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let logprobs: `Self`.ChoiceLogprobs?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + message + +
    +
    +
    +
    +
    +
    +

    A chat completion message generated by the model.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let message: `Self`.ChatCompletionMessage
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + finishReason + +
    +
    +
    +
    +
    +
    +

    The reason the model stopped generating tokens. This will be stop if the model hit a natural stop point or a provided stop sequence, length if the maximum number of tokens specified in the request was reached, content_filter if content was omitted due to a flag from our content filters, tool_calls if the model called a tool, or function_call (deprecated) if the model called a function.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let finishReason: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ChoiceLogprobs + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ChoiceLogprobs : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + FinishReason + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum FinishReason : String, Codable, Equatable
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/Choice/ChoiceLogprobs.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/Choice/ChoiceLogprobs.html new file mode 100644 index 00000000..dfb22cde --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/Choice/ChoiceLogprobs.html @@ -0,0 +1,363 @@ + + + + ChoiceLogprobs Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChoiceLogprobs

+
+
+ +
public struct ChoiceLogprobs : Codable, Equatable
+ +
+
+ +
+
+
+ +
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html new file mode 100644 index 00000000..4e8d67fc --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html @@ -0,0 +1,478 @@ + + + + ChatCompletionTokenLogprob Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatCompletionTokenLogprob

+
+
+ +
public struct ChatCompletionTokenLogprob : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + token + +
    +
    +
    +
    +
    +
    +

    The token.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let token: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + bytes + +
    +
    +
    +
    +
    +
    +

    A list of integers representing the UTF-8 bytes representation of the token. +Useful in instances where characters are represented by multiple tokens and +their byte representations must be combined to generate the correct text +representation. Can be null if there is no bytes representation for the token.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let bytes: [Int]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logprob + +
    +
    +
    +
    +
    +
    +

    The log probability of this token.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let logprob: Double
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + topLogprobs + +
    +
    +
    +
    +
    +
    +

    List of the most likely tokens and their log probability, at this token position. +In rare cases, there may be fewer than the number of requested top_logprobs returned.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let topLogprobs: [TopLogprob]
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + TopLogprob + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct TopLogprob : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html new file mode 100644 index 00000000..c15be5f8 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html @@ -0,0 +1,418 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + token + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case token
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + bytes + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case bytes
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logprob + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case logprob
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + topLogprobs + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case topLogprobs = "top_logprobs"
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html new file mode 100644 index 00000000..dbcf56c9 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html @@ -0,0 +1,396 @@ + + + + TopLogprob Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

TopLogprob

+
+
+ +
public struct TopLogprob : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + token + +
    +
    +
    +
    +
    +
    +

    The token.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let token: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + bytes + +
    +
    +
    +
    +
    +
    +

    A list of integers representing the UTF-8 bytes representation of the token. +Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be null if there is no bytes representation for the token.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let bytes: [Int]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logprob + +
    +
    +
    +
    +
    +
    +

    The log probability of this token.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let logprob: Double
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/Choice/CodingKeys.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/Choice/CodingKeys.html new file mode 100644 index 00000000..c74c9053 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/Choice/CodingKeys.html @@ -0,0 +1,414 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + index + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case index
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logprobs + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case logprobs
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + message + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case message
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + finishReason + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case finishReason = "finish_reason"
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/Choice/FinishReason.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/Choice/FinishReason.html new file mode 100644 index 00000000..9a2ee189 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/Choice/FinishReason.html @@ -0,0 +1,440 @@ + + + + FinishReason Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

FinishReason

+
+
+ +
public enum FinishReason : String, Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + stop + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case stop
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + length + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case length
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + toolCalls + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case toolCalls = "tool_calls"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + contentFilter + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case contentFilter = "content_filter"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + functionCall + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case functionCall = "function_call"
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/CodingKeys.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/CodingKeys.html new file mode 100644 index 00000000..74f6690a --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/CodingKeys.html @@ -0,0 +1,490 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + id + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case id
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + object + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case object
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + created + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case created
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + choices + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case choices
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + usage + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case usage
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + systemFingerprint + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case systemFingerprint = "system_fingerprint"
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/CompletionUsage.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/CompletionUsage.html new file mode 100644 index 00000000..d670ccde --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatResult/CompletionUsage.html @@ -0,0 +1,389 @@ + + + + CompletionUsage Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CompletionUsage

+
+
+ +
public struct CompletionUsage : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + completionTokens + +
    +
    +
    +
    +
    +
    +

    Number of tokens in the generated completion.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let completionTokens: Int
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + promptTokens + +
    +
    +
    +
    +
    +
    +

    Number of tokens in the prompt.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let promptTokens: Int
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + totalTokens + +
    +
    +
    +
    +
    +
    +

    Total number of tokens used in the request (prompt + completion).

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let totalTokens: Int
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult.html new file mode 100644 index 00000000..ea9c41c8 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult.html @@ -0,0 +1,524 @@ + + + + ChatStreamResult Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatStreamResult

+
+
+ +
public struct ChatStreamResult : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + Choice + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Choice : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + id + +
    +
    +
    +
    +
    +
    +

    A unique identifier for the chat completion. Each chunk has the same ID.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let id: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + object + +
    +
    +
    +
    +
    +
    +

    The object type, which is always chat.completion.chunk.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let object: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + created + +
    +
    +
    +
    +
    +
    +

    The Unix timestamp (in seconds) of when the chat completion was created. +Each chunk has the same timestamp.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let created: TimeInterval
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    The model to generate the completion.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + choices + +
    +
    +
    +
    +
    +
    +

    A list of chat completion choices. +Can be more than one if n is greater than 1.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let choices: [Choice]
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + systemFingerprint + +
    +
    +
    +
    +
    +
    +

    This fingerprint represents the backend configuration that the model runs with. Can be used in conjunction with the seed request parameter to understand when backend changes have been made that might impact determinism.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let systemFingerprint: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice.html new file mode 100644 index 00000000..07ec3ba2 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice.html @@ -0,0 +1,524 @@ + + + + Choice Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Choice

+
+
+ +
public struct Choice : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + FinishReason + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias FinishReason = ChatResult.Choice.FinishReason
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ChoiceDelta + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ChoiceDelta : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + index + +
    +
    +
    +
    +
    +
    +

    The index of the choice in the list of choices.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let index: Int
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + delta + +
    +
    +
    +
    +
    +
    +

    A chat completion delta generated by streamed model responses.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let delta: `Self`.ChoiceDelta
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + finishReason + +
    +
    +
    +
    +
    +
    +

    The reason the model stopped generating tokens. +This will be stop if the model hit a natural stop point or a provided stop sequence, length if the maximum number of tokens specified in the request was reached, content_filter if content was omitted due to a flag from our content filters, tool_calls if the model called a tool, or function_call (deprecated) if the model called a function.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let finishReason: FinishReason?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logprobs + +
    +
    +
    +
    +
    +
    +

    Log probability information for the choice.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let logprobs: `Self`.ChoiceLogprobs?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ChoiceLogprobs + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ChoiceLogprobs : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/ChoiceDelta.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/ChoiceDelta.html new file mode 100644 index 00000000..1d2aaf57 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/ChoiceDelta.html @@ -0,0 +1,470 @@ + + + + ChoiceDelta Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChoiceDelta

+
+
+ +
public struct ChoiceDelta : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + Role + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias Role = ChatQuery.ChatCompletionMessageParam.Role
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + content + +
    +
    +
    +
    +
    +
    +

    The contents of the chunk message.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let content: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + role + +
    +
    +
    +
    +
    +
    +

    The role of the author of this message.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let role: `Self`.Role?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + toolCalls + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let toolCalls: [`Self`.ChoiceDeltaToolCall]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ChoiceDeltaToolCall + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ChoiceDeltaToolCall : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall.html new file mode 100644 index 00000000..80421b18 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall.html @@ -0,0 +1,476 @@ + + + + ChoiceDeltaToolCall Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChoiceDeltaToolCall

+
+
+ +
public struct ChoiceDeltaToolCall : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + index + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let index: Int
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + id + +
    +
    +
    +
    +
    +
    +

    The ID of the tool call.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let id: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + function + +
    +
    +
    +
    +
    +
    +

    The function that the model called.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let function: `Self`.ChoiceDeltaToolCallFunction?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + type + +
    +
    +
    +
    +
    +
    +

    The type of the tool. Currently, only function is supported.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let type: String?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    index: Int,
    +    id: String? = nil,
    +    function: Self.ChoiceDeltaToolCallFunction? = nil
    +)
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ChoiceDeltaToolCallFunction : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall/ChoiceDeltaToolCallFunction.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall/ChoiceDeltaToolCallFunction.html new file mode 100644 index 00000000..7859e501 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall/ChoiceDeltaToolCallFunction.html @@ -0,0 +1,397 @@ + + + + ChoiceDeltaToolCallFunction Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChoiceDeltaToolCallFunction

+
+
+ +
public struct ChoiceDeltaToolCallFunction : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + arguments + +
    +
    +
    +
    +
    +
    +

    The arguments to call the function with, as generated by the model in JSON format. Note that the model does not always generate valid JSON, and may hallucinate parameters not defined by your function schema. Validate the arguments in your code before calling your function.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let arguments: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + name + +
    +
    +
    +
    +
    +
    +

    The name of the function to call.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let name: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(arguments:name:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    arguments: String? = nil,
    +    name: String? = nil
    +)
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/ChoiceDelta/CodingKeys.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/ChoiceDelta/CodingKeys.html new file mode 100644 index 00000000..b066c3d9 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/ChoiceDelta/CodingKeys.html @@ -0,0 +1,390 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + content + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case content
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + role + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case role
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + toolCalls + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case toolCalls = "tool_calls"
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/ChoiceLogprobs.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/ChoiceLogprobs.html new file mode 100644 index 00000000..6d3adc29 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/ChoiceLogprobs.html @@ -0,0 +1,364 @@ + + + + ChoiceLogprobs Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChoiceLogprobs

+
+
+ +
public struct ChoiceLogprobs : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + content + +
    +
    +
    +
    +
    +
    +

    A list of message content tokens with log probability information.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let content: [`Self`.ChatCompletionTokenLogprob]?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct ChatCompletionTokenLogprob : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html new file mode 100644 index 00000000..87b531b2 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html @@ -0,0 +1,474 @@ + + + + ChatCompletionTokenLogprob Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ChatCompletionTokenLogprob

+
+
+ +
public struct ChatCompletionTokenLogprob : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + token + +
    +
    +
    +
    +
    +
    +

    The token.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let token: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + bytes + +
    +
    +
    +
    +
    +
    +

    A list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be null if there is no bytes representation for the token.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let bytes: [Int]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logprob + +
    +
    +
    +
    +
    +
    +

    The log probability of this token.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let logprob: Double
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + topLogprobs + +
    +
    +
    +
    +
    +
    +

    List of the most likely tokens and their log probability, at this token position. In rare cases, there may be fewer than the number of requested top_logprobs returned.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let topLogprobs: [`Self`.TopLogprob]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + TopLogprob + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct TopLogprob : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html new file mode 100644 index 00000000..8cd920fa --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html @@ -0,0 +1,418 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + token + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case token
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + bytes + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case bytes
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logprob + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case logprob
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + topLogprobs + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case topLogprobs = "top_logprobs"
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html new file mode 100644 index 00000000..532a768e --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html @@ -0,0 +1,395 @@ + + + + TopLogprob Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

TopLogprob

+
+
+ +
public struct TopLogprob : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + token + +
    +
    +
    +
    +
    +
    +

    The token.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let token: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + bytes + +
    +
    +
    +
    +
    +
    +

    A list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be null if there is no bytes representation for the token.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let bytes: [Int]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logprob + +
    +
    +
    +
    +
    +
    +

    The log probability of this token.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let logprob: Double
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/CodingKeys.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/CodingKeys.html new file mode 100644 index 00000000..11ad4d0e --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/Choice/CodingKeys.html @@ -0,0 +1,414 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + index + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case index
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + delta + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case delta
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + finishReason + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case finishReason = "finish_reason"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + logprobs + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case logprobs
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/CodingKeys.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/CodingKeys.html new file mode 100644 index 00000000..6458b785 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ChatStreamResult/CodingKeys.html @@ -0,0 +1,464 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + id + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case id
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + object + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case object
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + created + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case created
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + choices + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case choices
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + systemFingerprint + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case systemFingerprint = "system_fingerprint"
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/CompletionsQuery.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/CompletionsQuery.html new file mode 100644 index 00000000..5b863fac --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/CompletionsQuery.html @@ -0,0 +1,575 @@ + + + + CompletionsQuery Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CompletionsQuery

+
+
+ +
public struct CompletionsQuery : Codable, Streamable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    ID of the model to use.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + prompt + +
    +
    +
    +
    +
    +
    +

    The prompt(s) to generate completions for, encoded as a string, array of strings, array of tokens, or array of token arrays.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let prompt: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + temperature + +
    +
    +
    +
    +
    +
    +

    What sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let temperature: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + maxTokens + +
    +
    +
    +
    +
    +
    +

    The maximum number of tokens to generate in the completion.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let maxTokens: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + topP + +
    +
    +
    +
    +
    +
    +

    An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let topP: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + frequencyPenalty + +
    +
    +
    +
    +
    +
    +

    Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model’s likelihood to repeat the same line verbatim.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let frequencyPenalty: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + presencePenalty + +
    +
    +
    +
    +
    +
    +

    Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model’s likelihood to talk about new topics.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let presencePenalty: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + stop + +
    +
    +
    +
    +
    +
    +

    Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let stop: [String]?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + user + +
    +
    +
    +
    +
    +
    +

    A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let user: String?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(model: Model, prompt: String, temperature: Double? = nil, maxTokens: Int? = nil, topP: Double? = nil, frequencyPenalty: Double? = nil, presencePenalty: Double? = nil, stop: [String]? = nil, user: String? = nil)
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/CompletionsResult.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/CompletionsResult.html new file mode 100644 index 00000000..ac1aaf9e --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/CompletionsResult.html @@ -0,0 +1,516 @@ + + + + CompletionsResult Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CompletionsResult

+
+
+ +
public struct CompletionsResult : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + Usage + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Usage : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Choice + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Choice : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + id + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let id: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + object + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let object: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + created + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let created: TimeInterval
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + choices + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let choices: [Choice]
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + usage + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let usage: Usage?
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/CompletionsResult/Choice.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/CompletionsResult/Choice.html new file mode 100644 index 00000000..afada57d --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/CompletionsResult/Choice.html @@ -0,0 +1,386 @@ + + + + Choice Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Choice

+
+
+ +
public struct Choice : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + text + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let text: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + index + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let index: Int
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + finishReason + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let finishReason: String?
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/CompletionsResult/Usage.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/CompletionsResult/Usage.html new file mode 100644 index 00000000..e83845d5 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/CompletionsResult/Usage.html @@ -0,0 +1,386 @@ + + + + Usage Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Usage

+
+
+ +
public struct Usage : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + promptTokens + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let promptTokens: Int
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + completionTokens + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let completionTokens: Int
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + totalTokens + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let totalTokens: Int
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/EditsQuery.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/EditsQuery.html new file mode 100644 index 00000000..d6efa469 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/EditsQuery.html @@ -0,0 +1,494 @@ + + + + EditsQuery Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

EditsQuery

+
+
+ +
public struct EditsQuery : Codable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    ID of the model to use.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + input + +
    +
    +
    +
    +
    +
    +

    Input text to get embeddings for.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let input: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + instruction + +
    +
    +
    +
    +
    +
    +

    The instruction that tells the model how to edit the prompt.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let instruction: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + n + +
    +
    +
    +
    +
    +
    +

    The number of images to generate. Must be between 1 and 10.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let n: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + temperature + +
    +
    +
    +
    +
    +
    +

    What sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let temperature: Double?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + topP + +
    +
    +
    +
    +
    +
    +

    An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let topP: Double?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(model: Model, input: String?, instruction: String, n: Int? = nil, temperature: Double? = nil, topP: Double? = nil)
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/EditsResult.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/EditsResult.html new file mode 100644 index 00000000..9c4b1915 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/EditsResult.html @@ -0,0 +1,464 @@ + + + + EditsResult Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

EditsResult

+
+
+ +
public struct EditsResult : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + Choice + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Choice : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Usage + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Usage : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + object + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let object: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + created + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let created: TimeInterval
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + choices + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let choices: [Choice]
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + usage + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let usage: Usage
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/EditsResult/Choice.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/EditsResult/Choice.html new file mode 100644 index 00000000..0cfccc44 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/EditsResult/Choice.html @@ -0,0 +1,360 @@ + + + + Choice Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Choice

+
+
+ +
public struct Choice : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + text + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let text: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + index + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let index: Int
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/EditsResult/Usage.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/EditsResult/Usage.html new file mode 100644 index 00000000..1e848023 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/EditsResult/Usage.html @@ -0,0 +1,386 @@ + + + + Usage Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Usage

+
+
+ +
public struct Usage : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + promptTokens + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let promptTokens: Int
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + completionTokens + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let completionTokens: Int
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + totalTokens + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let totalTokens: Int
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/EmbeddingsQuery.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/EmbeddingsQuery.html new file mode 100644 index 00000000..fb78cd32 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/EmbeddingsQuery.html @@ -0,0 +1,530 @@ + + + + EmbeddingsQuery Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

EmbeddingsQuery

+
+
+ +
public struct EmbeddingsQuery : Codable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + input + +
    +
    +
    +
    +
    +
    +

    Input text to embed, encoded as a string or array of tokens. To embed multiple inputs in a single request, pass an array of strings or array of token arrays. The input must not exceed the max input tokens for the model (8192 tokens for text-embedding-ada-002), cannot be an empty string, and any array must be 2048 dimensions or less.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let input: `Self`.Input
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    ID of the model to use. You can use the List models API to see all of your available models, or see our Model overview for descriptions of them. +https://platform.openai.com/docs/api-reference/models/list +https://platform.openai.com/docs/models/overview

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + encodingFormat + +
    +
    +
    +
    +
    +
    +

    The format to return the embeddings in. Can be either float or base64. +https://pypi.org/project/pybase64/

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let encodingFormat: `Self`.EncodingFormat?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + user + +
    +
    +
    +
    +
    +
    +

    A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. +https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let user: String?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    input: Self.Input,
    +    model: Model,
    +    encodingFormat: Self.EncodingFormat? = nil,
    +    user: String? = nil
    +)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Input + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum Input : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + EncodingFormat + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum EncodingFormat : String, Codable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/EmbeddingsQuery/CodingKeys.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/EmbeddingsQuery/CodingKeys.html new file mode 100644 index 00000000..994c7dcf --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/EmbeddingsQuery/CodingKeys.html @@ -0,0 +1,412 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + input + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case input
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + encodingFormat + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case encodingFormat = "encoding_format"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + user + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case user
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/EmbeddingsQuery/EncodingFormat.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/EmbeddingsQuery/EncodingFormat.html new file mode 100644 index 00000000..d1d98399 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/EmbeddingsQuery/EncodingFormat.html @@ -0,0 +1,360 @@ + + + + EncodingFormat Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

EncodingFormat

+
+
+ +
public enum EncodingFormat : String, Codable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + float + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case float
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + base64 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case base64
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/EmbeddingsQuery/Input.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/EmbeddingsQuery/Input.html new file mode 100644 index 00000000..ff7e775a --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/EmbeddingsQuery/Input.html @@ -0,0 +1,542 @@ + + + + Input Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Input

+
+
+ +
public enum Input : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + string(_:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case string(String)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + stringList(_:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case stringList([String])
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + intList(_:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case intList([Int])
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + intMatrix(_:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case intMatrix([[Int]])
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + encode(to:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public func encode(to encoder: Encoder) throws
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(string:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(string: String)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(stringList:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(stringList: [String])
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(intList:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(intList: [Int])
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(intMatrix:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(intMatrix: [[Int]])
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/EmbeddingsResult.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/EmbeddingsResult.html new file mode 100644 index 00000000..d8251f10 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/EmbeddingsResult.html @@ -0,0 +1,465 @@ + + + + EmbeddingsResult Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

EmbeddingsResult

+
+
+ +
public struct EmbeddingsResult : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + Embedding + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Embedding : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Usage + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Usage : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + data + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let data: [Embedding]
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + usage + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let usage: Usage
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + object + +
    +
    +
    +
    +
    +
    +

    The object type, which is always “list”.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let object: String
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/EmbeddingsResult/Embedding.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/EmbeddingsResult/Embedding.html new file mode 100644 index 00000000..71f10564 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/EmbeddingsResult/Embedding.html @@ -0,0 +1,390 @@ + + + + Embedding Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Embedding

+
+
+ +
public struct Embedding : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + object + +
    +
    +
    +
    +
    +
    +

    The object type, which is always “embedding”.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let object: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + embedding + +
    +
    +
    +
    +
    +
    +

    The embedding vector, which is a list of floats. The length of vector depends on the model as listed in the embedding guide. +https://platform.openai.com/docs/guides/embeddings

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let embedding: [Double]
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + index + +
    +
    +
    +
    +
    +
    +

    The index of the embedding in the list of embeddings.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let index: Int
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/EmbeddingsResult/Usage.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/EmbeddingsResult/Usage.html new file mode 100644 index 00000000..ac8f6faf --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/EmbeddingsResult/Usage.html @@ -0,0 +1,360 @@ + + + + Usage Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Usage

+
+
+ +
public struct Usage : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + promptTokens + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let promptTokens: Int
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + totalTokens + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let totalTokens: Int
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImageEditsQuery.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImageEditsQuery.html new file mode 100644 index 00000000..b4f190f8 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImageEditsQuery.html @@ -0,0 +1,639 @@ + + + + ImageEditsQuery Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ImageEditsQuery

+
+
+ +
public struct ImageEditsQuery : Codable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + ResponseFormat + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias ResponseFormat = ImagesQuery.ResponseFormat
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Size + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias Size = ImagesQuery.Size
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + image + +
    +
    +
    +
    +
    +
    +

    The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask is not provided, image must have transparency, which will be used as the mask.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let image: Data
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + mask + +
    +
    +
    +
    +
    +
    +

    An additional image whose fully transparent areas (e.g. where alpha is zero) indicate where image should be edited. Must be a valid PNG file, less than 4MB, and have the same dimensions as image.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let mask: Data?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + prompt + +
    +
    +
    +
    +
    +
    +

    A text description of the desired image(s). The maximum length is 1000 characters.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let prompt: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    The model to use for image generation. +Defaults to dall-e-2

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + n + +
    +
    +
    +
    +
    +
    +

    The number of images to generate. Must be between 1 and 10.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let n: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + responseFormat + +
    +
    +
    +
    +
    +
    +

    The format in which the generated images are returned. Must be one of url or b64_json. +Defaults to url

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let responseFormat: `Self`.ResponseFormat?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + size + +
    +
    +
    +
    +
    +
    +

    The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let size: Size?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + user + +
    +
    +
    +
    +
    +
    +

    A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. +https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let user: String?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    image: Data,
    +    prompt: String,
    +    mask: Data? = nil,
    +    model: Model? = nil,
    +    n: Int? = nil,
    +    responseFormat: Self.ResponseFormat? = nil,
    +    size: Self.Size? = nil,
    +    user: String? = nil
    +)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImageEditsQuery/CodingKeys.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImageEditsQuery/CodingKeys.html new file mode 100644 index 00000000..003f87f3 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImageEditsQuery/CodingKeys.html @@ -0,0 +1,516 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + image + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case image
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + mask + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case mask
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + prompt + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case prompt
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + n + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case n
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + responseFormat + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case responseFormat = "response_format"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + size + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case size
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + user + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case user
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImageVariationsQuery.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImageVariationsQuery.html new file mode 100644 index 00000000..a2e95236 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImageVariationsQuery.html @@ -0,0 +1,559 @@ + + + + ImageVariationsQuery Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ImageVariationsQuery

+
+
+ +
public struct ImageVariationsQuery : Codable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + ResponseFormat + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias ResponseFormat = ImagesQuery.ResponseFormat
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + image + +
    +
    +
    +
    +
    +
    +

    The image to edit. Must be a valid PNG file, less than 4MB, and square.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let image: Data
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    The model to use for image generation. Only dall-e-2 is supported at this time. +Defaults to dall-e-2

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + n + +
    +
    +
    +
    +
    +
    +

    The number of images to generate. Must be between 1 and 10. +Defaults to 1

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let n: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + responseFormat + +
    +
    +
    +
    +
    +
    +

    The format in which the generated images are returned. Must be one of url or b64_json. +Defaults to url

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let responseFormat: `Self`.ResponseFormat?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + size + +
    +
    +
    +
    +
    +
    +

    The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024. +Defaults to 1024x1024

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let size: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + user + +
    +
    +
    +
    +
    +
    +

    A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. +https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let user: String?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    image: Data,
    +    model: Model? = nil,
    +    n: Int? = nil,
    +    responseFormat: Self.ResponseFormat? = nil,
    +    size: String? = nil,
    +    user: String? = nil
    +)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImageVariationsQuery/CodingKeys.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImageVariationsQuery/CodingKeys.html new file mode 100644 index 00000000..d3c555a5 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImageVariationsQuery/CodingKeys.html @@ -0,0 +1,464 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + image + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case image
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + n + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case n
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + responseFormat + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case responseFormat = "response_format"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + size + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case size
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + user + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case user
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesQuery.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesQuery.html new file mode 100644 index 00000000..4812d094 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesQuery.html @@ -0,0 +1,701 @@ + + + + ImagesQuery Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ImagesQuery

+
+
+ +
public struct ImagesQuery : Codable
+ +
+
+

Given a prompt and/or an input image, the model will generate a new image. +https://platform.openai.com/docs/guides/images

+ +
+
+
+
    +
  • +
    + + + + ResponseFormat + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum ResponseFormat : String, Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + prompt + +
    +
    +
    +
    +
    +
    +

    A text description of the desired image(s). The maximum length is 1000 characters for dall-e-2 and 4000 characters for dall-e-3.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let prompt: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    The model to use for image generation. +Defaults to dall-e-2

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + responseFormat + +
    +
    +
    +
    +
    +
    +

    The format in which the generated images are returned. Must be one of url or b64_json. +Defaults to url

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let responseFormat: `Self`.ResponseFormat?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + n + +
    +
    +
    +
    +
    +
    +

    The number of images to generate. Must be between 1 and 10. For dall-e-3, only n=1 is supported. +Defaults to 1

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let n: Int?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + size + +
    +
    +
    +
    +
    +
    +

    The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024 for dall-e-2. Must be one of 1024x1024, 1792x1024, or 1024x1792 for dall-e-3 models. +Defaults to 1024x1024

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let size: `Self`.Size?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + user + +
    +
    +
    +
    +
    +
    +

    A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. +https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let user: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + style + +
    +
    +
    +
    +
    +
    +

    The style of the generated images. Must be one of vivid or natural. Vivid causes the model to lean towards generating hyper-real and dramatic images. Natural causes the model to produce more natural, less hyper-real looking images. This param is only supported for dall-e-3. +Defaults to vivid

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let style: `Self`.Style?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + quality + +
    +
    +
    +
    +
    +
    +

    The quality of the image that will be generated. hd creates images with finer details and greater consistency across the image. This param is only supported for dall-e-3. +Defaults to standard

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let quality: `Self`.Quality?
    + +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(
    +    prompt: String,
    +    model: Model? = nil,
    +    n: Int? = nil,
    +    quality:Self.Quality? = nil,
    +    responseFormat: Self.ResponseFormat? = nil,
    +    size: Size? = nil,
    +    style: Self.Style? = nil,
    +    user: String? = nil
    +)
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Style + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum Style : String, Codable, CaseIterable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Quality + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum Quality : String, Codable, CaseIterable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Size + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum Size : String, Codable, CaseIterable
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesQuery/CodingKeys.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesQuery/CodingKeys.html new file mode 100644 index 00000000..bf4b7cbf --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesQuery/CodingKeys.html @@ -0,0 +1,516 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + prompt + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case prompt
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + n + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case n
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + size + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case size
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + user + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case user
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + style + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case style
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + responseFormat + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case responseFormat = "response_format"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + quality + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case quality
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesQuery/Quality.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesQuery/Quality.html new file mode 100644 index 00000000..99029b9a --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesQuery/Quality.html @@ -0,0 +1,360 @@ + + + + Quality Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Quality

+
+
+ +
public enum Quality : String, Codable, CaseIterable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + standard + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case standard
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + hd + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case hd
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesQuery/ResponseFormat.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesQuery/ResponseFormat.html new file mode 100644 index 00000000..6b771624 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesQuery/ResponseFormat.html @@ -0,0 +1,360 @@ + + + + ResponseFormat Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ResponseFormat

+
+
+ +
public enum ResponseFormat : String, Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + url + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case url
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + b64_json + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case b64_json
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesQuery/Size.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesQuery/Size.html new file mode 100644 index 00000000..4bee5dbb --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesQuery/Size.html @@ -0,0 +1,438 @@ + + + + Size Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Size

+
+
+ +
public enum Size : String, Codable, CaseIterable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + _256 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case _256 = "256x256"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + _512 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case _512 = "512x512"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + _1024 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case _1024 = "1024x1024"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + _1792_1024 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case _1792_1024 = "1792x1024"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + _1024_1792 + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case _1024_1792 = "1024x1792"
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesQuery/Style.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesQuery/Style.html new file mode 100644 index 00000000..6376cc05 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesQuery/Style.html @@ -0,0 +1,360 @@ + + + + Style Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Style

+
+
+ +
public enum Style : String, Codable, CaseIterable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + natural + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case natural
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + vivid + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case vivid
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesResult.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesResult.html new file mode 100644 index 00000000..80096634 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesResult.html @@ -0,0 +1,387 @@ + + + + ImagesResult Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ImagesResult

+
+
+ +
public struct ImagesResult : Codable, Equatable
+ +
+
+

Returns a list of image objects.

+ +
+
+
+
    +
  • +
    + + + + created + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let created: TimeInterval
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + data + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let data: [`Self`.Image]
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + Image + +
    +
    +
    +
    +
    +
    +

    Represents the url or the content of an image generated by the OpenAI API.

    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Image : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesResult/Image.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesResult/Image.html new file mode 100644 index 00000000..3facad36 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesResult/Image.html @@ -0,0 +1,417 @@ + + + + Image Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Image

+
+
+ +
public struct Image : Codable, Equatable
+ +
+
+

Represents the url or the content of an image generated by the OpenAI API.

+ +
+
+
+
    +
  • +
    + + + + b64Json + +
    +
    +
    +
    +
    +
    +

    The base64-encoded JSON of the generated image, if response_format is b64_json

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let b64Json: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + revisedPrompt + +
    +
    +
    +
    +
    +
    +

    The prompt that was used to generate the image, if there was any revision to the prompt.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let revisedPrompt: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + url + +
    +
    +
    +
    +
    +
    +

    The URL of the generated image, if response_format is url (default).

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let url: String?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesResult/Image/CodingKeys.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesResult/Image/CodingKeys.html new file mode 100644 index 00000000..6993ab03 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ImagesResult/Image/CodingKeys.html @@ -0,0 +1,388 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + b64Json + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case b64Json = "b64_json"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + revisedPrompt + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case revisedPrompt = "revised_prompt"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + url + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case url
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModelQuery.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModelQuery.html new file mode 100644 index 00000000..f359c1fc --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModelQuery.html @@ -0,0 +1,360 @@ + + + + ModelQuery Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ModelQuery

+
+
+ +
public struct ModelQuery : Codable, Equatable
+ +
+
+

Retrieves a model instance, providing basic information about the model such as the owner and permissioning.

+ +
+
+
+
    +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    The ID of the model to use for this request.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(model:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(model: Model)
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModelResult.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModelResult.html new file mode 100644 index 00000000..2e416535 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModelResult.html @@ -0,0 +1,442 @@ + + + + ModelResult Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ModelResult

+
+
+ +
public struct ModelResult : Codable, Equatable
+ +
+
+

The model object matching the specified ID.

+ +
+
+
+
    +
  • +
    + + + + id + +
    +
    +
    +
    +
    +
    +

    The model identifier, which can be referenced in the API endpoints.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let id: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + created + +
    +
    +
    +
    +
    +
    +

    The Unix timestamp (in seconds) when the model was created.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let created: TimeInterval
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + object + +
    +
    +
    +
    +
    +
    +

    The object type, which is always “model”.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let object: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ownedBy + +
    +
    +
    +
    +
    +
    +

    The organization that owns the model.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let ownedBy: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModelResult/CodingKeys.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModelResult/CodingKeys.html new file mode 100644 index 00000000..cc3da332 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModelResult/CodingKeys.html @@ -0,0 +1,412 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey
+ +
+
+ +
+
+
+
    +
  • +
    + + + + id + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case id
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + created + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case created
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + object + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case object
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + ownedBy + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case ownedBy = "owned_by"
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModelsResult.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModelsResult.html new file mode 100644 index 00000000..0aadfcf5 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModelsResult.html @@ -0,0 +1,361 @@ + + + + ModelsResult Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ModelsResult

+
+
+ +
public struct ModelsResult : Codable, Equatable
+ +
+
+

A list of model objects.

+ +
+
+
+
    +
  • +
    + + + + data + +
    +
    +
    +
    +
    +
    +

    A list of model objects.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let data: [ModelResult]
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + object + +
    +
    +
    +
    +
    +
    +

    The object type, which is always list

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let object: String
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModerationsQuery.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModerationsQuery.html new file mode 100644 index 00000000..8749658a --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModerationsQuery.html @@ -0,0 +1,386 @@ + + + + ModerationsQuery Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ModerationsQuery

+
+
+ +
public struct ModerationsQuery : Codable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + input + +
    +
    +
    +
    +
    +
    +

    The input text to classify.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let input: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    +

    ID of the model to use.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model?
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + init(input:model:) + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public init(input: String, model: Model? = nil)
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModerationsResult.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModerationsResult.html new file mode 100644 index 00000000..6a579df1 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModerationsResult.html @@ -0,0 +1,412 @@ + + + + ModerationsResult Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

ModerationsResult

+
+
+ +
public struct ModerationsResult : Codable, Equatable
+
extension ModerationsResult: Identifiable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + Moderation + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Moderation : Codable, Equatable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + id + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let id: String
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + model + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let model: Model
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + results + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let results: [`Self`.Moderation]
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModerationsResult/Moderation.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModerationsResult/Moderation.html new file mode 100644 index 00000000..634e1d49 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModerationsResult/Moderation.html @@ -0,0 +1,443 @@ + + + + Moderation Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Moderation

+
+
+ +
public struct Moderation : Codable, Equatable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + Categories + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct Categories : Codable, Equatable, Sequence
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CategoryScores + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public struct CategoryScores : Codable, Equatable, Sequence
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + categories + +
    +
    +
    +
    +
    +
    +

    Collection of per-category binary usage policies violation flags. For each category, the value is true if the model flags the corresponding category as violated, false otherwise.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let categories: Categories
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + categoryScores + +
    +
    +
    +
    +
    +
    +

    Collection of per-category raw scores output by the model, denoting the model’s confidence that the input violates the OpenAI’s policy for the category. The value is between 0 and 1, where higher values denote higher confidence. The scores should not be interpreted as probabilities.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let categoryScores: CategoryScores
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + flagged + +
    +
    +
    +
    +
    +
    +

    True if the model classifies the content as violating OpenAI’s usage policies, false otherwise.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let flagged: Bool
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModerationsResult/Moderation/Categories.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModerationsResult/Moderation/Categories.html new file mode 100644 index 00000000..24b32f05 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModerationsResult/Moderation/Categories.html @@ -0,0 +1,660 @@ + + + + Categories Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Categories

+
+
+ +
public struct Categories : Codable, Equatable, Sequence
+ +
+
+ +
+
+
+
    +
  • +
    + + + + harassment + +
    +
    +
    +
    +
    +
    +

    Content that expresses, incites, or promotes harassing language towards any target.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let harassment: Bool
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + harassmentThreatening + +
    +
    +
    +
    +
    +
    +

    Harassment content that also includes violence or serious harm towards any target.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let harassmentThreatening: Bool
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + hate + +
    +
    +
    +
    +
    +
    +

    Content that expresses, incites, or promotes hate based on race, gender, ethnicity, religion, nationality, sexual orientation, disability status, or caste.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let hate: Bool
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + hateThreatening + +
    +
    +
    +
    +
    +
    +

    Hateful content that also includes violence or serious harm towards the targeted group.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let hateThreatening: Bool
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + selfHarm + +
    +
    +
    +
    +
    +
    +

    Content that promotes, encourages, or depicts acts of self-harm, such as suicide, cutting, and eating disorders.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let selfHarm: Bool
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + selfHarmIntent + +
    +
    +
    +
    +
    +
    +

    Content where the speaker expresses that they are engaging or intend to engage in acts of self-harm, such as suicide, cutting, and eating disorders.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let selfHarmIntent: Bool
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + selfHarmInstructions + +
    +
    +
    +
    +
    +
    +

    Content that encourages performing acts of self-harm, such as suicide, cutting, and eating disorders, or that gives instructions or advice on how to commit such acts.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let selfHarmInstructions: Bool
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + sexual + +
    +
    +
    +
    +
    +
    +

    Content meant to arouse sexual excitement, such as the description of sexual activity, or that promotes sexual services (excluding sex education and wellness).

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let sexual: Bool
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + sexualMinors + +
    +
    +
    +
    +
    +
    +

    Sexual content that includes an individual who is under 18 years old.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let sexualMinors: Bool
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + violence + +
    +
    +
    +
    +
    +
    +

    Content that promotes or glorifies violence or celebrates the suffering or humiliation of others.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let violence: Bool
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + violenceGraphic + +
    +
    +
    +
    +
    +
    +

    Violent content that depicts death, violence, or serious physical injury in extreme graphic detail.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let violenceGraphic: Bool
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey, CaseIterable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + makeIterator() + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public func makeIterator() -> IndexingIterator<[(String, Bool)]>
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModerationsResult/Moderation/Categories/CodingKeys.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModerationsResult/Moderation/Categories/CodingKeys.html new file mode 100644 index 00000000..1f7905fe --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModerationsResult/Moderation/Categories/CodingKeys.html @@ -0,0 +1,598 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey, CaseIterable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + harassment + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case harassment
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + harassmentThreatening + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case harassmentThreatening = "harassment/threatening"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + hate + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case hate
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + hateThreatening + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case hateThreatening = "hate/threatening"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + selfHarm + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case selfHarm = "self-harm"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + selfHarmIntent + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case selfHarmIntent = "self-harm/intent"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + selfHarmInstructions + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case selfHarmInstructions = "self-harm/instructions"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + sexual + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case sexual
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + sexualMinors + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case sexualMinors = "sexual/minors"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + violence + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case violence
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + violenceGraphic + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case violenceGraphic = "violence/graphic"
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModerationsResult/Moderation/CategoryScores.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModerationsResult/Moderation/CategoryScores.html new file mode 100644 index 00000000..ebc27305 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModerationsResult/Moderation/CategoryScores.html @@ -0,0 +1,660 @@ + + + + CategoryScores Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CategoryScores

+
+
+ +
public struct CategoryScores : Codable, Equatable, Sequence
+ +
+
+ +
+
+
+
    +
  • +
    + + + + harassment + +
    +
    +
    +
    +
    +
    +

    Content that expresses, incites, or promotes harassing language towards any target.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let harassment: Double
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + harassmentThreatening + +
    +
    +
    +
    +
    +
    +

    Harassment content that also includes violence or serious harm towards any target.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let harassmentThreatening: Double
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + hate + +
    +
    +
    +
    +
    +
    +

    Content that expresses, incites, or promotes hate based on race, gender, ethnicity, religion, nationality, sexual orientation, disability status, or caste.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let hate: Double
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + hateThreatening + +
    +
    +
    +
    +
    +
    +

    Hateful content that also includes violence or serious harm towards the targeted group.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let hateThreatening: Double
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + selfHarm + +
    +
    +
    +
    +
    +
    +

    Content that promotes, encourages, or depicts acts of self-harm, such as suicide, cutting, and eating disorders.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let selfHarm: Double
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + selfHarmIntent + +
    +
    +
    +
    +
    +
    +

    Content where the speaker expresses that they are engaging or intend to engage in acts of self-harm, such as suicide, cutting, and eating disorders.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let selfHarmIntent: Double
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + selfHarmInstructions + +
    +
    +
    +
    +
    +
    +

    Content that encourages performing acts of self-harm, such as suicide, cutting, and eating disorders, or that gives instructions or advice on how to commit such acts.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let selfHarmInstructions: Double
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + sexual + +
    +
    +
    +
    +
    +
    +

    Content meant to arouse sexual excitement, such as the description of sexual activity, or that promotes sexual services (excluding sex education and wellness).

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let sexual: Double
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + sexualMinors + +
    +
    +
    +
    +
    +
    +

    Sexual content that includes an individual who is under 18 years old.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let sexualMinors: Double
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + violence + +
    +
    +
    +
    +
    +
    +

    Content that promotes or glorifies violence or celebrates the suffering or humiliation of others.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let violence: Double
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + violenceGraphic + +
    +
    +
    +
    +
    +
    +

    Violent content that depicts death, violence, or serious physical injury in extreme graphic detail.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public let violenceGraphic: Double
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + CodingKeys + +
    +
    +
    +
    +
    +
    + + See more +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public enum CodingKeys : String, CodingKey, CaseIterable
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + makeIterator() + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public func makeIterator() -> IndexingIterator<[(String, Bool)]>
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html new file mode 100644 index 00000000..9ceabfbf --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html @@ -0,0 +1,598 @@ + + + + CodingKeys Enumeration Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

CodingKeys

+
+
+ +
public enum CodingKeys : String, CodingKey, CaseIterable
+ +
+
+ +
+
+
+
    +
  • +
    + + + + harassment + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case harassment
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + harassmentThreatening + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case harassmentThreatening = "harassment/threatening"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + hate + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case hate
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + hateThreatening + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case hateThreatening = "hate/threatening"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + selfHarm + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case selfHarm = "self-harm"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + selfHarmIntent + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case selfHarmIntent = "self-harm/intent"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + selfHarmInstructions + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case selfHarmInstructions = "self-harm/instructions"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + sexual + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case sexual
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + sexualMinors + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case sexualMinors = "sexual/minors"
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + violence + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case violence
    + +
    +
    +
    +
    +
  • +
  • +
    + + + + violenceGraphic + +
    +
    +
    +
    +
    +
    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    case violenceGraphic = "violence/graphic"
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Structs/Vector.html b/docs/docsets/.docset/Contents/Resources/Documents/Structs/Vector.html new file mode 100644 index 00000000..353605b1 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Structs/Vector.html @@ -0,0 +1,422 @@ + + + + Vector Structure Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Vector

+
+
+ +
public struct Vector
+ +
+
+ +
+
+
+
    +
  • + +
    +
    +
    +
    +
    +

    Returns the similarity between two vectors

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public static func cosineSimilarity(a: [Double], b: [Double]) -> Double
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + a + + +
    +

    The first vector

    +
    +
    + + b + + +
    +

    The second vector

    +
    +
    +
    +
    +
    +
  • +
  • + +
    +
    +
    +
    +
    +

    Returns the difference between two vectors. Cosine distance is defined as 1 - cosineSimilarity(a, b)

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public func cosineDifference(a: [Double], b: [Double]) -> Double
    + +
    +
    +
    +

    Parameters

    + + + + + + + + + + + +
    + + a + + +
    +

    The first vector

    +
    +
    + + b + + +
    +

    The second vector

    +
    +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/Typealiases.html b/docs/docsets/.docset/Contents/Resources/Documents/Typealiases.html new file mode 100644 index 00000000..1b045dd1 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/Typealiases.html @@ -0,0 +1,325 @@ + + + + Type Aliases Reference + + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+

Type Aliases

+

The following type aliases are available globally.

+ +
+
+
+
    +
  • +
    + + + + Model + +
    +
    +
    +
    +
    +
    +

    Defines all available OpenAI models supported by the library.

    + +
    +
    +

    Declaration

    +
    +

    Swift

    +
    public typealias Model = String
    + +
    +
    +
    +
    +
  • +
+
+
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/badge.svg b/docs/docsets/.docset/Contents/Resources/Documents/badge.svg new file mode 100644 index 00000000..a096feca --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/badge.svg @@ -0,0 +1,28 @@ + + + + + + + + + + + + + + + + documentation + + + documentation + + + 100% + + + 100% + + + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/css/highlight.css b/docs/docsets/.docset/Contents/Resources/Documents/css/highlight.css new file mode 100644 index 00000000..c170357c --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/css/highlight.css @@ -0,0 +1,202 @@ +/*! Jazzy - https://github.com/realm/jazzy + * Copyright Realm Inc. + * SPDX-License-Identifier: MIT + */ +/* Credit to https://gist.github.com/wataru420/2048287 */ +.highlight .c { + color: #999988; + font-style: italic; } + +.highlight .err { + color: #a61717; + background-color: #e3d2d2; } + +.highlight .k { + color: #000000; + font-weight: bold; } + +.highlight .o { + color: #000000; + font-weight: bold; } + +.highlight .cm { + color: #999988; + font-style: italic; } + +.highlight .cp { + color: #999999; + font-weight: bold; } + +.highlight .c1 { + color: #999988; + font-style: italic; } + +.highlight .cs { + color: #999999; + font-weight: bold; + font-style: italic; } + +.highlight .gd { + color: #000000; + background-color: #ffdddd; } + +.highlight .gd .x { + color: #000000; + background-color: #ffaaaa; } + +.highlight .ge { + color: #000000; + font-style: italic; } + +.highlight .gr { + color: #aa0000; } + +.highlight .gh { + color: #999999; } + +.highlight .gi { + color: #000000; + background-color: #ddffdd; } + +.highlight .gi .x { + color: #000000; + background-color: #aaffaa; } + +.highlight .go { + color: #888888; } + +.highlight .gp { + color: #555555; } + +.highlight .gs { + font-weight: bold; } + +.highlight .gu { + color: #aaaaaa; } + +.highlight .gt { + color: #aa0000; } + +.highlight .kc { + color: #000000; + font-weight: bold; } + +.highlight .kd { + color: #000000; + font-weight: bold; } + +.highlight .kp { + color: #000000; + font-weight: bold; } + +.highlight .kr { + color: #000000; + font-weight: bold; } + +.highlight .kt { + color: #445588; } + +.highlight .m { + color: #009999; } + +.highlight .s { + color: #d14; } + +.highlight .na { + color: #008080; } + +.highlight .nb { + color: #0086B3; } + +.highlight .nc { + color: #445588; + font-weight: bold; } + +.highlight .no { + color: #008080; } + +.highlight .ni { + color: #800080; } + +.highlight .ne { + color: #990000; + font-weight: bold; } + +.highlight .nf { + color: #990000; } + +.highlight .nn { + color: #555555; } + +.highlight .nt { + color: #000080; } + +.highlight .nv { + color: #008080; } + +.highlight .ow { + color: #000000; + font-weight: bold; } + +.highlight .w { + color: #bbbbbb; } + +.highlight .mf { + color: #009999; } + +.highlight .mh { + color: #009999; } + +.highlight .mi { + color: #009999; } + +.highlight .mo { + color: #009999; } + +.highlight .sb { + color: #d14; } + +.highlight .sc { + color: #d14; } + +.highlight .sd { + color: #d14; } + +.highlight .s2 { + color: #d14; } + +.highlight .se { + color: #d14; } + +.highlight .sh { + color: #d14; } + +.highlight .si { + color: #d14; } + +.highlight .sx { + color: #d14; } + +.highlight .sr { + color: #009926; } + +.highlight .s1 { + color: #d14; } + +.highlight .ss { + color: #990073; } + +.highlight .bp { + color: #999999; } + +.highlight .vc { + color: #008080; } + +.highlight .vg { + color: #008080; } + +.highlight .vi { + color: #008080; } + +.highlight .il { + color: #009999; } diff --git a/docs/docsets/.docset/Contents/Resources/Documents/css/jazzy.css b/docs/docsets/.docset/Contents/Resources/Documents/css/jazzy.css new file mode 100644 index 00000000..f84ef864 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/css/jazzy.css @@ -0,0 +1,442 @@ +/*! Jazzy - https://github.com/realm/jazzy + * Copyright Realm Inc. + * SPDX-License-Identifier: MIT + */ +html, body, div, span, h1, h3, h4, p, a, code, em, img, ul, li, table, tbody, tr, td { + background: transparent; + border: 0; + margin: 0; + outline: 0; + padding: 0; + vertical-align: baseline; } + +body { + background-color: #f2f2f2; + font-family: Helvetica, freesans, Arial, sans-serif; + font-size: 14px; + -webkit-font-smoothing: subpixel-antialiased; + word-wrap: break-word; } + +h1, h2, h3 { + margin-top: 0.8em; + margin-bottom: 0.3em; + font-weight: 100; + color: black; } + +h1 { + font-size: 2.5em; } + +h2 { + font-size: 2em; + border-bottom: 1px solid #e2e2e2; } + +h4 { + font-size: 13px; + line-height: 1.5; + margin-top: 21px; } + +h5 { + font-size: 1.1em; } + +h6 { + font-size: 1.1em; + color: #777; } + +.section-name { + color: gray; + display: block; + font-family: Helvetica; + font-size: 22px; + font-weight: 100; + margin-bottom: 15px; } + +pre, code { + font: 0.95em Menlo, monospace; + color: #777; + word-wrap: normal; } + +p code, li code { + background-color: #eee; + padding: 2px 4px; + border-radius: 4px; } + +pre > code { + padding: 0; } + +a { + color: #0088cc; + text-decoration: none; } + a code { + color: inherit; } + +ul { + padding-left: 15px; } + +li { + line-height: 1.8em; } + +img { + max-width: 100%; } + +blockquote { + margin-left: 0; + padding: 0 10px; + border-left: 4px solid #ccc; } + +hr { + height: 1px; + border: none; + background-color: #e2e2e2; } + +.footnote-ref { + display: inline-block; + scroll-margin-top: 70px; } + +.footnote-def { + scroll-margin-top: 70px; } + +.content-wrapper { + margin: 0 auto; + width: 980px; } + +header { + font-size: 0.85em; + line-height: 32px; + background-color: #414141; + position: fixed; + width: 100%; + z-index: 3; } + header img { + padding-right: 6px; + vertical-align: -3px; + height: 16px; } + header a { + color: #fff; } + header p { + float: left; + color: #999; } + header .header-right { + float: right; + margin-left: 16px; } + +#breadcrumbs { + background-color: #f2f2f2; + height: 26px; + padding-top: 12px; + position: fixed; + width: inherit; + z-index: 2; + margin-top: 32px; + white-space: nowrap; + overflow-x: scroll; } + #breadcrumbs #carat { + height: 10px; + margin: 0 5px; } + +.sidebar { + background-color: #f9f9f9; + border: 1px solid #e2e2e2; + overflow-y: auto; + overflow-x: hidden; + position: fixed; + top: 70px; + bottom: 0; + width: 230px; + word-wrap: normal; } + +.nav-groups { + list-style-type: none; + background: #fff; + padding-left: 0; } + +.nav-group-name { + border-bottom: 1px solid #e2e2e2; + font-size: 1.1em; + font-weight: 100; + padding: 15px 0 15px 20px; } + .nav-group-name > a { + color: #333; } + +.nav-group-tasks { + margin-top: 5px; } + +.nav-group-task { + font-size: 0.9em; + list-style-type: none; + white-space: nowrap; } + .nav-group-task a { + color: #888; } + +.main-content { + background-color: #fff; + border: 1px solid #e2e2e2; + margin-left: 246px; + position: absolute; + overflow: hidden; + padding-bottom: 20px; + top: 70px; + width: 734px; } + .main-content p, .main-content a, .main-content code, .main-content em, .main-content ul, .main-content table, .main-content blockquote { + margin-bottom: 1em; } + .main-content p { + line-height: 1.8em; } + .main-content section .section:first-child { + margin-top: 0; + padding-top: 0; } + .main-content section .task-group-section .task-group:first-of-type { + padding-top: 10px; } + .main-content section .task-group-section .task-group:first-of-type .section-name { + padding-top: 15px; } + .main-content section .heading:before { + content: ""; + display: block; + padding-top: 70px; + margin: -70px 0 0; } + .main-content .section-name p { + margin-bottom: inherit; + line-height: inherit; } + .main-content .section-name code { + background-color: inherit; + padding: inherit; + color: inherit; } + +.section { + padding: 0 25px; } + +.highlight { + background-color: #eee; + padding: 10px 12px; + border: 1px solid #e2e2e2; + border-radius: 4px; + overflow-x: auto; } + +.declaration .highlight { + overflow-x: initial; + padding: 0 40px 40px 0; + margin-bottom: -25px; + background-color: transparent; + border: none; } + +.section-name { + margin: 0; + margin-left: 18px; } + +.task-group-section { + margin-top: 10px; + padding-left: 6px; + border-top: 1px solid #e2e2e2; } + +.task-group { + padding-top: 0px; } + +.task-name-container a[name]:before { + content: ""; + display: block; + padding-top: 70px; + margin: -70px 0 0; } + +.section-name-container { + position: relative; + display: inline-block; } + .section-name-container .section-name-link { + position: absolute; + top: 0; + left: 0; + bottom: 0; + right: 0; + margin-bottom: 0; } + .section-name-container .section-name { + position: relative; + pointer-events: none; + z-index: 1; } + .section-name-container .section-name a { + pointer-events: auto; } + +.item { + padding-top: 8px; + width: 100%; + list-style-type: none; } + .item a[name]:before { + content: ""; + display: block; + padding-top: 70px; + margin: -70px 0 0; } + .item code { + background-color: transparent; + padding: 0; } + .item .token, .item .direct-link { + display: inline-block; + text-indent: -20px; + padding-left: 3px; + margin-left: 35px; + font-size: 11.9px; + transition: all 300ms; } + .item .token-open { + margin-left: 20px; } + .item .discouraged { + text-decoration: line-through; } + +.declaration-note { + font-size: .85em; + color: gray; + font-style: italic; } + +.pointer-container { + border-bottom: 1px solid #e2e2e2; + left: -23px; + padding-bottom: 13px; + position: relative; + width: 110%; } + +.pointer { + background: #f9f9f9; + border-left: 1px solid #e2e2e2; + border-top: 1px solid #e2e2e2; + height: 12px; + left: 21px; + top: -7px; + -webkit-transform: rotate(45deg); + -moz-transform: rotate(45deg); + -o-transform: rotate(45deg); + transform: rotate(45deg); + position: absolute; + width: 12px; } + +.height-container { + display: none; + left: -25px; + padding: 0 25px; + position: relative; + width: 100%; + overflow: hidden; } + .height-container .section { + background: #f9f9f9; + border-bottom: 1px solid #e2e2e2; + left: -25px; + position: relative; + width: 100%; + padding-top: 10px; + padding-bottom: 5px; } + +.aside, .language { + padding: 6px 12px; + margin: 12px 0; + border-left: 5px solid #dddddd; + overflow-y: hidden; } + .aside .aside-title, .language .aside-title { + font-size: 9px; + letter-spacing: 2px; + text-transform: uppercase; + padding-bottom: 0; + margin: 0; + color: #aaa; + -webkit-user-select: none; } + .aside p:last-child, .language p:last-child { + margin-bottom: 0; } + +.language { + border-left: 5px solid #cde9f4; } + .language .aside-title { + color: #4b8afb; } + +.aside-warning, .aside-deprecated, .aside-unavailable { + border-left: 5px solid #ff6666; } + .aside-warning .aside-title, .aside-deprecated .aside-title, .aside-unavailable .aside-title { + color: #ff0000; } + +.graybox { + border-collapse: collapse; + width: 100%; } + .graybox p { + margin: 0; + word-break: break-word; + min-width: 50px; } + .graybox td { + border: 1px solid #e2e2e2; + padding: 5px 25px 5px 10px; + vertical-align: middle; } + .graybox tr td:first-of-type { + text-align: right; + padding: 7px; + vertical-align: top; + word-break: normal; + width: 40px; } + +.slightly-smaller { + font-size: 0.9em; } + +#footer { + position: relative; + top: 10px; + bottom: 0px; + margin-left: 25px; } + #footer p { + margin: 0; + color: #aaa; + font-size: 0.8em; } + +html.dash header, html.dash #breadcrumbs, html.dash .sidebar { + display: none; } + +html.dash .main-content { + width: 980px; + margin-left: 0; + border: none; + width: 100%; + top: 0; + padding-bottom: 0; } + +html.dash .height-container { + display: block; } + +html.dash .item .token { + margin-left: 0; } + +html.dash .content-wrapper { + width: auto; } + +html.dash #footer { + position: static; } + +form[role=search] { + float: right; } + form[role=search] input { + font: Helvetica, freesans, Arial, sans-serif; + margin-top: 6px; + font-size: 13px; + line-height: 20px; + padding: 0px 10px; + border: none; + border-radius: 1em; } + .loading form[role=search] input { + background: white url(../img/spinner.gif) center right 4px no-repeat; } + form[role=search] .tt-menu { + margin: 0; + min-width: 300px; + background: #fff; + color: #333; + border: 1px solid #e2e2e2; + z-index: 4; } + form[role=search] .tt-highlight { + font-weight: bold; } + form[role=search] .tt-suggestion { + font: Helvetica, freesans, Arial, sans-serif; + font-size: 14px; + padding: 0 8px; } + form[role=search] .tt-suggestion span { + display: table-cell; + white-space: nowrap; } + form[role=search] .tt-suggestion .doc-parent-name { + width: 100%; + text-align: right; + font-weight: normal; + font-size: 0.9em; + padding-left: 16px; } + form[role=search] .tt-suggestion:hover, + form[role=search] .tt-suggestion.tt-cursor { + cursor: pointer; + background-color: #4183c4; + color: #fff; } + form[role=search] .tt-suggestion:hover .doc-parent-name, + form[role=search] .tt-suggestion.tt-cursor .doc-parent-name { + color: #fff; } diff --git a/docs/docsets/.docset/Contents/Resources/Documents/img/carat.png b/docs/docsets/.docset/Contents/Resources/Documents/img/carat.png new file mode 100755 index 00000000..29d2f7fd Binary files /dev/null and b/docs/docsets/.docset/Contents/Resources/Documents/img/carat.png differ diff --git a/docs/docsets/.docset/Contents/Resources/Documents/img/dash.png b/docs/docsets/.docset/Contents/Resources/Documents/img/dash.png new file mode 100755 index 00000000..6f694c7a Binary files /dev/null and b/docs/docsets/.docset/Contents/Resources/Documents/img/dash.png differ diff --git a/docs/docsets/.docset/Contents/Resources/Documents/img/spinner.gif b/docs/docsets/.docset/Contents/Resources/Documents/img/spinner.gif new file mode 100644 index 00000000..e3038d0a Binary files /dev/null and b/docs/docsets/.docset/Contents/Resources/Documents/img/spinner.gif differ diff --git a/docs/docsets/.docset/Contents/Resources/Documents/index.html b/docs/docsets/.docset/Contents/Resources/Documents/index.html new file mode 100644 index 00000000..38545af2 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/index.html @@ -0,0 +1,1280 @@ + + + + Index Reference + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+ +

OpenAI

+ +

logo

+ +
+ +

Swift Workflow + + +Twitter

+ +

This repository contains Swift community-maintained implementation over OpenAI public API.

+ + +

What is OpenAI

+ +

OpenAI is a non-profit artificial intelligence research organization founded in San Francisco, California in 2015. It was created with the purpose of advancing digital intelligence in ways that benefit humanity as a whole and promote societal progress. The organization strives to develop AI (Artificial Intelligence) programs and systems that can think, act and adapt quickly on their own – autonomously. OpenAI’s mission is to ensure safe and responsible use of AI for civic good, economic growth and other public benefits; this includes cutting-edge research into important topics such as general AI safety, natural language processing, applied reinforcement learning methods, machine vision algorithms etc.

+ +
+

The OpenAI API can be applied to virtually any task that involves understanding or generating natural language or code. We offer a spectrum of models with different levels of power suitable for different tasks, as well as the ability to fine-tune your own custom models. These models can be used for everything from content generation to semantic search and classification.

+
+

Installation

+ +

OpenAI is available with Swift Package Manager. +The Swift Package Manager is a tool for automating the distribution of Swift code and is integrated into the swift compiler. +Once you have your Swift package set up, adding OpenAI as a dependency is as easy as adding it to the dependencies value of your Package.swift.

+
dependencies: [
+    .package(url: "https://github.com/MacPaw/OpenAI.git", branch: "main")
+]
+
+

Usage

+

Initialization

+ +

To initialize API instance you need to obtain API token from your Open AI organization.

+ +

Remember that your API key is a secret! Do not share it with others or expose it in any client-side code (browsers, apps). Production requests must be routed through your own backend server where your API key can be securely loaded from an environment variable or key management service.

+ +

company

+ +

Once you have a token, you can initialize OpenAI class, which is an entry point to the API.

+ +
+

⚠️ OpenAI strongly recommends developers of client-side applications proxy requests through a separate backend service to keep their API key safe. API keys can access and manipulate customer billing, usage, and organizational data, so it’s a significant risk to expose them.

+
+
let openAI = OpenAI(apiToken: "YOUR_TOKEN_HERE")
+
+ +

Optionally you can initialize OpenAI with token, organization identifier and timeoutInterval.

+
let configuration = OpenAI.Configuration(token: "YOUR_TOKEN_HERE", organizationIdentifier: "YOUR_ORGANIZATION_ID_HERE", timeoutInterval: 60.0)
+let openAI = OpenAI(configuration: configuration)
+
+ +

Once token you posses the token, and the instance is initialized you are ready to make requests.

+

Completions

+ +

Given a prompt, the model will return one or more predicted completions, and can also return the probabilities of alternative tokens at each position.

+ +

Request

+
struct CompletionsQuery: Codable {
+    /// ID of the model to use.
+    public let model: Model
+    /// The prompt(s) to generate completions for, encoded as a string, array of strings, array of tokens, or array of token arrays.
+    public let prompt: String
+    /// What sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.
+    public let temperature: Double?
+    /// The maximum number of tokens to generate in the completion.
+    public let maxTokens: Int?
+    /// An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.
+    public let topP: Double?
+    /// Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.
+    public let frequencyPenalty: Double?
+    /// Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.
+    public let presencePenalty: Double?
+    /// Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence.
+    public let stop: [String]?
+    /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.
+    public let user: String?
+}
+
+ +

Response

+
struct CompletionsResult: Codable, Equatable {
+    public struct Choice: Codable, Equatable {
+        public let text: String
+        public let index: Int
+    }
+
+    public let id: String
+    public let object: String
+    public let created: TimeInterval
+    public let model: Model
+    public let choices: [Choice]
+    public let usage: Usage
+}
+
+ +

Example

+
let query = CompletionsQuery(model: .textDavinci_003, prompt: "What is 42?", temperature: 0, maxTokens: 100, topP: 1, frequencyPenalty: 0, presencePenalty: 0, stop: ["\\n"])
+openAI.completions(query: query) { result in
+  //Handle result here
+}
+//or
+let result = try await openAI.completions(query: query)
+
+
(lldb) po result
+▿ CompletionsResult
+  - id : "cmpl-6P9be2p2fQlwB7zTOl0NxCOetGmX3"
+  - object : "text_completion"
+  - created : 1671453146.0
+  - model : OpenAI.Model.textDavinci_003
+  ▿ choices : 1 element
+    ▿ 0 : Choice
+      - text : "\n\n42 is the answer to the ultimate question of life, the universe, and everything, according to the book The Hitchhiker\'s Guide to the Galaxy."
+      - index : 0
+
+

Completions Streaming

+ +

Completions streaming is available by using completionsStream function. Tokens will be sent one-by-one.

+ +

Closures

+
openAI.completionsStream(query: query) { partialResult in
+    switch partialResult {
+    case .success(let result):
+        print(result.choices)
+    case .failure(let error):
+        //Handle chunk error here
+    }
+} completion: { error in
+    //Handle streaming error here
+}
+
+ +

Combine

+
openAI
+    .completionsStream(query: query)
+    .sink { completion in
+        //Handle completion result here
+    } receiveValue: { result in
+        //Handle chunk here
+    }.store(in: &cancellables)
+
+ +

Structured concurrency

+
for try await result in openAI.completionsStream(query: query) {
+   //Handle result here
+}
+
+ +

Review Completions Documentation for more info.

+

Chats

+ +

Using the OpenAI Chat API, you can build your own applications with gpt-3.5-turbo to do things like:

+ +
    +
  • Draft an email or other piece of writing
  • +
  • Write Python code
  • +
  • Answer questions about a set of documents
  • +
  • Create conversational agents
  • +
  • Give your software a natural language interface
  • +
  • Tutor in a range of subjects
  • +
  • Translate languages
  • +
  • Simulate characters for video games and much more
  • +
+ +

Request

+
 struct ChatQuery: Codable {
+     /// ID of the model to use. Currently, only gpt-3.5-turbo and gpt-3.5-turbo-0301 are supported.
+     public let model: Model
+     /// The messages to generate chat completions for
+     public let messages: [Chat]
+     /// A list of functions the model may generate JSON inputs for.
+     public let functions: [ChatFunctionDeclaration]?
+     /// What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and  We generally recommend altering this or top_p but not both.
+     public let temperature: Double?
+     /// An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.
+     public let topP: Double?
+     /// How many chat completion choices to generate for each input message.
+     public let n: Int?
+     /// Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence.
+     public let stop: [String]?
+     /// The maximum number of tokens to generate in the completion.
+     public let maxTokens: Int?
+     /// Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.
+     public let presencePenalty: Double?
+     /// Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.
+     public let frequencyPenalty: Double?
+     ///Modify the likelihood of specified tokens appearing in the completion.
+     public let logitBias: [String:Int]?
+     /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.
+     public let user: String?
+}
+
+ +

Response

+
struct ChatResult: Codable, Equatable {
+    public struct Choice: Codable, Equatable {
+        public let index: Int
+        public let message: Chat
+        public let finishReason: String
+    }
+
+    public struct Usage: Codable, Equatable {
+        public let promptTokens: Int
+        public let completionTokens: Int
+        public let totalTokens: Int
+    }
+
+    public let id: String
+    public let object: String
+    public let created: TimeInterval
+    public let model: Model
+    public let choices: [Choice]
+    public let usage: Usage
+}
+
+ +

Example

+
let query = ChatQuery(model: .gpt3_5Turbo, messages: [.init(role: .user, content: "who are you")])
+let result = try await openAI.chats(query: query)
+
+
(lldb) po result
+▿ ChatResult
+  - id : "chatcmpl-6pwjgxGV2iPP4QGdyOLXnTY0LE3F8"
+  - object : "chat.completion"
+  - created : 1677838528.0
+  - model : "gpt-3.5-turbo-0301"
+  ▿ choices : 1 element
+    ▿ 0 : Choice
+      - index : 0
+      ▿ message : Chat
+        - role : "assistant"
+        - content : "\n\nI\'m an AI language model developed by OpenAI, created to provide assistance and support for various tasks such as answering questions, generating text, and providing recommendations. Nice to meet you!"
+      - finish_reason : "stop"
+  ▿ usage : Usage
+    - prompt_tokens : 10
+    - completion_tokens : 39
+    - total_tokens : 49
+
+

Chats Streaming

+ +

Chats streaming is available by using chatStream function. Tokens will be sent one-by-one.

+ +

Closures

+
openAI.chatsStream(query: query) { partialResult in
+    switch partialResult {
+    case .success(let result):
+        print(result.choices)
+    case .failure(let error):
+        //Handle chunk error here
+    }
+} completion: { error in
+    //Handle streaming error here
+}
+
+ +

Combine

+
openAI
+    .chatsStream(query: query)
+    .sink { completion in
+        //Handle completion result here
+    } receiveValue: { result in
+        //Handle chunk here
+    }.store(in: &cancellables)
+
+ +

Structured concurrency

+
for try await result in openAI.chatsStream(query: query) {
+   //Handle result here
+}
+
+ +

Function calls

+
let openAI = OpenAI(apiToken: "...")
+// Declare functions which GPT-3 might decide to call.
+let functions = [
+  ChatFunctionDeclaration(
+      name: "get_current_weather",
+      description: "Get the current weather in a given location",
+      parameters:
+        JSONSchema(
+          type: .object,
+          properties: [
+            "location": .init(type: .string, description: "The city and state, e.g. San Francisco, CA"),
+            "unit": .init(type: .string, enumValues: ["celsius", "fahrenheit"])
+          ],
+          required: ["location"]
+        )
+  )
+]
+let query = ChatQuery(
+  model: "gpt-3.5-turbo-0613",  // 0613 is the earliest version with function calls support.
+  messages: [
+      Chat(role: .user, content: "What's the weather like in Boston?")
+  ],
+  functions: functions
+)
+let result = try await openAI.chats(query: query)
+
+ +

Result will be (serialized as JSON here for readability):

+
{
+  "id": "chatcmpl-1234",
+  "object": "chat.completion",
+  "created": 1686000000,
+  "model": "gpt-3.5-turbo-0613",
+  "choices": [
+    {
+      "index": 0,
+      "message": {
+        "role": "assistant",
+        "function_call": {
+          "name": "get_current_weather",
+          "arguments": "{\n  \"location\": \"Boston, MA\"\n}"
+        }
+      },
+      "finish_reason": "function_call"
+    }
+  ],
+  "usage": { "total_tokens": 100, "completion_tokens": 18, "prompt_tokens": 82 }
+}
+
+
+ +

Review Chat Documentation for more info.

+

Images

+ +

Given a prompt and/or an input image, the model will generate a new image.

+ +

As Artificial Intelligence continues to develop, so too does the intriguing concept of Dall-E. Developed by OpenAI, a research lab for artificial intelligence purposes, Dall-E has been classified as an AI system that can generate images based on descriptions provided by humans. With its potential applications spanning from animation and illustration to design and engineering - not to mention the endless possibilities in between - it’s easy to see why there is such excitement over this new technology.

+

Create Image

+ +

Request

+
struct ImagesQuery: Codable {
+    /// A text description of the desired image(s). The maximum length is 1000 characters.
+    public let prompt: String
+    /// The number of images to generate. Must be between 1 and 10.
+    public let n: Int?
+    /// The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024.
+    public let size: String?
+}
+
+ +

Response

+
struct ImagesResult: Codable, Equatable {
+    public struct URLResult: Codable, Equatable {
+        public let url: String
+    }
+    public let created: TimeInterval
+    public let data: [URLResult]
+}
+
+ +

Example

+
let query = ImagesQuery(prompt: "White cat with heterochromia sitting on the kitchen table", n: 1, size: "1024x1024")
+openAI.images(query: query) { result in
+  //Handle result here
+}
+//or
+let result = try await openAI.images(query: query)
+
+
(lldb) po result
+▿ ImagesResult
+  - created : 1671453505.0
+  ▿ data : 1 element
+    ▿ 0 : URLResult
+      - url : "https://oaidalleapiprodscus.blob.core.windows.net/private/org-CWjU5cDIzgCcVjq10pp5yX5Q/user-GoBXgChvLBqLHdBiMJBUbPqF/img-WZVUK2dOD4HKbKwW1NeMJHBd.png?st=2022-12-19T11%3A38%3A25Z&se=2022-12-19T13%3A38%3A25Z&sp=r&sv=2021-08-06&sr=b&rscd=inline&rsct=image/png&skoid=6aaadede-4fb3-4698-a8f6-684d7786b067&sktid=a48cca56-e6da-484e-a814-9c849652bcb3&skt=2022-12-19T09%3A35%3A16Z&ske=2022-12-20T09%3A35%3A16Z&sks=b&skv=2021-08-06&sig=mh52rmtbQ8CXArv5bMaU6lhgZHFBZz/ePr4y%2BJwLKOc%3D"
+
+ +

Generated image

+ +

Generated Image

+

Create Image Edit

+ +

Creates an edited or extended image given an original image and a prompt.

+ +

Request

+
public struct ImageEditsQuery: Codable {
+    /// The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask is not provided, image must have transparency, which will be used as the mask.
+    public let image: Data
+    public let fileName: String
+    /// An additional image whose fully transparent areas (e.g. where alpha is zero) indicate where image should be edited. Must be a valid PNG file, less than 4MB, and have the same dimensions as image.
+    public let mask: Data?
+    public let maskFileName: String?
+    /// A text description of the desired image(s). The maximum length is 1000 characters.
+    public let prompt: String
+    /// The number of images to generate. Must be between 1 and 10.
+    public let n: Int?
+    /// The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024.
+    public let size: String?
+}
+
+ +

Response

+ +

Uses the ImagesResult response similarly to ImagesQuery.

+ +

Example

+
let data = image.pngData()
+let query = ImageEditQuery(image: data, fileName: "whitecat.png", prompt: "White cat with heterochromia sitting on the kitchen table with a bowl of food", n: 1, size: "1024x1024")
+openAI.imageEdits(query: query) { result in
+  //Handle result here
+}
+//or
+let result = try await openAI.imageEdits(query: query)
+
+

Create Image Variation

+ +

Creates a variation of a given image.

+ +

Request

+
public struct ImageVariationsQuery: Codable {
+    /// The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask is not provided, image must have transparency, which will be used as the mask.
+    public let image: Data
+    public let fileName: String
+    /// The number of images to generate. Must be between 1 and 10.
+    public let n: Int?
+    /// The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024.
+    public let size: String?
+}
+
+ +

Response

+ +

Uses the ImagesResult response similarly to ImagesQuery.

+ +

Example

+
let data = image.pngData()
+let query = ImageVariationQuery(image: data, fileName: "whitecat.png", n: 1, size: "1024x1024")
+openAI.imageVariations(query: query) { result in
+  //Handle result here
+}
+//or
+let result = try await openAI.imageVariations(query: query)
+
+ +

Review Images Documentation for more info.

+

Audio

+ +

The speech to text API provides two endpoints, transcriptions and translations, based on our state-of-the-art open source large-v2 Whisper model. They can be used to:

+ +

Transcribe audio into whatever language the audio is in. +Translate and transcribe the audio into english. +File uploads are currently limited to 25 MB and the following input file types are supported: mp3, mp4, mpeg, mpga, m4a, wav, and webm.

+

Audio Create Speech

+ +

This function sends an AudioSpeechQuery to the OpenAI API to create audio speech from text using a specific voice and format.

+ +

Learn more about voices.
+Learn more about models.

+ +

Request:

+
public struct AudioSpeechQuery: Codable, Equatable {
+    //...
+    public let model: Model // tts-1 or tts-1-hd  
+    public let input: String
+    public let voice: AudioSpeechVoice
+    public let responseFormat: AudioSpeechResponseFormat
+    public let speed: String? // Initializes with Double?
+    //...
+}
+
+ +

Response:

+
/// Audio data for one of the following formats :`mp3`, `opus`, `aac`, `flac`
+public let audioData: Data?
+
+ +

Example:

+
let query = AudioSpeechQuery(model: .tts_1, input: "Hello, world!", voice: .alloy, responseFormat: .mp3, speed: 1.0)
+
+openAI.audioCreateSpeech(query: query) { result in
+    // Handle response here
+}
+//or
+let result = try await openAI.audioCreateSpeech(query: query)
+
+ +

OpenAI Create Speech – Documentation

+

Audio Transcriptions

+ +

Transcribes audio into the input language.

+ +

Request

+
public struct AudioTranscriptionQuery: Codable, Equatable {
+
+    public let file: Data
+    public let fileName: String
+    public let model: Model
+
+    public let prompt: String?
+    public let temperature: Double?
+    public let language: String?
+}
+
+ +

Response

+
public struct AudioTranscriptionResult: Codable, Equatable {
+
+    public let text: String
+}
+
+ +

Example

+
let data = Data(contentsOfURL:...)
+let query = AudioTranscriptionQuery(file: data, fileName: "audio.m4a", model: .whisper_1)        
+
+openAI.audioTranscriptions(query: query) { result in
+    //Handle result here
+}
+//or
+let result = try await openAI.audioTranscriptions(query: query)
+
+

Audio Translations

+ +

Translates audio into into English.

+ +

Request

+
public struct AudioTranslationQuery: Codable, Equatable {
+
+    public let file: Data
+    public let fileName: String
+    public let model: Model
+
+    public let prompt: String?
+    public let temperature: Double?
+}    
+
+ +

Response

+
public struct AudioTranslationResult: Codable, Equatable {
+
+    public let text: String
+}
+
+ +

Example

+
let data = Data(contentsOfURL:...)
+let query = AudioTranslationQuery(file: data, fileName: "audio.m4a", model: .whisper_1)  
+
+openAI.audioTranslations(query: query) { result in
+    //Handle result here
+}
+//or
+let result = try await openAI.audioTranslations(query: query)
+
+ +

Review Audio Documentation for more info.

+

Edits

+ +

Creates a new edit for the provided input, instruction, and parameters.

+ +

Request

+
struct EditsQuery: Codable {
+    /// ID of the model to use.
+    public let model: Model
+    /// Input text to get embeddings for.
+    public let input: String?
+    /// The instruction that tells the model how to edit the prompt.
+    public let instruction: String
+    /// The number of images to generate. Must be between 1 and 10.
+    public let n: Int?
+    /// What sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.
+    public let temperature: Double?
+    /// An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.
+    public let topP: Double?
+}
+
+ +

Response

+
struct EditsResult: Codable, Equatable {
+
+    public struct Choice: Codable, Equatable {
+        public let text: String
+        public let index: Int
+    }
+
+    public struct Usage: Codable, Equatable {
+        public let promptTokens: Int
+        public let completionTokens: Int
+        public let totalTokens: Int
+
+        enum CodingKeys: String, CodingKey {
+            case promptTokens = "prompt_tokens"
+            case completionTokens = "completion_tokens"
+            case totalTokens = "total_tokens"
+        }
+    }
+
+    public let object: String
+    public let created: TimeInterval
+    public let choices: [Choice]
+    public let usage: Usage
+}
+
+ +

Example

+
let query = EditsQuery(model: .gpt4, input: "What day of the wek is it?", instruction: "Fix the spelling mistakes")
+openAI.edits(query: query) { result in
+  //Handle response here
+}
+//or
+let result = try await openAI.edits(query: query)
+
+ +

Review Edits Documentation for more info.

+

Embeddings

+ +

Get a vector representation of a given input that can be easily consumed by machine learning models and algorithms.

+ +

Request

+
struct EmbeddingsQuery: Codable {
+    /// ID of the model to use.
+    public let model: Model
+    /// Input text to get embeddings for
+    public let input: String
+}
+
+ +

Response

+
struct EmbeddingsResult: Codable, Equatable {
+
+    public struct Embedding: Codable, Equatable {
+
+        public let object: String
+        public let embedding: [Double]
+        public let index: Int
+    }
+    public let data: [Embedding]
+    public let usage: Usage
+}
+
+ +

Example

+
let query = EmbeddingsQuery(model: .textSearchBabbageDoc, input: "The food was delicious and the waiter...")
+openAI.embeddings(query: query) { result in
+  //Handle response here
+}
+//or
+let result = try await openAI.embeddings(query: query)
+
+
(lldb) po result
+▿ EmbeddingsResult
+  ▿ data : 1 element
+    ▿ 0 : Embedding
+      - object : "embedding"
+      ▿ embedding : 2048 elements
+        - 0 : 0.0010535449
+        - 1 : 0.024234328
+        - 2 : -0.0084999
+        - 3 : 0.008647452
+    .......
+        - 2044 : 0.017536353
+        - 2045 : -0.005897616
+        - 2046 : -0.026559394
+        - 2047 : -0.016633155
+      - index : 0
+
+(lldb)
+
+ +

Review Embeddings Documentation for more info.

+

Models

+ +

Models are represented as a typealias typealias Model = String.

+
public extension Model {
+    static let gpt4_turbo_preview = "gpt-4-turbo-preview"
+    static let gpt4_vision_preview = "gpt-4-vision-preview"
+    static let gpt4_0125_preview = "gpt-4-0125-preview"
+    static let gpt4_1106_preview = "gpt-4-1106-preview"
+    static let gpt4 = "gpt-4"
+    static let gpt4_0613 = "gpt-4-0613"
+    static let gpt4_0314 = "gpt-4-0314"
+    static let gpt4_32k = "gpt-4-32k"
+    static let gpt4_32k_0613 = "gpt-4-32k-0613"
+    static let gpt4_32k_0314 = "gpt-4-32k-0314"
+
+    static let gpt3_5Turbo = "gpt-3.5-turbo"
+    static let gpt3_5Turbo_0125 = "gpt-3.5-turbo-0125"
+    static let gpt3_5Turbo_1106 = "gpt-3.5-turbo-1106"
+    static let gpt3_5Turbo_0613 = "gpt-3.5-turbo-0613"
+    static let gpt3_5Turbo_0301 = "gpt-3.5-turbo-0301"
+    static let gpt3_5Turbo_16k = "gpt-3.5-turbo-16k"
+    static let gpt3_5Turbo_16k_0613 = "gpt-3.5-turbo-16k-0613"
+
+    static let textDavinci_003 = "text-davinci-003"
+    static let textDavinci_002 = "text-davinci-002"
+    static let textCurie = "text-curie-001"
+    static let textBabbage = "text-babbage-001"
+    static let textAda = "text-ada-001"
+
+    static let textDavinci_001 = "text-davinci-001"
+    static let codeDavinciEdit_001 = "code-davinci-edit-001"
+
+    static let tts_1 = "tts-1"
+    static let tts_1_hd = "tts-1-hd"
+
+    static let whisper_1 = "whisper-1"
+
+    static let dall_e_2 = "dall-e-2"
+    static let dall_e_3 = "dall-e-3"
+
+    static let davinci = "davinci"
+    static let curie = "curie"
+    static let babbage = "babbage"
+    static let ada = "ada"
+
+    static let textEmbeddingAda = "text-embedding-ada-002"
+    static let textSearchAda = "text-search-ada-doc-001"
+    static let textSearchBabbageDoc = "text-search-babbage-doc-001"
+    static let textSearchBabbageQuery001 = "text-search-babbage-query-001"
+    static let textEmbedding3 = "text-embedding-3-small"
+    static let textEmbedding3Large = "text-embedding-3-large"
+
+    static let textModerationStable = "text-moderation-stable"
+    static let textModerationLatest = "text-moderation-latest"
+    static let moderation = "text-moderation-007"
+}
+
+ +

GPT-4 models are supported.

+ +

As an example: To use the gpt-4-turbo-preview model, pass .gpt4_turbo_preview as the parameter to the ChatQuery init.

+
let query = ChatQuery(model: .gpt4_turbo_preview, messages: [
+    .init(role: .system, content: "You are Librarian-GPT. You know everything about the books."),
+    .init(role: .user, content: "Who wrote Harry Potter?")
+])
+let result = try await openAI.chats(query: query)
+XCTAssertFalse(result.choices.isEmpty)
+
+ +

You can also pass a custom string if you need to use some model, that is not represented above.

+

List Models

+ +

Lists the currently available models.

+ +

Response

+
public struct ModelsResult: Codable, Equatable {
+
+    public let data: [ModelResult]
+    public let object: String
+}
+
+
+ +

Example

+
openAI.models() { result in
+  //Handle result here
+}
+//or
+let result = try await openAI.models()
+
+

Retrieve Model

+ +

Retrieves a model instance, providing ownership information.

+ +

Request

+
public struct ModelQuery: Codable, Equatable {
+
+    public let model: Model
+}    
+
+ +

Response

+
public struct ModelResult: Codable, Equatable {
+
+    public let id: Model
+    public let object: String
+    public let ownedBy: String
+}
+
+ +

Example

+
let query = ModelQuery(model: .gpt4)
+openAI.model(query: query) { result in
+  //Handle result here
+}
+//or
+let result = try await openAI.model(query: query)
+
+ +

Review Models Documentation for more info.

+

Moderations

+ +

Given a input text, outputs if the model classifies it as violating OpenAI’s content policy.

+ +

Request

+
public struct ModerationsQuery: Codable {
+
+    public let input: String
+    public let model: Model?
+}    
+
+ +

Response

+
public struct ModerationsResult: Codable, Equatable {
+
+    public let id: String
+    public let model: Model
+    public let results: [CategoryResult]
+}
+
+ +

Example

+
let query = ModerationsQuery(input: "I want to kill them.")
+openAI.moderations(query: query) { result in
+  //Handle result here
+}
+//or
+let result = try await openAI.moderations(query: query)
+
+ +

Review Moderations Documentation for more info.

+

Utilities

+ +

The component comes with several handy utility functions to work with the vectors.

+
public struct Vector {
+
+    /// Returns the similarity between two vectors
+    ///
+    /// - Parameters:
+    ///     - a: The first vector
+    ///     - b: The second vector
+    public static func cosineSimilarity(a: [Double], b: [Double]) -> Double {
+        return dot(a, b) / (mag(a) * mag(b))
+    }
+
+    /// Returns the difference between two vectors. Cosine distance is defined as `1 - cosineSimilarity(a, b)`
+    ///
+    /// - Parameters:
+    ///     - a: The first vector
+    ///     - b: The second vector
+    public func cosineDifference(a: [Double], b: [Double]) -> Double {
+        return 1 - Self.cosineSimilarity(a: a, b: b)
+    }
+}
+
+ +

Example

+
let vector1 = [0.213123, 0.3214124, 0.421412, 0.3214521251, 0.412412, 0.3214124, 0.1414124, 0.3214521251, 0.213123, 0.3214124, 0.1414124, 0.4214214, 0.213123, 0.3214124, 0.1414124, 0.3214521251, 0.213123, 0.3214124, 0.1414124, 0.3214521251]
+let vector2 = [0.213123, 0.3214124, 0.1414124, 0.3214521251, 0.213123, 0.3214124, 0.1414124, 0.3214521251, 0.213123, 0.511515, 0.1414124, 0.3214521251, 0.213123, 0.3214124, 0.1414124, 0.3214521251, 0.213123, 0.3214124, 0.1414124, 0.3213213]
+let similarity = Vector.cosineSimilarity(a: vector1, b: vector2)
+print(similarity) //0.9510201910206734
+
+ +
+

In data analysis, cosine similarity is a measure of similarity between two sequences of numbers.

+
+ +

Screenshot 2022-12-19 at 6 00 33 PM

+ +

Read more about Cosine Similarity here.

+

Combine Extensions

+ +

The library contains built-in Combine extensions.

+
func completions(query: CompletionsQuery) -> AnyPublisher<CompletionsResult, Error>
+func images(query: ImagesQuery) -> AnyPublisher<ImagesResult, Error>
+func embeddings(query: EmbeddingsQuery) -> AnyPublisher<EmbeddingsResult, Error>
+func chats(query: ChatQuery) -> AnyPublisher<ChatResult, Error>
+func edits(query: EditsQuery) -> AnyPublisher<EditsResult, Error>
+func model(query: ModelQuery) -> AnyPublisher<ModelResult, Error>
+func models() -> AnyPublisher<ModelsResult, Error>
+func moderations(query: ModerationsQuery) -> AnyPublisher<ModerationsResult, Error>
+func audioTranscriptions(query: AudioTranscriptionQuery) -> AnyPublisher<AudioTranscriptionResult, Error>
+func audioTranslations(query: AudioTranslationQuery) -> AnyPublisher<AudioTranslationResult, Error>
+
+

Example Project

+ +

You can find example iOS application in Demo folder.

+ +

mockuuups-iphone-13-pro-mockup-perspective-right

+

Contribution Guidelines

+ +

Make your Pull Requests clear and obvious to anyone viewing them.
+Set main as your target branch.

+

Use Conventional Commits principles in naming PRs and branches:

+ +
    +
  • Feat: ... for new features and new functionality implementations.
  • +
  • Bug: ... for bug fixes.
  • +
  • Fix: ... for minor issues fixing, like typos or inaccuracies in code.
  • +
  • Chore: ... for boring stuff like code polishing, refactoring, deprecation fixing etc.
  • +
+ +

PR naming example: Feat: Add Threads API handling or Bug: Fix message result duplication

+ +

Branch naming example: feat/add-threads-API-handling or bug/fix-message-result-duplication

+

Write description to pull requests in following format:

+ +
    +
  • What
  • +
+ +

+ +
    +
  • Why
  • +
+ +

+ +
    +
  • Affected Areas
  • +
+ +

+ +
    +
  • More Info
  • +
+ +

+ +

We’ll appreciate you including tests to your code if it is needed and possible. ❤️

+ + + +

License

+
MIT License
+
+Copyright (c) 2023 MacPaw Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+ +
+
+ +
+
+ + diff --git a/docs/docsets/.docset/Contents/Resources/Documents/js/jazzy.js b/docs/docsets/.docset/Contents/Resources/Documents/js/jazzy.js new file mode 100755 index 00000000..1ac86992 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/js/jazzy.js @@ -0,0 +1,74 @@ +// Jazzy - https://github.com/realm/jazzy +// Copyright Realm Inc. +// SPDX-License-Identifier: MIT + +window.jazzy = {'docset': false} +if (typeof window.dash != 'undefined') { + document.documentElement.className += ' dash' + window.jazzy.docset = true +} +if (navigator.userAgent.match(/xcode/i)) { + document.documentElement.className += ' xcode' + window.jazzy.docset = true +} + +function toggleItem($link, $content) { + var animationDuration = 300; + $link.toggleClass('token-open'); + $content.slideToggle(animationDuration); +} + +function itemLinkToContent($link) { + return $link.parent().parent().next(); +} + +// On doc load + hash-change, open any targeted item +function openCurrentItemIfClosed() { + if (window.jazzy.docset) { + return; + } + var $link = $(`a[name="${location.hash.substring(1)}"]`).nextAll('.token'); + $content = itemLinkToContent($link); + if ($content.is(':hidden')) { + toggleItem($link, $content); + } +} + +$(openCurrentItemIfClosed); +$(window).on('hashchange', openCurrentItemIfClosed); + +// On item link ('token') click, toggle its discussion +$('.token').on('click', function(event) { + if (window.jazzy.docset) { + return; + } + var $link = $(this); + toggleItem($link, itemLinkToContent($link)); + + // Keeps the document from jumping to the hash. + var href = $link.attr('href'); + if (history.pushState) { + history.pushState({}, '', href); + } else { + location.hash = href; + } + event.preventDefault(); +}); + +// Clicks on links to the current, closed, item need to open the item +$("a:not('.token')").on('click', function() { + if (location == this.href) { + openCurrentItemIfClosed(); + } +}); + +// KaTeX rendering +if ("katex" in window) { + $($('.math').each( (_, element) => { + katex.render(element.textContent, element, { + displayMode: $(element).hasClass('m-block'), + throwOnError: false, + trust: true + }); + })) +} diff --git a/docs/docsets/.docset/Contents/Resources/Documents/js/jazzy.search.js b/docs/docsets/.docset/Contents/Resources/Documents/js/jazzy.search.js new file mode 100644 index 00000000..359cdbb8 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/js/jazzy.search.js @@ -0,0 +1,74 @@ +// Jazzy - https://github.com/realm/jazzy +// Copyright Realm Inc. +// SPDX-License-Identifier: MIT + +$(function(){ + var $typeahead = $('[data-typeahead]'); + var $form = $typeahead.parents('form'); + var searchURL = $form.attr('action'); + + function displayTemplate(result) { + return result.name; + } + + function suggestionTemplate(result) { + var t = '
'; + t += '' + result.name + ''; + if (result.parent_name) { + t += '' + result.parent_name + ''; + } + t += '
'; + return t; + } + + $typeahead.one('focus', function() { + $form.addClass('loading'); + + $.getJSON(searchURL).then(function(searchData) { + const searchIndex = lunr(function() { + this.ref('url'); + this.field('name'); + this.field('abstract'); + for (const [url, doc] of Object.entries(searchData)) { + this.add({url: url, name: doc.name, abstract: doc.abstract}); + } + }); + + $typeahead.typeahead( + { + highlight: true, + minLength: 3, + autoselect: true + }, + { + limit: 10, + display: displayTemplate, + templates: { suggestion: suggestionTemplate }, + source: function(query, sync) { + const lcSearch = query.toLowerCase(); + const results = searchIndex.query(function(q) { + q.term(lcSearch, { boost: 100 }); + q.term(lcSearch, { + boost: 10, + wildcard: lunr.Query.wildcard.TRAILING + }); + }).map(function(result) { + var doc = searchData[result.ref]; + doc.url = result.ref; + return doc; + }); + sync(results); + } + } + ); + $form.removeClass('loading'); + $typeahead.trigger('focus'); + }); + }); + + var baseURL = searchURL.slice(0, -"search.json".length); + + $typeahead.on('typeahead:select', function(e, result) { + window.location = baseURL + result.url; + }); +}); diff --git a/docs/docsets/.docset/Contents/Resources/Documents/js/jquery.min.js b/docs/docsets/.docset/Contents/Resources/Documents/js/jquery.min.js new file mode 100644 index 00000000..7f37b5d9 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/js/jquery.min.js @@ -0,0 +1,2 @@ +/*! jQuery v3.7.1 | (c) OpenJS Foundation and other contributors | jquery.org/license */ +!function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(ie,e){"use strict";var oe=[],r=Object.getPrototypeOf,ae=oe.slice,g=oe.flat?function(e){return oe.flat.call(e)}:function(e){return oe.concat.apply([],e)},s=oe.push,se=oe.indexOf,n={},i=n.toString,ue=n.hasOwnProperty,o=ue.toString,a=o.call(Object),le={},v=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType&&"function"!=typeof e.item},y=function(e){return null!=e&&e===e.window},C=ie.document,u={type:!0,src:!0,nonce:!0,noModule:!0};function m(e,t,n){var r,i,o=(n=n||C).createElement("script");if(o.text=e,t)for(r in u)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function x(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[i.call(e)]||"object":typeof e}var t="3.7.1",l=/HTML$/i,ce=function(e,t){return new ce.fn.init(e,t)};function c(e){var t=!!e&&"length"in e&&e.length,n=x(e);return!v(e)&&!y(e)&&("array"===n||0===t||"number"==typeof t&&0+~]|"+ge+")"+ge+"*"),x=new RegExp(ge+"|>"),j=new RegExp(g),A=new RegExp("^"+t+"$"),D={ID:new RegExp("^#("+t+")"),CLASS:new RegExp("^\\.("+t+")"),TAG:new RegExp("^("+t+"|[*])"),ATTR:new RegExp("^"+p),PSEUDO:new RegExp("^"+g),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+ge+"*(even|odd|(([+-]|)(\\d*)n|)"+ge+"*(?:([+-]|)"+ge+"*(\\d+)|))"+ge+"*\\)|)","i"),bool:new RegExp("^(?:"+f+")$","i"),needsContext:new RegExp("^"+ge+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+ge+"*((?:-\\d)?\\d*)"+ge+"*\\)|)(?=[^-]|$)","i")},N=/^(?:input|select|textarea|button)$/i,q=/^h\d$/i,L=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,H=/[+~]/,O=new RegExp("\\\\[\\da-fA-F]{1,6}"+ge+"?|\\\\([^\\r\\n\\f])","g"),P=function(e,t){var n="0x"+e.slice(1)-65536;return t||(n<0?String.fromCharCode(n+65536):String.fromCharCode(n>>10|55296,1023&n|56320))},M=function(){V()},R=J(function(e){return!0===e.disabled&&fe(e,"fieldset")},{dir:"parentNode",next:"legend"});try{k.apply(oe=ae.call(ye.childNodes),ye.childNodes),oe[ye.childNodes.length].nodeType}catch(e){k={apply:function(e,t){me.apply(e,ae.call(t))},call:function(e){me.apply(e,ae.call(arguments,1))}}}function I(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&(V(e),e=e||T,C)){if(11!==p&&(u=L.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return k.call(n,a),n}else if(f&&(a=f.getElementById(i))&&I.contains(e,a)&&a.id===i)return k.call(n,a),n}else{if(u[2])return k.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&e.getElementsByClassName)return k.apply(n,e.getElementsByClassName(i)),n}if(!(h[t+" "]||d&&d.test(t))){if(c=t,f=e,1===p&&(x.test(t)||m.test(t))){(f=H.test(t)&&U(e.parentNode)||e)==e&&le.scope||((s=e.getAttribute("id"))?s=ce.escapeSelector(s):e.setAttribute("id",s=S)),o=(l=Y(t)).length;while(o--)l[o]=(s?"#"+s:":scope")+" "+Q(l[o]);c=l.join(",")}try{return k.apply(n,f.querySelectorAll(c)),n}catch(e){h(t,!0)}finally{s===S&&e.removeAttribute("id")}}}return re(t.replace(ve,"$1"),e,n,r)}function W(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function F(e){return e[S]=!0,e}function $(e){var t=T.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function B(t){return function(e){return fe(e,"input")&&e.type===t}}function _(t){return function(e){return(fe(e,"input")||fe(e,"button"))&&e.type===t}}function z(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&R(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function X(a){return F(function(o){return o=+o,F(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function U(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}function V(e){var t,n=e?e.ownerDocument||e:ye;return n!=T&&9===n.nodeType&&n.documentElement&&(r=(T=n).documentElement,C=!ce.isXMLDoc(T),i=r.matches||r.webkitMatchesSelector||r.msMatchesSelector,r.msMatchesSelector&&ye!=T&&(t=T.defaultView)&&t.top!==t&&t.addEventListener("unload",M),le.getById=$(function(e){return r.appendChild(e).id=ce.expando,!T.getElementsByName||!T.getElementsByName(ce.expando).length}),le.disconnectedMatch=$(function(e){return i.call(e,"*")}),le.scope=$(function(){return T.querySelectorAll(":scope")}),le.cssHas=$(function(){try{return T.querySelector(":has(*,:jqfake)"),!1}catch(e){return!0}}),le.getById?(b.filter.ID=function(e){var t=e.replace(O,P);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&C){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(O,P);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&C){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):t.querySelectorAll(e)},b.find.CLASS=function(e,t){if("undefined"!=typeof t.getElementsByClassName&&C)return t.getElementsByClassName(e)},d=[],$(function(e){var t;r.appendChild(e).innerHTML="",e.querySelectorAll("[selected]").length||d.push("\\["+ge+"*(?:value|"+f+")"),e.querySelectorAll("[id~="+S+"-]").length||d.push("~="),e.querySelectorAll("a#"+S+"+*").length||d.push(".#.+[+~]"),e.querySelectorAll(":checked").length||d.push(":checked"),(t=T.createElement("input")).setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),r.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&d.push(":enabled",":disabled"),(t=T.createElement("input")).setAttribute("name",""),e.appendChild(t),e.querySelectorAll("[name='']").length||d.push("\\["+ge+"*name"+ge+"*="+ge+"*(?:''|\"\")")}),le.cssHas||d.push(":has"),d=d.length&&new RegExp(d.join("|")),l=function(e,t){if(e===t)return a=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)==(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!le.sortDetached&&t.compareDocumentPosition(e)===n?e===T||e.ownerDocument==ye&&I.contains(ye,e)?-1:t===T||t.ownerDocument==ye&&I.contains(ye,t)?1:o?se.call(o,e)-se.call(o,t):0:4&n?-1:1)}),T}for(e in I.matches=function(e,t){return I(e,null,null,t)},I.matchesSelector=function(e,t){if(V(e),C&&!h[t+" "]&&(!d||!d.test(t)))try{var n=i.call(e,t);if(n||le.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){h(t,!0)}return 0":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(O,P),e[3]=(e[3]||e[4]||e[5]||"").replace(O,P),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||I.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&I.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return D.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&j.test(n)&&(t=Y(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(O,P).toLowerCase();return"*"===e?function(){return!0}:function(e){return fe(e,t)}},CLASS:function(e){var t=s[e+" "];return t||(t=new RegExp("(^|"+ge+")"+e+"("+ge+"|$)"))&&s(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=I.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function T(e,n,r){return v(n)?ce.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?ce.grep(e,function(e){return e===n!==r}):"string"!=typeof n?ce.grep(e,function(e){return-1)[^>]*|#([\w-]+))$/;(ce.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||k,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:S.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof ce?t[0]:t,ce.merge(this,ce.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:C,!0)),w.test(r[1])&&ce.isPlainObject(t))for(r in t)v(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=C.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):v(e)?void 0!==n.ready?n.ready(e):e(ce):ce.makeArray(e,this)}).prototype=ce.fn,k=ce(C);var E=/^(?:parents|prev(?:Until|All))/,j={children:!0,contents:!0,next:!0,prev:!0};function A(e,t){while((e=e[t])&&1!==e.nodeType);return e}ce.fn.extend({has:function(e){var t=ce(e,this),n=t.length;return this.filter(function(){for(var e=0;e\x20\t\r\n\f]*)/i,Ce=/^$|^module$|\/(?:java|ecma)script/i;xe=C.createDocumentFragment().appendChild(C.createElement("div")),(be=C.createElement("input")).setAttribute("type","radio"),be.setAttribute("checked","checked"),be.setAttribute("name","t"),xe.appendChild(be),le.checkClone=xe.cloneNode(!0).cloneNode(!0).lastChild.checked,xe.innerHTML="",le.noCloneChecked=!!xe.cloneNode(!0).lastChild.defaultValue,xe.innerHTML="",le.option=!!xe.lastChild;var ke={thead:[1,"","
"],col:[2,"","
"],tr:[2,"","
"],td:[3,"","
"],_default:[0,"",""]};function Se(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&fe(e,t)?ce.merge([e],n):n}function Ee(e,t){for(var n=0,r=e.length;n",""]);var je=/<|&#?\w+;/;function Ae(e,t,n,r,i){for(var o,a,s,u,l,c,f=t.createDocumentFragment(),p=[],d=0,h=e.length;d\s*$/g;function Re(e,t){return fe(e,"table")&&fe(11!==t.nodeType?t:t.firstChild,"tr")&&ce(e).children("tbody")[0]||e}function Ie(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function We(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Fe(e,t){var n,r,i,o,a,s;if(1===t.nodeType){if(_.hasData(e)&&(s=_.get(e).events))for(i in _.remove(t,"handle events"),s)for(n=0,r=s[i].length;n").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),C.head.appendChild(r[0])},abort:function(){i&&i()}}});var Jt,Kt=[],Zt=/(=)\?(?=&|$)|\?\?/;ce.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Kt.pop()||ce.expando+"_"+jt.guid++;return this[e]=!0,e}}),ce.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Zt.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Zt.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=v(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Zt,"$1"+r):!1!==e.jsonp&&(e.url+=(At.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||ce.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=ie[r],ie[r]=function(){o=arguments},n.always(function(){void 0===i?ce(ie).removeProp(r):ie[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,Kt.push(r)),o&&v(i)&&i(o[0]),o=i=void 0}),"script"}),le.createHTMLDocument=((Jt=C.implementation.createHTMLDocument("").body).innerHTML="
",2===Jt.childNodes.length),ce.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(le.createHTMLDocument?((r=(t=C.implementation.createHTMLDocument("")).createElement("base")).href=C.location.href,t.head.appendChild(r)):t=C),o=!n&&[],(i=w.exec(e))?[t.createElement(i[1])]:(i=Ae([e],t,o),o&&o.length&&ce(o).remove(),ce.merge([],i.childNodes)));var r,i,o},ce.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1").append(ce.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},ce.expr.pseudos.animated=function(t){return ce.grep(ce.timers,function(e){return t===e.elem}).length},ce.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=ce.css(e,"position"),c=ce(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=ce.css(e,"top"),u=ce.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),v(t)&&(t=t.call(e,n,ce.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},ce.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){ce.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===ce.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===ce.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=ce(e).offset()).top+=ce.css(e,"borderTopWidth",!0),i.left+=ce.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-ce.css(r,"marginTop",!0),left:t.left-i.left-ce.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===ce.css(e,"position"))e=e.offsetParent;return e||J})}}),ce.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;ce.fn[t]=function(e){return M(this,function(e,t,n){var r;if(y(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),ce.each(["top","left"],function(e,n){ce.cssHooks[n]=Ye(le.pixelPosition,function(e,t){if(t)return t=Ge(e,n),_e.test(t)?ce(e).position()[n]+"px":t})}),ce.each({Height:"height",Width:"width"},function(a,s){ce.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){ce.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return M(this,function(e,t,n){var r;return y(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?ce.css(e,t,i):ce.style(e,t,n,i)},s,n?e:void 0,n)}})}),ce.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){ce.fn[t]=function(e){return this.on(t,e)}}),ce.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)},hover:function(e,t){return this.on("mouseenter",e).on("mouseleave",t||e)}}),ce.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){ce.fn[n]=function(e,t){return 00){var c=e.utils.clone(r)||{};c.position=[a,l],c.index=s.length,s.push(new e.Token(i.slice(a,o),c))}a=o+1}}return s},e.tokenizer.separator=/[\s\-]+/,e.Pipeline=function(){this._stack=[]},e.Pipeline.registeredFunctions=Object.create(null),e.Pipeline.registerFunction=function(t,r){r in this.registeredFunctions&&e.utils.warn("Overwriting existing registered function: "+r),t.label=r,e.Pipeline.registeredFunctions[t.label]=t},e.Pipeline.warnIfFunctionNotRegistered=function(t){var r=t.label&&t.label in this.registeredFunctions;r||e.utils.warn("Function is not registered with pipeline. This may cause problems when serialising the index.\n",t)},e.Pipeline.load=function(t){var r=new e.Pipeline;return t.forEach(function(t){var i=e.Pipeline.registeredFunctions[t];if(!i)throw new Error("Cannot load unregistered function: "+t);r.add(i)}),r},e.Pipeline.prototype.add=function(){var t=Array.prototype.slice.call(arguments);t.forEach(function(t){e.Pipeline.warnIfFunctionNotRegistered(t),this._stack.push(t)},this)},e.Pipeline.prototype.after=function(t,r){e.Pipeline.warnIfFunctionNotRegistered(r);var i=this._stack.indexOf(t);if(i==-1)throw new Error("Cannot find existingFn");i+=1,this._stack.splice(i,0,r)},e.Pipeline.prototype.before=function(t,r){e.Pipeline.warnIfFunctionNotRegistered(r);var i=this._stack.indexOf(t);if(i==-1)throw new Error("Cannot find existingFn");this._stack.splice(i,0,r)},e.Pipeline.prototype.remove=function(e){var t=this._stack.indexOf(e);t!=-1&&this._stack.splice(t,1)},e.Pipeline.prototype.run=function(e){for(var t=this._stack.length,r=0;r1&&(se&&(r=n),s!=e);)i=r-t,n=t+Math.floor(i/2),s=this.elements[2*n];return s==e?2*n:s>e?2*n:sa?l+=2:o==a&&(t+=r[u+1]*i[l+1],u+=2,l+=2);return t},e.Vector.prototype.similarity=function(e){return this.dot(e)/this.magnitude()||0},e.Vector.prototype.toArray=function(){for(var e=new Array(this.elements.length/2),t=1,r=0;t0){var o,a=s.str.charAt(0);a in s.node.edges?o=s.node.edges[a]:(o=new e.TokenSet,s.node.edges[a]=o),1==s.str.length&&(o["final"]=!0),n.push({node:o,editsRemaining:s.editsRemaining,str:s.str.slice(1)})}if(0!=s.editsRemaining){if("*"in s.node.edges)var u=s.node.edges["*"];else{var u=new e.TokenSet;s.node.edges["*"]=u}if(0==s.str.length&&(u["final"]=!0),n.push({node:u,editsRemaining:s.editsRemaining-1,str:s.str}),s.str.length>1&&n.push({node:s.node,editsRemaining:s.editsRemaining-1,str:s.str.slice(1)}),1==s.str.length&&(s.node["final"]=!0),s.str.length>=1){if("*"in s.node.edges)var l=s.node.edges["*"];else{var l=new e.TokenSet;s.node.edges["*"]=l}1==s.str.length&&(l["final"]=!0),n.push({node:l,editsRemaining:s.editsRemaining-1,str:s.str.slice(1)})}if(s.str.length>1){var c,h=s.str.charAt(0),d=s.str.charAt(1);d in s.node.edges?c=s.node.edges[d]:(c=new e.TokenSet,s.node.edges[d]=c),1==s.str.length&&(c["final"]=!0),n.push({node:c,editsRemaining:s.editsRemaining-1,str:h+s.str.slice(2)})}}}return i},e.TokenSet.fromString=function(t){for(var r=new e.TokenSet,i=r,n=0,s=t.length;n=e;t--){var r=this.uncheckedNodes[t],i=r.child.toString();i in this.minimizedNodes?r.parent.edges[r["char"]]=this.minimizedNodes[i]:(r.child._str=i,this.minimizedNodes[i]=r.child),this.uncheckedNodes.pop()}},e.Index=function(e){this.invertedIndex=e.invertedIndex,this.fieldVectors=e.fieldVectors,this.tokenSet=e.tokenSet,this.fields=e.fields,this.pipeline=e.pipeline},e.Index.prototype.search=function(t){return this.query(function(r){var i=new e.QueryParser(t,r);i.parse()})},e.Index.prototype.query=function(t){for(var r=new e.Query(this.fields),i=Object.create(null),n=Object.create(null),s=Object.create(null),o=Object.create(null),a=Object.create(null),u=0;u1?this._b=1:this._b=e},e.Builder.prototype.k1=function(e){this._k1=e},e.Builder.prototype.add=function(t,r){var i=t[this._ref],n=Object.keys(this._fields);this._documents[i]=r||{},this.documentCount+=1;for(var s=0;s=this.length)return e.QueryLexer.EOS;var t=this.str.charAt(this.pos);return this.pos+=1,t},e.QueryLexer.prototype.width=function(){return this.pos-this.start},e.QueryLexer.prototype.ignore=function(){this.start==this.pos&&(this.pos+=1),this.start=this.pos},e.QueryLexer.prototype.backup=function(){this.pos-=1},e.QueryLexer.prototype.acceptDigitRun=function(){var t,r;do t=this.next(),r=t.charCodeAt(0);while(r>47&&r<58);t!=e.QueryLexer.EOS&&this.backup()},e.QueryLexer.prototype.more=function(){return this.pos1&&(t.backup(),t.emit(e.QueryLexer.TERM)),t.ignore(),t.more())return e.QueryLexer.lexText},e.QueryLexer.lexEditDistance=function(t){return t.ignore(),t.acceptDigitRun(),t.emit(e.QueryLexer.EDIT_DISTANCE),e.QueryLexer.lexText},e.QueryLexer.lexBoost=function(t){return t.ignore(),t.acceptDigitRun(),t.emit(e.QueryLexer.BOOST),e.QueryLexer.lexText},e.QueryLexer.lexEOS=function(t){t.width()>0&&t.emit(e.QueryLexer.TERM)},e.QueryLexer.termSeparator=e.tokenizer.separator,e.QueryLexer.lexText=function(t){for(;;){var r=t.next();if(r==e.QueryLexer.EOS)return e.QueryLexer.lexEOS;if(92!=r.charCodeAt(0)){if(":"==r)return e.QueryLexer.lexField;if("~"==r)return t.backup(),t.width()>0&&t.emit(e.QueryLexer.TERM),e.QueryLexer.lexEditDistance;if("^"==r)return t.backup(),t.width()>0&&t.emit(e.QueryLexer.TERM),e.QueryLexer.lexBoost;if("+"==r&&1===t.width())return t.emit(e.QueryLexer.PRESENCE),e.QueryLexer.lexText;if("-"==r&&1===t.width())return t.emit(e.QueryLexer.PRESENCE),e.QueryLexer.lexText;if(r.match(e.QueryLexer.termSeparator))return e.QueryLexer.lexTerm}else t.escapeCharacter()}},e.QueryParser=function(t,r){this.lexer=new e.QueryLexer(t),this.query=r,this.currentClause={},this.lexemeIdx=0},e.QueryParser.prototype.parse=function(){this.lexer.run(),this.lexemes=this.lexer.lexemes;for(var t=e.QueryParser.parseClause;t;)t=t(this);return this.query},e.QueryParser.prototype.peekLexeme=function(){return this.lexemes[this.lexemeIdx]},e.QueryParser.prototype.consumeLexeme=function(){var e=this.peekLexeme();return this.lexemeIdx+=1,e},e.QueryParser.prototype.nextClause=function(){var e=this.currentClause;this.query.clause(e),this.currentClause={}},e.QueryParser.parseClause=function(t){var r=t.peekLexeme();if(void 0!=r)switch(r.type){case e.QueryLexer.PRESENCE:return e.QueryParser.parsePresence;case e.QueryLexer.FIELD:return e.QueryParser.parseField;case e.QueryLexer.TERM:return e.QueryParser.parseTerm;default:var i="expected either a field or a term, found "+r.type;throw r.str.length>=1&&(i+=" with value '"+r.str+"'"),new e.QueryParseError(i,r.start,r.end)}},e.QueryParser.parsePresence=function(t){var r=t.consumeLexeme();if(void 0!=r){switch(r.str){case"-":t.currentClause.presence=e.Query.presence.PROHIBITED;break;case"+":t.currentClause.presence=e.Query.presence.REQUIRED;break;default:var i="unrecognised presence operator'"+r.str+"'";throw new e.QueryParseError(i,r.start,r.end)}var n=t.peekLexeme();if(void 0==n){var i="expecting term or field, found nothing";throw new e.QueryParseError(i,r.start,r.end)}switch(n.type){case e.QueryLexer.FIELD:return e.QueryParser.parseField;case e.QueryLexer.TERM:return e.QueryParser.parseTerm;default:var i="expecting term or field, found '"+n.type+"'";throw new e.QueryParseError(i,n.start,n.end)}}},e.QueryParser.parseField=function(t){var r=t.consumeLexeme();if(void 0!=r){if(t.query.allFields.indexOf(r.str)==-1){var i=t.query.allFields.map(function(e){return"'"+e+"'"}).join(", "),n="unrecognised field '"+r.str+"', possible fields: "+i;throw new e.QueryParseError(n,r.start,r.end)}t.currentClause.fields=[r.str];var s=t.peekLexeme();if(void 0==s){var n="expecting term, found nothing";throw new e.QueryParseError(n,r.start,r.end)}switch(s.type){case e.QueryLexer.TERM:return e.QueryParser.parseTerm;default:var n="expecting term, found '"+s.type+"'";throw new e.QueryParseError(n,s.start,s.end)}}},e.QueryParser.parseTerm=function(t){var r=t.consumeLexeme();if(void 0!=r){t.currentClause.term=r.str.toLowerCase(),r.str.indexOf("*")!=-1&&(t.currentClause.usePipeline=!1);var i=t.peekLexeme();if(void 0==i)return void t.nextClause();switch(i.type){case e.QueryLexer.TERM:return t.nextClause(),e.QueryParser.parseTerm;case e.QueryLexer.FIELD:return t.nextClause(),e.QueryParser.parseField;case e.QueryLexer.EDIT_DISTANCE:return e.QueryParser.parseEditDistance;case e.QueryLexer.BOOST:return e.QueryParser.parseBoost;case e.QueryLexer.PRESENCE:return t.nextClause(),e.QueryParser.parsePresence;default:var n="Unexpected lexeme type '"+i.type+"'";throw new e.QueryParseError(n,i.start,i.end)}}},e.QueryParser.parseEditDistance=function(t){var r=t.consumeLexeme();if(void 0!=r){var i=parseInt(r.str,10);if(isNaN(i)){var n="edit distance must be numeric";throw new e.QueryParseError(n,r.start,r.end)}t.currentClause.editDistance=i;var s=t.peekLexeme();if(void 0==s)return void t.nextClause();switch(s.type){case e.QueryLexer.TERM:return t.nextClause(),e.QueryParser.parseTerm;case e.QueryLexer.FIELD:return t.nextClause(),e.QueryParser.parseField;case e.QueryLexer.EDIT_DISTANCE:return e.QueryParser.parseEditDistance;case e.QueryLexer.BOOST:return e.QueryParser.parseBoost;case e.QueryLexer.PRESENCE:return t.nextClause(),e.QueryParser.parsePresence;default:var n="Unexpected lexeme type '"+s.type+"'";throw new e.QueryParseError(n,s.start,s.end)}}},e.QueryParser.parseBoost=function(t){var r=t.consumeLexeme();if(void 0!=r){var i=parseInt(r.str,10);if(isNaN(i)){var n="boost must be numeric";throw new e.QueryParseError(n,r.start,r.end)}t.currentClause.boost=i;var s=t.peekLexeme();if(void 0==s)return void t.nextClause();switch(s.type){case e.QueryLexer.TERM:return t.nextClause(),e.QueryParser.parseTerm;case e.QueryLexer.FIELD:return t.nextClause(),e.QueryParser.parseField;case e.QueryLexer.EDIT_DISTANCE:return e.QueryParser.parseEditDistance;case e.QueryLexer.BOOST:return e.QueryParser.parseBoost;case e.QueryLexer.PRESENCE:return t.nextClause(),e.QueryParser.parsePresence;default:var n="Unexpected lexeme type '"+s.type+"'";throw new e.QueryParseError(n,s.start,s.end)}}},function(e,t){"function"==typeof define&&define.amd?define(t):"object"==typeof exports?module.exports=t():e.lunr=t()}(this,function(){return e})}(); diff --git a/docs/docsets/.docset/Contents/Resources/Documents/js/typeahead.jquery.js b/docs/docsets/.docset/Contents/Resources/Documents/js/typeahead.jquery.js new file mode 100644 index 00000000..bcb734be --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/js/typeahead.jquery.js @@ -0,0 +1,1695 @@ +/*! + * typeahead.js 1.3.3 + * https://github.com/corejavascript/typeahead.js + * Copyright 2013-2024 Twitter, Inc. and other contributors; Licensed MIT + */ + + +(function(root, factory) { + if (typeof define === "function" && define.amd) { + define([ "jquery" ], function(a0) { + return factory(a0); + }); + } else if (typeof module === "object" && module.exports) { + module.exports = factory(require("jquery")); + } else { + factory(root["jQuery"]); + } +})(this, function($) { + var _ = function() { + "use strict"; + return { + isMsie: function() { + return /(msie|trident)/i.test(navigator.userAgent) ? navigator.userAgent.match(/(msie |rv:)(\d+(.\d+)?)/i)[2] : false; + }, + isBlankString: function(str) { + return !str || /^\s*$/.test(str); + }, + escapeRegExChars: function(str) { + return str.replace(/[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g, "\\$&"); + }, + isString: function(obj) { + return typeof obj === "string"; + }, + isNumber: function(obj) { + return typeof obj === "number"; + }, + isArray: $.isArray, + isFunction: $.isFunction, + isObject: $.isPlainObject, + isUndefined: function(obj) { + return typeof obj === "undefined"; + }, + isElement: function(obj) { + return !!(obj && obj.nodeType === 1); + }, + isJQuery: function(obj) { + return obj instanceof $; + }, + toStr: function toStr(s) { + return _.isUndefined(s) || s === null ? "" : s + ""; + }, + bind: $.proxy, + each: function(collection, cb) { + $.each(collection, reverseArgs); + function reverseArgs(index, value) { + return cb(value, index); + } + }, + map: $.map, + filter: $.grep, + every: function(obj, test) { + var result = true; + if (!obj) { + return result; + } + $.each(obj, function(key, val) { + if (!(result = test.call(null, val, key, obj))) { + return false; + } + }); + return !!result; + }, + some: function(obj, test) { + var result = false; + if (!obj) { + return result; + } + $.each(obj, function(key, val) { + if (result = test.call(null, val, key, obj)) { + return false; + } + }); + return !!result; + }, + mixin: $.extend, + identity: function(x) { + return x; + }, + clone: function(obj) { + return $.extend(true, {}, obj); + }, + getIdGenerator: function() { + var counter = 0; + return function() { + return counter++; + }; + }, + templatify: function templatify(obj) { + return $.isFunction(obj) ? obj : template; + function template() { + return String(obj); + } + }, + defer: function(fn) { + setTimeout(fn, 0); + }, + debounce: function(func, wait, immediate) { + var timeout, result; + return function() { + var context = this, args = arguments, later, callNow; + later = function() { + timeout = null; + if (!immediate) { + result = func.apply(context, args); + } + }; + callNow = immediate && !timeout; + clearTimeout(timeout); + timeout = setTimeout(later, wait); + if (callNow) { + result = func.apply(context, args); + } + return result; + }; + }, + throttle: function(func, wait) { + var context, args, timeout, result, previous, later; + previous = 0; + later = function() { + previous = new Date(); + timeout = null; + result = func.apply(context, args); + }; + return function() { + var now = new Date(), remaining = wait - (now - previous); + context = this; + args = arguments; + if (remaining <= 0) { + clearTimeout(timeout); + timeout = null; + previous = now; + result = func.apply(context, args); + } else if (!timeout) { + timeout = setTimeout(later, remaining); + } + return result; + }; + }, + stringify: function(val) { + return _.isString(val) ? val : JSON.stringify(val); + }, + guid: function() { + function _p8(s) { + var p = (Math.random().toString(16) + "000000000").substr(2, 8); + return s ? "-" + p.substr(0, 4) + "-" + p.substr(4, 4) : p; + } + return "tt-" + _p8() + _p8(true) + _p8(true) + _p8(); + }, + noop: function() {} + }; + }(); + var WWW = function() { + "use strict"; + var defaultClassNames = { + wrapper: "twitter-typeahead", + input: "tt-input", + hint: "tt-hint", + menu: "tt-menu", + dataset: "tt-dataset", + suggestion: "tt-suggestion", + selectable: "tt-selectable", + empty: "tt-empty", + open: "tt-open", + cursor: "tt-cursor", + highlight: "tt-highlight" + }; + return build; + function build(o) { + var www, classes; + classes = _.mixin({}, defaultClassNames, o); + www = { + css: buildCss(), + classes: classes, + html: buildHtml(classes), + selectors: buildSelectors(classes) + }; + return { + css: www.css, + html: www.html, + classes: www.classes, + selectors: www.selectors, + mixin: function(o) { + _.mixin(o, www); + } + }; + } + function buildHtml(c) { + return { + wrapper: '', + menu: '
' + }; + } + function buildSelectors(classes) { + var selectors = {}; + _.each(classes, function(v, k) { + selectors[k] = "." + v; + }); + return selectors; + } + function buildCss() { + var css = { + wrapper: { + position: "relative", + display: "inline-block" + }, + hint: { + position: "absolute", + top: "0", + left: "0", + borderColor: "transparent", + boxShadow: "none", + opacity: "1" + }, + input: { + position: "relative", + verticalAlign: "top", + backgroundColor: "transparent" + }, + inputWithNoHint: { + position: "relative", + verticalAlign: "top" + }, + menu: { + position: "absolute", + top: "100%", + left: "0", + zIndex: "100", + display: "none" + }, + ltr: { + left: "0", + right: "auto" + }, + rtl: { + left: "auto", + right: " 0" + } + }; + if (_.isMsie()) { + _.mixin(css.input, { + backgroundImage: "url(data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7)" + }); + } + return css; + } + }(); + var EventBus = function() { + "use strict"; + var namespace, deprecationMap; + namespace = "typeahead:"; + deprecationMap = { + render: "rendered", + cursorchange: "cursorchanged", + select: "selected", + autocomplete: "autocompleted" + }; + function EventBus(o) { + if (!o || !o.el) { + $.error("EventBus initialized without el"); + } + this.$el = $(o.el); + } + _.mixin(EventBus.prototype, { + _trigger: function(type, args) { + var $e = $.Event(namespace + type); + this.$el.trigger.call(this.$el, $e, args || []); + return $e; + }, + before: function(type) { + var args, $e; + args = [].slice.call(arguments, 1); + $e = this._trigger("before" + type, args); + return $e.isDefaultPrevented(); + }, + trigger: function(type) { + var deprecatedType; + this._trigger(type, [].slice.call(arguments, 1)); + if (deprecatedType = deprecationMap[type]) { + this._trigger(deprecatedType, [].slice.call(arguments, 1)); + } + } + }); + return EventBus; + }(); + var EventEmitter = function() { + "use strict"; + var splitter = /\s+/, nextTick = getNextTick(); + return { + onSync: onSync, + onAsync: onAsync, + off: off, + trigger: trigger + }; + function on(method, types, cb, context) { + var type; + if (!cb) { + return this; + } + types = types.split(splitter); + cb = context ? bindContext(cb, context) : cb; + this._callbacks = this._callbacks || {}; + while (type = types.shift()) { + this._callbacks[type] = this._callbacks[type] || { + sync: [], + async: [] + }; + this._callbacks[type][method].push(cb); + } + return this; + } + function onAsync(types, cb, context) { + return on.call(this, "async", types, cb, context); + } + function onSync(types, cb, context) { + return on.call(this, "sync", types, cb, context); + } + function off(types) { + var type; + if (!this._callbacks) { + return this; + } + types = types.split(splitter); + while (type = types.shift()) { + delete this._callbacks[type]; + } + return this; + } + function trigger(types) { + var type, callbacks, args, syncFlush, asyncFlush; + if (!this._callbacks) { + return this; + } + types = types.split(splitter); + args = [].slice.call(arguments, 1); + while ((type = types.shift()) && (callbacks = this._callbacks[type])) { + syncFlush = getFlush(callbacks.sync, this, [ type ].concat(args)); + asyncFlush = getFlush(callbacks.async, this, [ type ].concat(args)); + syncFlush() && nextTick(asyncFlush); + } + return this; + } + function getFlush(callbacks, context, args) { + return flush; + function flush() { + var cancelled; + for (var i = 0, len = callbacks.length; !cancelled && i < len; i += 1) { + cancelled = callbacks[i].apply(context, args) === false; + } + return !cancelled; + } + } + function getNextTick() { + var nextTickFn; + if (window.setImmediate) { + nextTickFn = function nextTickSetImmediate(fn) { + setImmediate(function() { + fn(); + }); + }; + } else { + nextTickFn = function nextTickSetTimeout(fn) { + setTimeout(function() { + fn(); + }, 0); + }; + } + return nextTickFn; + } + function bindContext(fn, context) { + return fn.bind ? fn.bind(context) : function() { + fn.apply(context, [].slice.call(arguments, 0)); + }; + } + }(); + var highlight = function(doc) { + "use strict"; + var defaults = { + node: null, + pattern: null, + tagName: "strong", + className: null, + wordsOnly: false, + caseSensitive: false, + diacriticInsensitive: false + }; + var accented = { + A: "[AaªÀ-Åà-åĀ-ąǍǎȀ-ȃȦȧᴬᵃḀḁẚẠ-ảₐ℀℁℻⒜Ⓐⓐ㍱-㍴㎀-㎄㎈㎉㎩-㎯㏂㏊㏟㏿Aa]", + B: "[BbᴮᵇḂ-ḇℬ⒝Ⓑⓑ㍴㎅-㎇㏃㏈㏔㏝Bb]", + C: "[CcÇçĆ-čᶜ℀ℂ℃℅℆ℭⅭⅽ⒞Ⓒⓒ㍶㎈㎉㎝㎠㎤㏄-㏇Cc]", + D: "[DdĎďDŽ-džDZ-dzᴰᵈḊ-ḓⅅⅆⅮⅾ⒟Ⓓⓓ㋏㍲㍷-㍹㎗㎭-㎯㏅㏈Dd]", + E: "[EeÈ-Ëè-ëĒ-ěȄ-ȇȨȩᴱᵉḘ-ḛẸ-ẽₑ℡ℯℰⅇ⒠Ⓔⓔ㉐㋍㋎Ee]", + F: "[FfᶠḞḟ℉ℱ℻⒡Ⓕⓕ㎊-㎌㎙ff-fflFf]", + G: "[GgĜ-ģǦǧǴǵᴳᵍḠḡℊ⒢Ⓖⓖ㋌㋍㎇㎍-㎏㎓㎬㏆㏉㏒㏿Gg]", + H: "[HhĤĥȞȟʰᴴḢ-ḫẖℋ-ℎ⒣Ⓗⓗ㋌㍱㎐-㎔㏊㏋㏗Hh]", + I: "[IiÌ-Ïì-ïĨ-İIJijǏǐȈ-ȋᴵᵢḬḭỈ-ịⁱℐℑℹⅈⅠ-ⅣⅥ-ⅨⅪⅫⅰ-ⅳⅵ-ⅸⅺⅻ⒤Ⓘⓘ㍺㏌㏕fiffiIi]", + J: "[JjIJ-ĵLJ-njǰʲᴶⅉ⒥ⒿⓙⱼJj]", + K: "[KkĶķǨǩᴷᵏḰ-ḵK⒦Ⓚⓚ㎄㎅㎉㎏㎑㎘㎞㎢㎦㎪㎸㎾㏀㏆㏍-㏏Kk]", + L: "[LlĹ-ŀLJ-ljˡᴸḶḷḺ-ḽℒℓ℡Ⅼⅼ⒧Ⓛⓛ㋏㎈㎉㏐-㏓㏕㏖㏿flfflLl]", + M: "[MmᴹᵐḾ-ṃ℠™ℳⅯⅿ⒨Ⓜⓜ㍷-㍹㎃㎆㎎㎒㎖㎙-㎨㎫㎳㎷㎹㎽㎿㏁㏂㏎㏐㏔-㏖㏘㏙㏞㏟Mm]", + N: "[NnÑñŃ-ʼnNJ-njǸǹᴺṄ-ṋⁿℕ№⒩Ⓝⓝ㎁㎋㎚㎱㎵㎻㏌㏑Nn]", + O: "[OoºÒ-Öò-öŌ-őƠơǑǒǪǫȌ-ȏȮȯᴼᵒỌ-ỏₒ℅№ℴ⒪Ⓞⓞ㍵㏇㏒㏖Oo]", + P: "[PpᴾᵖṔ-ṗℙ⒫Ⓟⓟ㉐㍱㍶㎀㎊㎩-㎬㎰㎴㎺㏋㏗-㏚Pp]", + Q: "[Qqℚ⒬Ⓠⓠ㏃Qq]", + R: "[RrŔ-řȐ-ȓʳᴿᵣṘ-ṛṞṟ₨ℛ-ℝ⒭Ⓡⓡ㋍㍴㎭-㎯㏚㏛Rr]", + S: "[SsŚ-šſȘșˢṠ-ṣ₨℁℠⒮Ⓢⓢ㎧㎨㎮-㎳㏛㏜stSs]", + T: "[TtŢ-ťȚțᵀᵗṪ-ṱẗ℡™⒯Ⓣⓣ㉐㋏㎔㏏ſtstTt]", + U: "[UuÙ-Üù-üŨ-ųƯưǓǔȔ-ȗᵁᵘᵤṲ-ṷỤ-ủ℆⒰Ⓤⓤ㍳㍺Uu]", + V: "[VvᵛᵥṼ-ṿⅣ-Ⅷⅳ-ⅷ⒱Ⓥⓥⱽ㋎㍵㎴-㎹㏜㏞Vv]", + W: "[WwŴŵʷᵂẀ-ẉẘ⒲Ⓦⓦ㎺-㎿㏝Ww]", + X: "[XxˣẊ-ẍₓ℻Ⅸ-Ⅻⅸ-ⅻ⒳Ⓧⓧ㏓Xx]", + Y: "[YyÝýÿŶ-ŸȲȳʸẎẏẙỲ-ỹ⒴Ⓨⓨ㏉Yy]", + Z: "[ZzŹ-žDZ-dzᶻẐ-ẕℤℨ⒵Ⓩⓩ㎐-㎔Zz]" + }; + return function hightlight(o) { + var regex; + o = _.mixin({}, defaults, o); + if (!o.node || !o.pattern) { + return; + } + o.pattern = _.isArray(o.pattern) ? o.pattern : [ o.pattern ]; + regex = getRegex(o.pattern, o.caseSensitive, o.wordsOnly, o.diacriticInsensitive); + traverse(o.node, hightlightTextNode); + function hightlightTextNode(textNode) { + var match, patternNode, wrapperNode; + if (match = regex.exec(textNode.data)) { + wrapperNode = doc.createElement(o.tagName); + o.className && (wrapperNode.className = o.className); + patternNode = textNode.splitText(match.index); + patternNode.splitText(match[0].length); + wrapperNode.appendChild(patternNode.cloneNode(true)); + textNode.parentNode.replaceChild(wrapperNode, patternNode); + } + return !!match; + } + function traverse(el, hightlightTextNode) { + var childNode, TEXT_NODE_TYPE = 3; + for (var i = 0; i < el.childNodes.length; i++) { + childNode = el.childNodes[i]; + if (childNode.nodeType === TEXT_NODE_TYPE) { + i += hightlightTextNode(childNode) ? 1 : 0; + } else { + traverse(childNode, hightlightTextNode); + } + } + } + }; + function accent_replacer(chr) { + return accented[chr.toUpperCase()] || chr; + } + function getRegex(patterns, caseSensitive, wordsOnly, diacriticInsensitive) { + var escapedPatterns = [], regexStr; + for (var i = 0, len = patterns.length; i < len; i++) { + var escapedWord = _.escapeRegExChars(patterns[i]); + if (diacriticInsensitive) { + escapedWord = escapedWord.replace(/\S/g, accent_replacer); + } + escapedPatterns.push(escapedWord); + } + regexStr = wordsOnly ? "\\b(" + escapedPatterns.join("|") + ")\\b" : "(" + escapedPatterns.join("|") + ")"; + return caseSensitive ? new RegExp(regexStr) : new RegExp(regexStr, "i"); + } + }(window.document); + var Input = function() { + "use strict"; + var specialKeyCodeMap; + specialKeyCodeMap = { + 9: "tab", + 27: "esc", + 37: "left", + 39: "right", + 13: "enter", + 38: "up", + 40: "down" + }; + function Input(o, www) { + var id; + o = o || {}; + if (!o.input) { + $.error("input is missing"); + } + www.mixin(this); + this.$hint = $(o.hint); + this.$input = $(o.input); + this.$menu = $(o.menu); + id = this.$input.attr("id") || _.guid(); + this.$menu.attr("id", id + "_listbox"); + this.$hint.attr({ + "aria-hidden": true + }); + this.$input.attr({ + "aria-owns": id + "_listbox", + "aria-controls": id + "_listbox", + role: "combobox", + "aria-autocomplete": "list", + "aria-expanded": false + }); + this.query = this.$input.val(); + this.queryWhenFocused = this.hasFocus() ? this.query : null; + this.$overflowHelper = buildOverflowHelper(this.$input); + this._checkLanguageDirection(); + if (this.$hint.length === 0) { + this.setHint = this.getHint = this.clearHint = this.clearHintIfInvalid = _.noop; + } + this.onSync("cursorchange", this._updateDescendent); + } + Input.normalizeQuery = function(str) { + return _.toStr(str).replace(/^\s*/g, "").replace(/\s{2,}/g, " "); + }; + _.mixin(Input.prototype, EventEmitter, { + _onBlur: function onBlur() { + this.resetInputValue(); + this.trigger("blurred"); + }, + _onFocus: function onFocus() { + this.queryWhenFocused = this.query; + this.trigger("focused"); + }, + _onKeydown: function onKeydown($e) { + var keyName = specialKeyCodeMap[$e.which || $e.keyCode]; + this._managePreventDefault(keyName, $e); + if (keyName && this._shouldTrigger(keyName, $e)) { + this.trigger(keyName + "Keyed", $e); + } + }, + _onInput: function onInput() { + this._setQuery(this.getInputValue()); + this.clearHintIfInvalid(); + this._checkLanguageDirection(); + }, + _managePreventDefault: function managePreventDefault(keyName, $e) { + var preventDefault; + switch (keyName) { + case "up": + case "down": + preventDefault = !withModifier($e); + break; + + default: + preventDefault = false; + } + preventDefault && $e.preventDefault(); + }, + _shouldTrigger: function shouldTrigger(keyName, $e) { + var trigger; + switch (keyName) { + case "tab": + trigger = !withModifier($e); + break; + + default: + trigger = true; + } + return trigger; + }, + _checkLanguageDirection: function checkLanguageDirection() { + var dir = (this.$input.css("direction") || "ltr").toLowerCase(); + if (this.dir !== dir) { + this.dir = dir; + this.$hint.attr("dir", dir); + this.trigger("langDirChanged", dir); + } + }, + _setQuery: function setQuery(val, silent) { + var areEquivalent, hasDifferentWhitespace; + areEquivalent = areQueriesEquivalent(val, this.query); + hasDifferentWhitespace = areEquivalent ? this.query.length !== val.length : false; + this.query = val; + if (!silent && !areEquivalent) { + this.trigger("queryChanged", this.query); + } else if (!silent && hasDifferentWhitespace) { + this.trigger("whitespaceChanged", this.query); + } + }, + _updateDescendent: function updateDescendent(event, id) { + this.$input.attr("aria-activedescendant", id); + }, + bind: function() { + var that = this, onBlur, onFocus, onKeydown, onInput; + onBlur = _.bind(this._onBlur, this); + onFocus = _.bind(this._onFocus, this); + onKeydown = _.bind(this._onKeydown, this); + onInput = _.bind(this._onInput, this); + this.$input.on("blur.tt", onBlur).on("focus.tt", onFocus).on("keydown.tt", onKeydown); + if (!_.isMsie() || _.isMsie() > 9) { + this.$input.on("input.tt", onInput); + } else { + this.$input.on("keydown.tt keypress.tt cut.tt paste.tt", function($e) { + if (specialKeyCodeMap[$e.which || $e.keyCode]) { + return; + } + _.defer(_.bind(that._onInput, that, $e)); + }); + } + return this; + }, + focus: function focus() { + this.$input.focus(); + }, + blur: function blur() { + this.$input.blur(); + }, + getLangDir: function getLangDir() { + return this.dir; + }, + getQuery: function getQuery() { + return this.query || ""; + }, + setQuery: function setQuery(val, silent) { + this.setInputValue(val); + this._setQuery(val, silent); + }, + hasQueryChangedSinceLastFocus: function hasQueryChangedSinceLastFocus() { + return this.query !== this.queryWhenFocused; + }, + getInputValue: function getInputValue() { + return this.$input.val(); + }, + setInputValue: function setInputValue(value) { + this.$input.val(value); + this.clearHintIfInvalid(); + this._checkLanguageDirection(); + }, + resetInputValue: function resetInputValue() { + this.setInputValue(this.query); + }, + getHint: function getHint() { + return this.$hint.val(); + }, + setHint: function setHint(value) { + this.$hint.val(value); + }, + clearHint: function clearHint() { + this.setHint(""); + }, + clearHintIfInvalid: function clearHintIfInvalid() { + var val, hint, valIsPrefixOfHint, isValid; + val = this.getInputValue(); + hint = this.getHint(); + valIsPrefixOfHint = val !== hint && hint.indexOf(val) === 0; + isValid = val !== "" && valIsPrefixOfHint && !this.hasOverflow(); + !isValid && this.clearHint(); + }, + hasFocus: function hasFocus() { + return this.$input.is(":focus"); + }, + hasOverflow: function hasOverflow() { + var constraint = this.$input.width() - 2; + this.$overflowHelper.text(this.getInputValue()); + return this.$overflowHelper.width() >= constraint; + }, + isCursorAtEnd: function() { + var valueLength, selectionStart, range; + valueLength = this.$input.val().length; + selectionStart = this.$input[0].selectionStart; + if (_.isNumber(selectionStart)) { + return selectionStart === valueLength; + } else if (document.selection) { + range = document.selection.createRange(); + range.moveStart("character", -valueLength); + return valueLength === range.text.length; + } + return true; + }, + destroy: function destroy() { + this.$hint.off(".tt"); + this.$input.off(".tt"); + this.$overflowHelper.remove(); + this.$hint = this.$input = this.$overflowHelper = $("
"); + }, + setAriaExpanded: function setAriaExpanded(value) { + this.$input.attr("aria-expanded", value); + } + }); + return Input; + function buildOverflowHelper($input) { + return $('').css({ + position: "absolute", + visibility: "hidden", + whiteSpace: "pre", + fontFamily: $input.css("font-family"), + fontSize: $input.css("font-size"), + fontStyle: $input.css("font-style"), + fontVariant: $input.css("font-variant"), + fontWeight: $input.css("font-weight"), + wordSpacing: $input.css("word-spacing"), + letterSpacing: $input.css("letter-spacing"), + textIndent: $input.css("text-indent"), + textRendering: $input.css("text-rendering"), + textTransform: $input.css("text-transform") + }).insertAfter($input); + } + function areQueriesEquivalent(a, b) { + return Input.normalizeQuery(a) === Input.normalizeQuery(b); + } + function withModifier($e) { + return $e.altKey || $e.ctrlKey || $e.metaKey || $e.shiftKey; + } + }(); + var Dataset = function() { + "use strict"; + var keys, nameGenerator; + keys = { + dataset: "tt-selectable-dataset", + val: "tt-selectable-display", + obj: "tt-selectable-object" + }; + nameGenerator = _.getIdGenerator(); + function Dataset(o, www) { + o = o || {}; + o.templates = o.templates || {}; + o.templates.notFound = o.templates.notFound || o.templates.empty; + if (!o.source) { + $.error("missing source"); + } + if (!o.node) { + $.error("missing node"); + } + if (o.name && !isValidName(o.name)) { + $.error("invalid dataset name: " + o.name); + } + www.mixin(this); + this.highlight = !!o.highlight; + this.name = _.toStr(o.name || nameGenerator()); + this.limit = o.limit || 5; + this.displayFn = getDisplayFn(o.display || o.displayKey); + this.templates = getTemplates(o.templates, this.displayFn); + this.source = o.source.__ttAdapter ? o.source.__ttAdapter() : o.source; + this.async = _.isUndefined(o.async) ? this.source.length > 2 : !!o.async; + this._resetLastSuggestion(); + this.$el = $(o.node).attr("role", "presentation").addClass(this.classes.dataset).addClass(this.classes.dataset + "-" + this.name); + } + Dataset.extractData = function extractData(el) { + var $el = $(el); + if ($el.data(keys.obj)) { + return { + dataset: $el.data(keys.dataset) || "", + val: $el.data(keys.val) || "", + obj: $el.data(keys.obj) || null + }; + } + return null; + }; + _.mixin(Dataset.prototype, EventEmitter, { + _overwrite: function overwrite(query, suggestions) { + suggestions = suggestions || []; + if (suggestions.length) { + this._renderSuggestions(query, suggestions); + } else if (this.async && this.templates.pending) { + this._renderPending(query); + } else if (!this.async && this.templates.notFound) { + this._renderNotFound(query); + } else { + this._empty(); + } + this.trigger("rendered", suggestions, false, this.name); + }, + _append: function append(query, suggestions) { + suggestions = suggestions || []; + if (suggestions.length && this.$lastSuggestion.length) { + this._appendSuggestions(query, suggestions); + } else if (suggestions.length) { + this._renderSuggestions(query, suggestions); + } else if (!this.$lastSuggestion.length && this.templates.notFound) { + this._renderNotFound(query); + } + this.trigger("rendered", suggestions, true, this.name); + }, + _renderSuggestions: function renderSuggestions(query, suggestions) { + var $fragment; + $fragment = this._getSuggestionsFragment(query, suggestions); + this.$lastSuggestion = $fragment.children().last(); + this.$el.html($fragment).prepend(this._getHeader(query, suggestions)).append(this._getFooter(query, suggestions)); + }, + _appendSuggestions: function appendSuggestions(query, suggestions) { + var $fragment, $lastSuggestion; + $fragment = this._getSuggestionsFragment(query, suggestions); + $lastSuggestion = $fragment.children().last(); + this.$lastSuggestion.after($fragment); + this.$lastSuggestion = $lastSuggestion; + }, + _renderPending: function renderPending(query) { + var template = this.templates.pending; + this._resetLastSuggestion(); + template && this.$el.html(template({ + query: query, + dataset: this.name + })); + }, + _renderNotFound: function renderNotFound(query) { + var template = this.templates.notFound; + this._resetLastSuggestion(); + template && this.$el.html(template({ + query: query, + dataset: this.name + })); + }, + _empty: function empty() { + this.$el.empty(); + this._resetLastSuggestion(); + }, + _getSuggestionsFragment: function getSuggestionsFragment(query, suggestions) { + var that = this, fragment; + fragment = document.createDocumentFragment(); + _.each(suggestions, function getSuggestionNode(suggestion) { + var $el, context; + context = that._injectQuery(query, suggestion); + $el = $(that.templates.suggestion(context)).data(keys.dataset, that.name).data(keys.obj, suggestion).data(keys.val, that.displayFn(suggestion)).addClass(that.classes.suggestion + " " + that.classes.selectable); + fragment.appendChild($el[0]); + }); + this.highlight && highlight({ + className: this.classes.highlight, + node: fragment, + pattern: query + }); + return $(fragment); + }, + _getFooter: function getFooter(query, suggestions) { + return this.templates.footer ? this.templates.footer({ + query: query, + suggestions: suggestions, + dataset: this.name + }) : null; + }, + _getHeader: function getHeader(query, suggestions) { + return this.templates.header ? this.templates.header({ + query: query, + suggestions: suggestions, + dataset: this.name + }) : null; + }, + _resetLastSuggestion: function resetLastSuggestion() { + this.$lastSuggestion = $(); + }, + _injectQuery: function injectQuery(query, obj) { + return _.isObject(obj) ? _.mixin({ + _query: query + }, obj) : obj; + }, + update: function update(query) { + var that = this, canceled = false, syncCalled = false, rendered = 0; + this.cancel(); + this.cancel = function cancel() { + canceled = true; + that.cancel = $.noop; + that.async && that.trigger("asyncCanceled", query, that.name); + }; + this.source(query, sync, async); + !syncCalled && sync([]); + function sync(suggestions) { + if (syncCalled) { + return; + } + syncCalled = true; + suggestions = (suggestions || []).slice(0, that.limit); + rendered = suggestions.length; + that._overwrite(query, suggestions); + if (rendered < that.limit && that.async) { + that.trigger("asyncRequested", query, that.name); + } + } + function async(suggestions) { + suggestions = suggestions || []; + if (!canceled && rendered < that.limit) { + that.cancel = $.noop; + var idx = Math.abs(rendered - that.limit); + rendered += idx; + that._append(query, suggestions.slice(0, idx)); + that.async && that.trigger("asyncReceived", query, that.name); + } + } + }, + cancel: $.noop, + clear: function clear() { + this._empty(); + this.cancel(); + this.trigger("cleared"); + }, + isEmpty: function isEmpty() { + return this.$el.is(":empty"); + }, + destroy: function destroy() { + this.$el = $("
"); + } + }); + return Dataset; + function getDisplayFn(display) { + display = display || _.stringify; + return _.isFunction(display) ? display : displayFn; + function displayFn(obj) { + return obj[display]; + } + } + function getTemplates(templates, displayFn) { + return { + notFound: templates.notFound && _.templatify(templates.notFound), + pending: templates.pending && _.templatify(templates.pending), + header: templates.header && _.templatify(templates.header), + footer: templates.footer && _.templatify(templates.footer), + suggestion: templates.suggestion ? userSuggestionTemplate : suggestionTemplate + }; + function userSuggestionTemplate(context) { + var template = templates.suggestion; + return $(template(context)).attr("id", _.guid()); + } + function suggestionTemplate(context) { + return $('
').attr("id", _.guid()).text(displayFn(context)); + } + } + function isValidName(str) { + return /^[_a-zA-Z0-9-]+$/.test(str); + } + }(); + var Menu = function() { + "use strict"; + function Menu(o, www) { + var that = this; + o = o || {}; + if (!o.node) { + $.error("node is required"); + } + www.mixin(this); + this.$node = $(o.node); + this.query = null; + this.datasets = _.map(o.datasets, initializeDataset); + function initializeDataset(oDataset) { + var node = that.$node.find(oDataset.node).first(); + oDataset.node = node.length ? node : $("
").appendTo(that.$node); + return new Dataset(oDataset, www); + } + } + _.mixin(Menu.prototype, EventEmitter, { + _onSelectableClick: function onSelectableClick($e) { + this.trigger("selectableClicked", $($e.currentTarget)); + }, + _onRendered: function onRendered(type, dataset, suggestions, async) { + this.$node.toggleClass(this.classes.empty, this._allDatasetsEmpty()); + this.trigger("datasetRendered", dataset, suggestions, async); + }, + _onCleared: function onCleared() { + this.$node.toggleClass(this.classes.empty, this._allDatasetsEmpty()); + this.trigger("datasetCleared"); + }, + _propagate: function propagate() { + this.trigger.apply(this, arguments); + }, + _allDatasetsEmpty: function allDatasetsEmpty() { + return _.every(this.datasets, _.bind(function isDatasetEmpty(dataset) { + var isEmpty = dataset.isEmpty(); + this.$node.attr("aria-expanded", !isEmpty); + return isEmpty; + }, this)); + }, + _getSelectables: function getSelectables() { + return this.$node.find(this.selectors.selectable); + }, + _removeCursor: function _removeCursor() { + var $selectable = this.getActiveSelectable(); + $selectable && $selectable.removeClass(this.classes.cursor); + }, + _ensureVisible: function ensureVisible($el) { + var elTop, elBottom, nodeScrollTop, nodeHeight; + elTop = $el.position().top; + elBottom = elTop + $el.outerHeight(true); + nodeScrollTop = this.$node.scrollTop(); + nodeHeight = this.$node.height() + parseInt(this.$node.css("paddingTop"), 10) + parseInt(this.$node.css("paddingBottom"), 10); + if (elTop < 0) { + this.$node.scrollTop(nodeScrollTop + elTop); + } else if (nodeHeight < elBottom) { + this.$node.scrollTop(nodeScrollTop + (elBottom - nodeHeight)); + } + }, + bind: function() { + var that = this, onSelectableClick; + onSelectableClick = _.bind(this._onSelectableClick, this); + this.$node.on("click.tt", this.selectors.selectable, onSelectableClick); + this.$node.on("mouseover", this.selectors.selectable, function() { + that.setCursor($(this)); + }); + this.$node.on("mouseleave", function() { + that._removeCursor(); + }); + _.each(this.datasets, function(dataset) { + dataset.onSync("asyncRequested", that._propagate, that).onSync("asyncCanceled", that._propagate, that).onSync("asyncReceived", that._propagate, that).onSync("rendered", that._onRendered, that).onSync("cleared", that._onCleared, that); + }); + return this; + }, + isOpen: function isOpen() { + return this.$node.hasClass(this.classes.open); + }, + open: function open() { + this.$node.scrollTop(0); + this.$node.addClass(this.classes.open); + }, + close: function close() { + this.$node.attr("aria-expanded", false); + this.$node.removeClass(this.classes.open); + this._removeCursor(); + }, + setLanguageDirection: function setLanguageDirection(dir) { + this.$node.attr("dir", dir); + }, + selectableRelativeToCursor: function selectableRelativeToCursor(delta) { + var $selectables, $oldCursor, oldIndex, newIndex; + $oldCursor = this.getActiveSelectable(); + $selectables = this._getSelectables(); + oldIndex = $oldCursor ? $selectables.index($oldCursor) : -1; + newIndex = oldIndex + delta; + newIndex = (newIndex + 1) % ($selectables.length + 1) - 1; + newIndex = newIndex < -1 ? $selectables.length - 1 : newIndex; + return newIndex === -1 ? null : $selectables.eq(newIndex); + }, + setCursor: function setCursor($selectable) { + this._removeCursor(); + if ($selectable = $selectable && $selectable.first()) { + $selectable.addClass(this.classes.cursor); + this._ensureVisible($selectable); + } + }, + getSelectableData: function getSelectableData($el) { + return $el && $el.length ? Dataset.extractData($el) : null; + }, + getActiveSelectable: function getActiveSelectable() { + var $selectable = this._getSelectables().filter(this.selectors.cursor).first(); + return $selectable.length ? $selectable : null; + }, + getTopSelectable: function getTopSelectable() { + var $selectable = this._getSelectables().first(); + return $selectable.length ? $selectable : null; + }, + update: function update(query) { + var isValidUpdate = query !== this.query; + if (isValidUpdate) { + this.query = query; + _.each(this.datasets, updateDataset); + } + return isValidUpdate; + function updateDataset(dataset) { + dataset.update(query); + } + }, + empty: function empty() { + _.each(this.datasets, clearDataset); + this.query = null; + this.$node.addClass(this.classes.empty); + function clearDataset(dataset) { + dataset.clear(); + } + }, + destroy: function destroy() { + this.$node.off(".tt"); + this.$node = $("
"); + _.each(this.datasets, destroyDataset); + function destroyDataset(dataset) { + dataset.destroy(); + } + } + }); + return Menu; + }(); + var Status = function() { + "use strict"; + function Status(options) { + this.$el = $("", { + role: "status", + "aria-live": "polite" + }).css({ + position: "absolute", + padding: "0", + border: "0", + height: "1px", + width: "1px", + "margin-bottom": "-1px", + "margin-right": "-1px", + overflow: "hidden", + clip: "rect(0 0 0 0)", + "white-space": "nowrap" + }); + options.$input.after(this.$el); + _.each(options.menu.datasets, _.bind(function(dataset) { + if (dataset.onSync) { + dataset.onSync("rendered", _.bind(this.update, this)); + dataset.onSync("cleared", _.bind(this.cleared, this)); + } + }, this)); + } + _.mixin(Status.prototype, { + update: function update(event, suggestions) { + var length = suggestions.length; + var words; + if (length === 1) { + words = { + result: "result", + is: "is" + }; + } else { + words = { + result: "results", + is: "are" + }; + } + this.$el.text(length + " " + words.result + " " + words.is + " available, use up and down arrow keys to navigate."); + }, + cleared: function() { + this.$el.text(""); + } + }); + return Status; + }(); + var DefaultMenu = function() { + "use strict"; + var s = Menu.prototype; + function DefaultMenu() { + Menu.apply(this, [].slice.call(arguments, 0)); + } + _.mixin(DefaultMenu.prototype, Menu.prototype, { + open: function open() { + !this._allDatasetsEmpty() && this._show(); + return s.open.apply(this, [].slice.call(arguments, 0)); + }, + close: function close() { + this._hide(); + return s.close.apply(this, [].slice.call(arguments, 0)); + }, + _onRendered: function onRendered() { + if (this._allDatasetsEmpty()) { + this._hide(); + } else { + this.isOpen() && this._show(); + } + return s._onRendered.apply(this, [].slice.call(arguments, 0)); + }, + _onCleared: function onCleared() { + if (this._allDatasetsEmpty()) { + this._hide(); + } else { + this.isOpen() && this._show(); + } + return s._onCleared.apply(this, [].slice.call(arguments, 0)); + }, + setLanguageDirection: function setLanguageDirection(dir) { + this.$node.css(dir === "ltr" ? this.css.ltr : this.css.rtl); + return s.setLanguageDirection.apply(this, [].slice.call(arguments, 0)); + }, + _hide: function hide() { + this.$node.hide(); + }, + _show: function show() { + this.$node.css("display", "block"); + } + }); + return DefaultMenu; + }(); + var Typeahead = function() { + "use strict"; + function Typeahead(o, www) { + var onFocused, onBlurred, onEnterKeyed, onTabKeyed, onEscKeyed, onUpKeyed, onDownKeyed, onLeftKeyed, onRightKeyed, onQueryChanged, onWhitespaceChanged; + o = o || {}; + if (!o.input) { + $.error("missing input"); + } + if (!o.menu) { + $.error("missing menu"); + } + if (!o.eventBus) { + $.error("missing event bus"); + } + www.mixin(this); + this.eventBus = o.eventBus; + this.minLength = _.isNumber(o.minLength) ? o.minLength : 1; + this.input = o.input; + this.menu = o.menu; + this.enabled = true; + this.autoselect = !!o.autoselect; + this.active = false; + this.input.hasFocus() && this.activate(); + this.dir = this.input.getLangDir(); + this._hacks(); + this.menu.bind().onSync("selectableClicked", this._onSelectableClicked, this).onSync("asyncRequested", this._onAsyncRequested, this).onSync("asyncCanceled", this._onAsyncCanceled, this).onSync("asyncReceived", this._onAsyncReceived, this).onSync("datasetRendered", this._onDatasetRendered, this).onSync("datasetCleared", this._onDatasetCleared, this); + onFocused = c(this, "activate", "open", "_onFocused"); + onBlurred = c(this, "deactivate", "_onBlurred"); + onEnterKeyed = c(this, "isActive", "isOpen", "_onEnterKeyed"); + onTabKeyed = c(this, "isActive", "isOpen", "_onTabKeyed"); + onEscKeyed = c(this, "isActive", "_onEscKeyed"); + onUpKeyed = c(this, "isActive", "open", "_onUpKeyed"); + onDownKeyed = c(this, "isActive", "open", "_onDownKeyed"); + onLeftKeyed = c(this, "isActive", "isOpen", "_onLeftKeyed"); + onRightKeyed = c(this, "isActive", "isOpen", "_onRightKeyed"); + onQueryChanged = c(this, "_openIfActive", "_onQueryChanged"); + onWhitespaceChanged = c(this, "_openIfActive", "_onWhitespaceChanged"); + this.input.bind().onSync("focused", onFocused, this).onSync("blurred", onBlurred, this).onSync("enterKeyed", onEnterKeyed, this).onSync("tabKeyed", onTabKeyed, this).onSync("escKeyed", onEscKeyed, this).onSync("upKeyed", onUpKeyed, this).onSync("downKeyed", onDownKeyed, this).onSync("leftKeyed", onLeftKeyed, this).onSync("rightKeyed", onRightKeyed, this).onSync("queryChanged", onQueryChanged, this).onSync("whitespaceChanged", onWhitespaceChanged, this).onSync("langDirChanged", this._onLangDirChanged, this); + } + _.mixin(Typeahead.prototype, { + _hacks: function hacks() { + var $input, $menu; + $input = this.input.$input || $("
"); + $menu = this.menu.$node || $("
"); + $input.on("blur.tt", function($e) { + var active, isActive, hasActive; + active = document.activeElement; + isActive = $menu.is(active); + hasActive = $menu.has(active).length > 0; + if (_.isMsie() && (isActive || hasActive)) { + $e.preventDefault(); + $e.stopImmediatePropagation(); + _.defer(function() { + $input.focus(); + }); + } + }); + $menu.on("mousedown.tt", function($e) { + $e.preventDefault(); + }); + }, + _onSelectableClicked: function onSelectableClicked(type, $el) { + this.select($el); + }, + _onDatasetCleared: function onDatasetCleared() { + this._updateHint(); + }, + _onDatasetRendered: function onDatasetRendered(type, suggestions, async, dataset) { + this._updateHint(); + if (this.autoselect) { + var cursorClass = this.selectors.cursor.substr(1); + this.menu.$node.find(this.selectors.suggestion).first().addClass(cursorClass); + } + this.eventBus.trigger("render", suggestions, async, dataset); + }, + _onAsyncRequested: function onAsyncRequested(type, dataset, query) { + this.eventBus.trigger("asyncrequest", query, dataset); + }, + _onAsyncCanceled: function onAsyncCanceled(type, dataset, query) { + this.eventBus.trigger("asynccancel", query, dataset); + }, + _onAsyncReceived: function onAsyncReceived(type, dataset, query) { + this.eventBus.trigger("asyncreceive", query, dataset); + }, + _onFocused: function onFocused() { + this._minLengthMet() && this.menu.update(this.input.getQuery()); + }, + _onBlurred: function onBlurred() { + if (this.input.hasQueryChangedSinceLastFocus()) { + this.eventBus.trigger("change", this.input.getQuery()); + } + }, + _onEnterKeyed: function onEnterKeyed(type, $e) { + var $selectable; + if ($selectable = this.menu.getActiveSelectable()) { + if (this.select($selectable)) { + $e.preventDefault(); + $e.stopPropagation(); + } + } else if (this.autoselect) { + if (this.select(this.menu.getTopSelectable())) { + $e.preventDefault(); + $e.stopPropagation(); + } + } + }, + _onTabKeyed: function onTabKeyed(type, $e) { + var $selectable; + if ($selectable = this.menu.getActiveSelectable()) { + this.select($selectable) && $e.preventDefault(); + } else if (this.autoselect) { + if ($selectable = this.menu.getTopSelectable()) { + this.autocomplete($selectable) && $e.preventDefault(); + } + } + }, + _onEscKeyed: function onEscKeyed() { + this.close(); + }, + _onUpKeyed: function onUpKeyed() { + this.moveCursor(-1); + }, + _onDownKeyed: function onDownKeyed() { + this.moveCursor(+1); + }, + _onLeftKeyed: function onLeftKeyed() { + if (this.dir === "rtl" && this.input.isCursorAtEnd()) { + this.autocomplete(this.menu.getActiveSelectable() || this.menu.getTopSelectable()); + } + }, + _onRightKeyed: function onRightKeyed() { + if (this.dir === "ltr" && this.input.isCursorAtEnd()) { + this.autocomplete(this.menu.getActiveSelectable() || this.menu.getTopSelectable()); + } + }, + _onQueryChanged: function onQueryChanged(e, query) { + this._minLengthMet(query) ? this.menu.update(query) : this.menu.empty(); + }, + _onWhitespaceChanged: function onWhitespaceChanged() { + this._updateHint(); + }, + _onLangDirChanged: function onLangDirChanged(e, dir) { + if (this.dir !== dir) { + this.dir = dir; + this.menu.setLanguageDirection(dir); + } + }, + _openIfActive: function openIfActive() { + this.isActive() && this.open(); + }, + _minLengthMet: function minLengthMet(query) { + query = _.isString(query) ? query : this.input.getQuery() || ""; + return query.length >= this.minLength; + }, + _updateHint: function updateHint() { + var $selectable, data, val, query, escapedQuery, frontMatchRegEx, match; + $selectable = this.menu.getTopSelectable(); + data = this.menu.getSelectableData($selectable); + val = this.input.getInputValue(); + if (data && !_.isBlankString(val) && !this.input.hasOverflow()) { + query = Input.normalizeQuery(val); + escapedQuery = _.escapeRegExChars(query); + frontMatchRegEx = new RegExp("^(?:" + escapedQuery + ")(.+$)", "i"); + match = frontMatchRegEx.exec(data.val); + match && this.input.setHint(val + match[1]); + } else { + this.input.clearHint(); + } + }, + isEnabled: function isEnabled() { + return this.enabled; + }, + enable: function enable() { + this.enabled = true; + }, + disable: function disable() { + this.enabled = false; + }, + isActive: function isActive() { + return this.active; + }, + activate: function activate() { + if (this.isActive()) { + return true; + } else if (!this.isEnabled() || this.eventBus.before("active")) { + return false; + } else { + this.active = true; + this.eventBus.trigger("active"); + return true; + } + }, + deactivate: function deactivate() { + if (!this.isActive()) { + return true; + } else if (this.eventBus.before("idle")) { + return false; + } else { + this.active = false; + this.close(); + this.eventBus.trigger("idle"); + return true; + } + }, + isOpen: function isOpen() { + return this.menu.isOpen(); + }, + open: function open() { + if (!this.isOpen() && !this.eventBus.before("open")) { + this.input.setAriaExpanded(true); + this.menu.open(); + this._updateHint(); + this.eventBus.trigger("open"); + } + return this.isOpen(); + }, + close: function close() { + if (this.isOpen() && !this.eventBus.before("close")) { + this.input.setAriaExpanded(false); + this.menu.close(); + this.input.clearHint(); + this.input.resetInputValue(); + this.eventBus.trigger("close"); + } + return !this.isOpen(); + }, + setVal: function setVal(val) { + this.input.setQuery(_.toStr(val)); + }, + getVal: function getVal() { + return this.input.getQuery(); + }, + select: function select($selectable) { + var data = this.menu.getSelectableData($selectable); + if (data && !this.eventBus.before("select", data.obj, data.dataset)) { + this.input.setQuery(data.val, true); + this.eventBus.trigger("select", data.obj, data.dataset); + this.close(); + return true; + } + return false; + }, + autocomplete: function autocomplete($selectable) { + var query, data, isValid; + query = this.input.getQuery(); + data = this.menu.getSelectableData($selectable); + isValid = data && query !== data.val; + if (isValid && !this.eventBus.before("autocomplete", data.obj, data.dataset)) { + this.input.setQuery(data.val); + this.eventBus.trigger("autocomplete", data.obj, data.dataset); + return true; + } + return false; + }, + moveCursor: function moveCursor(delta) { + var query, $candidate, data, suggestion, datasetName, cancelMove, id; + query = this.input.getQuery(); + $candidate = this.menu.selectableRelativeToCursor(delta); + data = this.menu.getSelectableData($candidate); + suggestion = data ? data.obj : null; + datasetName = data ? data.dataset : null; + id = $candidate ? $candidate.attr("id") : null; + this.input.trigger("cursorchange", id); + cancelMove = this._minLengthMet() && this.menu.update(query); + if (!cancelMove && !this.eventBus.before("cursorchange", suggestion, datasetName)) { + this.menu.setCursor($candidate); + if (data) { + if (typeof data.val === "string") { + this.input.setInputValue(data.val); + } + } else { + this.input.resetInputValue(); + this._updateHint(); + } + this.eventBus.trigger("cursorchange", suggestion, datasetName); + return true; + } + return false; + }, + destroy: function destroy() { + this.input.destroy(); + this.menu.destroy(); + } + }); + return Typeahead; + function c(ctx) { + var methods = [].slice.call(arguments, 1); + return function() { + var args = [].slice.call(arguments); + _.each(methods, function(method) { + return ctx[method].apply(ctx, args); + }); + }; + } + }(); + (function() { + "use strict"; + var old, keys, methods; + old = $.fn.typeahead; + keys = { + www: "tt-www", + attrs: "tt-attrs", + typeahead: "tt-typeahead" + }; + methods = { + initialize: function initialize(o, datasets) { + var www; + datasets = _.isArray(datasets) ? datasets : [].slice.call(arguments, 1); + o = o || {}; + www = WWW(o.classNames); + return this.each(attach); + function attach() { + var $input, $wrapper, $hint, $menu, defaultHint, defaultMenu, eventBus, input, menu, status, typeahead, MenuConstructor; + _.each(datasets, function(d) { + d.highlight = !!o.highlight; + }); + $input = $(this); + $wrapper = $(www.html.wrapper); + $hint = $elOrNull(o.hint); + $menu = $elOrNull(o.menu); + defaultHint = o.hint !== false && !$hint; + defaultMenu = o.menu !== false && !$menu; + defaultHint && ($hint = buildHintFromInput($input, www)); + defaultMenu && ($menu = $(www.html.menu).css(www.css.menu)); + $hint && $hint.val(""); + $input = prepInput($input, www); + if (defaultHint || defaultMenu) { + $wrapper.css(www.css.wrapper); + $input.css(defaultHint ? www.css.input : www.css.inputWithNoHint); + $input.wrap($wrapper).parent().prepend(defaultHint ? $hint : null).append(defaultMenu ? $menu : null); + } + MenuConstructor = defaultMenu ? DefaultMenu : Menu; + eventBus = new EventBus({ + el: $input + }); + input = new Input({ + hint: $hint, + input: $input, + menu: $menu + }, www); + menu = new MenuConstructor({ + node: $menu, + datasets: datasets + }, www); + status = new Status({ + $input: $input, + menu: menu + }); + typeahead = new Typeahead({ + input: input, + menu: menu, + eventBus: eventBus, + minLength: o.minLength, + autoselect: o.autoselect + }, www); + $input.data(keys.www, www); + $input.data(keys.typeahead, typeahead); + } + }, + isEnabled: function isEnabled() { + var enabled; + ttEach(this.first(), function(t) { + enabled = t.isEnabled(); + }); + return enabled; + }, + enable: function enable() { + ttEach(this, function(t) { + t.enable(); + }); + return this; + }, + disable: function disable() { + ttEach(this, function(t) { + t.disable(); + }); + return this; + }, + isActive: function isActive() { + var active; + ttEach(this.first(), function(t) { + active = t.isActive(); + }); + return active; + }, + activate: function activate() { + ttEach(this, function(t) { + t.activate(); + }); + return this; + }, + deactivate: function deactivate() { + ttEach(this, function(t) { + t.deactivate(); + }); + return this; + }, + isOpen: function isOpen() { + var open; + ttEach(this.first(), function(t) { + open = t.isOpen(); + }); + return open; + }, + open: function open() { + ttEach(this, function(t) { + t.open(); + }); + return this; + }, + close: function close() { + ttEach(this, function(t) { + t.close(); + }); + return this; + }, + select: function select(el) { + var success = false, $el = $(el); + ttEach(this.first(), function(t) { + success = t.select($el); + }); + return success; + }, + autocomplete: function autocomplete(el) { + var success = false, $el = $(el); + ttEach(this.first(), function(t) { + success = t.autocomplete($el); + }); + return success; + }, + moveCursor: function moveCursoe(delta) { + var success = false; + ttEach(this.first(), function(t) { + success = t.moveCursor(delta); + }); + return success; + }, + val: function val(newVal) { + var query; + if (!arguments.length) { + ttEach(this.first(), function(t) { + query = t.getVal(); + }); + return query; + } else { + ttEach(this, function(t) { + t.setVal(_.toStr(newVal)); + }); + return this; + } + }, + destroy: function destroy() { + ttEach(this, function(typeahead, $input) { + revert($input); + typeahead.destroy(); + }); + return this; + } + }; + $.fn.typeahead = function(method) { + if (methods[method]) { + return methods[method].apply(this, [].slice.call(arguments, 1)); + } else { + return methods.initialize.apply(this, arguments); + } + }; + $.fn.typeahead.noConflict = function noConflict() { + $.fn.typeahead = old; + return this; + }; + function ttEach($els, fn) { + $els.each(function() { + var $input = $(this), typeahead; + (typeahead = $input.data(keys.typeahead)) && fn(typeahead, $input); + }); + } + function buildHintFromInput($input, www) { + return $input.clone().addClass(www.classes.hint).removeData().css(www.css.hint).css(getBackgroundStyles($input)).prop({ + readonly: true, + required: false + }).removeAttr("id name placeholder").removeClass("required").attr({ + spellcheck: "false", + tabindex: -1 + }); + } + function prepInput($input, www) { + $input.data(keys.attrs, { + dir: $input.attr("dir"), + autocomplete: $input.attr("autocomplete"), + spellcheck: $input.attr("spellcheck"), + style: $input.attr("style") + }); + $input.addClass(www.classes.input).attr({ + spellcheck: false + }); + try { + !$input.attr("dir") && $input.attr("dir", "auto"); + } catch (e) {} + return $input; + } + function getBackgroundStyles($el) { + return { + backgroundAttachment: $el.css("background-attachment"), + backgroundClip: $el.css("background-clip"), + backgroundColor: $el.css("background-color"), + backgroundImage: $el.css("background-image"), + backgroundOrigin: $el.css("background-origin"), + backgroundPosition: $el.css("background-position"), + backgroundRepeat: $el.css("background-repeat"), + backgroundSize: $el.css("background-size") + }; + } + function revert($input) { + var www, $wrapper; + www = $input.data(keys.www); + $wrapper = $input.parent().filter(www.selectors.wrapper); + _.each($input.data(keys.attrs), function(val, key) { + _.isUndefined(val) ? $input.removeAttr(key) : $input.attr(key, val); + }); + $input.removeData(keys.typeahead).removeData(keys.www).removeData(keys.attr).removeClass(www.classes.input); + if ($wrapper.length) { + $input.detach().insertAfter($wrapper); + $wrapper.remove(); + } + } + function $elOrNull(obj) { + var isValid, $el; + isValid = _.isJQuery(obj) || _.isElement(obj); + $el = isValid ? $(obj).first() : []; + return $el.length ? $el : null; + } + })(); +}); \ No newline at end of file diff --git a/docs/docsets/.docset/Contents/Resources/Documents/search.json b/docs/docsets/.docset/Contents/Resources/Documents/search.json new file mode 100644 index 00000000..a853d0b5 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/search.json @@ -0,0 +1 @@ +{"Typealiases.html#/s:6OpenAI5Modela":{"name":"Model","abstract":"

Defines all available OpenAI models supported by the library.

"},"Structs/Vector.html#/s:6OpenAI6VectorV16cosineSimilarity1a1bSdSaySdG_AGtFZ":{"name":"cosineSimilarity(a:b:)","abstract":"

Returns the similarity between two vectors

","parent_name":"Vector"},"Structs/Vector.html#/s:6OpenAI6VectorV16cosineDifference1a1bSdSaySdG_AGtF":{"name":"cosineDifference(a:b:)","abstract":"

Returns the difference between two vectors. Cosine distance is defined as 1 - cosineSimilarity(a, b)

","parent_name":"Vector"},"Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV10CodingKeysO10harassmentyA2ImF":{"name":"harassment","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV10CodingKeysO21harassmentThreateningyA2ImF":{"name":"harassmentThreatening","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV10CodingKeysO4hateyA2ImF":{"name":"hate","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV10CodingKeysO15hateThreateningyA2ImF":{"name":"hateThreatening","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV10CodingKeysO8selfHarmyA2ImF":{"name":"selfHarm","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV10CodingKeysO14selfHarmIntentyA2ImF":{"name":"selfHarmIntent","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV10CodingKeysO20selfHarmInstructionsyA2ImF":{"name":"selfHarmInstructions","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV10CodingKeysO6sexualyA2ImF":{"name":"sexual","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV10CodingKeysO12sexualMinorsyA2ImF":{"name":"sexualMinors","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV10CodingKeysO8violenceyA2ImF":{"name":"violence","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV10CodingKeysO15violenceGraphicyA2ImF":{"name":"violenceGraphic","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/CategoryScores.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV10harassmentSdvp":{"name":"harassment","abstract":"

Content that expresses, incites, or promotes harassing language towards any target.

","parent_name":"CategoryScores"},"Structs/ModerationsResult/Moderation/CategoryScores.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV21harassmentThreateningSdvp":{"name":"harassmentThreatening","abstract":"

Harassment content that also includes violence or serious harm towards any target.

","parent_name":"CategoryScores"},"Structs/ModerationsResult/Moderation/CategoryScores.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV4hateSdvp":{"name":"hate","abstract":"

Content that expresses, incites, or promotes hate based on race, gender, ethnicity, religion, nationality, sexual orientation, disability status, or caste.

","parent_name":"CategoryScores"},"Structs/ModerationsResult/Moderation/CategoryScores.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV15hateThreateningSdvp":{"name":"hateThreatening","abstract":"

Hateful content that also includes violence or serious harm towards the targeted group.

","parent_name":"CategoryScores"},"Structs/ModerationsResult/Moderation/CategoryScores.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV8selfHarmSdvp":{"name":"selfHarm","abstract":"

Content that promotes, encourages, or depicts acts of self-harm, such as suicide, cutting, and eating disorders.

","parent_name":"CategoryScores"},"Structs/ModerationsResult/Moderation/CategoryScores.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV14selfHarmIntentSdvp":{"name":"selfHarmIntent","abstract":"

Content where the speaker expresses that they are engaging or intend to engage in acts of self-harm, such as suicide, cutting, and eating disorders.

","parent_name":"CategoryScores"},"Structs/ModerationsResult/Moderation/CategoryScores.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV20selfHarmInstructionsSdvp":{"name":"selfHarmInstructions","abstract":"

Content that encourages performing acts of self-harm, such as suicide, cutting, and eating disorders, or that gives instructions or advice on how to commit such acts.

","parent_name":"CategoryScores"},"Structs/ModerationsResult/Moderation/CategoryScores.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV6sexualSdvp":{"name":"sexual","abstract":"

Content meant to arouse sexual excitement, such as the description of sexual activity, or that promotes sexual services (excluding sex education and wellness).

","parent_name":"CategoryScores"},"Structs/ModerationsResult/Moderation/CategoryScores.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV12sexualMinorsSdvp":{"name":"sexualMinors","abstract":"

Sexual content that includes an individual who is under 18 years old.

","parent_name":"CategoryScores"},"Structs/ModerationsResult/Moderation/CategoryScores.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV8violenceSdvp":{"name":"violence","abstract":"

Content that promotes or glorifies violence or celebrates the suffering or humiliation of others.

","parent_name":"CategoryScores"},"Structs/ModerationsResult/Moderation/CategoryScores.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV15violenceGraphicSdvp":{"name":"violenceGraphic","abstract":"

Violent content that depicts death, violence, or serious physical injury in extreme graphic detail.

","parent_name":"CategoryScores"},"Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html":{"name":"CodingKeys","parent_name":"CategoryScores"},"Structs/ModerationsResult/Moderation/CategoryScores.html#/s:ST12makeIterator0B0QzyF":{"name":"makeIterator()","parent_name":"CategoryScores"},"Structs/ModerationsResult/Moderation/Categories/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV10CodingKeysO10harassmentyA2ImF":{"name":"harassment","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/Categories/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV10CodingKeysO21harassmentThreateningyA2ImF":{"name":"harassmentThreatening","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/Categories/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV10CodingKeysO4hateyA2ImF":{"name":"hate","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/Categories/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV10CodingKeysO15hateThreateningyA2ImF":{"name":"hateThreatening","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/Categories/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV10CodingKeysO8selfHarmyA2ImF":{"name":"selfHarm","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/Categories/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV10CodingKeysO14selfHarmIntentyA2ImF":{"name":"selfHarmIntent","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/Categories/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV10CodingKeysO20selfHarmInstructionsyA2ImF":{"name":"selfHarmInstructions","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/Categories/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV10CodingKeysO6sexualyA2ImF":{"name":"sexual","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/Categories/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV10CodingKeysO12sexualMinorsyA2ImF":{"name":"sexualMinors","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/Categories/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV10CodingKeysO8violenceyA2ImF":{"name":"violence","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/Categories/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV10CodingKeysO15violenceGraphicyA2ImF":{"name":"violenceGraphic","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/Categories.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV10harassmentSbvp":{"name":"harassment","abstract":"

Content that expresses, incites, or promotes harassing language towards any target.

","parent_name":"Categories"},"Structs/ModerationsResult/Moderation/Categories.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV21harassmentThreateningSbvp":{"name":"harassmentThreatening","abstract":"

Harassment content that also includes violence or serious harm towards any target.

","parent_name":"Categories"},"Structs/ModerationsResult/Moderation/Categories.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV4hateSbvp":{"name":"hate","abstract":"

Content that expresses, incites, or promotes hate based on race, gender, ethnicity, religion, nationality, sexual orientation, disability status, or caste.

","parent_name":"Categories"},"Structs/ModerationsResult/Moderation/Categories.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV15hateThreateningSbvp":{"name":"hateThreatening","abstract":"

Hateful content that also includes violence or serious harm towards the targeted group.

","parent_name":"Categories"},"Structs/ModerationsResult/Moderation/Categories.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV8selfHarmSbvp":{"name":"selfHarm","abstract":"

Content that promotes, encourages, or depicts acts of self-harm, such as suicide, cutting, and eating disorders.

","parent_name":"Categories"},"Structs/ModerationsResult/Moderation/Categories.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV14selfHarmIntentSbvp":{"name":"selfHarmIntent","abstract":"

Content where the speaker expresses that they are engaging or intend to engage in acts of self-harm, such as suicide, cutting, and eating disorders.

","parent_name":"Categories"},"Structs/ModerationsResult/Moderation/Categories.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV20selfHarmInstructionsSbvp":{"name":"selfHarmInstructions","abstract":"

Content that encourages performing acts of self-harm, such as suicide, cutting, and eating disorders, or that gives instructions or advice on how to commit such acts.

","parent_name":"Categories"},"Structs/ModerationsResult/Moderation/Categories.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV6sexualSbvp":{"name":"sexual","abstract":"

Content meant to arouse sexual excitement, such as the description of sexual activity, or that promotes sexual services (excluding sex education and wellness).

","parent_name":"Categories"},"Structs/ModerationsResult/Moderation/Categories.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV12sexualMinorsSbvp":{"name":"sexualMinors","abstract":"

Sexual content that includes an individual who is under 18 years old.

","parent_name":"Categories"},"Structs/ModerationsResult/Moderation/Categories.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV8violenceSbvp":{"name":"violence","abstract":"

Content that promotes or glorifies violence or celebrates the suffering or humiliation of others.

","parent_name":"Categories"},"Structs/ModerationsResult/Moderation/Categories.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV15violenceGraphicSbvp":{"name":"violenceGraphic","abstract":"

Violent content that depicts death, violence, or serious physical injury in extreme graphic detail.

","parent_name":"Categories"},"Structs/ModerationsResult/Moderation/Categories/CodingKeys.html":{"name":"CodingKeys","parent_name":"Categories"},"Structs/ModerationsResult/Moderation/Categories.html#/s:ST12makeIterator0B0QzyF":{"name":"makeIterator()","parent_name":"Categories"},"Structs/ModerationsResult/Moderation/Categories.html":{"name":"Categories","parent_name":"Moderation"},"Structs/ModerationsResult/Moderation/CategoryScores.html":{"name":"CategoryScores","parent_name":"Moderation"},"Structs/ModerationsResult/Moderation.html#/s:6OpenAI17ModerationsResultV10ModerationV10categoriesAE10CategoriesVvp":{"name":"categories","abstract":"

Collection of per-category binary usage policies violation flags. For each category, the value is true if the model flags the corresponding category as violated, false otherwise.

","parent_name":"Moderation"},"Structs/ModerationsResult/Moderation.html#/s:6OpenAI17ModerationsResultV10ModerationV14categoryScoresAE08CategoryG0Vvp":{"name":"categoryScores","abstract":"

Collection of per-category raw scores output by the model, denoting the model’s confidence that the input violates the OpenAI’s policy for the category. The value is between 0 and 1, where higher values denote higher confidence. The scores should not be interpreted as probabilities.

","parent_name":"Moderation"},"Structs/ModerationsResult/Moderation.html#/s:6OpenAI17ModerationsResultV10ModerationV7flaggedSbvp":{"name":"flagged","abstract":"

True if the model classifies the content as violating OpenAI’s usage policies, false otherwise.

","parent_name":"Moderation"},"Structs/ModerationsResult/Moderation.html":{"name":"Moderation","parent_name":"ModerationsResult"},"Structs/ModerationsResult.html#/s:s12IdentifiableP2id2IDQzvp":{"name":"id","parent_name":"ModerationsResult"},"Structs/ModerationsResult.html#/s:6OpenAI17ModerationsResultV5modelSSvp":{"name":"model","parent_name":"ModerationsResult"},"Structs/ModerationsResult.html#/s:6OpenAI17ModerationsResultV7resultsSayAC10ModerationVGvp":{"name":"results","parent_name":"ModerationsResult"},"Structs/ModerationsQuery.html#/s:6OpenAI16ModerationsQueryV5inputSSvp":{"name":"input","abstract":"

The input text to classify.

","parent_name":"ModerationsQuery"},"Structs/ModerationsQuery.html#/s:6OpenAI16ModerationsQueryV5modelSSSgvp":{"name":"model","abstract":"

ID of the model to use.

","parent_name":"ModerationsQuery"},"Structs/ModerationsQuery.html#/s:6OpenAI16ModerationsQueryV5input5modelACSS_SSSgtcfc":{"name":"init(input:model:)","parent_name":"ModerationsQuery"},"Structs/ModelsResult.html#/s:6OpenAI12ModelsResultV4dataSayAA05ModelD0VGvp":{"name":"data","abstract":"

A list of model objects.

","parent_name":"ModelsResult"},"Structs/ModelsResult.html#/s:6OpenAI12ModelsResultV6objectSSvp":{"name":"object","abstract":"

The object type, which is always list

","parent_name":"ModelsResult"},"Structs/ModelResult/CodingKeys.html#/s:6OpenAI11ModelResultV10CodingKeysO2idyA2EmF":{"name":"id","parent_name":"CodingKeys"},"Structs/ModelResult/CodingKeys.html#/s:6OpenAI11ModelResultV10CodingKeysO7createdyA2EmF":{"name":"created","parent_name":"CodingKeys"},"Structs/ModelResult/CodingKeys.html#/s:6OpenAI11ModelResultV10CodingKeysO6objectyA2EmF":{"name":"object","parent_name":"CodingKeys"},"Structs/ModelResult/CodingKeys.html#/s:6OpenAI11ModelResultV10CodingKeysO7ownedByyA2EmF":{"name":"ownedBy","parent_name":"CodingKeys"},"Structs/ModelResult.html#/s:6OpenAI11ModelResultV2idSSvp":{"name":"id","abstract":"

The model identifier, which can be referenced in the API endpoints.

","parent_name":"ModelResult"},"Structs/ModelResult.html#/s:6OpenAI11ModelResultV7createdSdvp":{"name":"created","abstract":"

The Unix timestamp (in seconds) when the model was created.

","parent_name":"ModelResult"},"Structs/ModelResult.html#/s:6OpenAI11ModelResultV6objectSSvp":{"name":"object","abstract":"

The object type, which is always “model”.

","parent_name":"ModelResult"},"Structs/ModelResult.html#/s:6OpenAI11ModelResultV7ownedBySSvp":{"name":"ownedBy","abstract":"

The organization that owns the model.

","parent_name":"ModelResult"},"Structs/ModelResult/CodingKeys.html":{"name":"CodingKeys","parent_name":"ModelResult"},"Structs/ModelQuery.html#/s:6OpenAI10ModelQueryV5modelSSvp":{"name":"model","abstract":"

The ID of the model to use for this request.

","parent_name":"ModelQuery"},"Structs/ModelQuery.html#/s:6OpenAI10ModelQueryV5modelACSS_tcfc":{"name":"init(model:)","parent_name":"ModelQuery"},"Structs/ImagesResult/Image/CodingKeys.html#/s:6OpenAI12ImagesResultV5ImageV10CodingKeysO7b64JsonyA2GmF":{"name":"b64Json","parent_name":"CodingKeys"},"Structs/ImagesResult/Image/CodingKeys.html#/s:6OpenAI12ImagesResultV5ImageV10CodingKeysO13revisedPromptyA2GmF":{"name":"revisedPrompt","parent_name":"CodingKeys"},"Structs/ImagesResult/Image/CodingKeys.html#/s:6OpenAI12ImagesResultV5ImageV10CodingKeysO3urlyA2GmF":{"name":"url","parent_name":"CodingKeys"},"Structs/ImagesResult/Image.html#/s:6OpenAI12ImagesResultV5ImageV7b64JsonSSSgvp":{"name":"b64Json","abstract":"

The base64-encoded JSON of the generated image, if response_format is b64_json

","parent_name":"Image"},"Structs/ImagesResult/Image.html#/s:6OpenAI12ImagesResultV5ImageV13revisedPromptSSSgvp":{"name":"revisedPrompt","abstract":"

The prompt that was used to generate the image, if there was any revision to the prompt.

","parent_name":"Image"},"Structs/ImagesResult/Image.html#/s:6OpenAI12ImagesResultV5ImageV3urlSSSgvp":{"name":"url","abstract":"

The URL of the generated image, if response_format is url (default).

","parent_name":"Image"},"Structs/ImagesResult/Image/CodingKeys.html":{"name":"CodingKeys","parent_name":"Image"},"Structs/ImagesResult.html#/s:6OpenAI12ImagesResultV7createdSdvp":{"name":"created","parent_name":"ImagesResult"},"Structs/ImagesResult.html#/s:6OpenAI12ImagesResultV4dataSayAC5ImageVGvp":{"name":"data","parent_name":"ImagesResult"},"Structs/ImagesResult/Image.html":{"name":"Image","abstract":"

Represents the url or the content of an image generated by the OpenAI API.

","parent_name":"ImagesResult"},"Structs/ImagesQuery/Size.html#/s:6OpenAI11ImagesQueryV4SizeO4_256yA2EmF":{"name":"_256","parent_name":"Size"},"Structs/ImagesQuery/Size.html#/s:6OpenAI11ImagesQueryV4SizeO4_512yA2EmF":{"name":"_512","parent_name":"Size"},"Structs/ImagesQuery/Size.html#/s:6OpenAI11ImagesQueryV4SizeO5_1024yA2EmF":{"name":"_1024","parent_name":"Size"},"Structs/ImagesQuery/Size.html#/s:6OpenAI11ImagesQueryV4SizeO10_1792_1024yA2EmF":{"name":"_1792_1024","parent_name":"Size"},"Structs/ImagesQuery/Size.html#/s:6OpenAI11ImagesQueryV4SizeO10_1024_1792yA2EmF":{"name":"_1024_1792","parent_name":"Size"},"Structs/ImagesQuery/Quality.html#/s:6OpenAI11ImagesQueryV7QualityO8standardyA2EmF":{"name":"standard","parent_name":"Quality"},"Structs/ImagesQuery/Quality.html#/s:6OpenAI11ImagesQueryV7QualityO2hdyA2EmF":{"name":"hd","parent_name":"Quality"},"Structs/ImagesQuery/Style.html#/s:6OpenAI11ImagesQueryV5StyleO7naturalyA2EmF":{"name":"natural","parent_name":"Style"},"Structs/ImagesQuery/Style.html#/s:6OpenAI11ImagesQueryV5StyleO5vividyA2EmF":{"name":"vivid","parent_name":"Style"},"Structs/ImagesQuery/CodingKeys.html#/s:6OpenAI11ImagesQueryV10CodingKeysO5modelyA2EmF":{"name":"model","parent_name":"CodingKeys"},"Structs/ImagesQuery/CodingKeys.html#/s:6OpenAI11ImagesQueryV10CodingKeysO6promptyA2EmF":{"name":"prompt","parent_name":"CodingKeys"},"Structs/ImagesQuery/CodingKeys.html#/s:6OpenAI11ImagesQueryV10CodingKeysO1nyA2EmF":{"name":"n","parent_name":"CodingKeys"},"Structs/ImagesQuery/CodingKeys.html#/s:6OpenAI11ImagesQueryV10CodingKeysO4sizeyA2EmF":{"name":"size","parent_name":"CodingKeys"},"Structs/ImagesQuery/CodingKeys.html#/s:6OpenAI11ImagesQueryV10CodingKeysO4useryA2EmF":{"name":"user","parent_name":"CodingKeys"},"Structs/ImagesQuery/CodingKeys.html#/s:6OpenAI11ImagesQueryV10CodingKeysO5styleyA2EmF":{"name":"style","parent_name":"CodingKeys"},"Structs/ImagesQuery/CodingKeys.html#/s:6OpenAI11ImagesQueryV10CodingKeysO14responseFormatyA2EmF":{"name":"responseFormat","parent_name":"CodingKeys"},"Structs/ImagesQuery/CodingKeys.html#/s:6OpenAI11ImagesQueryV10CodingKeysO7qualityyA2EmF":{"name":"quality","parent_name":"CodingKeys"},"Structs/ImagesQuery/ResponseFormat.html#/s:6OpenAI11ImagesQueryV14ResponseFormatO3urlyA2EmF":{"name":"url","parent_name":"ResponseFormat"},"Structs/ImagesQuery/ResponseFormat.html#/s:6OpenAI11ImagesQueryV14ResponseFormatO8b64_jsonyA2EmF":{"name":"b64_json","parent_name":"ResponseFormat"},"Structs/ImagesQuery/ResponseFormat.html":{"name":"ResponseFormat","parent_name":"ImagesQuery"},"Structs/ImagesQuery.html#/s:6OpenAI11ImagesQueryV6promptSSvp":{"name":"prompt","abstract":"

A text description of the desired image(s). The maximum length is 1000 characters for dall-e-2 and 4000 characters for dall-e-3.

","parent_name":"ImagesQuery"},"Structs/ImagesQuery.html#/s:6OpenAI11ImagesQueryV5modelSSSgvp":{"name":"model","abstract":"

The model to use for image generation.","parent_name":"ImagesQuery"},"Structs/ImagesQuery.html#/s:6OpenAI11ImagesQueryV14responseFormatAC08ResponseF0OSgvp":{"name":"responseFormat","abstract":"

The format in which the generated images are returned. Must be one of url or b64_json.","parent_name":"ImagesQuery"},"Structs/ImagesQuery.html#/s:6OpenAI11ImagesQueryV1nSiSgvp":{"name":"n","abstract":"

The number of images to generate. Must be between 1 and 10. For dall-e-3, only n=1 is supported.","parent_name":"ImagesQuery"},"Structs/ImagesQuery.html#/s:6OpenAI11ImagesQueryV4sizeAC4SizeOSgvp":{"name":"size","abstract":"

The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024 for dall-e-2. Must be one of 1024x1024, 1792x1024, or 1024x1792 for dall-e-3 models.","parent_name":"ImagesQuery"},"Structs/ImagesQuery.html#/s:6OpenAI11ImagesQueryV4userSSSgvp":{"name":"user","abstract":"

A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.","parent_name":"ImagesQuery"},"Structs/ImagesQuery.html#/s:6OpenAI11ImagesQueryV5styleAC5StyleOSgvp":{"name":"style","abstract":"

The style of the generated images. Must be one of vivid or natural. Vivid causes the model to lean towards generating hyper-real and dramatic images. Natural causes the model to produce more natural, less hyper-real looking images. This param is only supported for dall-e-3.","parent_name":"ImagesQuery"},"Structs/ImagesQuery.html#/s:6OpenAI11ImagesQueryV7qualityAC7QualityOSgvp":{"name":"quality","abstract":"

The quality of the image that will be generated. hd creates images with finer details and greater consistency across the image. This param is only supported for dall-e-3.","parent_name":"ImagesQuery"},"Structs/ImagesQuery.html#/s:6OpenAI11ImagesQueryV6prompt5model1n7quality14responseFormat4size5style4userACSS_SSSgSiSgAC7QualityOSgAC08ResponseI0OSgAC4SizeOSgAC5StyleOSgALtcfc":{"name":"init(prompt:model:n:quality:responseFormat:size:style:user:)","parent_name":"ImagesQuery"},"Structs/ImagesQuery/CodingKeys.html":{"name":"CodingKeys","parent_name":"ImagesQuery"},"Structs/ImagesQuery/Style.html":{"name":"Style","parent_name":"ImagesQuery"},"Structs/ImagesQuery/Quality.html":{"name":"Quality","parent_name":"ImagesQuery"},"Structs/ImagesQuery/Size.html":{"name":"Size","parent_name":"ImagesQuery"},"Structs/ImageVariationsQuery/CodingKeys.html#/s:6OpenAI20ImageVariationsQueryV10CodingKeysO5imageyA2EmF":{"name":"image","parent_name":"CodingKeys"},"Structs/ImageVariationsQuery/CodingKeys.html#/s:6OpenAI20ImageVariationsQueryV10CodingKeysO5modelyA2EmF":{"name":"model","parent_name":"CodingKeys"},"Structs/ImageVariationsQuery/CodingKeys.html#/s:6OpenAI20ImageVariationsQueryV10CodingKeysO1nyA2EmF":{"name":"n","parent_name":"CodingKeys"},"Structs/ImageVariationsQuery/CodingKeys.html#/s:6OpenAI20ImageVariationsQueryV10CodingKeysO14responseFormatyA2EmF":{"name":"responseFormat","parent_name":"CodingKeys"},"Structs/ImageVariationsQuery/CodingKeys.html#/s:6OpenAI20ImageVariationsQueryV10CodingKeysO4sizeyA2EmF":{"name":"size","parent_name":"CodingKeys"},"Structs/ImageVariationsQuery/CodingKeys.html#/s:6OpenAI20ImageVariationsQueryV10CodingKeysO4useryA2EmF":{"name":"user","parent_name":"CodingKeys"},"Structs/ImageVariationsQuery.html#/s:6OpenAI20ImageVariationsQueryV14ResponseFormata":{"name":"ResponseFormat","parent_name":"ImageVariationsQuery"},"Structs/ImageVariationsQuery.html#/s:6OpenAI20ImageVariationsQueryV5image10Foundation4DataVvp":{"name":"image","abstract":"

The image to edit. Must be a valid PNG file, less than 4MB, and square.

","parent_name":"ImageVariationsQuery"},"Structs/ImageVariationsQuery.html#/s:6OpenAI20ImageVariationsQueryV5modelSSSgvp":{"name":"model","abstract":"

The model to use for image generation. Only dall-e-2 is supported at this time.","parent_name":"ImageVariationsQuery"},"Structs/ImageVariationsQuery.html#/s:6OpenAI20ImageVariationsQueryV1nSiSgvp":{"name":"n","abstract":"

The number of images to generate. Must be between 1 and 10.","parent_name":"ImageVariationsQuery"},"Structs/ImageVariationsQuery.html#/s:6OpenAI20ImageVariationsQueryV14responseFormatAA06ImagesE0V08ResponseG0OSgvp":{"name":"responseFormat","abstract":"

The format in which the generated images are returned. Must be one of url or b64_json.","parent_name":"ImageVariationsQuery"},"Structs/ImageVariationsQuery.html#/s:6OpenAI20ImageVariationsQueryV4sizeSSSgvp":{"name":"size","abstract":"

The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024.","parent_name":"ImageVariationsQuery"},"Structs/ImageVariationsQuery.html#/s:6OpenAI20ImageVariationsQueryV4userSSSgvp":{"name":"user","abstract":"

A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.","parent_name":"ImageVariationsQuery"},"Structs/ImageVariationsQuery.html#/s:6OpenAI20ImageVariationsQueryV5image5model1n14responseFormat4size4userAC10Foundation4DataV_SSSgSiSgAA06ImagesE0V08ResponseI0OSgA2Mtcfc":{"name":"init(image:model:n:responseFormat:size:user:)","parent_name":"ImageVariationsQuery"},"Structs/ImageVariationsQuery/CodingKeys.html":{"name":"CodingKeys","parent_name":"ImageVariationsQuery"},"Structs/ImageEditsQuery/CodingKeys.html#/s:6OpenAI15ImageEditsQueryV10CodingKeysO5imageyA2EmF":{"name":"image","parent_name":"CodingKeys"},"Structs/ImageEditsQuery/CodingKeys.html#/s:6OpenAI15ImageEditsQueryV10CodingKeysO4maskyA2EmF":{"name":"mask","parent_name":"CodingKeys"},"Structs/ImageEditsQuery/CodingKeys.html#/s:6OpenAI15ImageEditsQueryV10CodingKeysO6promptyA2EmF":{"name":"prompt","parent_name":"CodingKeys"},"Structs/ImageEditsQuery/CodingKeys.html#/s:6OpenAI15ImageEditsQueryV10CodingKeysO5modelyA2EmF":{"name":"model","parent_name":"CodingKeys"},"Structs/ImageEditsQuery/CodingKeys.html#/s:6OpenAI15ImageEditsQueryV10CodingKeysO1nyA2EmF":{"name":"n","parent_name":"CodingKeys"},"Structs/ImageEditsQuery/CodingKeys.html#/s:6OpenAI15ImageEditsQueryV10CodingKeysO14responseFormatyA2EmF":{"name":"responseFormat","parent_name":"CodingKeys"},"Structs/ImageEditsQuery/CodingKeys.html#/s:6OpenAI15ImageEditsQueryV10CodingKeysO4sizeyA2EmF":{"name":"size","parent_name":"CodingKeys"},"Structs/ImageEditsQuery/CodingKeys.html#/s:6OpenAI15ImageEditsQueryV10CodingKeysO4useryA2EmF":{"name":"user","parent_name":"CodingKeys"},"Structs/ImageEditsQuery.html#/s:6OpenAI15ImageEditsQueryV14ResponseFormata":{"name":"ResponseFormat","parent_name":"ImageEditsQuery"},"Structs/ImageEditsQuery.html#/s:6OpenAI15ImageEditsQueryV4Sizea":{"name":"Size","parent_name":"ImageEditsQuery"},"Structs/ImageEditsQuery.html#/s:6OpenAI15ImageEditsQueryV5image10Foundation4DataVvp":{"name":"image","abstract":"

The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask is not provided, image must have transparency, which will be used as the mask.

","parent_name":"ImageEditsQuery"},"Structs/ImageEditsQuery.html#/s:6OpenAI15ImageEditsQueryV4mask10Foundation4DataVSgvp":{"name":"mask","abstract":"

An additional image whose fully transparent areas (e.g. where alpha is zero) indicate where image should be edited. Must be a valid PNG file, less than 4MB, and have the same dimensions as image.

","parent_name":"ImageEditsQuery"},"Structs/ImageEditsQuery.html#/s:6OpenAI15ImageEditsQueryV6promptSSvp":{"name":"prompt","abstract":"

A text description of the desired image(s). The maximum length is 1000 characters.

","parent_name":"ImageEditsQuery"},"Structs/ImageEditsQuery.html#/s:6OpenAI15ImageEditsQueryV5modelSSSgvp":{"name":"model","abstract":"

The model to use for image generation.","parent_name":"ImageEditsQuery"},"Structs/ImageEditsQuery.html#/s:6OpenAI15ImageEditsQueryV1nSiSgvp":{"name":"n","abstract":"

The number of images to generate. Must be between 1 and 10.

","parent_name":"ImageEditsQuery"},"Structs/ImageEditsQuery.html#/s:6OpenAI15ImageEditsQueryV14responseFormatAA06ImagesE0V08ResponseG0OSgvp":{"name":"responseFormat","abstract":"

The format in which the generated images are returned. Must be one of url or b64_json.","parent_name":"ImageEditsQuery"},"Structs/ImageEditsQuery.html#/s:6OpenAI15ImageEditsQueryV4sizeAA06ImagesE0V4SizeOSgvp":{"name":"size","abstract":"

The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024.

","parent_name":"ImageEditsQuery"},"Structs/ImageEditsQuery.html#/s:6OpenAI15ImageEditsQueryV4userSSSgvp":{"name":"user","abstract":"

A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.","parent_name":"ImageEditsQuery"},"Structs/ImageEditsQuery.html#/s:6OpenAI15ImageEditsQueryV5image6prompt4mask5model1n14responseFormat4size4userAC10Foundation4DataV_SSANSgSSSgSiSgAA06ImagesE0V08ResponseK0OSgAS4SizeOSgAPtcfc":{"name":"init(image:prompt:mask:model:n:responseFormat:size:user:)","parent_name":"ImageEditsQuery"},"Structs/ImageEditsQuery/CodingKeys.html":{"name":"CodingKeys","parent_name":"ImageEditsQuery"},"Structs/EmbeddingsResult/Usage.html#/s:6OpenAI16EmbeddingsResultV5UsageV12promptTokensSivp":{"name":"promptTokens","parent_name":"Usage"},"Structs/EmbeddingsResult/Usage.html#/s:6OpenAI16EmbeddingsResultV5UsageV11totalTokensSivp":{"name":"totalTokens","parent_name":"Usage"},"Structs/EmbeddingsResult/Embedding.html#/s:6OpenAI16EmbeddingsResultV9EmbeddingV6objectSSvp":{"name":"object","abstract":"

The object type, which is always “embedding”.

","parent_name":"Embedding"},"Structs/EmbeddingsResult/Embedding.html#/s:6OpenAI16EmbeddingsResultV9EmbeddingV9embeddingSaySdGvp":{"name":"embedding","abstract":"

The embedding vector, which is a list of floats. The length of vector depends on the model as listed in the embedding guide.","parent_name":"Embedding"},"Structs/EmbeddingsResult/Embedding.html":{"name":"Embedding","parent_name":"EmbeddingsResult"},"Structs/EmbeddingsResult/Usage.html":{"name":"Usage","parent_name":"EmbeddingsResult"},"Structs/EmbeddingsResult.html#/s:6OpenAI16EmbeddingsResultV4dataSayAC9EmbeddingVGvp":{"name":"data","parent_name":"EmbeddingsResult"},"Structs/EmbeddingsResult.html#/s:6OpenAI16EmbeddingsResultV5modelSSvp":{"name":"model","parent_name":"EmbeddingsResult"},"Structs/EmbeddingsResult.html#/s:6OpenAI16EmbeddingsResultV5usageAC5UsageVvp":{"name":"usage","parent_name":"EmbeddingsResult"},"Structs/EmbeddingsResult.html#/s:6OpenAI16EmbeddingsResultV6objectSSvp":{"name":"object","abstract":"

The object type, which is always “list”.

","parent_name":"EmbeddingsResult"},"Structs/EmbeddingsQuery/CodingKeys.html#/s:6OpenAI15EmbeddingsQueryV10CodingKeysO5inputyA2EmF":{"name":"input","parent_name":"CodingKeys"},"Structs/EmbeddingsQuery/CodingKeys.html#/s:6OpenAI15EmbeddingsQueryV10CodingKeysO5modelyA2EmF":{"name":"model","parent_name":"CodingKeys"},"Structs/EmbeddingsQuery/CodingKeys.html#/s:6OpenAI15EmbeddingsQueryV10CodingKeysO14encodingFormatyA2EmF":{"name":"encodingFormat","parent_name":"CodingKeys"},"Structs/EmbeddingsQuery/CodingKeys.html#/s:6OpenAI15EmbeddingsQueryV10CodingKeysO4useryA2EmF":{"name":"user","parent_name":"CodingKeys"},"Structs/EmbeddingsQuery/EncodingFormat.html#/s:6OpenAI15EmbeddingsQueryV14EncodingFormatO5floatyA2EmF":{"name":"float","parent_name":"EncodingFormat"},"Structs/EmbeddingsQuery/EncodingFormat.html#/s:6OpenAI15EmbeddingsQueryV14EncodingFormatO6base64yA2EmF":{"name":"base64","parent_name":"EncodingFormat"},"Structs/EmbeddingsQuery/Input.html#/s:6OpenAI15EmbeddingsQueryV5InputO6stringyAESScAEmF":{"name":"string(_:)","parent_name":"Input"},"Structs/EmbeddingsQuery/Input.html#/s:6OpenAI15EmbeddingsQueryV5InputO10stringListyAESaySSGcAEmF":{"name":"stringList(_:)","parent_name":"Input"},"Structs/EmbeddingsQuery/Input.html#/s:6OpenAI15EmbeddingsQueryV5InputO7intListyAESaySiGcAEmF":{"name":"intList(_:)","parent_name":"Input"},"Structs/EmbeddingsQuery/Input.html#/s:6OpenAI15EmbeddingsQueryV5InputO9intMatrixyAESaySaySiGGcAEmF":{"name":"intMatrix(_:)","parent_name":"Input"},"Structs/EmbeddingsQuery/Input.html#/s:SE6encode2toys7Encoder_p_tKF":{"name":"encode(to:)","parent_name":"Input"},"Structs/EmbeddingsQuery/Input.html#/s:6OpenAI15EmbeddingsQueryV5InputO6stringAESS_tcfc":{"name":"init(string:)","parent_name":"Input"},"Structs/EmbeddingsQuery/Input.html#/s:6OpenAI15EmbeddingsQueryV5InputO10stringListAESaySSG_tcfc":{"name":"init(stringList:)","parent_name":"Input"},"Structs/EmbeddingsQuery/Input.html#/s:6OpenAI15EmbeddingsQueryV5InputO7intListAESaySiG_tcfc":{"name":"init(intList:)","parent_name":"Input"},"Structs/EmbeddingsQuery/Input.html#/s:6OpenAI15EmbeddingsQueryV5InputO9intMatrixAESaySaySiGG_tcfc":{"name":"init(intMatrix:)","parent_name":"Input"},"Structs/EmbeddingsQuery.html#/s:6OpenAI15EmbeddingsQueryV5inputAC5InputOvp":{"name":"input","abstract":"

Input text to embed, encoded as a string or array of tokens. To embed multiple inputs in a single request, pass an array of strings or array of token arrays. The input must not exceed the max input tokens for the model (8192 tokens for text-embedding-ada-002), cannot be an empty string, and any array must be 2048 dimensions or less.

","parent_name":"EmbeddingsQuery"},"Structs/EmbeddingsQuery.html#/s:6OpenAI15EmbeddingsQueryV5modelSSvp":{"name":"model","abstract":"

ID of the model to use. You can use the List models API to see all of your available models, or see our Model overview for descriptions of them.","parent_name":"EmbeddingsQuery"},"Structs/EmbeddingsQuery.html#/s:6OpenAI15EmbeddingsQueryV14encodingFormatAC08EncodingF0OSgvp":{"name":"encodingFormat","abstract":"

The format to return the embeddings in. Can be either float or base64.","parent_name":"EmbeddingsQuery"},"Structs/EmbeddingsQuery.html#/s:6OpenAI15EmbeddingsQueryV4userSSSgvp":{"name":"user","abstract":"

A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.","parent_name":"EmbeddingsQuery"},"Structs/EmbeddingsQuery.html#/s:6OpenAI15EmbeddingsQueryV5input5model14encodingFormat4userA2C5InputO_SSAC08EncodingH0OSgSSSgtcfc":{"name":"init(input:model:encodingFormat:user:)","parent_name":"EmbeddingsQuery"},"Structs/EmbeddingsQuery/Input.html":{"name":"Input","parent_name":"EmbeddingsQuery"},"Structs/EmbeddingsQuery/EncodingFormat.html":{"name":"EncodingFormat","parent_name":"EmbeddingsQuery"},"Structs/EmbeddingsQuery/CodingKeys.html":{"name":"CodingKeys","parent_name":"EmbeddingsQuery"},"Structs/EditsResult/Usage.html#/s:6OpenAI11EditsResultV5UsageV12promptTokensSivp":{"name":"promptTokens","parent_name":"Usage"},"Structs/EditsResult/Usage.html#/s:6OpenAI11EditsResultV5UsageV16completionTokensSivp":{"name":"completionTokens","parent_name":"Usage"},"Structs/EditsResult/Usage.html#/s:6OpenAI11EditsResultV5UsageV11totalTokensSivp":{"name":"totalTokens","parent_name":"Usage"},"Structs/EditsResult/Choice.html#/s:6OpenAI11EditsResultV6ChoiceV4textSSvp":{"name":"text","parent_name":"Choice"},"Structs/EditsResult/Choice.html":{"name":"Choice","parent_name":"EditsResult"},"Structs/EditsResult/Usage.html":{"name":"Usage","parent_name":"EditsResult"},"Structs/EditsResult.html#/s:6OpenAI11EditsResultV6objectSSvp":{"name":"object","parent_name":"EditsResult"},"Structs/EditsResult.html#/s:6OpenAI11EditsResultV7createdSdvp":{"name":"created","parent_name":"EditsResult"},"Structs/EditsResult.html#/s:6OpenAI11EditsResultV7choicesSayAC6ChoiceVGvp":{"name":"choices","parent_name":"EditsResult"},"Structs/EditsResult.html#/s:6OpenAI11EditsResultV5usageAC5UsageVvp":{"name":"usage","parent_name":"EditsResult"},"Structs/EditsQuery.html#/s:6OpenAI10EditsQueryV5modelSSvp":{"name":"model","abstract":"

ID of the model to use.

","parent_name":"EditsQuery"},"Structs/EditsQuery.html#/s:6OpenAI10EditsQueryV5inputSSSgvp":{"name":"input","abstract":"

Input text to get embeddings for.

","parent_name":"EditsQuery"},"Structs/EditsQuery.html#/s:6OpenAI10EditsQueryV11instructionSSvp":{"name":"instruction","abstract":"

The instruction that tells the model how to edit the prompt.

","parent_name":"EditsQuery"},"Structs/EditsQuery.html#/s:6OpenAI10EditsQueryV1nSiSgvp":{"name":"n","abstract":"

The number of images to generate. Must be between 1 and 10.

","parent_name":"EditsQuery"},"Structs/EditsQuery.html#/s:6OpenAI10EditsQueryV11temperatureSdSgvp":{"name":"temperature","abstract":"

What sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.

","parent_name":"EditsQuery"},"Structs/EditsQuery.html#/s:6OpenAI10EditsQueryV4topPSdSgvp":{"name":"topP","abstract":"

An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.

","parent_name":"EditsQuery"},"Structs/EditsQuery.html#/s:6OpenAI10EditsQueryV5model5input11instruction1n11temperature4topPACSS_SSSgSSSiSgSdSgALtcfc":{"name":"init(model:input:instruction:n:temperature:topP:)","parent_name":"EditsQuery"},"Structs/CompletionsResult/Choice.html#/s:6OpenAI17CompletionsResultV6ChoiceV4textSSvp":{"name":"text","parent_name":"Choice"},"Structs/CompletionsResult/Choice.html#/s:6OpenAI17CompletionsResultV6ChoiceV12finishReasonSSSgvp":{"name":"finishReason","parent_name":"Choice"},"Structs/CompletionsResult/Usage.html#/s:6OpenAI17CompletionsResultV5UsageV12promptTokensSivp":{"name":"promptTokens","parent_name":"Usage"},"Structs/CompletionsResult/Usage.html#/s:6OpenAI17CompletionsResultV5UsageV16completionTokensSivp":{"name":"completionTokens","parent_name":"Usage"},"Structs/CompletionsResult/Usage.html#/s:6OpenAI17CompletionsResultV5UsageV11totalTokensSivp":{"name":"totalTokens","parent_name":"Usage"},"Structs/CompletionsResult/Usage.html":{"name":"Usage","parent_name":"CompletionsResult"},"Structs/CompletionsResult/Choice.html":{"name":"Choice","parent_name":"CompletionsResult"},"Structs/CompletionsResult.html#/s:6OpenAI17CompletionsResultV2idSSvp":{"name":"id","parent_name":"CompletionsResult"},"Structs/CompletionsResult.html#/s:6OpenAI17CompletionsResultV6objectSSvp":{"name":"object","parent_name":"CompletionsResult"},"Structs/CompletionsResult.html#/s:6OpenAI17CompletionsResultV7createdSdvp":{"name":"created","parent_name":"CompletionsResult"},"Structs/CompletionsResult.html#/s:6OpenAI17CompletionsResultV5modelSSvp":{"name":"model","parent_name":"CompletionsResult"},"Structs/CompletionsResult.html#/s:6OpenAI17CompletionsResultV7choicesSayAC6ChoiceVGvp":{"name":"choices","parent_name":"CompletionsResult"},"Structs/CompletionsResult.html#/s:6OpenAI17CompletionsResultV5usageAC5UsageVSgvp":{"name":"usage","parent_name":"CompletionsResult"},"Structs/CompletionsQuery.html#/s:6OpenAI16CompletionsQueryV5modelSSvp":{"name":"model","abstract":"

ID of the model to use.

","parent_name":"CompletionsQuery"},"Structs/CompletionsQuery.html#/s:6OpenAI16CompletionsQueryV6promptSSvp":{"name":"prompt","abstract":"

The prompt(s) to generate completions for, encoded as a string, array of strings, array of tokens, or array of token arrays.

","parent_name":"CompletionsQuery"},"Structs/CompletionsQuery.html#/s:6OpenAI16CompletionsQueryV11temperatureSdSgvp":{"name":"temperature","abstract":"

What sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.

","parent_name":"CompletionsQuery"},"Structs/CompletionsQuery.html#/s:6OpenAI16CompletionsQueryV9maxTokensSiSgvp":{"name":"maxTokens","abstract":"

The maximum number of tokens to generate in the completion.

","parent_name":"CompletionsQuery"},"Structs/CompletionsQuery.html#/s:6OpenAI16CompletionsQueryV4topPSdSgvp":{"name":"topP","abstract":"

An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.

","parent_name":"CompletionsQuery"},"Structs/CompletionsQuery.html#/s:6OpenAI16CompletionsQueryV16frequencyPenaltySdSgvp":{"name":"frequencyPenalty","abstract":"

Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model’s likelihood to repeat the same line verbatim.

","parent_name":"CompletionsQuery"},"Structs/CompletionsQuery.html#/s:6OpenAI16CompletionsQueryV15presencePenaltySdSgvp":{"name":"presencePenalty","abstract":"

Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model’s likelihood to talk about new topics.

","parent_name":"CompletionsQuery"},"Structs/CompletionsQuery.html#/s:6OpenAI16CompletionsQueryV4stopSaySSGSgvp":{"name":"stop","abstract":"

Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence.

","parent_name":"CompletionsQuery"},"Structs/CompletionsQuery.html#/s:6OpenAI16CompletionsQueryV4userSSSgvp":{"name":"user","abstract":"

A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.

","parent_name":"CompletionsQuery"},"Structs/CompletionsQuery.html#/s:6OpenAI16CompletionsQueryV5model6prompt11temperature9maxTokens4topP16frequencyPenalty08presenceL04stop4userACSS_SSSdSgSiSgA3MSaySSGSgSSSgtcfc":{"name":"init(model:prompt:temperature:maxTokens:topP:frequencyPenalty:presencePenalty:stop:user:)","parent_name":"CompletionsQuery"},"Structs/ChatStreamResult/CodingKeys.html#/s:6OpenAI16ChatStreamResultV10CodingKeysO2idyA2EmF":{"name":"id","parent_name":"CodingKeys"},"Structs/ChatStreamResult/CodingKeys.html#/s:6OpenAI16ChatStreamResultV10CodingKeysO6objectyA2EmF":{"name":"object","parent_name":"CodingKeys"},"Structs/ChatStreamResult/CodingKeys.html#/s:6OpenAI16ChatStreamResultV10CodingKeysO7createdyA2EmF":{"name":"created","parent_name":"CodingKeys"},"Structs/ChatStreamResult/CodingKeys.html#/s:6OpenAI16ChatStreamResultV10CodingKeysO5modelyA2EmF":{"name":"model","parent_name":"CodingKeys"},"Structs/ChatStreamResult/CodingKeys.html#/s:6OpenAI16ChatStreamResultV10CodingKeysO7choicesyA2EmF":{"name":"choices","parent_name":"CodingKeys"},"Structs/ChatStreamResult/CodingKeys.html#/s:6OpenAI16ChatStreamResultV10CodingKeysO17systemFingerprintyA2EmF":{"name":"systemFingerprint","parent_name":"CodingKeys"},"Structs/ChatStreamResult/Choice/CodingKeys.html#/s:6OpenAI16ChatStreamResultV6ChoiceV10CodingKeysO5deltayA2GmF":{"name":"delta","parent_name":"CodingKeys"},"Structs/ChatStreamResult/Choice/CodingKeys.html#/s:6OpenAI16ChatStreamResultV6ChoiceV10CodingKeysO12finishReasonyA2GmF":{"name":"finishReason","parent_name":"CodingKeys"},"Structs/ChatStreamResult/Choice/CodingKeys.html#/s:6OpenAI16ChatStreamResultV6ChoiceV10CodingKeysO8logprobsyA2GmF":{"name":"logprobs","parent_name":"CodingKeys"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F8LogprobsV0C22CompletionTokenLogprobV10CodingKeysO5tokenyA2KmF":{"name":"token","parent_name":"CodingKeys"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F8LogprobsV0C22CompletionTokenLogprobV10CodingKeysO5bytesyA2KmF":{"name":"bytes","parent_name":"CodingKeys"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F8LogprobsV0C22CompletionTokenLogprobV10CodingKeysO7logprobyA2KmF":{"name":"logprob","parent_name":"CodingKeys"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F8LogprobsV0C22CompletionTokenLogprobV10CodingKeysO03topG0yA2KmF":{"name":"topLogprobs","parent_name":"CodingKeys"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F8LogprobsV0C22CompletionTokenLogprobV03TopJ0V5tokenSSvp":{"name":"token","abstract":"

The token.

","parent_name":"TopLogprob"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F8LogprobsV0C22CompletionTokenLogprobV03TopJ0V5bytesSaySiGSgvp":{"name":"bytes","abstract":"

A list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be null if there is no bytes representation for the token.

","parent_name":"TopLogprob"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F8LogprobsV0C22CompletionTokenLogprobV03TopJ0V7logprobSdvp":{"name":"logprob","abstract":"

The log probability of this token.

","parent_name":"TopLogprob"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F8LogprobsV0C22CompletionTokenLogprobV5tokenSSvp":{"name":"token","abstract":"

The token.

","parent_name":"ChatCompletionTokenLogprob"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F8LogprobsV0C22CompletionTokenLogprobV5bytesSaySiGSgvp":{"name":"bytes","abstract":"

A list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be null if there is no bytes representation for the token.

","parent_name":"ChatCompletionTokenLogprob"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F8LogprobsV0C22CompletionTokenLogprobV7logprobSdvp":{"name":"logprob","abstract":"

The log probability of this token.

","parent_name":"ChatCompletionTokenLogprob"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F8LogprobsV0C22CompletionTokenLogprobV03topG0SayAI03TopJ0VGSgvp":{"name":"topLogprobs","abstract":"

List of the most likely tokens and their log probability, at this token position. In rare cases, there may be fewer than the number of requested top_logprobs returned.

","parent_name":"ChatCompletionTokenLogprob"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html":{"name":"TopLogprob","parent_name":"ChatCompletionTokenLogprob"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html":{"name":"CodingKeys","parent_name":"ChatCompletionTokenLogprob"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F8LogprobsV7contentSayAG0C22CompletionTokenLogprobVGSgvp":{"name":"content","abstract":"

A list of message content tokens with log probability information.

","parent_name":"ChoiceLogprobs"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html":{"name":"ChatCompletionTokenLogprob","parent_name":"ChoiceLogprobs"},"Structs/ChatStreamResult/Choice/ChoiceDelta/CodingKeys.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV10CodingKeysO7contentyA2ImF":{"name":"content","parent_name":"CodingKeys"},"Structs/ChatStreamResult/Choice/ChoiceDelta/CodingKeys.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV10CodingKeysO4roleyA2ImF":{"name":"role","parent_name":"CodingKeys"},"Structs/ChatStreamResult/Choice/ChoiceDelta/CodingKeys.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV10CodingKeysO9toolCallsyA2ImF":{"name":"toolCalls","parent_name":"CodingKeys"},"Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall/ChoiceDeltaToolCallFunction.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV0fG8ToolCallV0fghI8FunctionV9argumentsSSSgvp":{"name":"arguments","abstract":"

The arguments to call the function with, as generated by the model in JSON format. Note that the model does not always generate valid JSON, and may hallucinate parameters not defined by your function schema. Validate the arguments in your code before calling your function.

","parent_name":"ChoiceDeltaToolCallFunction"},"Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall/ChoiceDeltaToolCallFunction.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV0fG8ToolCallV0fghI8FunctionV4nameSSSgvp":{"name":"name","abstract":"

The name of the function to call.

","parent_name":"ChoiceDeltaToolCallFunction"},"Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall/ChoiceDeltaToolCallFunction.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV0fG8ToolCallV0fghI8FunctionV9arguments4nameAKSSSg_ANtcfc":{"name":"init(arguments:name:)","parent_name":"ChoiceDeltaToolCallFunction"},"Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV0fG8ToolCallV2idSSSgvp":{"name":"id","abstract":"

The ID of the tool call.

","parent_name":"ChoiceDeltaToolCall"},"Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV0fG8ToolCallV8functionAI0fghI8FunctionVSgvp":{"name":"function","abstract":"

The function that the model called.

","parent_name":"ChoiceDeltaToolCall"},"Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV0fG8ToolCallV4typeSSSgvp":{"name":"type","abstract":"

The type of the tool. Currently, only function is supported.

","parent_name":"ChoiceDeltaToolCall"},"Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV0fG8ToolCallV5index2id8functionAISi_SSSgAI0fghI8FunctionVSgtcfc":{"name":"init(index:id:function:)","parent_name":"ChoiceDeltaToolCall"},"Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall/ChoiceDeltaToolCallFunction.html":{"name":"ChoiceDeltaToolCallFunction","parent_name":"ChoiceDeltaToolCall"},"Structs/ChatStreamResult/Choice/ChoiceDelta.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV4Rolea":{"name":"Role","parent_name":"ChoiceDelta"},"Structs/ChatStreamResult/Choice/ChoiceDelta.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV7contentSSSgvp":{"name":"content","abstract":"

The contents of the chunk message.

","parent_name":"ChoiceDelta"},"Structs/ChatStreamResult/Choice/ChoiceDelta.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV4roleAA0C5QueryV0C22CompletionMessageParamO4RoleOSgvp":{"name":"role","abstract":"

The role of the author of this message.

","parent_name":"ChoiceDelta"},"Structs/ChatStreamResult/Choice/ChoiceDelta.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV9toolCallsSayAG0fG8ToolCallVGSgvp":{"name":"toolCalls","parent_name":"ChoiceDelta"},"Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall.html":{"name":"ChoiceDeltaToolCall","parent_name":"ChoiceDelta"},"Structs/ChatStreamResult/Choice/ChoiceDelta/CodingKeys.html":{"name":"CodingKeys","parent_name":"ChoiceDelta"},"Structs/ChatStreamResult/Choice.html#/s:6OpenAI16ChatStreamResultV6ChoiceV12FinishReasona":{"name":"FinishReason","parent_name":"Choice"},"Structs/ChatStreamResult/Choice/ChoiceDelta.html":{"name":"ChoiceDelta","parent_name":"Choice"},"Structs/ChatStreamResult/Choice.html#/s:6OpenAI16ChatStreamResultV6ChoiceV5deltaAE0F5DeltaVvp":{"name":"delta","abstract":"

A chat completion delta generated by streamed model responses.

","parent_name":"Choice"},"Structs/ChatStreamResult/Choice.html#/s:6OpenAI16ChatStreamResultV6ChoiceV12finishReasonAA0cE0VADV06FinishH0OSgvp":{"name":"finishReason","abstract":"

The reason the model stopped generating tokens.","parent_name":"Choice"},"Structs/ChatStreamResult/Choice.html#/s:6OpenAI16ChatStreamResultV6ChoiceV8logprobsAE0F8LogprobsVSgvp":{"name":"logprobs","abstract":"

Log probability information for the choice.

","parent_name":"Choice"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs.html":{"name":"ChoiceLogprobs","parent_name":"Choice"},"Structs/ChatStreamResult/Choice/CodingKeys.html":{"name":"CodingKeys","parent_name":"Choice"},"Structs/ChatStreamResult/Choice.html":{"name":"Choice","parent_name":"ChatStreamResult"},"Structs/ChatStreamResult.html#/s:6OpenAI16ChatStreamResultV2idSSvp":{"name":"id","abstract":"

A unique identifier for the chat completion. Each chunk has the same ID.

","parent_name":"ChatStreamResult"},"Structs/ChatStreamResult.html#/s:6OpenAI16ChatStreamResultV6objectSSvp":{"name":"object","abstract":"

The object type, which is always chat.completion.chunk.

","parent_name":"ChatStreamResult"},"Structs/ChatStreamResult.html#/s:6OpenAI16ChatStreamResultV7createdSdvp":{"name":"created","abstract":"

The Unix timestamp (in seconds) of when the chat completion was created.","parent_name":"ChatStreamResult"},"Structs/ChatStreamResult.html#/s:6OpenAI16ChatStreamResultV5modelSSvp":{"name":"model","abstract":"

The model to generate the completion.

","parent_name":"ChatStreamResult"},"Structs/ChatStreamResult.html#/s:6OpenAI16ChatStreamResultV7choicesSayAC6ChoiceVGvp":{"name":"choices","abstract":"

A list of chat completion choices.","parent_name":"ChatStreamResult"},"Structs/ChatStreamResult.html#/s:6OpenAI16ChatStreamResultV17systemFingerprintSSSgvp":{"name":"systemFingerprint","abstract":"

This fingerprint represents the backend configuration that the model runs with. Can be used in conjunction with the seed request parameter to understand when backend changes have been made that might impact determinism.

","parent_name":"ChatStreamResult"},"Structs/ChatStreamResult/CodingKeys.html":{"name":"CodingKeys","parent_name":"ChatStreamResult"},"Structs/ChatResult/CodingKeys.html#/s:6OpenAI10ChatResultV10CodingKeysO2idyA2EmF":{"name":"id","parent_name":"CodingKeys"},"Structs/ChatResult/CodingKeys.html#/s:6OpenAI10ChatResultV10CodingKeysO6objectyA2EmF":{"name":"object","parent_name":"CodingKeys"},"Structs/ChatResult/CodingKeys.html#/s:6OpenAI10ChatResultV10CodingKeysO7createdyA2EmF":{"name":"created","parent_name":"CodingKeys"},"Structs/ChatResult/CodingKeys.html#/s:6OpenAI10ChatResultV10CodingKeysO5modelyA2EmF":{"name":"model","parent_name":"CodingKeys"},"Structs/ChatResult/CodingKeys.html#/s:6OpenAI10ChatResultV10CodingKeysO7choicesyA2EmF":{"name":"choices","parent_name":"CodingKeys"},"Structs/ChatResult/CodingKeys.html#/s:6OpenAI10ChatResultV10CodingKeysO5usageyA2EmF":{"name":"usage","parent_name":"CodingKeys"},"Structs/ChatResult/CodingKeys.html#/s:6OpenAI10ChatResultV10CodingKeysO17systemFingerprintyA2EmF":{"name":"systemFingerprint","parent_name":"CodingKeys"},"Structs/ChatResult/CompletionUsage.html#/s:6OpenAI10ChatResultV15CompletionUsageV16completionTokensSivp":{"name":"completionTokens","abstract":"

Number of tokens in the generated completion.

","parent_name":"CompletionUsage"},"Structs/ChatResult/CompletionUsage.html#/s:6OpenAI10ChatResultV15CompletionUsageV12promptTokensSivp":{"name":"promptTokens","abstract":"

Number of tokens in the prompt.

","parent_name":"CompletionUsage"},"Structs/ChatResult/CompletionUsage.html#/s:6OpenAI10ChatResultV15CompletionUsageV11totalTokensSivp":{"name":"totalTokens","abstract":"

Total number of tokens used in the request (prompt + completion).

","parent_name":"CompletionUsage"},"Structs/ChatResult/Choice/FinishReason.html#/s:6OpenAI10ChatResultV6ChoiceV12FinishReasonO4stopyA2GmF":{"name":"stop","parent_name":"FinishReason"},"Structs/ChatResult/Choice/FinishReason.html#/s:6OpenAI10ChatResultV6ChoiceV12FinishReasonO6lengthyA2GmF":{"name":"length","parent_name":"FinishReason"},"Structs/ChatResult/Choice/FinishReason.html#/s:6OpenAI10ChatResultV6ChoiceV12FinishReasonO9toolCallsyA2GmF":{"name":"toolCalls","parent_name":"FinishReason"},"Structs/ChatResult/Choice/FinishReason.html#/s:6OpenAI10ChatResultV6ChoiceV12FinishReasonO13contentFilteryA2GmF":{"name":"contentFilter","parent_name":"FinishReason"},"Structs/ChatResult/Choice/FinishReason.html#/s:6OpenAI10ChatResultV6ChoiceV12FinishReasonO12functionCallyA2GmF":{"name":"functionCall","parent_name":"FinishReason"},"Structs/ChatResult/Choice/CodingKeys.html#/s:6OpenAI10ChatResultV6ChoiceV10CodingKeysO8logprobsyA2GmF":{"name":"logprobs","parent_name":"CodingKeys"},"Structs/ChatResult/Choice/CodingKeys.html#/s:6OpenAI10ChatResultV6ChoiceV10CodingKeysO7messageyA2GmF":{"name":"message","parent_name":"CodingKeys"},"Structs/ChatResult/Choice/CodingKeys.html#/s:6OpenAI10ChatResultV6ChoiceV10CodingKeysO12finishReasonyA2GmF":{"name":"finishReason","parent_name":"CodingKeys"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html#/s:6OpenAI10ChatResultV6ChoiceV0E8LogprobsV0C22CompletionTokenLogprobV10CodingKeysO5tokenyA2KmF":{"name":"token","parent_name":"CodingKeys"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html#/s:6OpenAI10ChatResultV6ChoiceV0E8LogprobsV0C22CompletionTokenLogprobV10CodingKeysO5bytesyA2KmF":{"name":"bytes","parent_name":"CodingKeys"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html#/s:6OpenAI10ChatResultV6ChoiceV0E8LogprobsV0C22CompletionTokenLogprobV10CodingKeysO7logprobyA2KmF":{"name":"logprob","parent_name":"CodingKeys"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html#/s:6OpenAI10ChatResultV6ChoiceV0E8LogprobsV0C22CompletionTokenLogprobV10CodingKeysO03topF0yA2KmF":{"name":"topLogprobs","parent_name":"CodingKeys"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html#/s:6OpenAI10ChatResultV6ChoiceV0E8LogprobsV0C22CompletionTokenLogprobV03TopI0V5tokenSSvp":{"name":"token","abstract":"

The token.

","parent_name":"TopLogprob"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html#/s:6OpenAI10ChatResultV6ChoiceV0E8LogprobsV0C22CompletionTokenLogprobV03TopI0V5bytesSaySiGSgvp":{"name":"bytes","abstract":"

A list of integers representing the UTF-8 bytes representation of the token.","parent_name":"TopLogprob"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html#/s:6OpenAI10ChatResultV6ChoiceV0E8LogprobsV0C22CompletionTokenLogprobV03TopI0V7logprobSdvp":{"name":"logprob","abstract":"

The log probability of this token.

","parent_name":"TopLogprob"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html#/s:6OpenAI10ChatResultV6ChoiceV0E8LogprobsV0C22CompletionTokenLogprobV5tokenSSvp":{"name":"token","abstract":"

The token.

","parent_name":"ChatCompletionTokenLogprob"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html#/s:6OpenAI10ChatResultV6ChoiceV0E8LogprobsV0C22CompletionTokenLogprobV5bytesSaySiGSgvp":{"name":"bytes","abstract":"

A list of integers representing the UTF-8 bytes representation of the token.","parent_name":"ChatCompletionTokenLogprob"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html#/s:6OpenAI10ChatResultV6ChoiceV0E8LogprobsV0C22CompletionTokenLogprobV7logprobSdvp":{"name":"logprob","abstract":"

The log probability of this token.

","parent_name":"ChatCompletionTokenLogprob"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html#/s:6OpenAI10ChatResultV6ChoiceV0E8LogprobsV0C22CompletionTokenLogprobV03topF0SayAI03TopI0VGvp":{"name":"topLogprobs","abstract":"

List of the most likely tokens and their log probability, at this token position.","parent_name":"ChatCompletionTokenLogprob"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html":{"name":"TopLogprob","parent_name":"ChatCompletionTokenLogprob"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html":{"name":"CodingKeys","parent_name":"ChatCompletionTokenLogprob"},"Structs/ChatResult/Choice/ChoiceLogprobs.html#/s:6OpenAI10ChatResultV6ChoiceV0E8LogprobsV7contentSayAG0C22CompletionTokenLogprobVGSgvp":{"name":"content","parent_name":"ChoiceLogprobs"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html":{"name":"ChatCompletionTokenLogprob","parent_name":"ChoiceLogprobs"},"Structs/ChatResult/Choice.html#/s:6OpenAI10ChatResultV6ChoiceV0C17CompletionMessagea":{"name":"ChatCompletionMessage","parent_name":"Choice"},"Structs/ChatResult/Choice.html#/s:6OpenAI10ChatResultV6ChoiceV8logprobsAE0E8LogprobsVSgvp":{"name":"logprobs","abstract":"

Log probability information for the choice.

","parent_name":"Choice"},"Structs/ChatResult/Choice.html#/s:6OpenAI10ChatResultV6ChoiceV7messageAA0C5QueryV0C22CompletionMessageParamOvp":{"name":"message","abstract":"

A chat completion message generated by the model.

","parent_name":"Choice"},"Structs/ChatResult/Choice.html#/s:6OpenAI10ChatResultV6ChoiceV12finishReasonSSSgvp":{"name":"finishReason","abstract":"

The reason the model stopped generating tokens. This will be stop if the model hit a natural stop point or a provided stop sequence, length if the maximum number of tokens specified in the request was reached, content_filter if content was omitted due to a flag from our content filters, tool_calls if the model called a tool, or function_call (deprecated) if the model called a function.

","parent_name":"Choice"},"Structs/ChatResult/Choice/ChoiceLogprobs.html":{"name":"ChoiceLogprobs","parent_name":"Choice"},"Structs/ChatResult/Choice/CodingKeys.html":{"name":"CodingKeys","parent_name":"Choice"},"Structs/ChatResult/Choice/FinishReason.html":{"name":"FinishReason","parent_name":"Choice"},"Structs/ChatResult/Choice.html":{"name":"Choice","abstract":"

mimic the choices array in the chat completion object

","parent_name":"ChatResult"},"Structs/ChatResult/CompletionUsage.html":{"name":"CompletionUsage","parent_name":"ChatResult"},"Structs/ChatResult.html#/s:6OpenAI10ChatResultV2idSSvp":{"name":"id","abstract":"

A unique identifier for the chat completion.

","parent_name":"ChatResult"},"Structs/ChatResult.html#/s:6OpenAI10ChatResultV6objectSSvp":{"name":"object","abstract":"

The object type, which is always chat.completion.

","parent_name":"ChatResult"},"Structs/ChatResult.html#/s:6OpenAI10ChatResultV7createdSdvp":{"name":"created","abstract":"

The Unix timestamp (in seconds) of when the chat completion was created.

","parent_name":"ChatResult"},"Structs/ChatResult.html#/s:6OpenAI10ChatResultV5modelSSvp":{"name":"model","abstract":"

The model used for the chat completion.

","parent_name":"ChatResult"},"Structs/ChatResult.html#/s:6OpenAI10ChatResultV7choicesSayAC6ChoiceVGvp":{"name":"choices","abstract":"

A list of chat completion choices. Can be more than one if n is greater than 1.

","parent_name":"ChatResult"},"Structs/ChatResult.html#/s:6OpenAI10ChatResultV5usageAC15CompletionUsageVSgvp":{"name":"usage","abstract":"

Usage statistics for the completion request.

","parent_name":"ChatResult"},"Structs/ChatResult.html#/s:6OpenAI10ChatResultV17systemFingerprintSSSgvp":{"name":"systemFingerprint","abstract":"

This fingerprint represents the backend configuration that the model runs with.","parent_name":"ChatResult"},"Structs/ChatResult/CodingKeys.html":{"name":"CodingKeys","parent_name":"ChatResult"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO8messagesyA2EmF":{"name":"messages","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO5modelyA2EmF":{"name":"model","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO16frequencyPenaltyyA2EmF":{"name":"frequencyPenalty","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO9logitBiasyA2EmF":{"name":"logitBias","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO8logprobsyA2EmF":{"name":"logprobs","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO9maxTokensyA2EmF":{"name":"maxTokens","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO1nyA2EmF":{"name":"n","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO15presencePenaltyyA2EmF":{"name":"presencePenalty","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO14responseFormatyA2EmF":{"name":"responseFormat","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO4seedyA2EmF":{"name":"seed","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO4stopyA2EmF":{"name":"stop","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO11temperatureyA2EmF":{"name":"temperature","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO10toolChoiceyA2EmF":{"name":"toolChoice","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO5toolsyA2EmF":{"name":"tools","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO11topLogprobsyA2EmF":{"name":"topLogprobs","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO4topPyA2EmF":{"name":"topP","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO4useryA2EmF":{"name":"user","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO6streamyA2EmF":{"name":"stream","parent_name":"CodingKeys"},"Structs/ChatQuery/ChatCompletionToolParam/ToolsType.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV9ToolsTypeO8functionyA2GmF":{"name":"function","parent_name":"ToolsType"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/JSONType.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8JSONTypeO7integeryA2KmF":{"name":"integer","parent_name":"JSONType"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/JSONType.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8JSONTypeO6stringyA2KmF":{"name":"string","parent_name":"JSONType"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/JSONType.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8JSONTypeO7booleanyA2KmF":{"name":"boolean","parent_name":"JSONType"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/JSONType.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8JSONTypeO5arrayyA2KmF":{"name":"array","parent_name":"JSONType"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/JSONType.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8JSONTypeO6objectyA2KmF":{"name":"object","parent_name":"JSONType"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/JSONType.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8JSONTypeO6numberyA2KmF":{"name":"number","parent_name":"JSONType"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/JSONType.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8JSONTypeO4nullyA2KmF":{"name":"null","parent_name":"JSONType"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5ItemsV8JSONTypea":{"name":"JSONType","parent_name":"Items"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5ItemsV4typeAI8JSONTypeOvp":{"name":"type","parent_name":"Items"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5ItemsV10propertiesSDySSAKGSgvp":{"name":"properties","parent_name":"Items"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5ItemsV7patternSSSgvp":{"name":"pattern","parent_name":"Items"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5ItemsV5constSSSgvp":{"name":"const","parent_name":"Items"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5ItemsV4enumSaySSGSgvp":{"name":"enum","parent_name":"Items"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5ItemsV10multipleOfSiSgvp":{"name":"multipleOf","parent_name":"Items"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5ItemsV7minimumSdSgvp":{"name":"minimum","parent_name":"Items"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5ItemsV7maximumSdSgvp":{"name":"maximum","parent_name":"Items"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5ItemsV03minL0SiSgvp":{"name":"minItems","parent_name":"Items"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5ItemsV03maxL0SiSgvp":{"name":"maxItems","parent_name":"Items"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5ItemsV06uniqueL0SbSgvp":{"name":"uniqueItems","parent_name":"Items"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5ItemsV4type10properties7pattern5const4enum10multipleOf7minimum7maximum03minL003maxL006uniqueL0AmI8JSONTypeO_SDySSAKGSgSSSgA1_SaySSGSgSiSgSdSgA5_A4_A4_SbSgtcfc":{"name":"init(type:properties:pattern:const:enum:multipleOf:minimum:maximum:minItems:maxItems:uniqueItems:)","parent_name":"Items"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV8JSONTypea":{"name":"JSONType","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV4typeAI8JSONTypeOvp":{"name":"type","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV11descriptionSSSgvp":{"name":"description","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV6formatSSSgvp":{"name":"format","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5itemsAK5ItemsVSgvp":{"name":"items","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV8requiredSaySSGSgvp":{"name":"required","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV7patternSSSgvp":{"name":"pattern","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5constSSSgvp":{"name":"const","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV4enumSaySSGSgvp":{"name":"enum","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV10multipleOfSiSgvp":{"name":"multipleOf","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV7minimumSdSgvp":{"name":"minimum","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV7maximumSdSgvp":{"name":"maximum","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV8minItemsSiSgvp":{"name":"minItems","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV8maxItemsSiSgvp":{"name":"maxItems","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV11uniqueItemsSbSgvp":{"name":"uniqueItems","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV4type11description6format5items8required7pattern5const4enum10multipleOf7minimum7maximum8minItems03maxY006uniqueY0AkI8JSONTypeO_SSSgA0_AK0Y0VSgSaySSGSgA0_A0_A5_SiSgSdSgA7_A6_A6_SbSgtcfc":{"name":"init(type:description:format:items:required:pattern:const:enum:multipleOf:minimum:maximum:minItems:maxItems:uniqueItems:)","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html":{"name":"Items","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV4typeAI8JSONTypeOvp":{"name":"type","parent_name":"FunctionParameters"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV10propertiesSDySSAI8PropertyVGSgvp":{"name":"properties","parent_name":"FunctionParameters"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8requiredSaySSGSgvp":{"name":"required","parent_name":"FunctionParameters"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV7patternSSSgvp":{"name":"pattern","parent_name":"FunctionParameters"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV5constSSSgvp":{"name":"const","parent_name":"FunctionParameters"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV4enumSaySSGSgvp":{"name":"enum","parent_name":"FunctionParameters"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV10multipleOfSiSgvp":{"name":"multipleOf","parent_name":"FunctionParameters"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV7minimumSiSgvp":{"name":"minimum","parent_name":"FunctionParameters"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV7maximumSiSgvp":{"name":"maximum","parent_name":"FunctionParameters"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV4type10properties8required7pattern5const4enum10multipleOf7minimum7maximumA2I8JSONTypeO_SDySSAI8PropertyVGSgSaySSGSgSSSgA_AZSiSgA0_A0_tcfc":{"name":"init(type:properties:required:pattern:const:enum:multipleOf:minimum:maximum:)","parent_name":"FunctionParameters"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html":{"name":"Property","parent_name":"FunctionParameters"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/JSONType.html":{"name":"JSONType","parent_name":"FunctionParameters"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV4nameSSvp":{"name":"name","abstract":"

The name of the function to be called. Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length of 64.

","parent_name":"FunctionDefinition"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV11descriptionSSSgvp":{"name":"description","abstract":"

The description of what the function does.

","parent_name":"FunctionDefinition"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV10parametersAG0H10ParametersVSgvp":{"name":"parameters","abstract":"

The parameters the functions accepts, described as a JSON Schema object.","parent_name":"FunctionDefinition"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV4name11description10parametersAGSS_SSSgAG0H10ParametersVSgtcfc":{"name":"init(name:description:parameters:)","parent_name":"FunctionDefinition"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html":{"name":"FunctionParameters","abstract":"

See the guide for examples, and the JSON Schema reference for documentation about the format.

","parent_name":"FunctionDefinition"},"Structs/ChatQuery/ChatCompletionToolParam.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV8functionAE18FunctionDefinitionVvp":{"name":"function","parent_name":"ChatCompletionToolParam"},"Structs/ChatQuery/ChatCompletionToolParam.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV4typeAE9ToolsTypeOvp":{"name":"type","parent_name":"ChatCompletionToolParam"},"Structs/ChatQuery/ChatCompletionToolParam.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV8functionA2E18FunctionDefinitionV_tcfc":{"name":"init(function:)","parent_name":"ChatCompletionToolParam"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition.html":{"name":"FunctionDefinition","parent_name":"ChatCompletionToolParam"},"Structs/ChatQuery/ChatCompletionToolParam/ToolsType.html":{"name":"ToolsType","parent_name":"ChatCompletionToolParam"},"Structs/ChatQuery/ChatCompletionFunctionCallOptionParam.html#/s:6OpenAI9ChatQueryV0C33CompletionFunctionCallOptionParamO4noneyA2EmF":{"name":"none","parent_name":"ChatCompletionFunctionCallOptionParam"},"Structs/ChatQuery/ChatCompletionFunctionCallOptionParam.html#/s:6OpenAI9ChatQueryV0C33CompletionFunctionCallOptionParamO4autoyA2EmF":{"name":"auto","parent_name":"ChatCompletionFunctionCallOptionParam"},"Structs/ChatQuery/ChatCompletionFunctionCallOptionParam.html#/s:6OpenAI9ChatQueryV0C33CompletionFunctionCallOptionParamO8functionyAESScAEmF":{"name":"function(_:)","parent_name":"ChatCompletionFunctionCallOptionParam"},"Structs/ChatQuery/ChatCompletionFunctionCallOptionParam.html#/s:SE6encode2toys7Encoder_p_tKF":{"name":"encode(to:)","parent_name":"ChatCompletionFunctionCallOptionParam"},"Structs/ChatQuery/ChatCompletionFunctionCallOptionParam.html#/s:6OpenAI9ChatQueryV0C33CompletionFunctionCallOptionParamO8functionAESS_tcfc":{"name":"init(function:)","parent_name":"ChatCompletionFunctionCallOptionParam"},"Structs/ChatQuery/ResponseFormat.html#/s:6OpenAI9ChatQueryV14ResponseFormatO10jsonObjectyA2EmF":{"name":"jsonObject","parent_name":"ResponseFormat"},"Structs/ChatQuery/ResponseFormat.html#/s:6OpenAI9ChatQueryV14ResponseFormatO4textyA2EmF":{"name":"text","parent_name":"ResponseFormat"},"Structs/ChatQuery/ResponseFormat.html#/s:SE6encode2toys7Encoder_p_tKF":{"name":"encode(to:)","parent_name":"ResponseFormat"},"Structs/ChatQuery/Stop.html#/s:6OpenAI9ChatQueryV4StopO6stringyAESScAEmF":{"name":"string(_:)","parent_name":"Stop"},"Structs/ChatQuery/Stop.html#/s:6OpenAI9ChatQueryV4StopO10stringListyAESaySSGcAEmF":{"name":"stringList(_:)","parent_name":"Stop"},"Structs/ChatQuery/Stop.html#/s:SE6encode2toys7Encoder_p_tKF":{"name":"encode(to:)","parent_name":"Stop"},"Structs/ChatQuery/Stop.html#/s:6OpenAI9ChatQueryV4StopO6stringAESS_tcfc":{"name":"init(string:)","parent_name":"Stop"},"Structs/ChatQuery/Stop.html#/s:6OpenAI9ChatQueryV4StopO10stringListAESaySSG_tcfc":{"name":"init(stringList:)","parent_name":"Stop"},"Structs/ChatQuery/ChatCompletionMessageParam/Role.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO4RoleO6systemyA2GmF":{"name":"system","parent_name":"Role"},"Structs/ChatQuery/ChatCompletionMessageParam/Role.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO4RoleO4useryA2GmF":{"name":"user","parent_name":"Role"},"Structs/ChatQuery/ChatCompletionMessageParam/Role.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO4RoleO9assistantyA2GmF":{"name":"assistant","parent_name":"Role"},"Structs/ChatQuery/ChatCompletionMessageParam/Role.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO4RoleO4toolyA2GmF":{"name":"tool","parent_name":"Role"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam/CodingKeys.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4ToolfG0V10CodingKeysO7contentyA2ImF":{"name":"content","parent_name":"CodingKeys"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam/CodingKeys.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4ToolfG0V10CodingKeysO4roleyA2ImF":{"name":"role","parent_name":"CodingKeys"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam/CodingKeys.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4ToolfG0V10CodingKeysO10toolCallIdyA2ImF":{"name":"toolCallId","parent_name":"CodingKeys"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4ToolfG0V4Rolea":{"name":"Role","parent_name":"ChatCompletionToolMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4ToolfG0V7contentSSvp":{"name":"content","abstract":"

The contents of the tool message.

","parent_name":"ChatCompletionToolMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4ToolfG0V4roleAE4RoleOvp":{"name":"role","abstract":"

The role of the messages author, in this case tool.

","parent_name":"ChatCompletionToolMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4ToolfG0V10toolCallIdSSvp":{"name":"toolCallId","abstract":"

Tool call that this message is responding to.

","parent_name":"ChatCompletionToolMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4ToolfG0V7content10toolCallIdAGSS_SStcfc":{"name":"init(content:toolCallId:)","parent_name":"ChatCompletionToolMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam/CodingKeys.html":{"name":"CodingKeys","parent_name":"ChatCompletionToolMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam/FunctionCall.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V0cef8ToolCallG0V08FunctionJ0V9argumentsSSvp":{"name":"arguments","abstract":"

The arguments to call the function with, as generated by the model in JSON format. Note that the model does not always generate valid JSON, and may hallucinate parameters not defined by your function schema. Validate the arguments in your code before calling your function.

","parent_name":"FunctionCall"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam/FunctionCall.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V0cef8ToolCallG0V08FunctionJ0V4nameSSvp":{"name":"name","abstract":"

The name of the function to call.

","parent_name":"FunctionCall"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V0cef8ToolCallG0V9ToolsTypea":{"name":"ToolsType","parent_name":"ChatCompletionMessageToolCallParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V0cef8ToolCallG0V2idSSvp":{"name":"id","abstract":"

The ID of the tool call.

","parent_name":"ChatCompletionMessageToolCallParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V0cef8ToolCallG0V8functionAI08FunctionJ0Vvp":{"name":"function","abstract":"

The function that the model called.

","parent_name":"ChatCompletionMessageToolCallParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V0cef8ToolCallG0V4typeAC0ceiG0V9ToolsTypeOvp":{"name":"type","abstract":"

The type of the tool. Currently, only function is supported.

","parent_name":"ChatCompletionMessageToolCallParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V0cef8ToolCallG0V2id8functionAISS_AI08FunctionJ0Vtcfc":{"name":"init(id:function:)","parent_name":"ChatCompletionMessageToolCallParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam/FunctionCall.html":{"name":"FunctionCall","parent_name":"ChatCompletionMessageToolCallParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/CodingKeys.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V10CodingKeysO4nameyA2ImF":{"name":"name","parent_name":"CodingKeys"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/CodingKeys.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V10CodingKeysO4roleyA2ImF":{"name":"role","parent_name":"CodingKeys"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/CodingKeys.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V10CodingKeysO7contentyA2ImF":{"name":"content","parent_name":"CodingKeys"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/CodingKeys.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V10CodingKeysO9toolCallsyA2ImF":{"name":"toolCalls","parent_name":"CodingKeys"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V4Rolea":{"name":"Role","parent_name":"ChatCompletionAssistantMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V4roleAE4RoleOvp":{"name":"role","abstract":"

/ The role of the messages author, in this case assistant.

","parent_name":"ChatCompletionAssistantMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V7contentSSSgvp":{"name":"content","abstract":"

The contents of the assistant message. Required unless tool_calls is specified.

","parent_name":"ChatCompletionAssistantMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V4nameSSSgvp":{"name":"name","abstract":"

The name of the author of this message. name is required if role is function, and it should be the name of the function whose response is in the content. May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 characters.

","parent_name":"ChatCompletionAssistantMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V9toolCallsSayAG0cef8ToolCallG0VGSgvp":{"name":"toolCalls","abstract":"

The tool calls generated by the model, such as function calls.

","parent_name":"ChatCompletionAssistantMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V7content4name9toolCallsAGSSSg_AKSayAG0cef8ToolCallG0VGSgtcfc":{"name":"init(content:name:toolCalls:)","parent_name":"ChatCompletionAssistantMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/CodingKeys.html":{"name":"CodingKeys","parent_name":"ChatCompletionAssistantMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam.html":{"name":"ChatCompletionMessageToolCallParam","parent_name":"ChatCompletionAssistantMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/CodingKeys.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei9PartImageG0V10CodingKeysO8imageUrlyA2OmF":{"name":"imageUrl","parent_name":"CodingKeys"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/CodingKeys.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei9PartImageG0V10CodingKeysO4typeyA2OmF":{"name":"type","parent_name":"CodingKeys"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL/Detail.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei9PartImageG0V0L3URLV6DetailO4autoyA2QmF":{"name":"auto","parent_name":"Detail"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL/Detail.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei9PartImageG0V0L3URLV6DetailO3lowyA2QmF":{"name":"low","parent_name":"Detail"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL/Detail.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei9PartImageG0V0L3URLV6DetailO4highyA2QmF":{"name":"high","parent_name":"Detail"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei9PartImageG0V0L3URLV3urlSSvp":{"name":"url","abstract":"

Either a URL of the image or the base64 encoded image data.

","parent_name":"ImageURL"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei9PartImageG0V0L3URLV6detailAO6DetailOvp":{"name":"detail","abstract":"

Specifies the detail level of the image. Learn more in the","parent_name":"ImageURL"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei9PartImageG0V0L3URLV3url6detailAOSS_AO6DetailOtcfc":{"name":"init(url:detail:)","parent_name":"ImageURL"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei9PartImageG0V0L3URLV3url6detailAO10Foundation4DataV_AO6DetailOtcfc":{"name":"init(url:detail:)","parent_name":"ImageURL"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL/Detail.html":{"name":"Detail","parent_name":"ImageURL"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei9PartImageG0V8imageUrlAM0L3URLVvp":{"name":"imageUrl","parent_name":"ChatCompletionContentPartImageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei9PartImageG0V4typeSSvp":{"name":"type","abstract":"

The type of the content part.

","parent_name":"ChatCompletionContentPartImageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei9PartImageG0V8imageUrlA2M0L3URLV_tcfc":{"name":"init(imageUrl:)","parent_name":"ChatCompletionContentPartImageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL.html":{"name":"ImageURL","parent_name":"ChatCompletionContentPartImageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/CodingKeys.html":{"name":"CodingKeys","parent_name":"ChatCompletionContentPartImageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartTextParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei8PartTextG0V4textSSvp":{"name":"text","abstract":"

The text content.

","parent_name":"ChatCompletionContentPartTextParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartTextParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei8PartTextG0V4typeSSvp":{"name":"type","abstract":"

The type of the content part.

","parent_name":"ChatCompletionContentPartTextParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartTextParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei8PartTextG0V4textAMSS_tcfc":{"name":"init(text:)","parent_name":"ChatCompletionContentPartTextParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O04chatei8PartTextG0yA2K0ceilmG0VcAKmF":{"name":"chatCompletionContentPartTextParam(_:)","parent_name":"VisionContent"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O04chatei9PartImageG0yA2K0ceilmG0VcAKmF":{"name":"chatCompletionContentPartImageParam(_:)","parent_name":"VisionContent"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O4textSSSgvp":{"name":"text","parent_name":"VisionContent"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O8imageUrlAK0cei9PartImageG0V0N3URLVSgvp":{"name":"imageUrl","parent_name":"VisionContent"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O04chatei8PartTextG0A2K0ceilmG0V_tcfc":{"name":"init(chatCompletionContentPartTextParam:)","parent_name":"VisionContent"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O04chatei9PartImageG0A2K0ceilmG0V_tcfc":{"name":"init(chatCompletionContentPartImageParam:)","parent_name":"VisionContent"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent.html#/s:SE6encode2toys7Encoder_p_tKF":{"name":"encode(to:)","parent_name":"VisionContent"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartTextParam.html":{"name":"ChatCompletionContentPartTextParam","parent_name":"VisionContent"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam.html":{"name":"ChatCompletionContentPartImageParam","parent_name":"VisionContent"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/CodingKeys.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO10CodingKeysO6stringyA2KmF":{"name":"string","parent_name":"CodingKeys"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/CodingKeys.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO10CodingKeysO6visionyA2KmF":{"name":"vision","parent_name":"CodingKeys"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO6stringyAISScAImF":{"name":"string(_:)","parent_name":"Content"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO6visionyAISayAI06VisionI0OGcAImF":{"name":"vision(_:)","parent_name":"Content"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO6stringSSSgvp":{"name":"string","parent_name":"Content"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO6stringAISS_tcfc":{"name":"init(string:)","parent_name":"Content"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO6visionAISayAI06VisionI0OG_tcfc":{"name":"init(vision:)","parent_name":"Content"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/CodingKeys.html":{"name":"CodingKeys","parent_name":"Content"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content.html#/s:SE6encode2toys7Encoder_p_tKF":{"name":"encode(to:)","parent_name":"Content"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent.html":{"name":"VisionContent","parent_name":"Content"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content.html#/s:Se4fromxs7Decoder_p_tKcfc":{"name":"init(from:)","parent_name":"Content"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V4Rolea":{"name":"Role","parent_name":"ChatCompletionUserMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7contentAG7ContentOvp":{"name":"content","abstract":"

The contents of the user message.

","parent_name":"ChatCompletionUserMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V4roleAE4RoleOvp":{"name":"role","abstract":"

The role of the messages author, in this case user.

","parent_name":"ChatCompletionUserMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V4nameSSSgvp":{"name":"name","abstract":"

An optional name for the participant. Provides the model information to differentiate between participants of the same role.

","parent_name":"ChatCompletionUserMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7content4nameA2G7ContentO_SSSgtcfc":{"name":"init(content:name:)","parent_name":"ChatCompletionUserMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content.html":{"name":"Content","parent_name":"ChatCompletionUserMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionSystemMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce6SystemfG0V4Rolea":{"name":"Role","parent_name":"ChatCompletionSystemMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionSystemMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce6SystemfG0V7contentSSvp":{"name":"content","abstract":"

The contents of the system message.

","parent_name":"ChatCompletionSystemMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionSystemMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce6SystemfG0V4roleAE4RoleOvp":{"name":"role","abstract":"

The role of the messages author, in this case system.

","parent_name":"ChatCompletionSystemMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionSystemMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce6SystemfG0V4nameSSSgvp":{"name":"name","abstract":"

An optional name for the participant. Provides the model information to differentiate between participants of the same role.

","parent_name":"ChatCompletionSystemMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionSystemMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce6SystemfG0V7content4nameAGSS_SSSgtcfc":{"name":"init(content:name:)","parent_name":"ChatCompletionSystemMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO6systemyA2E0ce6SystemfG0VcAEmF":{"name":"system(_:)","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO4useryA2E0ce4UserfG0VcAEmF":{"name":"user(_:)","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO9assistantyA2E0ce9AssistantfG0VcAEmF":{"name":"assistant(_:)","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO4toolyA2E0ce4ToolfG0VcAEmF":{"name":"tool(_:)","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO7contentAE0ce4UserfG0V7ContentOSgvp":{"name":"content","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO4roleAE4RoleOvp":{"name":"role","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO4nameSSSgvp":{"name":"name","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO10toolCallIdSSSgvp":{"name":"toolCallId","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO9toolCallsSayAE0ce9AssistantfG0V0cef8ToolCallG0VGSgvp":{"name":"toolCalls","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO4role7content4name9toolCalls0K6CallIdAESgAE4RoleO_SSSgANSayAE0ce9AssistantfG0V0cef4ToolmG0VGSgANtcfc":{"name":"init(role:content:name:toolCalls:toolCallId:)","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO4role7content4nameAESgAE4RoleO_SayAE0ce4UserfG0V7ContentO06VisionM0OGSSSgtcfc":{"name":"init(role:content:name:)","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam.html#/s:SE6encode2toys7Encoder_p_tKF":{"name":"encode(to:)","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionSystemMessageParam.html":{"name":"ChatCompletionSystemMessageParam","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam.html":{"name":"ChatCompletionUserMessageParam","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam.html":{"name":"ChatCompletionAssistantMessageParam","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam.html":{"name":"ChatCompletionToolMessageParam","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/Role.html":{"name":"Role","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO4fromAEs7Decoder_p_tKcfc":{"name":"init(from:)","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV8messagesSayAC0C22CompletionMessageParamOGvp":{"name":"messages","abstract":"

A list of messages comprising the conversation so far

","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV5modelSSvp":{"name":"model","abstract":"

ID of the model to use. See the model endpoint compatibility table for details on which models work with the Chat API.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV16frequencyPenaltySdSgvp":{"name":"frequencyPenalty","abstract":"

Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model’s likelihood to repeat the same line verbatim.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV9logitBiasSDySSSiGSgvp":{"name":"logitBias","abstract":"

Modify the likelihood of specified tokens appearing in the completion.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV8logprobsSbSgvp":{"name":"logprobs","abstract":"

Whether to return log probabilities of the output tokens or not. If true, returns the log probabilities of each output token returned in the content of message. This option is currently not available on the gpt-4-vision-preview model.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV9maxTokensSiSgvp":{"name":"maxTokens","abstract":"

The maximum number of tokens to generate in the completion.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV1nSiSgvp":{"name":"n","abstract":"

How many chat completion choices to generate for each input message. Note that you will be charged based on the number of generated tokens across all of the choices. Keep n as 1 to minimize costs.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV15presencePenaltySdSgvp":{"name":"presencePenalty","abstract":"

Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model’s likelihood to talk about new topics.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV14responseFormatAC08ResponseF0OSgvp":{"name":"responseFormat","abstract":"

An object specifying the format that the model must output. Compatible with gpt-4-1106-preview and gpt-3.5-turbo-1106.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV4seedSiSgvp":{"name":"seed","abstract":"

This feature is in Beta. If specified, our system will make a best effort to sample deterministically, such that repeated requests with the same seed and parameters should return the same result. Determinism is not guaranteed, and you should refer to the system_fingerprint response parameter to monitor changes in the backend.

","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV4stopAC4StopOSgvp":{"name":"stop","abstract":"

Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV11temperatureSdSgvp":{"name":"temperature","abstract":"

What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV10toolChoiceAC0C33CompletionFunctionCallOptionParamOSgvp":{"name":"toolChoice","abstract":"

Controls which (if any) function is called by the model. none means the model will not call a function and instead generates a message. auto means the model can pick between generating a message or calling a function. Specifying a particular function via {“type”: “function”, “function”: {“name”: “my_function”}} forces the model to call that function.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV5toolsSayAC0C19CompletionToolParamVGSgvp":{"name":"tools","abstract":"

A list of tools the model may call. Currently, only functions are supported as a tool. Use this to provide a list of functions the model may generate JSON inputs for.

","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV11topLogprobsSiSgvp":{"name":"topLogprobs","abstract":"

An integer between 0 and 5 specifying the number of most likely tokens to return at each token position, each with an associated log probability. logprobs must be set to true if this parameter is used.

","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV4topPSdSgvp":{"name":"topP","abstract":"

An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV4userSSSgvp":{"name":"user","abstract":"

A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV6streamSbvp":{"name":"stream","abstract":"

If set, partial message deltas will be sent, like in ChatGPT. Tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a data: [DONE] message.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV8messages5model16frequencyPenalty9logitBias8logprobs9maxTokens1n08presenceH014responseFormat4seed4stop11temperature10toolChoice5tools11topLogprobs0W1P4user6streamACSayAC0C22CompletionMessageParamOG_SSSdSgSDySSSiGSgSbSgSiSgA1_AyC08ResponseP0OSgA1_AC4StopOSgAyC0C33CompletionFunctionCallOptionParamOSgSayAC0C19CompletionToolParamVGSgA1_AYSSSgSbtcfc":{"name":"init(messages:model:frequencyPenalty:logitBias:logprobs:maxTokens:n:presencePenalty:responseFormat:seed:stop:temperature:toolChoice:tools:topLogprobs:topP:user:stream:)","parent_name":"ChatQuery"},"Structs/ChatQuery/ChatCompletionMessageParam.html":{"name":"ChatCompletionMessageParam","parent_name":"ChatQuery"},"Structs/ChatQuery/Stop.html":{"name":"Stop","parent_name":"ChatQuery"},"Structs/ChatQuery/ResponseFormat.html":{"name":"ResponseFormat","parent_name":"ChatQuery"},"Structs/ChatQuery/ChatCompletionFunctionCallOptionParam.html":{"name":"ChatCompletionFunctionCallOptionParam","parent_name":"ChatQuery"},"Structs/ChatQuery/ChatCompletionToolParam.html":{"name":"ChatCompletionToolParam","parent_name":"ChatQuery"},"Structs/ChatQuery/CodingKeys.html":{"name":"CodingKeys","parent_name":"ChatQuery"},"Structs/AudioTranslationResult.html#/s:6OpenAI22AudioTranslationResultV4textSSvp":{"name":"text","abstract":"

The translated text.

","parent_name":"AudioTranslationResult"},"Structs/AudioTranslationQuery.html#/s:6OpenAI21AudioTranslationQueryV8FileTypea":{"name":"FileType","parent_name":"AudioTranslationQuery"},"Structs/AudioTranslationQuery.html#/s:6OpenAI21AudioTranslationQueryV14ResponseFormata":{"name":"ResponseFormat","parent_name":"AudioTranslationQuery"},"Structs/AudioTranslationQuery.html#/s:6OpenAI21AudioTranslationQueryV4file10Foundation4DataVvp":{"name":"file","abstract":"

The audio file object (not file name) translate, in one of these formats: flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm.

","parent_name":"AudioTranslationQuery"},"Structs/AudioTranslationQuery.html#/s:6OpenAI21AudioTranslationQueryV8fileTypeAA0c13TranscriptionE0V04FileG0Ovp":{"name":"fileType","parent_name":"AudioTranslationQuery"},"Structs/AudioTranslationQuery.html#/s:6OpenAI21AudioTranslationQueryV5modelSSvp":{"name":"model","abstract":"

ID of the model to use. Only whisper-1 is currently available.

","parent_name":"AudioTranslationQuery"},"Structs/AudioTranslationQuery.html#/s:6OpenAI21AudioTranslationQueryV14responseFormatAA0c13TranscriptionE0V08ResponseG0OSgvp":{"name":"responseFormat","abstract":"

The format of the transcript output, in one of these options: json, text, srt, verbose_json, or vtt.","parent_name":"AudioTranslationQuery"},"Structs/AudioTranslationQuery.html#/s:6OpenAI21AudioTranslationQueryV6promptSSSgvp":{"name":"prompt","abstract":"

An optional text to guide the model’s style or continue a previous audio segment. The prompt should be in English.","parent_name":"AudioTranslationQuery"},"Structs/AudioTranslationQuery.html#/s:6OpenAI21AudioTranslationQueryV11temperatureSdSgvp":{"name":"temperature","abstract":"

The sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.","parent_name":"AudioTranslationQuery"},"Structs/AudioTranslationQuery.html#/s:6OpenAI21AudioTranslationQueryV4file0F4Type5model6prompt11temperature14responseFormatAC10Foundation4DataV_AA0c13TranscriptionE0V04FileG0OS2SSgSdSgAN08ResponseL0OSgtcfc":{"name":"init(file:fileType:model:prompt:temperature:responseFormat:)","parent_name":"AudioTranslationQuery"},"Structs/AudioTranscriptionResult.html#/s:6OpenAI24AudioTranscriptionResultV4textSSvp":{"name":"text","abstract":"

The transcribed text.

","parent_name":"AudioTranscriptionResult"},"Structs/AudioTranscriptionQuery/FileType.html#/s:6OpenAI23AudioTranscriptionQueryV8FileTypeO4flacyA2EmF":{"name":"flac","parent_name":"FileType"},"Structs/AudioTranscriptionQuery/FileType.html#/s:6OpenAI23AudioTranscriptionQueryV8FileTypeO3mp3yA2EmF":{"name":"mp3","parent_name":"FileType"},"Structs/AudioTranscriptionQuery/FileType.html#/s:6OpenAI23AudioTranscriptionQueryV8FileTypeO4mpgayA2EmF":{"name":"mpga","parent_name":"FileType"},"Structs/AudioTranscriptionQuery/FileType.html#/s:6OpenAI23AudioTranscriptionQueryV8FileTypeO3mp4yA2EmF":{"name":"mp4","parent_name":"FileType"},"Structs/AudioTranscriptionQuery/FileType.html#/s:6OpenAI23AudioTranscriptionQueryV8FileTypeO3m4ayA2EmF":{"name":"m4a","parent_name":"FileType"},"Structs/AudioTranscriptionQuery/FileType.html#/s:6OpenAI23AudioTranscriptionQueryV8FileTypeO4mpegyA2EmF":{"name":"mpeg","parent_name":"FileType"},"Structs/AudioTranscriptionQuery/FileType.html#/s:6OpenAI23AudioTranscriptionQueryV8FileTypeO3oggyA2EmF":{"name":"ogg","parent_name":"FileType"},"Structs/AudioTranscriptionQuery/FileType.html#/s:6OpenAI23AudioTranscriptionQueryV8FileTypeO3wavyA2EmF":{"name":"wav","parent_name":"FileType"},"Structs/AudioTranscriptionQuery/FileType.html#/s:6OpenAI23AudioTranscriptionQueryV8FileTypeO4webmyA2EmF":{"name":"webm","parent_name":"FileType"},"Structs/AudioTranscriptionQuery/ResponseFormat.html#/s:6OpenAI23AudioTranscriptionQueryV14ResponseFormatO4jsonyA2EmF":{"name":"json","parent_name":"ResponseFormat"},"Structs/AudioTranscriptionQuery/ResponseFormat.html#/s:6OpenAI23AudioTranscriptionQueryV14ResponseFormatO4textyA2EmF":{"name":"text","parent_name":"ResponseFormat"},"Structs/AudioTranscriptionQuery/ResponseFormat.html#/s:6OpenAI23AudioTranscriptionQueryV14ResponseFormatO11verboseJsonyA2EmF":{"name":"verboseJson","parent_name":"ResponseFormat"},"Structs/AudioTranscriptionQuery/ResponseFormat.html#/s:6OpenAI23AudioTranscriptionQueryV14ResponseFormatO3srtyA2EmF":{"name":"srt","parent_name":"ResponseFormat"},"Structs/AudioTranscriptionQuery/ResponseFormat.html#/s:6OpenAI23AudioTranscriptionQueryV14ResponseFormatO3vttyA2EmF":{"name":"vtt","parent_name":"ResponseFormat"},"Structs/AudioTranscriptionQuery/ResponseFormat.html":{"name":"ResponseFormat","parent_name":"AudioTranscriptionQuery"},"Structs/AudioTranscriptionQuery.html#/s:6OpenAI23AudioTranscriptionQueryV4file10Foundation4DataVvp":{"name":"file","abstract":"

The audio file object (not file name) to transcribe, in one of these formats: flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm.

","parent_name":"AudioTranscriptionQuery"},"Structs/AudioTranscriptionQuery.html#/s:6OpenAI23AudioTranscriptionQueryV8fileTypeAC04FileG0Ovp":{"name":"fileType","parent_name":"AudioTranscriptionQuery"},"Structs/AudioTranscriptionQuery.html#/s:6OpenAI23AudioTranscriptionQueryV5modelSSvp":{"name":"model","abstract":"

ID of the model to use. Only whisper-1 is currently available.

","parent_name":"AudioTranscriptionQuery"},"Structs/AudioTranscriptionQuery.html#/s:6OpenAI23AudioTranscriptionQueryV14responseFormatAC08ResponseG0OSgvp":{"name":"responseFormat","abstract":"

The format of the transcript output, in one of these options: json, text, srt, verbose_json, or vtt.","parent_name":"AudioTranscriptionQuery"},"Structs/AudioTranscriptionQuery.html#/s:6OpenAI23AudioTranscriptionQueryV6promptSSSgvp":{"name":"prompt","abstract":"

An optional text to guide the model’s style or continue a previous audio segment. The prompt should match the audio language.

","parent_name":"AudioTranscriptionQuery"},"Structs/AudioTranscriptionQuery.html#/s:6OpenAI23AudioTranscriptionQueryV11temperatureSdSgvp":{"name":"temperature","abstract":"

The sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.","parent_name":"AudioTranscriptionQuery"},"Structs/AudioTranscriptionQuery.html#/s:6OpenAI23AudioTranscriptionQueryV8languageSSSgvp":{"name":"language","abstract":"

The language of the input audio. Supplying the input language in ISO-639-1 format will improve accuracy and latency.","parent_name":"AudioTranscriptionQuery"},"Structs/AudioTranscriptionQuery.html#/s:6OpenAI23AudioTranscriptionQueryV4file0F4Type5model6prompt11temperature8language14responseFormatAC10Foundation4DataV_AC04FileG0OS2SSgSdSgApC08ResponseM0OSgtcfc":{"name":"init(file:fileType:model:prompt:temperature:language:responseFormat:)","parent_name":"AudioTranscriptionQuery"},"Structs/AudioTranscriptionQuery/FileType.html":{"name":"FileType","parent_name":"AudioTranscriptionQuery"},"Structs/AudioSpeechResult.html#/s:6OpenAI17AudioSpeechResultV5audio10Foundation4DataVvp":{"name":"audio","abstract":"

Audio data for one of the following formats :mp3, opus, aac, flac

","parent_name":"AudioSpeechResult"},"Structs/AudioSpeechQuery/Speed.html#/s:6OpenAI16AudioSpeechQueryV5SpeedO6normalyA2EmF":{"name":"normal","parent_name":"Speed"},"Structs/AudioSpeechQuery/Speed.html#/s:6OpenAI16AudioSpeechQueryV5SpeedO3maxyA2EmF":{"name":"max","parent_name":"Speed"},"Structs/AudioSpeechQuery/Speed.html#/s:6OpenAI16AudioSpeechQueryV5SpeedO3minyA2EmF":{"name":"min","parent_name":"Speed"},"Structs/AudioSpeechQuery/CodingKeys.html#/s:6OpenAI16AudioSpeechQueryV10CodingKeysO5modelyA2EmF":{"name":"model","parent_name":"CodingKeys"},"Structs/AudioSpeechQuery/CodingKeys.html#/s:6OpenAI16AudioSpeechQueryV10CodingKeysO5inputyA2EmF":{"name":"input","parent_name":"CodingKeys"},"Structs/AudioSpeechQuery/CodingKeys.html#/s:6OpenAI16AudioSpeechQueryV10CodingKeysO5voiceyA2EmF":{"name":"voice","parent_name":"CodingKeys"},"Structs/AudioSpeechQuery/CodingKeys.html#/s:6OpenAI16AudioSpeechQueryV10CodingKeysO14responseFormatyA2EmF":{"name":"responseFormat","parent_name":"CodingKeys"},"Structs/AudioSpeechQuery/CodingKeys.html#/s:6OpenAI16AudioSpeechQueryV10CodingKeysO5speedyA2EmF":{"name":"speed","parent_name":"CodingKeys"},"Structs/AudioSpeechQuery/AudioSpeechResponseFormat.html#/s:6OpenAI16AudioSpeechQueryV0cD14ResponseFormatO3mp3yA2EmF":{"name":"mp3","parent_name":"AudioSpeechResponseFormat"},"Structs/AudioSpeechQuery/AudioSpeechResponseFormat.html#/s:6OpenAI16AudioSpeechQueryV0cD14ResponseFormatO4opusyA2EmF":{"name":"opus","parent_name":"AudioSpeechResponseFormat"},"Structs/AudioSpeechQuery/AudioSpeechResponseFormat.html#/s:6OpenAI16AudioSpeechQueryV0cD14ResponseFormatO3aacyA2EmF":{"name":"aac","parent_name":"AudioSpeechResponseFormat"},"Structs/AudioSpeechQuery/AudioSpeechResponseFormat.html#/s:6OpenAI16AudioSpeechQueryV0cD14ResponseFormatO4flacyA2EmF":{"name":"flac","parent_name":"AudioSpeechResponseFormat"},"Structs/AudioSpeechQuery/AudioSpeechVoice.html#/s:6OpenAI16AudioSpeechQueryV0cD5VoiceO5alloyyA2EmF":{"name":"alloy","parent_name":"AudioSpeechVoice"},"Structs/AudioSpeechQuery/AudioSpeechVoice.html#/s:6OpenAI16AudioSpeechQueryV0cD5VoiceO4echoyA2EmF":{"name":"echo","parent_name":"AudioSpeechVoice"},"Structs/AudioSpeechQuery/AudioSpeechVoice.html#/s:6OpenAI16AudioSpeechQueryV0cD5VoiceO5fableyA2EmF":{"name":"fable","parent_name":"AudioSpeechVoice"},"Structs/AudioSpeechQuery/AudioSpeechVoice.html#/s:6OpenAI16AudioSpeechQueryV0cD5VoiceO4onyxyA2EmF":{"name":"onyx","parent_name":"AudioSpeechVoice"},"Structs/AudioSpeechQuery/AudioSpeechVoice.html#/s:6OpenAI16AudioSpeechQueryV0cD5VoiceO4novayA2EmF":{"name":"nova","parent_name":"AudioSpeechVoice"},"Structs/AudioSpeechQuery/AudioSpeechVoice.html#/s:6OpenAI16AudioSpeechQueryV0cD5VoiceO7shimmeryA2EmF":{"name":"shimmer","parent_name":"AudioSpeechVoice"},"Structs/AudioSpeechQuery/AudioSpeechVoice.html":{"name":"AudioSpeechVoice","abstract":"

Encapsulates the voices available for audio generation.

","parent_name":"AudioSpeechQuery"},"Structs/AudioSpeechQuery/AudioSpeechResponseFormat.html":{"name":"AudioSpeechResponseFormat","abstract":"

Encapsulates the response formats available for audio data.

","parent_name":"AudioSpeechQuery"},"Structs/AudioSpeechQuery.html#/s:6OpenAI16AudioSpeechQueryV5inputSSvp":{"name":"input","abstract":"

The text to generate audio for. The maximum length is 4096 characters.

","parent_name":"AudioSpeechQuery"},"Structs/AudioSpeechQuery.html#/s:6OpenAI16AudioSpeechQueryV5modelSSvp":{"name":"model","abstract":"

One of the available TTS models: tts-1 or tts-1-hd

","parent_name":"AudioSpeechQuery"},"Structs/AudioSpeechQuery.html#/s:6OpenAI16AudioSpeechQueryV5voiceAC0cD5VoiceOvp":{"name":"voice","abstract":"

The voice to use when generating the audio. Supported voices are alloy, echo, fable, onyx, nova, and shimmer. Previews of the voices are available in the Text to speech guide.","parent_name":"AudioSpeechQuery"},"Structs/AudioSpeechQuery.html#/s:6OpenAI16AudioSpeechQueryV14responseFormatAC0cd8ResponseG0OSgvp":{"name":"responseFormat","abstract":"

The format to audio in. Supported formats are mp3, opus, aac, and flac.","parent_name":"AudioSpeechQuery"},"Structs/AudioSpeechQuery.html#/s:6OpenAI16AudioSpeechQueryV5speedSSSgvp":{"name":"speed","abstract":"

The speed of the generated audio. Select a value from 0.25 to 4.0. 1.0 is the default.","parent_name":"AudioSpeechQuery"},"Structs/AudioSpeechQuery/CodingKeys.html":{"name":"CodingKeys","parent_name":"AudioSpeechQuery"},"Structs/AudioSpeechQuery.html#/s:6OpenAI16AudioSpeechQueryV5model5input5voice14responseFormat5speedACSS_SSAC0cD5VoiceOAC0cd8ResponseJ0OSdSgtcfc":{"name":"init(model:input:voice:responseFormat:speed:)","parent_name":"AudioSpeechQuery"},"Structs/AudioSpeechQuery/Speed.html":{"name":"Speed","parent_name":"AudioSpeechQuery"},"Structs/AudioSpeechQuery.html#/s:6OpenAI16AudioSpeechQueryV09normalizeD5SpeedySSSdSgFZ":{"name":"normalizeSpeechSpeed(_:)","parent_name":"AudioSpeechQuery"},"Structs/APIErrorResponse.html#/s:6OpenAI16APIErrorResponseV5errorAA0C0Vvp":{"name":"error","parent_name":"APIErrorResponse"},"Structs/APIErrorResponse.html#/s:10Foundation14LocalizedErrorP16errorDescriptionSSSgvp":{"name":"errorDescription","parent_name":"APIErrorResponse"},"Structs/APIError.html#/s:6OpenAI8APIErrorV7messageSSvp":{"name":"message","parent_name":"APIError"},"Structs/APIError.html#/s:6OpenAI8APIErrorV4typeSSvp":{"name":"type","parent_name":"APIError"},"Structs/APIError.html#/s:6OpenAI8APIErrorV5paramSSSgvp":{"name":"param","parent_name":"APIError"},"Structs/APIError.html#/s:6OpenAI8APIErrorV4codeSSSgvp":{"name":"code","parent_name":"APIError"},"Structs/APIError.html#/s:6OpenAI8APIErrorV7message4type5param4codeACSS_S2SSgAHtcfc":{"name":"init(message:type:param:code:)","parent_name":"APIError"},"Structs/APIError.html#/s:Se4fromxs7Decoder_p_tKcfc":{"name":"init(from:)","parent_name":"APIError"},"Structs/APIError.html#/s:10Foundation14LocalizedErrorP16errorDescriptionSSSgvp":{"name":"errorDescription","parent_name":"APIError"},"Structs/APIError.html":{"name":"APIError"},"Structs/APIErrorResponse.html":{"name":"APIErrorResponse"},"Structs/AudioSpeechQuery.html":{"name":"AudioSpeechQuery","abstract":"

Generates audio from the input text."},"Structs/AudioSpeechResult.html":{"name":"AudioSpeechResult","abstract":"

The audio file content."},"Structs/AudioTranscriptionQuery.html":{"name":"AudioTranscriptionQuery"},"Structs/AudioTranscriptionResult.html":{"name":"AudioTranscriptionResult"},"Structs/AudioTranslationQuery.html":{"name":"AudioTranslationQuery","abstract":"

Translates audio into English.

"},"Structs/AudioTranslationResult.html":{"name":"AudioTranslationResult"},"Structs/ChatQuery.html":{"name":"ChatQuery","abstract":"

Creates a model response for the given chat conversation"},"Structs/ChatResult.html":{"name":"ChatResult","abstract":"

https://platform.openai.com/docs/api-reference/chat/object"},"Structs/ChatStreamResult.html":{"name":"ChatStreamResult"},"Structs/CompletionsQuery.html":{"name":"CompletionsQuery"},"Structs/CompletionsResult.html":{"name":"CompletionsResult"},"Structs/EditsQuery.html":{"name":"EditsQuery"},"Structs/EditsResult.html":{"name":"EditsResult"},"Structs/EmbeddingsQuery.html":{"name":"EmbeddingsQuery"},"Structs/EmbeddingsResult.html":{"name":"EmbeddingsResult"},"Structs/ImageEditsQuery.html":{"name":"ImageEditsQuery"},"Structs/ImageVariationsQuery.html":{"name":"ImageVariationsQuery"},"Structs/ImagesQuery.html":{"name":"ImagesQuery","abstract":"

Given a prompt and/or an input image, the model will generate a new image."},"Structs/ImagesResult.html":{"name":"ImagesResult","abstract":"

Returns a list of image objects.

"},"Structs/ModelQuery.html":{"name":"ModelQuery","abstract":"

Retrieves a model instance, providing basic information about the model such as the owner and permissioning.

"},"Structs/ModelResult.html":{"name":"ModelResult","abstract":"

The model object matching the specified ID.

"},"Structs/ModelsResult.html":{"name":"ModelsResult","abstract":"

A list of model objects.

"},"Structs/ModerationsQuery.html":{"name":"ModerationsQuery"},"Structs/ModerationsResult.html":{"name":"ModerationsResult"},"Structs/Vector.html":{"name":"Vector"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP11completions5query10completionyAA16CompletionsQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"completions(query:completion:)","abstract":"

This function sends a completions query to the OpenAI API and retrieves generated completions in response. The Completions API enables you to build applications using OpenAI’s language models, like the powerful GPT-3.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP17completionsStream5query8onResult10completionyAA16CompletionsQueryV_ys0H0OyAA0jH0Vs5Error_pGcysAN_pSgcSgtF":{"name":"completionsStream(query:onResult:completion:)","abstract":"

This function sends a completions query to the OpenAI API and retrieves generated completions in response. The Completions API enables you to build applications using OpenAI’s language models, like the powerful GPT-3. The result is returned by chunks.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP6images5query10completionyAA11ImagesQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"images(query:completion:)","abstract":"

This function sends an images query to the OpenAI API and retrieves generated images in response. The Images Generation API enables you to create various images or graphics using OpenAI’s powerful deep learning models.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP10imageEdits5query10completionyAA05ImageE5QueryV_ys6ResultOyAA06ImagesJ0Vs5Error_pGctF":{"name":"imageEdits(query:completion:)","abstract":"

This function sends an image edit query to the OpenAI API and retrieves generated images in response. The Images Edit API enables you to edit images or graphics using OpenAI’s powerful deep learning models.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP15imageVariations5query10completionyAA05ImageE5QueryV_ys6ResultOyAA06ImagesJ0Vs5Error_pGctF":{"name":"imageVariations(query:completion:)","abstract":"

This function sends an image variation query to the OpenAI API and retrieves generated images in response. The Images Variations API enables you to create a variation of a given image using OpenAI’s powerful deep learning models.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP10embeddings5query10completionyAA15EmbeddingsQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"embeddings(query:completion:)","abstract":"

This function sends an embeddings query to the OpenAI API and retrieves embeddings in response. The Embeddings API enables you to generate high-dimensional vector representations of texts, which can be used for various natural language processing tasks such as semantic similarity, clustering, and classification.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP5chats5query10completionyAA9ChatQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"chats(query:completion:)","abstract":"

This function sends a chat query to the OpenAI API and retrieves chat conversation responses. The Chat API enables you to build chatbots or conversational applications using OpenAI’s powerful natural language models, like GPT-3.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP11chatsStream5query8onResult10completionyAA9ChatQueryV_ys0H0OyAA0jeH0Vs5Error_pGcysAN_pSgcSgtF":{"name":"chatsStream(query:onResult:completion:)","abstract":"

This function sends a chat query to the OpenAI API and retrieves chat stream conversation responses. The Chat API enables you to build chatbots or conversational applications using OpenAI’s powerful natural language models, like GPT-3. The result is returned by chunks.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP5edits5query10completionyAA10EditsQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"edits(query:completion:)","abstract":"

This function sends an edits query to the OpenAI API and retrieves an edited version of the prompt based on the instruction given.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP5model5query10completionyAA10ModelQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"model(query:completion:)","abstract":"

This function sends a model query to the OpenAI API and retrieves a model instance, providing owner information. The Models API in this usage enables you to gather detailed information on the model in question, like GPT-3.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP6models10completionyys6ResultOyAA06ModelsF0Vs5Error_pGc_tF":{"name":"models(completion:)","abstract":"

This function sends a models query to the OpenAI API and retrieves a list of models. The Models API in this usage enables you to list all the available models.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP11moderations5query10completionyAA16ModerationsQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"moderations(query:completion:)","abstract":"

This function sends a moderations query to the OpenAI API and retrieves a list of category results to classify how text may violate OpenAI’s Content Policy.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP17audioCreateSpeech5query10completionyAA05AudioF5QueryV_ys6ResultOyAA0ifK0Vs5Error_pGctF":{"name":"audioCreateSpeech(query:completion:)","abstract":"

This function sends an AudioSpeechQuery to the OpenAI API to create audio speech from text using a specific voice and format.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP19audioTranscriptions5query10completionyAA23AudioTranscriptionQueryV_ys6ResultOyAA0hiK0Vs5Error_pGctF":{"name":"audioTranscriptions(query:completion:)","abstract":"

Transcribes audio data using OpenAI’s audio transcription API and completes the operation asynchronously.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP17audioTranslations5query10completionyAA21AudioTranslationQueryV_ys6ResultOyAA0hiK0Vs5Error_pGctF":{"name":"audioTranslations(query:completion:)","abstract":"

Translates audio data using OpenAI’s audio translation API and completes the operation asynchronously.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE11completions5queryAA17CompletionsResultVAA0F5QueryV_tYaKF":{"name":"completions(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE17completionsStream5queryScsyAA17CompletionsResultVs5Error_pGAA0G5QueryV_tF":{"name":"completionsStream(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE6images5queryAA12ImagesResultVAA0F5QueryV_tYaKF":{"name":"images(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE10imageEdits5queryAA12ImagesResultVAA05ImageE5QueryV_tYaKF":{"name":"imageEdits(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE15imageVariations5queryAA12ImagesResultVAA05ImageE5QueryV_tYaKF":{"name":"imageVariations(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE10embeddings5queryAA16EmbeddingsResultVAA0F5QueryV_tYaKF":{"name":"embeddings(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE5chats5queryAA10ChatResultVAA0F5QueryV_tYaKF":{"name":"chats(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE11chatsStream5queryScsyAA04ChatE6ResultVs5Error_pGAA0G5QueryV_tF":{"name":"chatsStream(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE5edits5queryAA11EditsResultVAA0F5QueryV_tYaKF":{"name":"edits(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE5model5queryAA11ModelResultVAA0F5QueryV_tYaKF":{"name":"model(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE6modelsAA12ModelsResultVyYaKF":{"name":"models()","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE11moderations5queryAA17ModerationsResultVAA0F5QueryV_tYaKF":{"name":"moderations(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE17audioCreateSpeech5queryAA05AudioF6ResultVAA0hF5QueryV_tYaKF":{"name":"audioCreateSpeech(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE19audioTranscriptions5queryAA24AudioTranscriptionResultVAA0gH5QueryV_tYaKF":{"name":"audioTranscriptions(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE17audioTranslations5queryAA22AudioTranslationResultVAA0gH5QueryV_tYaKF":{"name":"audioTranslations(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE11completions5query7Combine12AnyPublisherVyAA17CompletionsResultVs5Error_pGAA0I5QueryV_tF":{"name":"completions(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE17completionsStream5query7Combine12AnyPublisherVys6ResultOyAA011CompletionsJ0Vs5Error_pGsAM_pGAA0K5QueryV_tF":{"name":"completionsStream(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE6images5query7Combine12AnyPublisherVyAA12ImagesResultVs5Error_pGAA0I5QueryV_tF":{"name":"images(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE10imageEdits5query7Combine12AnyPublisherVyAA12ImagesResultVs5Error_pGAA05ImageE5QueryV_tF":{"name":"imageEdits(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE15imageVariations5query7Combine12AnyPublisherVyAA12ImagesResultVs5Error_pGAA05ImageE5QueryV_tF":{"name":"imageVariations(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE10embeddings5query7Combine12AnyPublisherVyAA16EmbeddingsResultVs5Error_pGAA0I5QueryV_tF":{"name":"embeddings(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE5chats5query7Combine12AnyPublisherVyAA10ChatResultVs5Error_pGAA0I5QueryV_tF":{"name":"chats(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE11chatsStream5query7Combine12AnyPublisherVys6ResultOyAA04ChateJ0Vs5Error_pGsAM_pGAA0K5QueryV_tF":{"name":"chatsStream(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE5edits5query7Combine12AnyPublisherVyAA11EditsResultVs5Error_pGAA0I5QueryV_tF":{"name":"edits(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE5model5query7Combine12AnyPublisherVyAA11ModelResultVs5Error_pGAA0I5QueryV_tF":{"name":"model(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE6models7Combine12AnyPublisherVyAA12ModelsResultVs5Error_pGyF":{"name":"models()","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE11moderations5query7Combine12AnyPublisherVyAA17ModerationsResultVs5Error_pGAA0I5QueryV_tF":{"name":"moderations(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE17audioCreateSpeech5query7Combine12AnyPublisherVyAA05AudioF6ResultVs5Error_pGAA0kF5QueryV_tF":{"name":"audioCreateSpeech(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE19audioTranscriptions5query7Combine12AnyPublisherVyAA24AudioTranscriptionResultVs5Error_pGAA0jK5QueryV_tF":{"name":"audioTranscriptions(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE17audioTranslations5query7Combine12AnyPublisherVyAA22AudioTranslationResultVs5Error_pGAA0jK5QueryV_tF":{"name":"audioTranslations(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html":{"name":"OpenAIProtocol"},"Extensions/Model.html#/s:SS6OpenAIE6gpt4_oSSvpZ":{"name":"gpt4_o","abstract":"

gpt-4o, currently the most advanced, multimodal flagship model that’s cheaper and faster than GPT-4 Turbo.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE11gpt4_o_miniSSvpZ":{"name":"gpt4_o_mini","abstract":"

gpt-4o-mini, currently the most affordable and intelligent model for fast and lightweight requests.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE10gpt4_turboSSvpZ":{"name":"gpt4_turbo","abstract":"

gpt-4-turbo, The latest GPT-4 Turbo model with vision capabilities. Vision requests can now use JSON mode and function calling and more. Context window: 128,000 tokens

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE18gpt4_turbo_previewSSvpZ":{"name":"gpt4_turbo_preview","abstract":"

gpt-4-turbo, gpt-4 model with improved instruction following, JSON mode, reproducible outputs, parallel function calling and more. Maximum of 4096 output tokens

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE19gpt4_vision_previewSSvpZ":{"name":"gpt4_vision_preview","abstract":"

gpt-4-vision-preview, able to understand images, in addition to all other GPT-4 Turbo capabilities.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE17gpt4_0125_previewSSvpZ":{"name":"gpt4_0125_preview","abstract":"

Snapshot of gpt-4-turbo-preview from January 25th 2024. This model reduces cases of “laziness” where the model doesn’t complete a task. Also fixes the bug impacting non-English UTF-8 generations. Maximum of 4096 output tokens

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE17gpt4_1106_previewSSvpZ":{"name":"gpt4_1106_preview","abstract":"

Snapshot of gpt-4-turbo-preview from November 6th 2023. Improved instruction following, JSON mode, reproducible outputs, parallel function calling and more. Maximum of 4096 output tokens

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE4gpt4SSvpZ":{"name":"gpt4","abstract":"

Most capable gpt-4 model, outperforms any GPT-3.5 model, able to do more complex tasks, and optimized for chat.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE9gpt4_0613SSvpZ":{"name":"gpt4_0613","abstract":"

Snapshot of gpt-4 from June 13th 2023 with function calling data. Unlike gpt-4, this model will not receive updates, and will be deprecated 3 months after a new version is released.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE9gpt4_0314SSvpZ":{"name":"gpt4_0314","abstract":"

Snapshot of gpt-4 from March 14th 2023. Unlike gpt-4, this model will not receive updates, and will only be supported for a three month period ending on June 14th 2023.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE8gpt4_32kSSvpZ":{"name":"gpt4_32k","abstract":"

Same capabilities as the base gpt-4 model but with 4x the context length. Will be updated with our latest model iteration.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE13gpt4_32k_0613SSvpZ":{"name":"gpt4_32k_0613","abstract":"

Snapshot of gpt-4-32k from June 13th 2023. Unlike gpt-4-32k, this model will not receive updates, and will be deprecated 3 months after a new version is released.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE13gpt4_32k_0314SSvpZ":{"name":"gpt4_32k_0314","abstract":"

Snapshot of gpt-4-32k from March 14th 2023. Unlike gpt-4-32k, this model will not receive updates, and will only be supported for a three month period ending on June 14th 2023.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE11gpt3_5TurboSSvpZ":{"name":"gpt3_5Turbo","abstract":"

Most capable gpt-3.5-turbo model and optimized for chat. Will be updated with our latest model iteration.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE16gpt3_5Turbo_0125SSvpZ":{"name":"gpt3_5Turbo_0125","abstract":"

Snapshot of gpt-3.5-turbo from January 25th 2024. Decreased prices by 50%. Various improvements including higher accuracy at responding in requested formats and a fix for a bug which caused a text encoding issue for non-English language function calls.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE16gpt3_5Turbo_1106SSvpZ":{"name":"gpt3_5Turbo_1106","abstract":"

Snapshot of gpt-3.5-turbo from November 6th 2023. The latest gpt-3.5-turbo model with improved instruction following, JSON mode, reproducible outputs, parallel function calling and more.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE16gpt3_5Turbo_0613SSvpZ":{"name":"gpt3_5Turbo_0613","abstract":"

Snapshot of gpt-3.5-turbo from June 13th 2023 with function calling data. Unlike gpt-3.5-turbo, this model will not receive updates, and will be deprecated 3 months after a new version is released.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE16gpt3_5Turbo_0301SSvpZ":{"name":"gpt3_5Turbo_0301","abstract":"

Snapshot of gpt-3.5-turbo from March 1st 2023. Unlike gpt-3.5-turbo, this model will not receive updates, and will only be supported for a three month period ending on June 1st 2023.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE15gpt3_5Turbo_16kSSvpZ":{"name":"gpt3_5Turbo_16k","abstract":"

Same capabilities as the standard gpt-3.5-turbo model but with 4 times the context.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE20gpt3_5Turbo_16k_0613SSvpZ":{"name":"gpt3_5Turbo_16k_0613","abstract":"

Snapshot of gpt-3.5-turbo-16k from June 13th 2023. Unlike gpt-3.5-turbo-16k, this model will not receive updates, and will be deprecated 3 months after a new version is released.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE15textDavinci_003SSvpZ":{"name":"textDavinci_003","abstract":"

Can do any language task with better quality, longer output, and consistent instruction-following than the curie, babbage, or ada models. Also supports inserting completions within text.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE15textDavinci_002SSvpZ":{"name":"textDavinci_002","abstract":"

Similar capabilities to text-davinci-003 but trained with supervised fine-tuning instead of reinforcement learning.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE9textCurieSSvpZ":{"name":"textCurie","abstract":"

Very capable, faster and lower cost than Davinci.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE11textBabbageSSvpZ":{"name":"textBabbage","abstract":"

Capable of straightforward tasks, very fast, and lower cost.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE7textAdaSSvpZ":{"name":"textAda","abstract":"

Capable of very simple tasks, usually the fastest model in the GPT-3 series, and lowest cost.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE15textDavinci_001SSvpZ":{"name":"textDavinci_001","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE19codeDavinciEdit_001SSvpZ":{"name":"codeDavinciEdit_001","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE5tts_1SSvpZ":{"name":"tts_1","abstract":"

The latest text to speech model, optimized for speed.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE8tts_1_hdSSvpZ":{"name":"tts_1_hd","abstract":"

The latest text to speech model, optimized for quality.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE9whisper_1SSvpZ":{"name":"whisper_1","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE8dall_e_2SSvpZ":{"name":"dall_e_2","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE8dall_e_3SSvpZ":{"name":"dall_e_3","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE7davinciSSvpZ":{"name":"davinci","abstract":"

Most capable GPT-3 model. Can do any task the other models can do, often with higher quality.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE5curieSSvpZ":{"name":"curie","abstract":"

Very capable, but faster and lower cost than Davinci.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE7babbageSSvpZ":{"name":"babbage","abstract":"

Capable of straightforward tasks, very fast, and lower cost.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE3adaSSvpZ":{"name":"ada","abstract":"

Capable of very simple tasks, usually the fastest model in the GPT-3 series, and lowest cost.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE16textEmbeddingAdaSSvpZ":{"name":"textEmbeddingAda","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE13textSearchAdaSSvpZ":{"name":"textSearchAda","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE20textSearchBabbageDocSSvpZ":{"name":"textSearchBabbageDoc","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE25textSearchBabbageQuery001SSvpZ":{"name":"textSearchBabbageQuery001","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE14textEmbedding3SSvpZ":{"name":"textEmbedding3","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE19textEmbedding3LargeSSvpZ":{"name":"textEmbedding3Large","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE20textModerationStableSSvpZ":{"name":"textModerationStable","abstract":"

Almost as capable as the latest model, but slightly older.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE20textModerationLatestSSvpZ":{"name":"textModerationLatest","abstract":"

Most capable moderation model. Accuracy will be slightly higher than the stable model.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE10moderationSSvpZ":{"name":"moderation","parent_name":"Model"},"Extensions/Model.html":{"name":"Model"},"Enums/OpenAIError.html#/s:6OpenAI0A7AIErrorO9emptyDatayA2CmF":{"name":"emptyData","parent_name":"OpenAIError"},"Enums/OpenAIError.html":{"name":"OpenAIError"},"Classes/OpenAI/Configuration.html#/s:6OpenAIAAC13ConfigurationV5tokenSSvp":{"name":"token","abstract":"

OpenAI API token. See https://platform.openai.com/docs/api-reference/authentication

","parent_name":"Configuration"},"Classes/OpenAI/Configuration.html#/s:6OpenAIAAC13ConfigurationV22organizationIdentifierSSSgvp":{"name":"organizationIdentifier","abstract":"

Optional OpenAI organization identifier. See https://platform.openai.com/docs/api-reference/authentication

","parent_name":"Configuration"},"Classes/OpenAI/Configuration.html#/s:6OpenAIAAC13ConfigurationV4hostSSvp":{"name":"host","abstract":"

API host. Set this property if you use some kind of proxy or your own server. Default is api.openai.com

","parent_name":"Configuration"},"Classes/OpenAI/Configuration.html#/s:6OpenAIAAC13ConfigurationV4portSivp":{"name":"port","parent_name":"Configuration"},"Classes/OpenAI/Configuration.html#/s:6OpenAIAAC13ConfigurationV6schemeSSvp":{"name":"scheme","parent_name":"Configuration"},"Classes/OpenAI/Configuration.html#/s:6OpenAIAAC13ConfigurationV15timeoutIntervalSdvp":{"name":"timeoutInterval","abstract":"

Default request timeout

","parent_name":"Configuration"},"Classes/OpenAI/Configuration.html#/s:6OpenAIAAC13ConfigurationV5token22organizationIdentifier4host4port6scheme15timeoutIntervalADSS_SSSgSSSiSSSdtcfc":{"name":"init(token:organizationIdentifier:host:port:scheme:timeoutInterval:)","parent_name":"Configuration"},"Classes/OpenAI/Configuration.html":{"name":"Configuration","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAIAAC13configurationAB13ConfigurationVvp":{"name":"configuration","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAIAAC8apiTokenABSS_tcfc":{"name":"init(apiToken:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAIAAC13configurationA2B13ConfigurationV_tcfc":{"name":"init(configuration:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAIAAC13configuration7sessionA2B13ConfigurationV_So12NSURLSessionCtcfc":{"name":"init(configuration:session:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP11completions5query10completionyAA16CompletionsQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"completions(query:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP17completionsStream5query8onResult10completionyAA16CompletionsQueryV_ys0H0OyAA0jH0Vs5Error_pGcysAN_pSgcSgtF":{"name":"completionsStream(query:onResult:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP6images5query10completionyAA11ImagesQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"images(query:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP10imageEdits5query10completionyAA05ImageE5QueryV_ys6ResultOyAA06ImagesJ0Vs5Error_pGctF":{"name":"imageEdits(query:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP15imageVariations5query10completionyAA05ImageE5QueryV_ys6ResultOyAA06ImagesJ0Vs5Error_pGctF":{"name":"imageVariations(query:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP10embeddings5query10completionyAA15EmbeddingsQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"embeddings(query:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP5chats5query10completionyAA9ChatQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"chats(query:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP11chatsStream5query8onResult10completionyAA9ChatQueryV_ys0H0OyAA0jeH0Vs5Error_pGcysAN_pSgcSgtF":{"name":"chatsStream(query:onResult:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP5edits5query10completionyAA10EditsQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"edits(query:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP5model5query10completionyAA10ModelQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"model(query:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP6models10completionyys6ResultOyAA06ModelsF0Vs5Error_pGc_tF":{"name":"models(completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP11moderations5query10completionyAA16ModerationsQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"moderations(query:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP19audioTranscriptions5query10completionyAA23AudioTranscriptionQueryV_ys6ResultOyAA0hiK0Vs5Error_pGctF":{"name":"audioTranscriptions(query:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP17audioTranslations5query10completionyAA21AudioTranslationQueryV_ys6ResultOyAA0hiK0Vs5Error_pGctF":{"name":"audioTranslations(query:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP17audioCreateSpeech5query10completionyAA05AudioF5QueryV_ys6ResultOyAA0ifK0Vs5Error_pGctF":{"name":"audioCreateSpeech(query:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html":{"name":"OpenAI"},"Classes.html":{"name":"Classes","abstract":"

The following classes are available globally.

"},"Enums.html":{"name":"Enumerations","abstract":"

The following enumerations are available globally.

"},"Extensions.html":{"name":"Extensions","abstract":"

The following extensions are available globally.

"},"Protocols.html":{"name":"Protocols","abstract":"

The following protocols are available globally.

"},"Structs.html":{"name":"Structures","abstract":"

The following structures are available globally.

"},"Typealiases.html":{"name":"Type Aliases","abstract":"

The following type aliases are available globally.

"}} \ No newline at end of file diff --git a/docs/docsets/.docset/Contents/Resources/Documents/undocumented.json b/docs/docsets/.docset/Contents/Resources/Documents/undocumented.json new file mode 100644 index 00000000..8b7f1785 --- /dev/null +++ b/docs/docsets/.docset/Contents/Resources/Documents/undocumented.json @@ -0,0 +1,6 @@ +{ + "warnings": [ + + ], + "source_directory": "/Users/dingxiancao/OpenAI" +} \ No newline at end of file diff --git a/docs/docsets/.docset/Contents/Resources/docSet.dsidx b/docs/docsets/.docset/Contents/Resources/docSet.dsidx new file mode 100644 index 00000000..ee954744 Binary files /dev/null and b/docs/docsets/.docset/Contents/Resources/docSet.dsidx differ diff --git a/docs/docsets/.tgz b/docs/docsets/.tgz new file mode 100644 index 00000000..64fe7e1b Binary files /dev/null and b/docs/docsets/.tgz differ diff --git a/docs/img/carat.png b/docs/img/carat.png new file mode 100755 index 00000000..29d2f7fd Binary files /dev/null and b/docs/img/carat.png differ diff --git a/docs/img/dash.png b/docs/img/dash.png new file mode 100755 index 00000000..6f694c7a Binary files /dev/null and b/docs/img/dash.png differ diff --git a/docs/img/spinner.gif b/docs/img/spinner.gif new file mode 100644 index 00000000..e3038d0a Binary files /dev/null and b/docs/img/spinner.gif differ diff --git a/docs/index.html b/docs/index.html new file mode 100644 index 00000000..38545af2 --- /dev/null +++ b/docs/index.html @@ -0,0 +1,1280 @@ + + + + Index Reference + + + + + + + + + + + + +
+
+

Docs (100% documented)

+
+
+ +
+
+
+
+
+ +
+
+ +
+
+
+ +

OpenAI

+ +

logo

+ +
+ +

Swift Workflow + + +Twitter

+ +

This repository contains Swift community-maintained implementation over OpenAI public API.

+ + +

What is OpenAI

+ +

OpenAI is a non-profit artificial intelligence research organization founded in San Francisco, California in 2015. It was created with the purpose of advancing digital intelligence in ways that benefit humanity as a whole and promote societal progress. The organization strives to develop AI (Artificial Intelligence) programs and systems that can think, act and adapt quickly on their own – autonomously. OpenAI’s mission is to ensure safe and responsible use of AI for civic good, economic growth and other public benefits; this includes cutting-edge research into important topics such as general AI safety, natural language processing, applied reinforcement learning methods, machine vision algorithms etc.

+ +
+

The OpenAI API can be applied to virtually any task that involves understanding or generating natural language or code. We offer a spectrum of models with different levels of power suitable for different tasks, as well as the ability to fine-tune your own custom models. These models can be used for everything from content generation to semantic search and classification.

+
+

Installation

+ +

OpenAI is available with Swift Package Manager. +The Swift Package Manager is a tool for automating the distribution of Swift code and is integrated into the swift compiler. +Once you have your Swift package set up, adding OpenAI as a dependency is as easy as adding it to the dependencies value of your Package.swift.

+
dependencies: [
+    .package(url: "https://github.com/MacPaw/OpenAI.git", branch: "main")
+]
+
+

Usage

+

Initialization

+ +

To initialize API instance you need to obtain API token from your Open AI organization.

+ +

Remember that your API key is a secret! Do not share it with others or expose it in any client-side code (browsers, apps). Production requests must be routed through your own backend server where your API key can be securely loaded from an environment variable or key management service.

+ +

company

+ +

Once you have a token, you can initialize OpenAI class, which is an entry point to the API.

+ +
+

⚠️ OpenAI strongly recommends developers of client-side applications proxy requests through a separate backend service to keep their API key safe. API keys can access and manipulate customer billing, usage, and organizational data, so it’s a significant risk to expose them.

+
+
let openAI = OpenAI(apiToken: "YOUR_TOKEN_HERE")
+
+ +

Optionally you can initialize OpenAI with token, organization identifier and timeoutInterval.

+
let configuration = OpenAI.Configuration(token: "YOUR_TOKEN_HERE", organizationIdentifier: "YOUR_ORGANIZATION_ID_HERE", timeoutInterval: 60.0)
+let openAI = OpenAI(configuration: configuration)
+
+ +

Once token you posses the token, and the instance is initialized you are ready to make requests.

+

Completions

+ +

Given a prompt, the model will return one or more predicted completions, and can also return the probabilities of alternative tokens at each position.

+ +

Request

+
struct CompletionsQuery: Codable {
+    /// ID of the model to use.
+    public let model: Model
+    /// The prompt(s) to generate completions for, encoded as a string, array of strings, array of tokens, or array of token arrays.
+    public let prompt: String
+    /// What sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.
+    public let temperature: Double?
+    /// The maximum number of tokens to generate in the completion.
+    public let maxTokens: Int?
+    /// An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.
+    public let topP: Double?
+    /// Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.
+    public let frequencyPenalty: Double?
+    /// Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.
+    public let presencePenalty: Double?
+    /// Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence.
+    public let stop: [String]?
+    /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.
+    public let user: String?
+}
+
+ +

Response

+
struct CompletionsResult: Codable, Equatable {
+    public struct Choice: Codable, Equatable {
+        public let text: String
+        public let index: Int
+    }
+
+    public let id: String
+    public let object: String
+    public let created: TimeInterval
+    public let model: Model
+    public let choices: [Choice]
+    public let usage: Usage
+}
+
+ +

Example

+
let query = CompletionsQuery(model: .textDavinci_003, prompt: "What is 42?", temperature: 0, maxTokens: 100, topP: 1, frequencyPenalty: 0, presencePenalty: 0, stop: ["\\n"])
+openAI.completions(query: query) { result in
+  //Handle result here
+}
+//or
+let result = try await openAI.completions(query: query)
+
+
(lldb) po result
+▿ CompletionsResult
+  - id : "cmpl-6P9be2p2fQlwB7zTOl0NxCOetGmX3"
+  - object : "text_completion"
+  - created : 1671453146.0
+  - model : OpenAI.Model.textDavinci_003
+  ▿ choices : 1 element
+    ▿ 0 : Choice
+      - text : "\n\n42 is the answer to the ultimate question of life, the universe, and everything, according to the book The Hitchhiker\'s Guide to the Galaxy."
+      - index : 0
+
+

Completions Streaming

+ +

Completions streaming is available by using completionsStream function. Tokens will be sent one-by-one.

+ +

Closures

+
openAI.completionsStream(query: query) { partialResult in
+    switch partialResult {
+    case .success(let result):
+        print(result.choices)
+    case .failure(let error):
+        //Handle chunk error here
+    }
+} completion: { error in
+    //Handle streaming error here
+}
+
+ +

Combine

+
openAI
+    .completionsStream(query: query)
+    .sink { completion in
+        //Handle completion result here
+    } receiveValue: { result in
+        //Handle chunk here
+    }.store(in: &cancellables)
+
+ +

Structured concurrency

+
for try await result in openAI.completionsStream(query: query) {
+   //Handle result here
+}
+
+ +

Review Completions Documentation for more info.

+

Chats

+ +

Using the OpenAI Chat API, you can build your own applications with gpt-3.5-turbo to do things like:

+ +
    +
  • Draft an email or other piece of writing
  • +
  • Write Python code
  • +
  • Answer questions about a set of documents
  • +
  • Create conversational agents
  • +
  • Give your software a natural language interface
  • +
  • Tutor in a range of subjects
  • +
  • Translate languages
  • +
  • Simulate characters for video games and much more
  • +
+ +

Request

+
 struct ChatQuery: Codable {
+     /// ID of the model to use. Currently, only gpt-3.5-turbo and gpt-3.5-turbo-0301 are supported.
+     public let model: Model
+     /// The messages to generate chat completions for
+     public let messages: [Chat]
+     /// A list of functions the model may generate JSON inputs for.
+     public let functions: [ChatFunctionDeclaration]?
+     /// What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and  We generally recommend altering this or top_p but not both.
+     public let temperature: Double?
+     /// An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.
+     public let topP: Double?
+     /// How many chat completion choices to generate for each input message.
+     public let n: Int?
+     /// Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence.
+     public let stop: [String]?
+     /// The maximum number of tokens to generate in the completion.
+     public let maxTokens: Int?
+     /// Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.
+     public let presencePenalty: Double?
+     /// Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.
+     public let frequencyPenalty: Double?
+     ///Modify the likelihood of specified tokens appearing in the completion.
+     public let logitBias: [String:Int]?
+     /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.
+     public let user: String?
+}
+
+ +

Response

+
struct ChatResult: Codable, Equatable {
+    public struct Choice: Codable, Equatable {
+        public let index: Int
+        public let message: Chat
+        public let finishReason: String
+    }
+
+    public struct Usage: Codable, Equatable {
+        public let promptTokens: Int
+        public let completionTokens: Int
+        public let totalTokens: Int
+    }
+
+    public let id: String
+    public let object: String
+    public let created: TimeInterval
+    public let model: Model
+    public let choices: [Choice]
+    public let usage: Usage
+}
+
+ +

Example

+
let query = ChatQuery(model: .gpt3_5Turbo, messages: [.init(role: .user, content: "who are you")])
+let result = try await openAI.chats(query: query)
+
+
(lldb) po result
+▿ ChatResult
+  - id : "chatcmpl-6pwjgxGV2iPP4QGdyOLXnTY0LE3F8"
+  - object : "chat.completion"
+  - created : 1677838528.0
+  - model : "gpt-3.5-turbo-0301"
+  ▿ choices : 1 element
+    ▿ 0 : Choice
+      - index : 0
+      ▿ message : Chat
+        - role : "assistant"
+        - content : "\n\nI\'m an AI language model developed by OpenAI, created to provide assistance and support for various tasks such as answering questions, generating text, and providing recommendations. Nice to meet you!"
+      - finish_reason : "stop"
+  ▿ usage : Usage
+    - prompt_tokens : 10
+    - completion_tokens : 39
+    - total_tokens : 49
+
+

Chats Streaming

+ +

Chats streaming is available by using chatStream function. Tokens will be sent one-by-one.

+ +

Closures

+
openAI.chatsStream(query: query) { partialResult in
+    switch partialResult {
+    case .success(let result):
+        print(result.choices)
+    case .failure(let error):
+        //Handle chunk error here
+    }
+} completion: { error in
+    //Handle streaming error here
+}
+
+ +

Combine

+
openAI
+    .chatsStream(query: query)
+    .sink { completion in
+        //Handle completion result here
+    } receiveValue: { result in
+        //Handle chunk here
+    }.store(in: &cancellables)
+
+ +

Structured concurrency

+
for try await result in openAI.chatsStream(query: query) {
+   //Handle result here
+}
+
+ +

Function calls

+
let openAI = OpenAI(apiToken: "...")
+// Declare functions which GPT-3 might decide to call.
+let functions = [
+  ChatFunctionDeclaration(
+      name: "get_current_weather",
+      description: "Get the current weather in a given location",
+      parameters:
+        JSONSchema(
+          type: .object,
+          properties: [
+            "location": .init(type: .string, description: "The city and state, e.g. San Francisco, CA"),
+            "unit": .init(type: .string, enumValues: ["celsius", "fahrenheit"])
+          ],
+          required: ["location"]
+        )
+  )
+]
+let query = ChatQuery(
+  model: "gpt-3.5-turbo-0613",  // 0613 is the earliest version with function calls support.
+  messages: [
+      Chat(role: .user, content: "What's the weather like in Boston?")
+  ],
+  functions: functions
+)
+let result = try await openAI.chats(query: query)
+
+ +

Result will be (serialized as JSON here for readability):

+
{
+  "id": "chatcmpl-1234",
+  "object": "chat.completion",
+  "created": 1686000000,
+  "model": "gpt-3.5-turbo-0613",
+  "choices": [
+    {
+      "index": 0,
+      "message": {
+        "role": "assistant",
+        "function_call": {
+          "name": "get_current_weather",
+          "arguments": "{\n  \"location\": \"Boston, MA\"\n}"
+        }
+      },
+      "finish_reason": "function_call"
+    }
+  ],
+  "usage": { "total_tokens": 100, "completion_tokens": 18, "prompt_tokens": 82 }
+}
+
+
+ +

Review Chat Documentation for more info.

+

Images

+ +

Given a prompt and/or an input image, the model will generate a new image.

+ +

As Artificial Intelligence continues to develop, so too does the intriguing concept of Dall-E. Developed by OpenAI, a research lab for artificial intelligence purposes, Dall-E has been classified as an AI system that can generate images based on descriptions provided by humans. With its potential applications spanning from animation and illustration to design and engineering - not to mention the endless possibilities in between - it’s easy to see why there is such excitement over this new technology.

+

Create Image

+ +

Request

+
struct ImagesQuery: Codable {
+    /// A text description of the desired image(s). The maximum length is 1000 characters.
+    public let prompt: String
+    /// The number of images to generate. Must be between 1 and 10.
+    public let n: Int?
+    /// The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024.
+    public let size: String?
+}
+
+ +

Response

+
struct ImagesResult: Codable, Equatable {
+    public struct URLResult: Codable, Equatable {
+        public let url: String
+    }
+    public let created: TimeInterval
+    public let data: [URLResult]
+}
+
+ +

Example

+
let query = ImagesQuery(prompt: "White cat with heterochromia sitting on the kitchen table", n: 1, size: "1024x1024")
+openAI.images(query: query) { result in
+  //Handle result here
+}
+//or
+let result = try await openAI.images(query: query)
+
+
(lldb) po result
+▿ ImagesResult
+  - created : 1671453505.0
+  ▿ data : 1 element
+    ▿ 0 : URLResult
+      - url : "https://oaidalleapiprodscus.blob.core.windows.net/private/org-CWjU5cDIzgCcVjq10pp5yX5Q/user-GoBXgChvLBqLHdBiMJBUbPqF/img-WZVUK2dOD4HKbKwW1NeMJHBd.png?st=2022-12-19T11%3A38%3A25Z&se=2022-12-19T13%3A38%3A25Z&sp=r&sv=2021-08-06&sr=b&rscd=inline&rsct=image/png&skoid=6aaadede-4fb3-4698-a8f6-684d7786b067&sktid=a48cca56-e6da-484e-a814-9c849652bcb3&skt=2022-12-19T09%3A35%3A16Z&ske=2022-12-20T09%3A35%3A16Z&sks=b&skv=2021-08-06&sig=mh52rmtbQ8CXArv5bMaU6lhgZHFBZz/ePr4y%2BJwLKOc%3D"
+
+ +

Generated image

+ +

Generated Image

+

Create Image Edit

+ +

Creates an edited or extended image given an original image and a prompt.

+ +

Request

+
public struct ImageEditsQuery: Codable {
+    /// The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask is not provided, image must have transparency, which will be used as the mask.
+    public let image: Data
+    public let fileName: String
+    /// An additional image whose fully transparent areas (e.g. where alpha is zero) indicate where image should be edited. Must be a valid PNG file, less than 4MB, and have the same dimensions as image.
+    public let mask: Data?
+    public let maskFileName: String?
+    /// A text description of the desired image(s). The maximum length is 1000 characters.
+    public let prompt: String
+    /// The number of images to generate. Must be between 1 and 10.
+    public let n: Int?
+    /// The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024.
+    public let size: String?
+}
+
+ +

Response

+ +

Uses the ImagesResult response similarly to ImagesQuery.

+ +

Example

+
let data = image.pngData()
+let query = ImageEditQuery(image: data, fileName: "whitecat.png", prompt: "White cat with heterochromia sitting on the kitchen table with a bowl of food", n: 1, size: "1024x1024")
+openAI.imageEdits(query: query) { result in
+  //Handle result here
+}
+//or
+let result = try await openAI.imageEdits(query: query)
+
+

Create Image Variation

+ +

Creates a variation of a given image.

+ +

Request

+
public struct ImageVariationsQuery: Codable {
+    /// The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask is not provided, image must have transparency, which will be used as the mask.
+    public let image: Data
+    public let fileName: String
+    /// The number of images to generate. Must be between 1 and 10.
+    public let n: Int?
+    /// The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024.
+    public let size: String?
+}
+
+ +

Response

+ +

Uses the ImagesResult response similarly to ImagesQuery.

+ +

Example

+
let data = image.pngData()
+let query = ImageVariationQuery(image: data, fileName: "whitecat.png", n: 1, size: "1024x1024")
+openAI.imageVariations(query: query) { result in
+  //Handle result here
+}
+//or
+let result = try await openAI.imageVariations(query: query)
+
+ +

Review Images Documentation for more info.

+

Audio

+ +

The speech to text API provides two endpoints, transcriptions and translations, based on our state-of-the-art open source large-v2 Whisper model. They can be used to:

+ +

Transcribe audio into whatever language the audio is in. +Translate and transcribe the audio into english. +File uploads are currently limited to 25 MB and the following input file types are supported: mp3, mp4, mpeg, mpga, m4a, wav, and webm.

+

Audio Create Speech

+ +

This function sends an AudioSpeechQuery to the OpenAI API to create audio speech from text using a specific voice and format.

+ +

Learn more about voices.
+Learn more about models.

+ +

Request:

+
public struct AudioSpeechQuery: Codable, Equatable {
+    //...
+    public let model: Model // tts-1 or tts-1-hd  
+    public let input: String
+    public let voice: AudioSpeechVoice
+    public let responseFormat: AudioSpeechResponseFormat
+    public let speed: String? // Initializes with Double?
+    //...
+}
+
+ +

Response:

+
/// Audio data for one of the following formats :`mp3`, `opus`, `aac`, `flac`
+public let audioData: Data?
+
+ +

Example:

+
let query = AudioSpeechQuery(model: .tts_1, input: "Hello, world!", voice: .alloy, responseFormat: .mp3, speed: 1.0)
+
+openAI.audioCreateSpeech(query: query) { result in
+    // Handle response here
+}
+//or
+let result = try await openAI.audioCreateSpeech(query: query)
+
+ +

OpenAI Create Speech – Documentation

+

Audio Transcriptions

+ +

Transcribes audio into the input language.

+ +

Request

+
public struct AudioTranscriptionQuery: Codable, Equatable {
+
+    public let file: Data
+    public let fileName: String
+    public let model: Model
+
+    public let prompt: String?
+    public let temperature: Double?
+    public let language: String?
+}
+
+ +

Response

+
public struct AudioTranscriptionResult: Codable, Equatable {
+
+    public let text: String
+}
+
+ +

Example

+
let data = Data(contentsOfURL:...)
+let query = AudioTranscriptionQuery(file: data, fileName: "audio.m4a", model: .whisper_1)        
+
+openAI.audioTranscriptions(query: query) { result in
+    //Handle result here
+}
+//or
+let result = try await openAI.audioTranscriptions(query: query)
+
+

Audio Translations

+ +

Translates audio into into English.

+ +

Request

+
public struct AudioTranslationQuery: Codable, Equatable {
+
+    public let file: Data
+    public let fileName: String
+    public let model: Model
+
+    public let prompt: String?
+    public let temperature: Double?
+}    
+
+ +

Response

+
public struct AudioTranslationResult: Codable, Equatable {
+
+    public let text: String
+}
+
+ +

Example

+
let data = Data(contentsOfURL:...)
+let query = AudioTranslationQuery(file: data, fileName: "audio.m4a", model: .whisper_1)  
+
+openAI.audioTranslations(query: query) { result in
+    //Handle result here
+}
+//or
+let result = try await openAI.audioTranslations(query: query)
+
+ +

Review Audio Documentation for more info.

+

Edits

+ +

Creates a new edit for the provided input, instruction, and parameters.

+ +

Request

+
struct EditsQuery: Codable {
+    /// ID of the model to use.
+    public let model: Model
+    /// Input text to get embeddings for.
+    public let input: String?
+    /// The instruction that tells the model how to edit the prompt.
+    public let instruction: String
+    /// The number of images to generate. Must be between 1 and 10.
+    public let n: Int?
+    /// What sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.
+    public let temperature: Double?
+    /// An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.
+    public let topP: Double?
+}
+
+ +

Response

+
struct EditsResult: Codable, Equatable {
+
+    public struct Choice: Codable, Equatable {
+        public let text: String
+        public let index: Int
+    }
+
+    public struct Usage: Codable, Equatable {
+        public let promptTokens: Int
+        public let completionTokens: Int
+        public let totalTokens: Int
+
+        enum CodingKeys: String, CodingKey {
+            case promptTokens = "prompt_tokens"
+            case completionTokens = "completion_tokens"
+            case totalTokens = "total_tokens"
+        }
+    }
+
+    public let object: String
+    public let created: TimeInterval
+    public let choices: [Choice]
+    public let usage: Usage
+}
+
+ +

Example

+
let query = EditsQuery(model: .gpt4, input: "What day of the wek is it?", instruction: "Fix the spelling mistakes")
+openAI.edits(query: query) { result in
+  //Handle response here
+}
+//or
+let result = try await openAI.edits(query: query)
+
+ +

Review Edits Documentation for more info.

+

Embeddings

+ +

Get a vector representation of a given input that can be easily consumed by machine learning models and algorithms.

+ +

Request

+
struct EmbeddingsQuery: Codable {
+    /// ID of the model to use.
+    public let model: Model
+    /// Input text to get embeddings for
+    public let input: String
+}
+
+ +

Response

+
struct EmbeddingsResult: Codable, Equatable {
+
+    public struct Embedding: Codable, Equatable {
+
+        public let object: String
+        public let embedding: [Double]
+        public let index: Int
+    }
+    public let data: [Embedding]
+    public let usage: Usage
+}
+
+ +

Example

+
let query = EmbeddingsQuery(model: .textSearchBabbageDoc, input: "The food was delicious and the waiter...")
+openAI.embeddings(query: query) { result in
+  //Handle response here
+}
+//or
+let result = try await openAI.embeddings(query: query)
+
+
(lldb) po result
+▿ EmbeddingsResult
+  ▿ data : 1 element
+    ▿ 0 : Embedding
+      - object : "embedding"
+      ▿ embedding : 2048 elements
+        - 0 : 0.0010535449
+        - 1 : 0.024234328
+        - 2 : -0.0084999
+        - 3 : 0.008647452
+    .......
+        - 2044 : 0.017536353
+        - 2045 : -0.005897616
+        - 2046 : -0.026559394
+        - 2047 : -0.016633155
+      - index : 0
+
+(lldb)
+
+ +

Review Embeddings Documentation for more info.

+

Models

+ +

Models are represented as a typealias typealias Model = String.

+
public extension Model {
+    static let gpt4_turbo_preview = "gpt-4-turbo-preview"
+    static let gpt4_vision_preview = "gpt-4-vision-preview"
+    static let gpt4_0125_preview = "gpt-4-0125-preview"
+    static let gpt4_1106_preview = "gpt-4-1106-preview"
+    static let gpt4 = "gpt-4"
+    static let gpt4_0613 = "gpt-4-0613"
+    static let gpt4_0314 = "gpt-4-0314"
+    static let gpt4_32k = "gpt-4-32k"
+    static let gpt4_32k_0613 = "gpt-4-32k-0613"
+    static let gpt4_32k_0314 = "gpt-4-32k-0314"
+
+    static let gpt3_5Turbo = "gpt-3.5-turbo"
+    static let gpt3_5Turbo_0125 = "gpt-3.5-turbo-0125"
+    static let gpt3_5Turbo_1106 = "gpt-3.5-turbo-1106"
+    static let gpt3_5Turbo_0613 = "gpt-3.5-turbo-0613"
+    static let gpt3_5Turbo_0301 = "gpt-3.5-turbo-0301"
+    static let gpt3_5Turbo_16k = "gpt-3.5-turbo-16k"
+    static let gpt3_5Turbo_16k_0613 = "gpt-3.5-turbo-16k-0613"
+
+    static let textDavinci_003 = "text-davinci-003"
+    static let textDavinci_002 = "text-davinci-002"
+    static let textCurie = "text-curie-001"
+    static let textBabbage = "text-babbage-001"
+    static let textAda = "text-ada-001"
+
+    static let textDavinci_001 = "text-davinci-001"
+    static let codeDavinciEdit_001 = "code-davinci-edit-001"
+
+    static let tts_1 = "tts-1"
+    static let tts_1_hd = "tts-1-hd"
+
+    static let whisper_1 = "whisper-1"
+
+    static let dall_e_2 = "dall-e-2"
+    static let dall_e_3 = "dall-e-3"
+
+    static let davinci = "davinci"
+    static let curie = "curie"
+    static let babbage = "babbage"
+    static let ada = "ada"
+
+    static let textEmbeddingAda = "text-embedding-ada-002"
+    static let textSearchAda = "text-search-ada-doc-001"
+    static let textSearchBabbageDoc = "text-search-babbage-doc-001"
+    static let textSearchBabbageQuery001 = "text-search-babbage-query-001"
+    static let textEmbedding3 = "text-embedding-3-small"
+    static let textEmbedding3Large = "text-embedding-3-large"
+
+    static let textModerationStable = "text-moderation-stable"
+    static let textModerationLatest = "text-moderation-latest"
+    static let moderation = "text-moderation-007"
+}
+
+ +

GPT-4 models are supported.

+ +

As an example: To use the gpt-4-turbo-preview model, pass .gpt4_turbo_preview as the parameter to the ChatQuery init.

+
let query = ChatQuery(model: .gpt4_turbo_preview, messages: [
+    .init(role: .system, content: "You are Librarian-GPT. You know everything about the books."),
+    .init(role: .user, content: "Who wrote Harry Potter?")
+])
+let result = try await openAI.chats(query: query)
+XCTAssertFalse(result.choices.isEmpty)
+
+ +

You can also pass a custom string if you need to use some model, that is not represented above.

+

List Models

+ +

Lists the currently available models.

+ +

Response

+
public struct ModelsResult: Codable, Equatable {
+
+    public let data: [ModelResult]
+    public let object: String
+}
+
+
+ +

Example

+
openAI.models() { result in
+  //Handle result here
+}
+//or
+let result = try await openAI.models()
+
+

Retrieve Model

+ +

Retrieves a model instance, providing ownership information.

+ +

Request

+
public struct ModelQuery: Codable, Equatable {
+
+    public let model: Model
+}    
+
+ +

Response

+
public struct ModelResult: Codable, Equatable {
+
+    public let id: Model
+    public let object: String
+    public let ownedBy: String
+}
+
+ +

Example

+
let query = ModelQuery(model: .gpt4)
+openAI.model(query: query) { result in
+  //Handle result here
+}
+//or
+let result = try await openAI.model(query: query)
+
+ +

Review Models Documentation for more info.

+

Moderations

+ +

Given a input text, outputs if the model classifies it as violating OpenAI’s content policy.

+ +

Request

+
public struct ModerationsQuery: Codable {
+
+    public let input: String
+    public let model: Model?
+}    
+
+ +

Response

+
public struct ModerationsResult: Codable, Equatable {
+
+    public let id: String
+    public let model: Model
+    public let results: [CategoryResult]
+}
+
+ +

Example

+
let query = ModerationsQuery(input: "I want to kill them.")
+openAI.moderations(query: query) { result in
+  //Handle result here
+}
+//or
+let result = try await openAI.moderations(query: query)
+
+ +

Review Moderations Documentation for more info.

+

Utilities

+ +

The component comes with several handy utility functions to work with the vectors.

+
public struct Vector {
+
+    /// Returns the similarity between two vectors
+    ///
+    /// - Parameters:
+    ///     - a: The first vector
+    ///     - b: The second vector
+    public static func cosineSimilarity(a: [Double], b: [Double]) -> Double {
+        return dot(a, b) / (mag(a) * mag(b))
+    }
+
+    /// Returns the difference between two vectors. Cosine distance is defined as `1 - cosineSimilarity(a, b)`
+    ///
+    /// - Parameters:
+    ///     - a: The first vector
+    ///     - b: The second vector
+    public func cosineDifference(a: [Double], b: [Double]) -> Double {
+        return 1 - Self.cosineSimilarity(a: a, b: b)
+    }
+}
+
+ +

Example

+
let vector1 = [0.213123, 0.3214124, 0.421412, 0.3214521251, 0.412412, 0.3214124, 0.1414124, 0.3214521251, 0.213123, 0.3214124, 0.1414124, 0.4214214, 0.213123, 0.3214124, 0.1414124, 0.3214521251, 0.213123, 0.3214124, 0.1414124, 0.3214521251]
+let vector2 = [0.213123, 0.3214124, 0.1414124, 0.3214521251, 0.213123, 0.3214124, 0.1414124, 0.3214521251, 0.213123, 0.511515, 0.1414124, 0.3214521251, 0.213123, 0.3214124, 0.1414124, 0.3214521251, 0.213123, 0.3214124, 0.1414124, 0.3213213]
+let similarity = Vector.cosineSimilarity(a: vector1, b: vector2)
+print(similarity) //0.9510201910206734
+
+ +
+

In data analysis, cosine similarity is a measure of similarity between two sequences of numbers.

+
+ +

Screenshot 2022-12-19 at 6 00 33 PM

+ +

Read more about Cosine Similarity here.

+

Combine Extensions

+ +

The library contains built-in Combine extensions.

+
func completions(query: CompletionsQuery) -> AnyPublisher<CompletionsResult, Error>
+func images(query: ImagesQuery) -> AnyPublisher<ImagesResult, Error>
+func embeddings(query: EmbeddingsQuery) -> AnyPublisher<EmbeddingsResult, Error>
+func chats(query: ChatQuery) -> AnyPublisher<ChatResult, Error>
+func edits(query: EditsQuery) -> AnyPublisher<EditsResult, Error>
+func model(query: ModelQuery) -> AnyPublisher<ModelResult, Error>
+func models() -> AnyPublisher<ModelsResult, Error>
+func moderations(query: ModerationsQuery) -> AnyPublisher<ModerationsResult, Error>
+func audioTranscriptions(query: AudioTranscriptionQuery) -> AnyPublisher<AudioTranscriptionResult, Error>
+func audioTranslations(query: AudioTranslationQuery) -> AnyPublisher<AudioTranslationResult, Error>
+
+

Example Project

+ +

You can find example iOS application in Demo folder.

+ +

mockuuups-iphone-13-pro-mockup-perspective-right

+

Contribution Guidelines

+ +

Make your Pull Requests clear and obvious to anyone viewing them.
+Set main as your target branch.

+

Use Conventional Commits principles in naming PRs and branches:

+ +
    +
  • Feat: ... for new features and new functionality implementations.
  • +
  • Bug: ... for bug fixes.
  • +
  • Fix: ... for minor issues fixing, like typos or inaccuracies in code.
  • +
  • Chore: ... for boring stuff like code polishing, refactoring, deprecation fixing etc.
  • +
+ +

PR naming example: Feat: Add Threads API handling or Bug: Fix message result duplication

+ +

Branch naming example: feat/add-threads-API-handling or bug/fix-message-result-duplication

+

Write description to pull requests in following format:

+ +
    +
  • What
  • +
+ +

+ +
    +
  • Why
  • +
+ +

+ +
    +
  • Affected Areas
  • +
+ +

+ +
    +
  • More Info
  • +
+ +

+ +

We’ll appreciate you including tests to your code if it is needed and possible. ❤️

+ + + +

License

+
MIT License
+
+Copyright (c) 2023 MacPaw Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+ +
+
+ +
+
+ + diff --git a/docs/js/jazzy.js b/docs/js/jazzy.js new file mode 100755 index 00000000..1ac86992 --- /dev/null +++ b/docs/js/jazzy.js @@ -0,0 +1,74 @@ +// Jazzy - https://github.com/realm/jazzy +// Copyright Realm Inc. +// SPDX-License-Identifier: MIT + +window.jazzy = {'docset': false} +if (typeof window.dash != 'undefined') { + document.documentElement.className += ' dash' + window.jazzy.docset = true +} +if (navigator.userAgent.match(/xcode/i)) { + document.documentElement.className += ' xcode' + window.jazzy.docset = true +} + +function toggleItem($link, $content) { + var animationDuration = 300; + $link.toggleClass('token-open'); + $content.slideToggle(animationDuration); +} + +function itemLinkToContent($link) { + return $link.parent().parent().next(); +} + +// On doc load + hash-change, open any targeted item +function openCurrentItemIfClosed() { + if (window.jazzy.docset) { + return; + } + var $link = $(`a[name="${location.hash.substring(1)}"]`).nextAll('.token'); + $content = itemLinkToContent($link); + if ($content.is(':hidden')) { + toggleItem($link, $content); + } +} + +$(openCurrentItemIfClosed); +$(window).on('hashchange', openCurrentItemIfClosed); + +// On item link ('token') click, toggle its discussion +$('.token').on('click', function(event) { + if (window.jazzy.docset) { + return; + } + var $link = $(this); + toggleItem($link, itemLinkToContent($link)); + + // Keeps the document from jumping to the hash. + var href = $link.attr('href'); + if (history.pushState) { + history.pushState({}, '', href); + } else { + location.hash = href; + } + event.preventDefault(); +}); + +// Clicks on links to the current, closed, item need to open the item +$("a:not('.token')").on('click', function() { + if (location == this.href) { + openCurrentItemIfClosed(); + } +}); + +// KaTeX rendering +if ("katex" in window) { + $($('.math').each( (_, element) => { + katex.render(element.textContent, element, { + displayMode: $(element).hasClass('m-block'), + throwOnError: false, + trust: true + }); + })) +} diff --git a/docs/js/jazzy.search.js b/docs/js/jazzy.search.js new file mode 100644 index 00000000..359cdbb8 --- /dev/null +++ b/docs/js/jazzy.search.js @@ -0,0 +1,74 @@ +// Jazzy - https://github.com/realm/jazzy +// Copyright Realm Inc. +// SPDX-License-Identifier: MIT + +$(function(){ + var $typeahead = $('[data-typeahead]'); + var $form = $typeahead.parents('form'); + var searchURL = $form.attr('action'); + + function displayTemplate(result) { + return result.name; + } + + function suggestionTemplate(result) { + var t = '
'; + t += '' + result.name + ''; + if (result.parent_name) { + t += '' + result.parent_name + ''; + } + t += '
'; + return t; + } + + $typeahead.one('focus', function() { + $form.addClass('loading'); + + $.getJSON(searchURL).then(function(searchData) { + const searchIndex = lunr(function() { + this.ref('url'); + this.field('name'); + this.field('abstract'); + for (const [url, doc] of Object.entries(searchData)) { + this.add({url: url, name: doc.name, abstract: doc.abstract}); + } + }); + + $typeahead.typeahead( + { + highlight: true, + minLength: 3, + autoselect: true + }, + { + limit: 10, + display: displayTemplate, + templates: { suggestion: suggestionTemplate }, + source: function(query, sync) { + const lcSearch = query.toLowerCase(); + const results = searchIndex.query(function(q) { + q.term(lcSearch, { boost: 100 }); + q.term(lcSearch, { + boost: 10, + wildcard: lunr.Query.wildcard.TRAILING + }); + }).map(function(result) { + var doc = searchData[result.ref]; + doc.url = result.ref; + return doc; + }); + sync(results); + } + } + ); + $form.removeClass('loading'); + $typeahead.trigger('focus'); + }); + }); + + var baseURL = searchURL.slice(0, -"search.json".length); + + $typeahead.on('typeahead:select', function(e, result) { + window.location = baseURL + result.url; + }); +}); diff --git a/docs/js/jquery.min.js b/docs/js/jquery.min.js new file mode 100644 index 00000000..7f37b5d9 --- /dev/null +++ b/docs/js/jquery.min.js @@ -0,0 +1,2 @@ +/*! jQuery v3.7.1 | (c) OpenJS Foundation and other contributors | jquery.org/license */ +!function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(ie,e){"use strict";var oe=[],r=Object.getPrototypeOf,ae=oe.slice,g=oe.flat?function(e){return oe.flat.call(e)}:function(e){return oe.concat.apply([],e)},s=oe.push,se=oe.indexOf,n={},i=n.toString,ue=n.hasOwnProperty,o=ue.toString,a=o.call(Object),le={},v=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType&&"function"!=typeof e.item},y=function(e){return null!=e&&e===e.window},C=ie.document,u={type:!0,src:!0,nonce:!0,noModule:!0};function m(e,t,n){var r,i,o=(n=n||C).createElement("script");if(o.text=e,t)for(r in u)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function x(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[i.call(e)]||"object":typeof e}var t="3.7.1",l=/HTML$/i,ce=function(e,t){return new ce.fn.init(e,t)};function c(e){var t=!!e&&"length"in e&&e.length,n=x(e);return!v(e)&&!y(e)&&("array"===n||0===t||"number"==typeof t&&0+~]|"+ge+")"+ge+"*"),x=new RegExp(ge+"|>"),j=new RegExp(g),A=new RegExp("^"+t+"$"),D={ID:new RegExp("^#("+t+")"),CLASS:new RegExp("^\\.("+t+")"),TAG:new RegExp("^("+t+"|[*])"),ATTR:new RegExp("^"+p),PSEUDO:new RegExp("^"+g),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+ge+"*(even|odd|(([+-]|)(\\d*)n|)"+ge+"*(?:([+-]|)"+ge+"*(\\d+)|))"+ge+"*\\)|)","i"),bool:new RegExp("^(?:"+f+")$","i"),needsContext:new RegExp("^"+ge+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+ge+"*((?:-\\d)?\\d*)"+ge+"*\\)|)(?=[^-]|$)","i")},N=/^(?:input|select|textarea|button)$/i,q=/^h\d$/i,L=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,H=/[+~]/,O=new RegExp("\\\\[\\da-fA-F]{1,6}"+ge+"?|\\\\([^\\r\\n\\f])","g"),P=function(e,t){var n="0x"+e.slice(1)-65536;return t||(n<0?String.fromCharCode(n+65536):String.fromCharCode(n>>10|55296,1023&n|56320))},M=function(){V()},R=J(function(e){return!0===e.disabled&&fe(e,"fieldset")},{dir:"parentNode",next:"legend"});try{k.apply(oe=ae.call(ye.childNodes),ye.childNodes),oe[ye.childNodes.length].nodeType}catch(e){k={apply:function(e,t){me.apply(e,ae.call(t))},call:function(e){me.apply(e,ae.call(arguments,1))}}}function I(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&(V(e),e=e||T,C)){if(11!==p&&(u=L.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return k.call(n,a),n}else if(f&&(a=f.getElementById(i))&&I.contains(e,a)&&a.id===i)return k.call(n,a),n}else{if(u[2])return k.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&e.getElementsByClassName)return k.apply(n,e.getElementsByClassName(i)),n}if(!(h[t+" "]||d&&d.test(t))){if(c=t,f=e,1===p&&(x.test(t)||m.test(t))){(f=H.test(t)&&U(e.parentNode)||e)==e&&le.scope||((s=e.getAttribute("id"))?s=ce.escapeSelector(s):e.setAttribute("id",s=S)),o=(l=Y(t)).length;while(o--)l[o]=(s?"#"+s:":scope")+" "+Q(l[o]);c=l.join(",")}try{return k.apply(n,f.querySelectorAll(c)),n}catch(e){h(t,!0)}finally{s===S&&e.removeAttribute("id")}}}return re(t.replace(ve,"$1"),e,n,r)}function W(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function F(e){return e[S]=!0,e}function $(e){var t=T.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function B(t){return function(e){return fe(e,"input")&&e.type===t}}function _(t){return function(e){return(fe(e,"input")||fe(e,"button"))&&e.type===t}}function z(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&R(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function X(a){return F(function(o){return o=+o,F(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function U(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}function V(e){var t,n=e?e.ownerDocument||e:ye;return n!=T&&9===n.nodeType&&n.documentElement&&(r=(T=n).documentElement,C=!ce.isXMLDoc(T),i=r.matches||r.webkitMatchesSelector||r.msMatchesSelector,r.msMatchesSelector&&ye!=T&&(t=T.defaultView)&&t.top!==t&&t.addEventListener("unload",M),le.getById=$(function(e){return r.appendChild(e).id=ce.expando,!T.getElementsByName||!T.getElementsByName(ce.expando).length}),le.disconnectedMatch=$(function(e){return i.call(e,"*")}),le.scope=$(function(){return T.querySelectorAll(":scope")}),le.cssHas=$(function(){try{return T.querySelector(":has(*,:jqfake)"),!1}catch(e){return!0}}),le.getById?(b.filter.ID=function(e){var t=e.replace(O,P);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&C){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(O,P);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&C){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):t.querySelectorAll(e)},b.find.CLASS=function(e,t){if("undefined"!=typeof t.getElementsByClassName&&C)return t.getElementsByClassName(e)},d=[],$(function(e){var t;r.appendChild(e).innerHTML="",e.querySelectorAll("[selected]").length||d.push("\\["+ge+"*(?:value|"+f+")"),e.querySelectorAll("[id~="+S+"-]").length||d.push("~="),e.querySelectorAll("a#"+S+"+*").length||d.push(".#.+[+~]"),e.querySelectorAll(":checked").length||d.push(":checked"),(t=T.createElement("input")).setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),r.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&d.push(":enabled",":disabled"),(t=T.createElement("input")).setAttribute("name",""),e.appendChild(t),e.querySelectorAll("[name='']").length||d.push("\\["+ge+"*name"+ge+"*="+ge+"*(?:''|\"\")")}),le.cssHas||d.push(":has"),d=d.length&&new RegExp(d.join("|")),l=function(e,t){if(e===t)return a=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)==(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!le.sortDetached&&t.compareDocumentPosition(e)===n?e===T||e.ownerDocument==ye&&I.contains(ye,e)?-1:t===T||t.ownerDocument==ye&&I.contains(ye,t)?1:o?se.call(o,e)-se.call(o,t):0:4&n?-1:1)}),T}for(e in I.matches=function(e,t){return I(e,null,null,t)},I.matchesSelector=function(e,t){if(V(e),C&&!h[t+" "]&&(!d||!d.test(t)))try{var n=i.call(e,t);if(n||le.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){h(t,!0)}return 0":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(O,P),e[3]=(e[3]||e[4]||e[5]||"").replace(O,P),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||I.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&I.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return D.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&j.test(n)&&(t=Y(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(O,P).toLowerCase();return"*"===e?function(){return!0}:function(e){return fe(e,t)}},CLASS:function(e){var t=s[e+" "];return t||(t=new RegExp("(^|"+ge+")"+e+"("+ge+"|$)"))&&s(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=I.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function T(e,n,r){return v(n)?ce.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?ce.grep(e,function(e){return e===n!==r}):"string"!=typeof n?ce.grep(e,function(e){return-1)[^>]*|#([\w-]+))$/;(ce.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||k,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:S.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof ce?t[0]:t,ce.merge(this,ce.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:C,!0)),w.test(r[1])&&ce.isPlainObject(t))for(r in t)v(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=C.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):v(e)?void 0!==n.ready?n.ready(e):e(ce):ce.makeArray(e,this)}).prototype=ce.fn,k=ce(C);var E=/^(?:parents|prev(?:Until|All))/,j={children:!0,contents:!0,next:!0,prev:!0};function A(e,t){while((e=e[t])&&1!==e.nodeType);return e}ce.fn.extend({has:function(e){var t=ce(e,this),n=t.length;return this.filter(function(){for(var e=0;e\x20\t\r\n\f]*)/i,Ce=/^$|^module$|\/(?:java|ecma)script/i;xe=C.createDocumentFragment().appendChild(C.createElement("div")),(be=C.createElement("input")).setAttribute("type","radio"),be.setAttribute("checked","checked"),be.setAttribute("name","t"),xe.appendChild(be),le.checkClone=xe.cloneNode(!0).cloneNode(!0).lastChild.checked,xe.innerHTML="",le.noCloneChecked=!!xe.cloneNode(!0).lastChild.defaultValue,xe.innerHTML="",le.option=!!xe.lastChild;var ke={thead:[1,"","
"],col:[2,"","
"],tr:[2,"","
"],td:[3,"","
"],_default:[0,"",""]};function Se(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&fe(e,t)?ce.merge([e],n):n}function Ee(e,t){for(var n=0,r=e.length;n",""]);var je=/<|&#?\w+;/;function Ae(e,t,n,r,i){for(var o,a,s,u,l,c,f=t.createDocumentFragment(),p=[],d=0,h=e.length;d\s*$/g;function Re(e,t){return fe(e,"table")&&fe(11!==t.nodeType?t:t.firstChild,"tr")&&ce(e).children("tbody")[0]||e}function Ie(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function We(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Fe(e,t){var n,r,i,o,a,s;if(1===t.nodeType){if(_.hasData(e)&&(s=_.get(e).events))for(i in _.remove(t,"handle events"),s)for(n=0,r=s[i].length;n").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),C.head.appendChild(r[0])},abort:function(){i&&i()}}});var Jt,Kt=[],Zt=/(=)\?(?=&|$)|\?\?/;ce.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Kt.pop()||ce.expando+"_"+jt.guid++;return this[e]=!0,e}}),ce.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Zt.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Zt.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=v(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Zt,"$1"+r):!1!==e.jsonp&&(e.url+=(At.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||ce.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=ie[r],ie[r]=function(){o=arguments},n.always(function(){void 0===i?ce(ie).removeProp(r):ie[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,Kt.push(r)),o&&v(i)&&i(o[0]),o=i=void 0}),"script"}),le.createHTMLDocument=((Jt=C.implementation.createHTMLDocument("").body).innerHTML="
",2===Jt.childNodes.length),ce.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(le.createHTMLDocument?((r=(t=C.implementation.createHTMLDocument("")).createElement("base")).href=C.location.href,t.head.appendChild(r)):t=C),o=!n&&[],(i=w.exec(e))?[t.createElement(i[1])]:(i=Ae([e],t,o),o&&o.length&&ce(o).remove(),ce.merge([],i.childNodes)));var r,i,o},ce.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1").append(ce.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},ce.expr.pseudos.animated=function(t){return ce.grep(ce.timers,function(e){return t===e.elem}).length},ce.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=ce.css(e,"position"),c=ce(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=ce.css(e,"top"),u=ce.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),v(t)&&(t=t.call(e,n,ce.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},ce.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){ce.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===ce.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===ce.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=ce(e).offset()).top+=ce.css(e,"borderTopWidth",!0),i.left+=ce.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-ce.css(r,"marginTop",!0),left:t.left-i.left-ce.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===ce.css(e,"position"))e=e.offsetParent;return e||J})}}),ce.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;ce.fn[t]=function(e){return M(this,function(e,t,n){var r;if(y(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),ce.each(["top","left"],function(e,n){ce.cssHooks[n]=Ye(le.pixelPosition,function(e,t){if(t)return t=Ge(e,n),_e.test(t)?ce(e).position()[n]+"px":t})}),ce.each({Height:"height",Width:"width"},function(a,s){ce.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){ce.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return M(this,function(e,t,n){var r;return y(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?ce.css(e,t,i):ce.style(e,t,n,i)},s,n?e:void 0,n)}})}),ce.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){ce.fn[t]=function(e){return this.on(t,e)}}),ce.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)},hover:function(e,t){return this.on("mouseenter",e).on("mouseleave",t||e)}}),ce.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){ce.fn[n]=function(e,t){return 00){var c=e.utils.clone(r)||{};c.position=[a,l],c.index=s.length,s.push(new e.Token(i.slice(a,o),c))}a=o+1}}return s},e.tokenizer.separator=/[\s\-]+/,e.Pipeline=function(){this._stack=[]},e.Pipeline.registeredFunctions=Object.create(null),e.Pipeline.registerFunction=function(t,r){r in this.registeredFunctions&&e.utils.warn("Overwriting existing registered function: "+r),t.label=r,e.Pipeline.registeredFunctions[t.label]=t},e.Pipeline.warnIfFunctionNotRegistered=function(t){var r=t.label&&t.label in this.registeredFunctions;r||e.utils.warn("Function is not registered with pipeline. This may cause problems when serialising the index.\n",t)},e.Pipeline.load=function(t){var r=new e.Pipeline;return t.forEach(function(t){var i=e.Pipeline.registeredFunctions[t];if(!i)throw new Error("Cannot load unregistered function: "+t);r.add(i)}),r},e.Pipeline.prototype.add=function(){var t=Array.prototype.slice.call(arguments);t.forEach(function(t){e.Pipeline.warnIfFunctionNotRegistered(t),this._stack.push(t)},this)},e.Pipeline.prototype.after=function(t,r){e.Pipeline.warnIfFunctionNotRegistered(r);var i=this._stack.indexOf(t);if(i==-1)throw new Error("Cannot find existingFn");i+=1,this._stack.splice(i,0,r)},e.Pipeline.prototype.before=function(t,r){e.Pipeline.warnIfFunctionNotRegistered(r);var i=this._stack.indexOf(t);if(i==-1)throw new Error("Cannot find existingFn");this._stack.splice(i,0,r)},e.Pipeline.prototype.remove=function(e){var t=this._stack.indexOf(e);t!=-1&&this._stack.splice(t,1)},e.Pipeline.prototype.run=function(e){for(var t=this._stack.length,r=0;r1&&(se&&(r=n),s!=e);)i=r-t,n=t+Math.floor(i/2),s=this.elements[2*n];return s==e?2*n:s>e?2*n:sa?l+=2:o==a&&(t+=r[u+1]*i[l+1],u+=2,l+=2);return t},e.Vector.prototype.similarity=function(e){return this.dot(e)/this.magnitude()||0},e.Vector.prototype.toArray=function(){for(var e=new Array(this.elements.length/2),t=1,r=0;t0){var o,a=s.str.charAt(0);a in s.node.edges?o=s.node.edges[a]:(o=new e.TokenSet,s.node.edges[a]=o),1==s.str.length&&(o["final"]=!0),n.push({node:o,editsRemaining:s.editsRemaining,str:s.str.slice(1)})}if(0!=s.editsRemaining){if("*"in s.node.edges)var u=s.node.edges["*"];else{var u=new e.TokenSet;s.node.edges["*"]=u}if(0==s.str.length&&(u["final"]=!0),n.push({node:u,editsRemaining:s.editsRemaining-1,str:s.str}),s.str.length>1&&n.push({node:s.node,editsRemaining:s.editsRemaining-1,str:s.str.slice(1)}),1==s.str.length&&(s.node["final"]=!0),s.str.length>=1){if("*"in s.node.edges)var l=s.node.edges["*"];else{var l=new e.TokenSet;s.node.edges["*"]=l}1==s.str.length&&(l["final"]=!0),n.push({node:l,editsRemaining:s.editsRemaining-1,str:s.str.slice(1)})}if(s.str.length>1){var c,h=s.str.charAt(0),d=s.str.charAt(1);d in s.node.edges?c=s.node.edges[d]:(c=new e.TokenSet,s.node.edges[d]=c),1==s.str.length&&(c["final"]=!0),n.push({node:c,editsRemaining:s.editsRemaining-1,str:h+s.str.slice(2)})}}}return i},e.TokenSet.fromString=function(t){for(var r=new e.TokenSet,i=r,n=0,s=t.length;n=e;t--){var r=this.uncheckedNodes[t],i=r.child.toString();i in this.minimizedNodes?r.parent.edges[r["char"]]=this.minimizedNodes[i]:(r.child._str=i,this.minimizedNodes[i]=r.child),this.uncheckedNodes.pop()}},e.Index=function(e){this.invertedIndex=e.invertedIndex,this.fieldVectors=e.fieldVectors,this.tokenSet=e.tokenSet,this.fields=e.fields,this.pipeline=e.pipeline},e.Index.prototype.search=function(t){return this.query(function(r){var i=new e.QueryParser(t,r);i.parse()})},e.Index.prototype.query=function(t){for(var r=new e.Query(this.fields),i=Object.create(null),n=Object.create(null),s=Object.create(null),o=Object.create(null),a=Object.create(null),u=0;u1?this._b=1:this._b=e},e.Builder.prototype.k1=function(e){this._k1=e},e.Builder.prototype.add=function(t,r){var i=t[this._ref],n=Object.keys(this._fields);this._documents[i]=r||{},this.documentCount+=1;for(var s=0;s=this.length)return e.QueryLexer.EOS;var t=this.str.charAt(this.pos);return this.pos+=1,t},e.QueryLexer.prototype.width=function(){return this.pos-this.start},e.QueryLexer.prototype.ignore=function(){this.start==this.pos&&(this.pos+=1),this.start=this.pos},e.QueryLexer.prototype.backup=function(){this.pos-=1},e.QueryLexer.prototype.acceptDigitRun=function(){var t,r;do t=this.next(),r=t.charCodeAt(0);while(r>47&&r<58);t!=e.QueryLexer.EOS&&this.backup()},e.QueryLexer.prototype.more=function(){return this.pos1&&(t.backup(),t.emit(e.QueryLexer.TERM)),t.ignore(),t.more())return e.QueryLexer.lexText},e.QueryLexer.lexEditDistance=function(t){return t.ignore(),t.acceptDigitRun(),t.emit(e.QueryLexer.EDIT_DISTANCE),e.QueryLexer.lexText},e.QueryLexer.lexBoost=function(t){return t.ignore(),t.acceptDigitRun(),t.emit(e.QueryLexer.BOOST),e.QueryLexer.lexText},e.QueryLexer.lexEOS=function(t){t.width()>0&&t.emit(e.QueryLexer.TERM)},e.QueryLexer.termSeparator=e.tokenizer.separator,e.QueryLexer.lexText=function(t){for(;;){var r=t.next();if(r==e.QueryLexer.EOS)return e.QueryLexer.lexEOS;if(92!=r.charCodeAt(0)){if(":"==r)return e.QueryLexer.lexField;if("~"==r)return t.backup(),t.width()>0&&t.emit(e.QueryLexer.TERM),e.QueryLexer.lexEditDistance;if("^"==r)return t.backup(),t.width()>0&&t.emit(e.QueryLexer.TERM),e.QueryLexer.lexBoost;if("+"==r&&1===t.width())return t.emit(e.QueryLexer.PRESENCE),e.QueryLexer.lexText;if("-"==r&&1===t.width())return t.emit(e.QueryLexer.PRESENCE),e.QueryLexer.lexText;if(r.match(e.QueryLexer.termSeparator))return e.QueryLexer.lexTerm}else t.escapeCharacter()}},e.QueryParser=function(t,r){this.lexer=new e.QueryLexer(t),this.query=r,this.currentClause={},this.lexemeIdx=0},e.QueryParser.prototype.parse=function(){this.lexer.run(),this.lexemes=this.lexer.lexemes;for(var t=e.QueryParser.parseClause;t;)t=t(this);return this.query},e.QueryParser.prototype.peekLexeme=function(){return this.lexemes[this.lexemeIdx]},e.QueryParser.prototype.consumeLexeme=function(){var e=this.peekLexeme();return this.lexemeIdx+=1,e},e.QueryParser.prototype.nextClause=function(){var e=this.currentClause;this.query.clause(e),this.currentClause={}},e.QueryParser.parseClause=function(t){var r=t.peekLexeme();if(void 0!=r)switch(r.type){case e.QueryLexer.PRESENCE:return e.QueryParser.parsePresence;case e.QueryLexer.FIELD:return e.QueryParser.parseField;case e.QueryLexer.TERM:return e.QueryParser.parseTerm;default:var i="expected either a field or a term, found "+r.type;throw r.str.length>=1&&(i+=" with value '"+r.str+"'"),new e.QueryParseError(i,r.start,r.end)}},e.QueryParser.parsePresence=function(t){var r=t.consumeLexeme();if(void 0!=r){switch(r.str){case"-":t.currentClause.presence=e.Query.presence.PROHIBITED;break;case"+":t.currentClause.presence=e.Query.presence.REQUIRED;break;default:var i="unrecognised presence operator'"+r.str+"'";throw new e.QueryParseError(i,r.start,r.end)}var n=t.peekLexeme();if(void 0==n){var i="expecting term or field, found nothing";throw new e.QueryParseError(i,r.start,r.end)}switch(n.type){case e.QueryLexer.FIELD:return e.QueryParser.parseField;case e.QueryLexer.TERM:return e.QueryParser.parseTerm;default:var i="expecting term or field, found '"+n.type+"'";throw new e.QueryParseError(i,n.start,n.end)}}},e.QueryParser.parseField=function(t){var r=t.consumeLexeme();if(void 0!=r){if(t.query.allFields.indexOf(r.str)==-1){var i=t.query.allFields.map(function(e){return"'"+e+"'"}).join(", "),n="unrecognised field '"+r.str+"', possible fields: "+i;throw new e.QueryParseError(n,r.start,r.end)}t.currentClause.fields=[r.str];var s=t.peekLexeme();if(void 0==s){var n="expecting term, found nothing";throw new e.QueryParseError(n,r.start,r.end)}switch(s.type){case e.QueryLexer.TERM:return e.QueryParser.parseTerm;default:var n="expecting term, found '"+s.type+"'";throw new e.QueryParseError(n,s.start,s.end)}}},e.QueryParser.parseTerm=function(t){var r=t.consumeLexeme();if(void 0!=r){t.currentClause.term=r.str.toLowerCase(),r.str.indexOf("*")!=-1&&(t.currentClause.usePipeline=!1);var i=t.peekLexeme();if(void 0==i)return void t.nextClause();switch(i.type){case e.QueryLexer.TERM:return t.nextClause(),e.QueryParser.parseTerm;case e.QueryLexer.FIELD:return t.nextClause(),e.QueryParser.parseField;case e.QueryLexer.EDIT_DISTANCE:return e.QueryParser.parseEditDistance;case e.QueryLexer.BOOST:return e.QueryParser.parseBoost;case e.QueryLexer.PRESENCE:return t.nextClause(),e.QueryParser.parsePresence;default:var n="Unexpected lexeme type '"+i.type+"'";throw new e.QueryParseError(n,i.start,i.end)}}},e.QueryParser.parseEditDistance=function(t){var r=t.consumeLexeme();if(void 0!=r){var i=parseInt(r.str,10);if(isNaN(i)){var n="edit distance must be numeric";throw new e.QueryParseError(n,r.start,r.end)}t.currentClause.editDistance=i;var s=t.peekLexeme();if(void 0==s)return void t.nextClause();switch(s.type){case e.QueryLexer.TERM:return t.nextClause(),e.QueryParser.parseTerm;case e.QueryLexer.FIELD:return t.nextClause(),e.QueryParser.parseField;case e.QueryLexer.EDIT_DISTANCE:return e.QueryParser.parseEditDistance;case e.QueryLexer.BOOST:return e.QueryParser.parseBoost;case e.QueryLexer.PRESENCE:return t.nextClause(),e.QueryParser.parsePresence;default:var n="Unexpected lexeme type '"+s.type+"'";throw new e.QueryParseError(n,s.start,s.end)}}},e.QueryParser.parseBoost=function(t){var r=t.consumeLexeme();if(void 0!=r){var i=parseInt(r.str,10);if(isNaN(i)){var n="boost must be numeric";throw new e.QueryParseError(n,r.start,r.end)}t.currentClause.boost=i;var s=t.peekLexeme();if(void 0==s)return void t.nextClause();switch(s.type){case e.QueryLexer.TERM:return t.nextClause(),e.QueryParser.parseTerm;case e.QueryLexer.FIELD:return t.nextClause(),e.QueryParser.parseField;case e.QueryLexer.EDIT_DISTANCE:return e.QueryParser.parseEditDistance;case e.QueryLexer.BOOST:return e.QueryParser.parseBoost;case e.QueryLexer.PRESENCE:return t.nextClause(),e.QueryParser.parsePresence;default:var n="Unexpected lexeme type '"+s.type+"'";throw new e.QueryParseError(n,s.start,s.end)}}},function(e,t){"function"==typeof define&&define.amd?define(t):"object"==typeof exports?module.exports=t():e.lunr=t()}(this,function(){return e})}(); diff --git a/docs/js/typeahead.jquery.js b/docs/js/typeahead.jquery.js new file mode 100644 index 00000000..bcb734be --- /dev/null +++ b/docs/js/typeahead.jquery.js @@ -0,0 +1,1695 @@ +/*! + * typeahead.js 1.3.3 + * https://github.com/corejavascript/typeahead.js + * Copyright 2013-2024 Twitter, Inc. and other contributors; Licensed MIT + */ + + +(function(root, factory) { + if (typeof define === "function" && define.amd) { + define([ "jquery" ], function(a0) { + return factory(a0); + }); + } else if (typeof module === "object" && module.exports) { + module.exports = factory(require("jquery")); + } else { + factory(root["jQuery"]); + } +})(this, function($) { + var _ = function() { + "use strict"; + return { + isMsie: function() { + return /(msie|trident)/i.test(navigator.userAgent) ? navigator.userAgent.match(/(msie |rv:)(\d+(.\d+)?)/i)[2] : false; + }, + isBlankString: function(str) { + return !str || /^\s*$/.test(str); + }, + escapeRegExChars: function(str) { + return str.replace(/[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g, "\\$&"); + }, + isString: function(obj) { + return typeof obj === "string"; + }, + isNumber: function(obj) { + return typeof obj === "number"; + }, + isArray: $.isArray, + isFunction: $.isFunction, + isObject: $.isPlainObject, + isUndefined: function(obj) { + return typeof obj === "undefined"; + }, + isElement: function(obj) { + return !!(obj && obj.nodeType === 1); + }, + isJQuery: function(obj) { + return obj instanceof $; + }, + toStr: function toStr(s) { + return _.isUndefined(s) || s === null ? "" : s + ""; + }, + bind: $.proxy, + each: function(collection, cb) { + $.each(collection, reverseArgs); + function reverseArgs(index, value) { + return cb(value, index); + } + }, + map: $.map, + filter: $.grep, + every: function(obj, test) { + var result = true; + if (!obj) { + return result; + } + $.each(obj, function(key, val) { + if (!(result = test.call(null, val, key, obj))) { + return false; + } + }); + return !!result; + }, + some: function(obj, test) { + var result = false; + if (!obj) { + return result; + } + $.each(obj, function(key, val) { + if (result = test.call(null, val, key, obj)) { + return false; + } + }); + return !!result; + }, + mixin: $.extend, + identity: function(x) { + return x; + }, + clone: function(obj) { + return $.extend(true, {}, obj); + }, + getIdGenerator: function() { + var counter = 0; + return function() { + return counter++; + }; + }, + templatify: function templatify(obj) { + return $.isFunction(obj) ? obj : template; + function template() { + return String(obj); + } + }, + defer: function(fn) { + setTimeout(fn, 0); + }, + debounce: function(func, wait, immediate) { + var timeout, result; + return function() { + var context = this, args = arguments, later, callNow; + later = function() { + timeout = null; + if (!immediate) { + result = func.apply(context, args); + } + }; + callNow = immediate && !timeout; + clearTimeout(timeout); + timeout = setTimeout(later, wait); + if (callNow) { + result = func.apply(context, args); + } + return result; + }; + }, + throttle: function(func, wait) { + var context, args, timeout, result, previous, later; + previous = 0; + later = function() { + previous = new Date(); + timeout = null; + result = func.apply(context, args); + }; + return function() { + var now = new Date(), remaining = wait - (now - previous); + context = this; + args = arguments; + if (remaining <= 0) { + clearTimeout(timeout); + timeout = null; + previous = now; + result = func.apply(context, args); + } else if (!timeout) { + timeout = setTimeout(later, remaining); + } + return result; + }; + }, + stringify: function(val) { + return _.isString(val) ? val : JSON.stringify(val); + }, + guid: function() { + function _p8(s) { + var p = (Math.random().toString(16) + "000000000").substr(2, 8); + return s ? "-" + p.substr(0, 4) + "-" + p.substr(4, 4) : p; + } + return "tt-" + _p8() + _p8(true) + _p8(true) + _p8(); + }, + noop: function() {} + }; + }(); + var WWW = function() { + "use strict"; + var defaultClassNames = { + wrapper: "twitter-typeahead", + input: "tt-input", + hint: "tt-hint", + menu: "tt-menu", + dataset: "tt-dataset", + suggestion: "tt-suggestion", + selectable: "tt-selectable", + empty: "tt-empty", + open: "tt-open", + cursor: "tt-cursor", + highlight: "tt-highlight" + }; + return build; + function build(o) { + var www, classes; + classes = _.mixin({}, defaultClassNames, o); + www = { + css: buildCss(), + classes: classes, + html: buildHtml(classes), + selectors: buildSelectors(classes) + }; + return { + css: www.css, + html: www.html, + classes: www.classes, + selectors: www.selectors, + mixin: function(o) { + _.mixin(o, www); + } + }; + } + function buildHtml(c) { + return { + wrapper: '', + menu: '
' + }; + } + function buildSelectors(classes) { + var selectors = {}; + _.each(classes, function(v, k) { + selectors[k] = "." + v; + }); + return selectors; + } + function buildCss() { + var css = { + wrapper: { + position: "relative", + display: "inline-block" + }, + hint: { + position: "absolute", + top: "0", + left: "0", + borderColor: "transparent", + boxShadow: "none", + opacity: "1" + }, + input: { + position: "relative", + verticalAlign: "top", + backgroundColor: "transparent" + }, + inputWithNoHint: { + position: "relative", + verticalAlign: "top" + }, + menu: { + position: "absolute", + top: "100%", + left: "0", + zIndex: "100", + display: "none" + }, + ltr: { + left: "0", + right: "auto" + }, + rtl: { + left: "auto", + right: " 0" + } + }; + if (_.isMsie()) { + _.mixin(css.input, { + backgroundImage: "url(data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7)" + }); + } + return css; + } + }(); + var EventBus = function() { + "use strict"; + var namespace, deprecationMap; + namespace = "typeahead:"; + deprecationMap = { + render: "rendered", + cursorchange: "cursorchanged", + select: "selected", + autocomplete: "autocompleted" + }; + function EventBus(o) { + if (!o || !o.el) { + $.error("EventBus initialized without el"); + } + this.$el = $(o.el); + } + _.mixin(EventBus.prototype, { + _trigger: function(type, args) { + var $e = $.Event(namespace + type); + this.$el.trigger.call(this.$el, $e, args || []); + return $e; + }, + before: function(type) { + var args, $e; + args = [].slice.call(arguments, 1); + $e = this._trigger("before" + type, args); + return $e.isDefaultPrevented(); + }, + trigger: function(type) { + var deprecatedType; + this._trigger(type, [].slice.call(arguments, 1)); + if (deprecatedType = deprecationMap[type]) { + this._trigger(deprecatedType, [].slice.call(arguments, 1)); + } + } + }); + return EventBus; + }(); + var EventEmitter = function() { + "use strict"; + var splitter = /\s+/, nextTick = getNextTick(); + return { + onSync: onSync, + onAsync: onAsync, + off: off, + trigger: trigger + }; + function on(method, types, cb, context) { + var type; + if (!cb) { + return this; + } + types = types.split(splitter); + cb = context ? bindContext(cb, context) : cb; + this._callbacks = this._callbacks || {}; + while (type = types.shift()) { + this._callbacks[type] = this._callbacks[type] || { + sync: [], + async: [] + }; + this._callbacks[type][method].push(cb); + } + return this; + } + function onAsync(types, cb, context) { + return on.call(this, "async", types, cb, context); + } + function onSync(types, cb, context) { + return on.call(this, "sync", types, cb, context); + } + function off(types) { + var type; + if (!this._callbacks) { + return this; + } + types = types.split(splitter); + while (type = types.shift()) { + delete this._callbacks[type]; + } + return this; + } + function trigger(types) { + var type, callbacks, args, syncFlush, asyncFlush; + if (!this._callbacks) { + return this; + } + types = types.split(splitter); + args = [].slice.call(arguments, 1); + while ((type = types.shift()) && (callbacks = this._callbacks[type])) { + syncFlush = getFlush(callbacks.sync, this, [ type ].concat(args)); + asyncFlush = getFlush(callbacks.async, this, [ type ].concat(args)); + syncFlush() && nextTick(asyncFlush); + } + return this; + } + function getFlush(callbacks, context, args) { + return flush; + function flush() { + var cancelled; + for (var i = 0, len = callbacks.length; !cancelled && i < len; i += 1) { + cancelled = callbacks[i].apply(context, args) === false; + } + return !cancelled; + } + } + function getNextTick() { + var nextTickFn; + if (window.setImmediate) { + nextTickFn = function nextTickSetImmediate(fn) { + setImmediate(function() { + fn(); + }); + }; + } else { + nextTickFn = function nextTickSetTimeout(fn) { + setTimeout(function() { + fn(); + }, 0); + }; + } + return nextTickFn; + } + function bindContext(fn, context) { + return fn.bind ? fn.bind(context) : function() { + fn.apply(context, [].slice.call(arguments, 0)); + }; + } + }(); + var highlight = function(doc) { + "use strict"; + var defaults = { + node: null, + pattern: null, + tagName: "strong", + className: null, + wordsOnly: false, + caseSensitive: false, + diacriticInsensitive: false + }; + var accented = { + A: "[AaªÀ-Åà-åĀ-ąǍǎȀ-ȃȦȧᴬᵃḀḁẚẠ-ảₐ℀℁℻⒜Ⓐⓐ㍱-㍴㎀-㎄㎈㎉㎩-㎯㏂㏊㏟㏿Aa]", + B: "[BbᴮᵇḂ-ḇℬ⒝Ⓑⓑ㍴㎅-㎇㏃㏈㏔㏝Bb]", + C: "[CcÇçĆ-čᶜ℀ℂ℃℅℆ℭⅭⅽ⒞Ⓒⓒ㍶㎈㎉㎝㎠㎤㏄-㏇Cc]", + D: "[DdĎďDŽ-džDZ-dzᴰᵈḊ-ḓⅅⅆⅮⅾ⒟Ⓓⓓ㋏㍲㍷-㍹㎗㎭-㎯㏅㏈Dd]", + E: "[EeÈ-Ëè-ëĒ-ěȄ-ȇȨȩᴱᵉḘ-ḛẸ-ẽₑ℡ℯℰⅇ⒠Ⓔⓔ㉐㋍㋎Ee]", + F: "[FfᶠḞḟ℉ℱ℻⒡Ⓕⓕ㎊-㎌㎙ff-fflFf]", + G: "[GgĜ-ģǦǧǴǵᴳᵍḠḡℊ⒢Ⓖⓖ㋌㋍㎇㎍-㎏㎓㎬㏆㏉㏒㏿Gg]", + H: "[HhĤĥȞȟʰᴴḢ-ḫẖℋ-ℎ⒣Ⓗⓗ㋌㍱㎐-㎔㏊㏋㏗Hh]", + I: "[IiÌ-Ïì-ïĨ-İIJijǏǐȈ-ȋᴵᵢḬḭỈ-ịⁱℐℑℹⅈⅠ-ⅣⅥ-ⅨⅪⅫⅰ-ⅳⅵ-ⅸⅺⅻ⒤Ⓘⓘ㍺㏌㏕fiffiIi]", + J: "[JjIJ-ĵLJ-njǰʲᴶⅉ⒥ⒿⓙⱼJj]", + K: "[KkĶķǨǩᴷᵏḰ-ḵK⒦Ⓚⓚ㎄㎅㎉㎏㎑㎘㎞㎢㎦㎪㎸㎾㏀㏆㏍-㏏Kk]", + L: "[LlĹ-ŀLJ-ljˡᴸḶḷḺ-ḽℒℓ℡Ⅼⅼ⒧Ⓛⓛ㋏㎈㎉㏐-㏓㏕㏖㏿flfflLl]", + M: "[MmᴹᵐḾ-ṃ℠™ℳⅯⅿ⒨Ⓜⓜ㍷-㍹㎃㎆㎎㎒㎖㎙-㎨㎫㎳㎷㎹㎽㎿㏁㏂㏎㏐㏔-㏖㏘㏙㏞㏟Mm]", + N: "[NnÑñŃ-ʼnNJ-njǸǹᴺṄ-ṋⁿℕ№⒩Ⓝⓝ㎁㎋㎚㎱㎵㎻㏌㏑Nn]", + O: "[OoºÒ-Öò-öŌ-őƠơǑǒǪǫȌ-ȏȮȯᴼᵒỌ-ỏₒ℅№ℴ⒪Ⓞⓞ㍵㏇㏒㏖Oo]", + P: "[PpᴾᵖṔ-ṗℙ⒫Ⓟⓟ㉐㍱㍶㎀㎊㎩-㎬㎰㎴㎺㏋㏗-㏚Pp]", + Q: "[Qqℚ⒬Ⓠⓠ㏃Qq]", + R: "[RrŔ-řȐ-ȓʳᴿᵣṘ-ṛṞṟ₨ℛ-ℝ⒭Ⓡⓡ㋍㍴㎭-㎯㏚㏛Rr]", + S: "[SsŚ-šſȘșˢṠ-ṣ₨℁℠⒮Ⓢⓢ㎧㎨㎮-㎳㏛㏜stSs]", + T: "[TtŢ-ťȚțᵀᵗṪ-ṱẗ℡™⒯Ⓣⓣ㉐㋏㎔㏏ſtstTt]", + U: "[UuÙ-Üù-üŨ-ųƯưǓǔȔ-ȗᵁᵘᵤṲ-ṷỤ-ủ℆⒰Ⓤⓤ㍳㍺Uu]", + V: "[VvᵛᵥṼ-ṿⅣ-Ⅷⅳ-ⅷ⒱Ⓥⓥⱽ㋎㍵㎴-㎹㏜㏞Vv]", + W: "[WwŴŵʷᵂẀ-ẉẘ⒲Ⓦⓦ㎺-㎿㏝Ww]", + X: "[XxˣẊ-ẍₓ℻Ⅸ-Ⅻⅸ-ⅻ⒳Ⓧⓧ㏓Xx]", + Y: "[YyÝýÿŶ-ŸȲȳʸẎẏẙỲ-ỹ⒴Ⓨⓨ㏉Yy]", + Z: "[ZzŹ-žDZ-dzᶻẐ-ẕℤℨ⒵Ⓩⓩ㎐-㎔Zz]" + }; + return function hightlight(o) { + var regex; + o = _.mixin({}, defaults, o); + if (!o.node || !o.pattern) { + return; + } + o.pattern = _.isArray(o.pattern) ? o.pattern : [ o.pattern ]; + regex = getRegex(o.pattern, o.caseSensitive, o.wordsOnly, o.diacriticInsensitive); + traverse(o.node, hightlightTextNode); + function hightlightTextNode(textNode) { + var match, patternNode, wrapperNode; + if (match = regex.exec(textNode.data)) { + wrapperNode = doc.createElement(o.tagName); + o.className && (wrapperNode.className = o.className); + patternNode = textNode.splitText(match.index); + patternNode.splitText(match[0].length); + wrapperNode.appendChild(patternNode.cloneNode(true)); + textNode.parentNode.replaceChild(wrapperNode, patternNode); + } + return !!match; + } + function traverse(el, hightlightTextNode) { + var childNode, TEXT_NODE_TYPE = 3; + for (var i = 0; i < el.childNodes.length; i++) { + childNode = el.childNodes[i]; + if (childNode.nodeType === TEXT_NODE_TYPE) { + i += hightlightTextNode(childNode) ? 1 : 0; + } else { + traverse(childNode, hightlightTextNode); + } + } + } + }; + function accent_replacer(chr) { + return accented[chr.toUpperCase()] || chr; + } + function getRegex(patterns, caseSensitive, wordsOnly, diacriticInsensitive) { + var escapedPatterns = [], regexStr; + for (var i = 0, len = patterns.length; i < len; i++) { + var escapedWord = _.escapeRegExChars(patterns[i]); + if (diacriticInsensitive) { + escapedWord = escapedWord.replace(/\S/g, accent_replacer); + } + escapedPatterns.push(escapedWord); + } + regexStr = wordsOnly ? "\\b(" + escapedPatterns.join("|") + ")\\b" : "(" + escapedPatterns.join("|") + ")"; + return caseSensitive ? new RegExp(regexStr) : new RegExp(regexStr, "i"); + } + }(window.document); + var Input = function() { + "use strict"; + var specialKeyCodeMap; + specialKeyCodeMap = { + 9: "tab", + 27: "esc", + 37: "left", + 39: "right", + 13: "enter", + 38: "up", + 40: "down" + }; + function Input(o, www) { + var id; + o = o || {}; + if (!o.input) { + $.error("input is missing"); + } + www.mixin(this); + this.$hint = $(o.hint); + this.$input = $(o.input); + this.$menu = $(o.menu); + id = this.$input.attr("id") || _.guid(); + this.$menu.attr("id", id + "_listbox"); + this.$hint.attr({ + "aria-hidden": true + }); + this.$input.attr({ + "aria-owns": id + "_listbox", + "aria-controls": id + "_listbox", + role: "combobox", + "aria-autocomplete": "list", + "aria-expanded": false + }); + this.query = this.$input.val(); + this.queryWhenFocused = this.hasFocus() ? this.query : null; + this.$overflowHelper = buildOverflowHelper(this.$input); + this._checkLanguageDirection(); + if (this.$hint.length === 0) { + this.setHint = this.getHint = this.clearHint = this.clearHintIfInvalid = _.noop; + } + this.onSync("cursorchange", this._updateDescendent); + } + Input.normalizeQuery = function(str) { + return _.toStr(str).replace(/^\s*/g, "").replace(/\s{2,}/g, " "); + }; + _.mixin(Input.prototype, EventEmitter, { + _onBlur: function onBlur() { + this.resetInputValue(); + this.trigger("blurred"); + }, + _onFocus: function onFocus() { + this.queryWhenFocused = this.query; + this.trigger("focused"); + }, + _onKeydown: function onKeydown($e) { + var keyName = specialKeyCodeMap[$e.which || $e.keyCode]; + this._managePreventDefault(keyName, $e); + if (keyName && this._shouldTrigger(keyName, $e)) { + this.trigger(keyName + "Keyed", $e); + } + }, + _onInput: function onInput() { + this._setQuery(this.getInputValue()); + this.clearHintIfInvalid(); + this._checkLanguageDirection(); + }, + _managePreventDefault: function managePreventDefault(keyName, $e) { + var preventDefault; + switch (keyName) { + case "up": + case "down": + preventDefault = !withModifier($e); + break; + + default: + preventDefault = false; + } + preventDefault && $e.preventDefault(); + }, + _shouldTrigger: function shouldTrigger(keyName, $e) { + var trigger; + switch (keyName) { + case "tab": + trigger = !withModifier($e); + break; + + default: + trigger = true; + } + return trigger; + }, + _checkLanguageDirection: function checkLanguageDirection() { + var dir = (this.$input.css("direction") || "ltr").toLowerCase(); + if (this.dir !== dir) { + this.dir = dir; + this.$hint.attr("dir", dir); + this.trigger("langDirChanged", dir); + } + }, + _setQuery: function setQuery(val, silent) { + var areEquivalent, hasDifferentWhitespace; + areEquivalent = areQueriesEquivalent(val, this.query); + hasDifferentWhitespace = areEquivalent ? this.query.length !== val.length : false; + this.query = val; + if (!silent && !areEquivalent) { + this.trigger("queryChanged", this.query); + } else if (!silent && hasDifferentWhitespace) { + this.trigger("whitespaceChanged", this.query); + } + }, + _updateDescendent: function updateDescendent(event, id) { + this.$input.attr("aria-activedescendant", id); + }, + bind: function() { + var that = this, onBlur, onFocus, onKeydown, onInput; + onBlur = _.bind(this._onBlur, this); + onFocus = _.bind(this._onFocus, this); + onKeydown = _.bind(this._onKeydown, this); + onInput = _.bind(this._onInput, this); + this.$input.on("blur.tt", onBlur).on("focus.tt", onFocus).on("keydown.tt", onKeydown); + if (!_.isMsie() || _.isMsie() > 9) { + this.$input.on("input.tt", onInput); + } else { + this.$input.on("keydown.tt keypress.tt cut.tt paste.tt", function($e) { + if (specialKeyCodeMap[$e.which || $e.keyCode]) { + return; + } + _.defer(_.bind(that._onInput, that, $e)); + }); + } + return this; + }, + focus: function focus() { + this.$input.focus(); + }, + blur: function blur() { + this.$input.blur(); + }, + getLangDir: function getLangDir() { + return this.dir; + }, + getQuery: function getQuery() { + return this.query || ""; + }, + setQuery: function setQuery(val, silent) { + this.setInputValue(val); + this._setQuery(val, silent); + }, + hasQueryChangedSinceLastFocus: function hasQueryChangedSinceLastFocus() { + return this.query !== this.queryWhenFocused; + }, + getInputValue: function getInputValue() { + return this.$input.val(); + }, + setInputValue: function setInputValue(value) { + this.$input.val(value); + this.clearHintIfInvalid(); + this._checkLanguageDirection(); + }, + resetInputValue: function resetInputValue() { + this.setInputValue(this.query); + }, + getHint: function getHint() { + return this.$hint.val(); + }, + setHint: function setHint(value) { + this.$hint.val(value); + }, + clearHint: function clearHint() { + this.setHint(""); + }, + clearHintIfInvalid: function clearHintIfInvalid() { + var val, hint, valIsPrefixOfHint, isValid; + val = this.getInputValue(); + hint = this.getHint(); + valIsPrefixOfHint = val !== hint && hint.indexOf(val) === 0; + isValid = val !== "" && valIsPrefixOfHint && !this.hasOverflow(); + !isValid && this.clearHint(); + }, + hasFocus: function hasFocus() { + return this.$input.is(":focus"); + }, + hasOverflow: function hasOverflow() { + var constraint = this.$input.width() - 2; + this.$overflowHelper.text(this.getInputValue()); + return this.$overflowHelper.width() >= constraint; + }, + isCursorAtEnd: function() { + var valueLength, selectionStart, range; + valueLength = this.$input.val().length; + selectionStart = this.$input[0].selectionStart; + if (_.isNumber(selectionStart)) { + return selectionStart === valueLength; + } else if (document.selection) { + range = document.selection.createRange(); + range.moveStart("character", -valueLength); + return valueLength === range.text.length; + } + return true; + }, + destroy: function destroy() { + this.$hint.off(".tt"); + this.$input.off(".tt"); + this.$overflowHelper.remove(); + this.$hint = this.$input = this.$overflowHelper = $("
"); + }, + setAriaExpanded: function setAriaExpanded(value) { + this.$input.attr("aria-expanded", value); + } + }); + return Input; + function buildOverflowHelper($input) { + return $('').css({ + position: "absolute", + visibility: "hidden", + whiteSpace: "pre", + fontFamily: $input.css("font-family"), + fontSize: $input.css("font-size"), + fontStyle: $input.css("font-style"), + fontVariant: $input.css("font-variant"), + fontWeight: $input.css("font-weight"), + wordSpacing: $input.css("word-spacing"), + letterSpacing: $input.css("letter-spacing"), + textIndent: $input.css("text-indent"), + textRendering: $input.css("text-rendering"), + textTransform: $input.css("text-transform") + }).insertAfter($input); + } + function areQueriesEquivalent(a, b) { + return Input.normalizeQuery(a) === Input.normalizeQuery(b); + } + function withModifier($e) { + return $e.altKey || $e.ctrlKey || $e.metaKey || $e.shiftKey; + } + }(); + var Dataset = function() { + "use strict"; + var keys, nameGenerator; + keys = { + dataset: "tt-selectable-dataset", + val: "tt-selectable-display", + obj: "tt-selectable-object" + }; + nameGenerator = _.getIdGenerator(); + function Dataset(o, www) { + o = o || {}; + o.templates = o.templates || {}; + o.templates.notFound = o.templates.notFound || o.templates.empty; + if (!o.source) { + $.error("missing source"); + } + if (!o.node) { + $.error("missing node"); + } + if (o.name && !isValidName(o.name)) { + $.error("invalid dataset name: " + o.name); + } + www.mixin(this); + this.highlight = !!o.highlight; + this.name = _.toStr(o.name || nameGenerator()); + this.limit = o.limit || 5; + this.displayFn = getDisplayFn(o.display || o.displayKey); + this.templates = getTemplates(o.templates, this.displayFn); + this.source = o.source.__ttAdapter ? o.source.__ttAdapter() : o.source; + this.async = _.isUndefined(o.async) ? this.source.length > 2 : !!o.async; + this._resetLastSuggestion(); + this.$el = $(o.node).attr("role", "presentation").addClass(this.classes.dataset).addClass(this.classes.dataset + "-" + this.name); + } + Dataset.extractData = function extractData(el) { + var $el = $(el); + if ($el.data(keys.obj)) { + return { + dataset: $el.data(keys.dataset) || "", + val: $el.data(keys.val) || "", + obj: $el.data(keys.obj) || null + }; + } + return null; + }; + _.mixin(Dataset.prototype, EventEmitter, { + _overwrite: function overwrite(query, suggestions) { + suggestions = suggestions || []; + if (suggestions.length) { + this._renderSuggestions(query, suggestions); + } else if (this.async && this.templates.pending) { + this._renderPending(query); + } else if (!this.async && this.templates.notFound) { + this._renderNotFound(query); + } else { + this._empty(); + } + this.trigger("rendered", suggestions, false, this.name); + }, + _append: function append(query, suggestions) { + suggestions = suggestions || []; + if (suggestions.length && this.$lastSuggestion.length) { + this._appendSuggestions(query, suggestions); + } else if (suggestions.length) { + this._renderSuggestions(query, suggestions); + } else if (!this.$lastSuggestion.length && this.templates.notFound) { + this._renderNotFound(query); + } + this.trigger("rendered", suggestions, true, this.name); + }, + _renderSuggestions: function renderSuggestions(query, suggestions) { + var $fragment; + $fragment = this._getSuggestionsFragment(query, suggestions); + this.$lastSuggestion = $fragment.children().last(); + this.$el.html($fragment).prepend(this._getHeader(query, suggestions)).append(this._getFooter(query, suggestions)); + }, + _appendSuggestions: function appendSuggestions(query, suggestions) { + var $fragment, $lastSuggestion; + $fragment = this._getSuggestionsFragment(query, suggestions); + $lastSuggestion = $fragment.children().last(); + this.$lastSuggestion.after($fragment); + this.$lastSuggestion = $lastSuggestion; + }, + _renderPending: function renderPending(query) { + var template = this.templates.pending; + this._resetLastSuggestion(); + template && this.$el.html(template({ + query: query, + dataset: this.name + })); + }, + _renderNotFound: function renderNotFound(query) { + var template = this.templates.notFound; + this._resetLastSuggestion(); + template && this.$el.html(template({ + query: query, + dataset: this.name + })); + }, + _empty: function empty() { + this.$el.empty(); + this._resetLastSuggestion(); + }, + _getSuggestionsFragment: function getSuggestionsFragment(query, suggestions) { + var that = this, fragment; + fragment = document.createDocumentFragment(); + _.each(suggestions, function getSuggestionNode(suggestion) { + var $el, context; + context = that._injectQuery(query, suggestion); + $el = $(that.templates.suggestion(context)).data(keys.dataset, that.name).data(keys.obj, suggestion).data(keys.val, that.displayFn(suggestion)).addClass(that.classes.suggestion + " " + that.classes.selectable); + fragment.appendChild($el[0]); + }); + this.highlight && highlight({ + className: this.classes.highlight, + node: fragment, + pattern: query + }); + return $(fragment); + }, + _getFooter: function getFooter(query, suggestions) { + return this.templates.footer ? this.templates.footer({ + query: query, + suggestions: suggestions, + dataset: this.name + }) : null; + }, + _getHeader: function getHeader(query, suggestions) { + return this.templates.header ? this.templates.header({ + query: query, + suggestions: suggestions, + dataset: this.name + }) : null; + }, + _resetLastSuggestion: function resetLastSuggestion() { + this.$lastSuggestion = $(); + }, + _injectQuery: function injectQuery(query, obj) { + return _.isObject(obj) ? _.mixin({ + _query: query + }, obj) : obj; + }, + update: function update(query) { + var that = this, canceled = false, syncCalled = false, rendered = 0; + this.cancel(); + this.cancel = function cancel() { + canceled = true; + that.cancel = $.noop; + that.async && that.trigger("asyncCanceled", query, that.name); + }; + this.source(query, sync, async); + !syncCalled && sync([]); + function sync(suggestions) { + if (syncCalled) { + return; + } + syncCalled = true; + suggestions = (suggestions || []).slice(0, that.limit); + rendered = suggestions.length; + that._overwrite(query, suggestions); + if (rendered < that.limit && that.async) { + that.trigger("asyncRequested", query, that.name); + } + } + function async(suggestions) { + suggestions = suggestions || []; + if (!canceled && rendered < that.limit) { + that.cancel = $.noop; + var idx = Math.abs(rendered - that.limit); + rendered += idx; + that._append(query, suggestions.slice(0, idx)); + that.async && that.trigger("asyncReceived", query, that.name); + } + } + }, + cancel: $.noop, + clear: function clear() { + this._empty(); + this.cancel(); + this.trigger("cleared"); + }, + isEmpty: function isEmpty() { + return this.$el.is(":empty"); + }, + destroy: function destroy() { + this.$el = $("
"); + } + }); + return Dataset; + function getDisplayFn(display) { + display = display || _.stringify; + return _.isFunction(display) ? display : displayFn; + function displayFn(obj) { + return obj[display]; + } + } + function getTemplates(templates, displayFn) { + return { + notFound: templates.notFound && _.templatify(templates.notFound), + pending: templates.pending && _.templatify(templates.pending), + header: templates.header && _.templatify(templates.header), + footer: templates.footer && _.templatify(templates.footer), + suggestion: templates.suggestion ? userSuggestionTemplate : suggestionTemplate + }; + function userSuggestionTemplate(context) { + var template = templates.suggestion; + return $(template(context)).attr("id", _.guid()); + } + function suggestionTemplate(context) { + return $('
').attr("id", _.guid()).text(displayFn(context)); + } + } + function isValidName(str) { + return /^[_a-zA-Z0-9-]+$/.test(str); + } + }(); + var Menu = function() { + "use strict"; + function Menu(o, www) { + var that = this; + o = o || {}; + if (!o.node) { + $.error("node is required"); + } + www.mixin(this); + this.$node = $(o.node); + this.query = null; + this.datasets = _.map(o.datasets, initializeDataset); + function initializeDataset(oDataset) { + var node = that.$node.find(oDataset.node).first(); + oDataset.node = node.length ? node : $("
").appendTo(that.$node); + return new Dataset(oDataset, www); + } + } + _.mixin(Menu.prototype, EventEmitter, { + _onSelectableClick: function onSelectableClick($e) { + this.trigger("selectableClicked", $($e.currentTarget)); + }, + _onRendered: function onRendered(type, dataset, suggestions, async) { + this.$node.toggleClass(this.classes.empty, this._allDatasetsEmpty()); + this.trigger("datasetRendered", dataset, suggestions, async); + }, + _onCleared: function onCleared() { + this.$node.toggleClass(this.classes.empty, this._allDatasetsEmpty()); + this.trigger("datasetCleared"); + }, + _propagate: function propagate() { + this.trigger.apply(this, arguments); + }, + _allDatasetsEmpty: function allDatasetsEmpty() { + return _.every(this.datasets, _.bind(function isDatasetEmpty(dataset) { + var isEmpty = dataset.isEmpty(); + this.$node.attr("aria-expanded", !isEmpty); + return isEmpty; + }, this)); + }, + _getSelectables: function getSelectables() { + return this.$node.find(this.selectors.selectable); + }, + _removeCursor: function _removeCursor() { + var $selectable = this.getActiveSelectable(); + $selectable && $selectable.removeClass(this.classes.cursor); + }, + _ensureVisible: function ensureVisible($el) { + var elTop, elBottom, nodeScrollTop, nodeHeight; + elTop = $el.position().top; + elBottom = elTop + $el.outerHeight(true); + nodeScrollTop = this.$node.scrollTop(); + nodeHeight = this.$node.height() + parseInt(this.$node.css("paddingTop"), 10) + parseInt(this.$node.css("paddingBottom"), 10); + if (elTop < 0) { + this.$node.scrollTop(nodeScrollTop + elTop); + } else if (nodeHeight < elBottom) { + this.$node.scrollTop(nodeScrollTop + (elBottom - nodeHeight)); + } + }, + bind: function() { + var that = this, onSelectableClick; + onSelectableClick = _.bind(this._onSelectableClick, this); + this.$node.on("click.tt", this.selectors.selectable, onSelectableClick); + this.$node.on("mouseover", this.selectors.selectable, function() { + that.setCursor($(this)); + }); + this.$node.on("mouseleave", function() { + that._removeCursor(); + }); + _.each(this.datasets, function(dataset) { + dataset.onSync("asyncRequested", that._propagate, that).onSync("asyncCanceled", that._propagate, that).onSync("asyncReceived", that._propagate, that).onSync("rendered", that._onRendered, that).onSync("cleared", that._onCleared, that); + }); + return this; + }, + isOpen: function isOpen() { + return this.$node.hasClass(this.classes.open); + }, + open: function open() { + this.$node.scrollTop(0); + this.$node.addClass(this.classes.open); + }, + close: function close() { + this.$node.attr("aria-expanded", false); + this.$node.removeClass(this.classes.open); + this._removeCursor(); + }, + setLanguageDirection: function setLanguageDirection(dir) { + this.$node.attr("dir", dir); + }, + selectableRelativeToCursor: function selectableRelativeToCursor(delta) { + var $selectables, $oldCursor, oldIndex, newIndex; + $oldCursor = this.getActiveSelectable(); + $selectables = this._getSelectables(); + oldIndex = $oldCursor ? $selectables.index($oldCursor) : -1; + newIndex = oldIndex + delta; + newIndex = (newIndex + 1) % ($selectables.length + 1) - 1; + newIndex = newIndex < -1 ? $selectables.length - 1 : newIndex; + return newIndex === -1 ? null : $selectables.eq(newIndex); + }, + setCursor: function setCursor($selectable) { + this._removeCursor(); + if ($selectable = $selectable && $selectable.first()) { + $selectable.addClass(this.classes.cursor); + this._ensureVisible($selectable); + } + }, + getSelectableData: function getSelectableData($el) { + return $el && $el.length ? Dataset.extractData($el) : null; + }, + getActiveSelectable: function getActiveSelectable() { + var $selectable = this._getSelectables().filter(this.selectors.cursor).first(); + return $selectable.length ? $selectable : null; + }, + getTopSelectable: function getTopSelectable() { + var $selectable = this._getSelectables().first(); + return $selectable.length ? $selectable : null; + }, + update: function update(query) { + var isValidUpdate = query !== this.query; + if (isValidUpdate) { + this.query = query; + _.each(this.datasets, updateDataset); + } + return isValidUpdate; + function updateDataset(dataset) { + dataset.update(query); + } + }, + empty: function empty() { + _.each(this.datasets, clearDataset); + this.query = null; + this.$node.addClass(this.classes.empty); + function clearDataset(dataset) { + dataset.clear(); + } + }, + destroy: function destroy() { + this.$node.off(".tt"); + this.$node = $("
"); + _.each(this.datasets, destroyDataset); + function destroyDataset(dataset) { + dataset.destroy(); + } + } + }); + return Menu; + }(); + var Status = function() { + "use strict"; + function Status(options) { + this.$el = $("", { + role: "status", + "aria-live": "polite" + }).css({ + position: "absolute", + padding: "0", + border: "0", + height: "1px", + width: "1px", + "margin-bottom": "-1px", + "margin-right": "-1px", + overflow: "hidden", + clip: "rect(0 0 0 0)", + "white-space": "nowrap" + }); + options.$input.after(this.$el); + _.each(options.menu.datasets, _.bind(function(dataset) { + if (dataset.onSync) { + dataset.onSync("rendered", _.bind(this.update, this)); + dataset.onSync("cleared", _.bind(this.cleared, this)); + } + }, this)); + } + _.mixin(Status.prototype, { + update: function update(event, suggestions) { + var length = suggestions.length; + var words; + if (length === 1) { + words = { + result: "result", + is: "is" + }; + } else { + words = { + result: "results", + is: "are" + }; + } + this.$el.text(length + " " + words.result + " " + words.is + " available, use up and down arrow keys to navigate."); + }, + cleared: function() { + this.$el.text(""); + } + }); + return Status; + }(); + var DefaultMenu = function() { + "use strict"; + var s = Menu.prototype; + function DefaultMenu() { + Menu.apply(this, [].slice.call(arguments, 0)); + } + _.mixin(DefaultMenu.prototype, Menu.prototype, { + open: function open() { + !this._allDatasetsEmpty() && this._show(); + return s.open.apply(this, [].slice.call(arguments, 0)); + }, + close: function close() { + this._hide(); + return s.close.apply(this, [].slice.call(arguments, 0)); + }, + _onRendered: function onRendered() { + if (this._allDatasetsEmpty()) { + this._hide(); + } else { + this.isOpen() && this._show(); + } + return s._onRendered.apply(this, [].slice.call(arguments, 0)); + }, + _onCleared: function onCleared() { + if (this._allDatasetsEmpty()) { + this._hide(); + } else { + this.isOpen() && this._show(); + } + return s._onCleared.apply(this, [].slice.call(arguments, 0)); + }, + setLanguageDirection: function setLanguageDirection(dir) { + this.$node.css(dir === "ltr" ? this.css.ltr : this.css.rtl); + return s.setLanguageDirection.apply(this, [].slice.call(arguments, 0)); + }, + _hide: function hide() { + this.$node.hide(); + }, + _show: function show() { + this.$node.css("display", "block"); + } + }); + return DefaultMenu; + }(); + var Typeahead = function() { + "use strict"; + function Typeahead(o, www) { + var onFocused, onBlurred, onEnterKeyed, onTabKeyed, onEscKeyed, onUpKeyed, onDownKeyed, onLeftKeyed, onRightKeyed, onQueryChanged, onWhitespaceChanged; + o = o || {}; + if (!o.input) { + $.error("missing input"); + } + if (!o.menu) { + $.error("missing menu"); + } + if (!o.eventBus) { + $.error("missing event bus"); + } + www.mixin(this); + this.eventBus = o.eventBus; + this.minLength = _.isNumber(o.minLength) ? o.minLength : 1; + this.input = o.input; + this.menu = o.menu; + this.enabled = true; + this.autoselect = !!o.autoselect; + this.active = false; + this.input.hasFocus() && this.activate(); + this.dir = this.input.getLangDir(); + this._hacks(); + this.menu.bind().onSync("selectableClicked", this._onSelectableClicked, this).onSync("asyncRequested", this._onAsyncRequested, this).onSync("asyncCanceled", this._onAsyncCanceled, this).onSync("asyncReceived", this._onAsyncReceived, this).onSync("datasetRendered", this._onDatasetRendered, this).onSync("datasetCleared", this._onDatasetCleared, this); + onFocused = c(this, "activate", "open", "_onFocused"); + onBlurred = c(this, "deactivate", "_onBlurred"); + onEnterKeyed = c(this, "isActive", "isOpen", "_onEnterKeyed"); + onTabKeyed = c(this, "isActive", "isOpen", "_onTabKeyed"); + onEscKeyed = c(this, "isActive", "_onEscKeyed"); + onUpKeyed = c(this, "isActive", "open", "_onUpKeyed"); + onDownKeyed = c(this, "isActive", "open", "_onDownKeyed"); + onLeftKeyed = c(this, "isActive", "isOpen", "_onLeftKeyed"); + onRightKeyed = c(this, "isActive", "isOpen", "_onRightKeyed"); + onQueryChanged = c(this, "_openIfActive", "_onQueryChanged"); + onWhitespaceChanged = c(this, "_openIfActive", "_onWhitespaceChanged"); + this.input.bind().onSync("focused", onFocused, this).onSync("blurred", onBlurred, this).onSync("enterKeyed", onEnterKeyed, this).onSync("tabKeyed", onTabKeyed, this).onSync("escKeyed", onEscKeyed, this).onSync("upKeyed", onUpKeyed, this).onSync("downKeyed", onDownKeyed, this).onSync("leftKeyed", onLeftKeyed, this).onSync("rightKeyed", onRightKeyed, this).onSync("queryChanged", onQueryChanged, this).onSync("whitespaceChanged", onWhitespaceChanged, this).onSync("langDirChanged", this._onLangDirChanged, this); + } + _.mixin(Typeahead.prototype, { + _hacks: function hacks() { + var $input, $menu; + $input = this.input.$input || $("
"); + $menu = this.menu.$node || $("
"); + $input.on("blur.tt", function($e) { + var active, isActive, hasActive; + active = document.activeElement; + isActive = $menu.is(active); + hasActive = $menu.has(active).length > 0; + if (_.isMsie() && (isActive || hasActive)) { + $e.preventDefault(); + $e.stopImmediatePropagation(); + _.defer(function() { + $input.focus(); + }); + } + }); + $menu.on("mousedown.tt", function($e) { + $e.preventDefault(); + }); + }, + _onSelectableClicked: function onSelectableClicked(type, $el) { + this.select($el); + }, + _onDatasetCleared: function onDatasetCleared() { + this._updateHint(); + }, + _onDatasetRendered: function onDatasetRendered(type, suggestions, async, dataset) { + this._updateHint(); + if (this.autoselect) { + var cursorClass = this.selectors.cursor.substr(1); + this.menu.$node.find(this.selectors.suggestion).first().addClass(cursorClass); + } + this.eventBus.trigger("render", suggestions, async, dataset); + }, + _onAsyncRequested: function onAsyncRequested(type, dataset, query) { + this.eventBus.trigger("asyncrequest", query, dataset); + }, + _onAsyncCanceled: function onAsyncCanceled(type, dataset, query) { + this.eventBus.trigger("asynccancel", query, dataset); + }, + _onAsyncReceived: function onAsyncReceived(type, dataset, query) { + this.eventBus.trigger("asyncreceive", query, dataset); + }, + _onFocused: function onFocused() { + this._minLengthMet() && this.menu.update(this.input.getQuery()); + }, + _onBlurred: function onBlurred() { + if (this.input.hasQueryChangedSinceLastFocus()) { + this.eventBus.trigger("change", this.input.getQuery()); + } + }, + _onEnterKeyed: function onEnterKeyed(type, $e) { + var $selectable; + if ($selectable = this.menu.getActiveSelectable()) { + if (this.select($selectable)) { + $e.preventDefault(); + $e.stopPropagation(); + } + } else if (this.autoselect) { + if (this.select(this.menu.getTopSelectable())) { + $e.preventDefault(); + $e.stopPropagation(); + } + } + }, + _onTabKeyed: function onTabKeyed(type, $e) { + var $selectable; + if ($selectable = this.menu.getActiveSelectable()) { + this.select($selectable) && $e.preventDefault(); + } else if (this.autoselect) { + if ($selectable = this.menu.getTopSelectable()) { + this.autocomplete($selectable) && $e.preventDefault(); + } + } + }, + _onEscKeyed: function onEscKeyed() { + this.close(); + }, + _onUpKeyed: function onUpKeyed() { + this.moveCursor(-1); + }, + _onDownKeyed: function onDownKeyed() { + this.moveCursor(+1); + }, + _onLeftKeyed: function onLeftKeyed() { + if (this.dir === "rtl" && this.input.isCursorAtEnd()) { + this.autocomplete(this.menu.getActiveSelectable() || this.menu.getTopSelectable()); + } + }, + _onRightKeyed: function onRightKeyed() { + if (this.dir === "ltr" && this.input.isCursorAtEnd()) { + this.autocomplete(this.menu.getActiveSelectable() || this.menu.getTopSelectable()); + } + }, + _onQueryChanged: function onQueryChanged(e, query) { + this._minLengthMet(query) ? this.menu.update(query) : this.menu.empty(); + }, + _onWhitespaceChanged: function onWhitespaceChanged() { + this._updateHint(); + }, + _onLangDirChanged: function onLangDirChanged(e, dir) { + if (this.dir !== dir) { + this.dir = dir; + this.menu.setLanguageDirection(dir); + } + }, + _openIfActive: function openIfActive() { + this.isActive() && this.open(); + }, + _minLengthMet: function minLengthMet(query) { + query = _.isString(query) ? query : this.input.getQuery() || ""; + return query.length >= this.minLength; + }, + _updateHint: function updateHint() { + var $selectable, data, val, query, escapedQuery, frontMatchRegEx, match; + $selectable = this.menu.getTopSelectable(); + data = this.menu.getSelectableData($selectable); + val = this.input.getInputValue(); + if (data && !_.isBlankString(val) && !this.input.hasOverflow()) { + query = Input.normalizeQuery(val); + escapedQuery = _.escapeRegExChars(query); + frontMatchRegEx = new RegExp("^(?:" + escapedQuery + ")(.+$)", "i"); + match = frontMatchRegEx.exec(data.val); + match && this.input.setHint(val + match[1]); + } else { + this.input.clearHint(); + } + }, + isEnabled: function isEnabled() { + return this.enabled; + }, + enable: function enable() { + this.enabled = true; + }, + disable: function disable() { + this.enabled = false; + }, + isActive: function isActive() { + return this.active; + }, + activate: function activate() { + if (this.isActive()) { + return true; + } else if (!this.isEnabled() || this.eventBus.before("active")) { + return false; + } else { + this.active = true; + this.eventBus.trigger("active"); + return true; + } + }, + deactivate: function deactivate() { + if (!this.isActive()) { + return true; + } else if (this.eventBus.before("idle")) { + return false; + } else { + this.active = false; + this.close(); + this.eventBus.trigger("idle"); + return true; + } + }, + isOpen: function isOpen() { + return this.menu.isOpen(); + }, + open: function open() { + if (!this.isOpen() && !this.eventBus.before("open")) { + this.input.setAriaExpanded(true); + this.menu.open(); + this._updateHint(); + this.eventBus.trigger("open"); + } + return this.isOpen(); + }, + close: function close() { + if (this.isOpen() && !this.eventBus.before("close")) { + this.input.setAriaExpanded(false); + this.menu.close(); + this.input.clearHint(); + this.input.resetInputValue(); + this.eventBus.trigger("close"); + } + return !this.isOpen(); + }, + setVal: function setVal(val) { + this.input.setQuery(_.toStr(val)); + }, + getVal: function getVal() { + return this.input.getQuery(); + }, + select: function select($selectable) { + var data = this.menu.getSelectableData($selectable); + if (data && !this.eventBus.before("select", data.obj, data.dataset)) { + this.input.setQuery(data.val, true); + this.eventBus.trigger("select", data.obj, data.dataset); + this.close(); + return true; + } + return false; + }, + autocomplete: function autocomplete($selectable) { + var query, data, isValid; + query = this.input.getQuery(); + data = this.menu.getSelectableData($selectable); + isValid = data && query !== data.val; + if (isValid && !this.eventBus.before("autocomplete", data.obj, data.dataset)) { + this.input.setQuery(data.val); + this.eventBus.trigger("autocomplete", data.obj, data.dataset); + return true; + } + return false; + }, + moveCursor: function moveCursor(delta) { + var query, $candidate, data, suggestion, datasetName, cancelMove, id; + query = this.input.getQuery(); + $candidate = this.menu.selectableRelativeToCursor(delta); + data = this.menu.getSelectableData($candidate); + suggestion = data ? data.obj : null; + datasetName = data ? data.dataset : null; + id = $candidate ? $candidate.attr("id") : null; + this.input.trigger("cursorchange", id); + cancelMove = this._minLengthMet() && this.menu.update(query); + if (!cancelMove && !this.eventBus.before("cursorchange", suggestion, datasetName)) { + this.menu.setCursor($candidate); + if (data) { + if (typeof data.val === "string") { + this.input.setInputValue(data.val); + } + } else { + this.input.resetInputValue(); + this._updateHint(); + } + this.eventBus.trigger("cursorchange", suggestion, datasetName); + return true; + } + return false; + }, + destroy: function destroy() { + this.input.destroy(); + this.menu.destroy(); + } + }); + return Typeahead; + function c(ctx) { + var methods = [].slice.call(arguments, 1); + return function() { + var args = [].slice.call(arguments); + _.each(methods, function(method) { + return ctx[method].apply(ctx, args); + }); + }; + } + }(); + (function() { + "use strict"; + var old, keys, methods; + old = $.fn.typeahead; + keys = { + www: "tt-www", + attrs: "tt-attrs", + typeahead: "tt-typeahead" + }; + methods = { + initialize: function initialize(o, datasets) { + var www; + datasets = _.isArray(datasets) ? datasets : [].slice.call(arguments, 1); + o = o || {}; + www = WWW(o.classNames); + return this.each(attach); + function attach() { + var $input, $wrapper, $hint, $menu, defaultHint, defaultMenu, eventBus, input, menu, status, typeahead, MenuConstructor; + _.each(datasets, function(d) { + d.highlight = !!o.highlight; + }); + $input = $(this); + $wrapper = $(www.html.wrapper); + $hint = $elOrNull(o.hint); + $menu = $elOrNull(o.menu); + defaultHint = o.hint !== false && !$hint; + defaultMenu = o.menu !== false && !$menu; + defaultHint && ($hint = buildHintFromInput($input, www)); + defaultMenu && ($menu = $(www.html.menu).css(www.css.menu)); + $hint && $hint.val(""); + $input = prepInput($input, www); + if (defaultHint || defaultMenu) { + $wrapper.css(www.css.wrapper); + $input.css(defaultHint ? www.css.input : www.css.inputWithNoHint); + $input.wrap($wrapper).parent().prepend(defaultHint ? $hint : null).append(defaultMenu ? $menu : null); + } + MenuConstructor = defaultMenu ? DefaultMenu : Menu; + eventBus = new EventBus({ + el: $input + }); + input = new Input({ + hint: $hint, + input: $input, + menu: $menu + }, www); + menu = new MenuConstructor({ + node: $menu, + datasets: datasets + }, www); + status = new Status({ + $input: $input, + menu: menu + }); + typeahead = new Typeahead({ + input: input, + menu: menu, + eventBus: eventBus, + minLength: o.minLength, + autoselect: o.autoselect + }, www); + $input.data(keys.www, www); + $input.data(keys.typeahead, typeahead); + } + }, + isEnabled: function isEnabled() { + var enabled; + ttEach(this.first(), function(t) { + enabled = t.isEnabled(); + }); + return enabled; + }, + enable: function enable() { + ttEach(this, function(t) { + t.enable(); + }); + return this; + }, + disable: function disable() { + ttEach(this, function(t) { + t.disable(); + }); + return this; + }, + isActive: function isActive() { + var active; + ttEach(this.first(), function(t) { + active = t.isActive(); + }); + return active; + }, + activate: function activate() { + ttEach(this, function(t) { + t.activate(); + }); + return this; + }, + deactivate: function deactivate() { + ttEach(this, function(t) { + t.deactivate(); + }); + return this; + }, + isOpen: function isOpen() { + var open; + ttEach(this.first(), function(t) { + open = t.isOpen(); + }); + return open; + }, + open: function open() { + ttEach(this, function(t) { + t.open(); + }); + return this; + }, + close: function close() { + ttEach(this, function(t) { + t.close(); + }); + return this; + }, + select: function select(el) { + var success = false, $el = $(el); + ttEach(this.first(), function(t) { + success = t.select($el); + }); + return success; + }, + autocomplete: function autocomplete(el) { + var success = false, $el = $(el); + ttEach(this.first(), function(t) { + success = t.autocomplete($el); + }); + return success; + }, + moveCursor: function moveCursoe(delta) { + var success = false; + ttEach(this.first(), function(t) { + success = t.moveCursor(delta); + }); + return success; + }, + val: function val(newVal) { + var query; + if (!arguments.length) { + ttEach(this.first(), function(t) { + query = t.getVal(); + }); + return query; + } else { + ttEach(this, function(t) { + t.setVal(_.toStr(newVal)); + }); + return this; + } + }, + destroy: function destroy() { + ttEach(this, function(typeahead, $input) { + revert($input); + typeahead.destroy(); + }); + return this; + } + }; + $.fn.typeahead = function(method) { + if (methods[method]) { + return methods[method].apply(this, [].slice.call(arguments, 1)); + } else { + return methods.initialize.apply(this, arguments); + } + }; + $.fn.typeahead.noConflict = function noConflict() { + $.fn.typeahead = old; + return this; + }; + function ttEach($els, fn) { + $els.each(function() { + var $input = $(this), typeahead; + (typeahead = $input.data(keys.typeahead)) && fn(typeahead, $input); + }); + } + function buildHintFromInput($input, www) { + return $input.clone().addClass(www.classes.hint).removeData().css(www.css.hint).css(getBackgroundStyles($input)).prop({ + readonly: true, + required: false + }).removeAttr("id name placeholder").removeClass("required").attr({ + spellcheck: "false", + tabindex: -1 + }); + } + function prepInput($input, www) { + $input.data(keys.attrs, { + dir: $input.attr("dir"), + autocomplete: $input.attr("autocomplete"), + spellcheck: $input.attr("spellcheck"), + style: $input.attr("style") + }); + $input.addClass(www.classes.input).attr({ + spellcheck: false + }); + try { + !$input.attr("dir") && $input.attr("dir", "auto"); + } catch (e) {} + return $input; + } + function getBackgroundStyles($el) { + return { + backgroundAttachment: $el.css("background-attachment"), + backgroundClip: $el.css("background-clip"), + backgroundColor: $el.css("background-color"), + backgroundImage: $el.css("background-image"), + backgroundOrigin: $el.css("background-origin"), + backgroundPosition: $el.css("background-position"), + backgroundRepeat: $el.css("background-repeat"), + backgroundSize: $el.css("background-size") + }; + } + function revert($input) { + var www, $wrapper; + www = $input.data(keys.www); + $wrapper = $input.parent().filter(www.selectors.wrapper); + _.each($input.data(keys.attrs), function(val, key) { + _.isUndefined(val) ? $input.removeAttr(key) : $input.attr(key, val); + }); + $input.removeData(keys.typeahead).removeData(keys.www).removeData(keys.attr).removeClass(www.classes.input); + if ($wrapper.length) { + $input.detach().insertAfter($wrapper); + $wrapper.remove(); + } + } + function $elOrNull(obj) { + var isValid, $el; + isValid = _.isJQuery(obj) || _.isElement(obj); + $el = isValid ? $(obj).first() : []; + return $el.length ? $el : null; + } + })(); +}); \ No newline at end of file diff --git a/docs/search.json b/docs/search.json new file mode 100644 index 00000000..a853d0b5 --- /dev/null +++ b/docs/search.json @@ -0,0 +1 @@ +{"Typealiases.html#/s:6OpenAI5Modela":{"name":"Model","abstract":"

Defines all available OpenAI models supported by the library.

"},"Structs/Vector.html#/s:6OpenAI6VectorV16cosineSimilarity1a1bSdSaySdG_AGtFZ":{"name":"cosineSimilarity(a:b:)","abstract":"

Returns the similarity between two vectors

","parent_name":"Vector"},"Structs/Vector.html#/s:6OpenAI6VectorV16cosineDifference1a1bSdSaySdG_AGtF":{"name":"cosineDifference(a:b:)","abstract":"

Returns the difference between two vectors. Cosine distance is defined as 1 - cosineSimilarity(a, b)

","parent_name":"Vector"},"Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV10CodingKeysO10harassmentyA2ImF":{"name":"harassment","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV10CodingKeysO21harassmentThreateningyA2ImF":{"name":"harassmentThreatening","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV10CodingKeysO4hateyA2ImF":{"name":"hate","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV10CodingKeysO15hateThreateningyA2ImF":{"name":"hateThreatening","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV10CodingKeysO8selfHarmyA2ImF":{"name":"selfHarm","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV10CodingKeysO14selfHarmIntentyA2ImF":{"name":"selfHarmIntent","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV10CodingKeysO20selfHarmInstructionsyA2ImF":{"name":"selfHarmInstructions","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV10CodingKeysO6sexualyA2ImF":{"name":"sexual","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV10CodingKeysO12sexualMinorsyA2ImF":{"name":"sexualMinors","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV10CodingKeysO8violenceyA2ImF":{"name":"violence","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV10CodingKeysO15violenceGraphicyA2ImF":{"name":"violenceGraphic","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/CategoryScores.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV10harassmentSdvp":{"name":"harassment","abstract":"

Content that expresses, incites, or promotes harassing language towards any target.

","parent_name":"CategoryScores"},"Structs/ModerationsResult/Moderation/CategoryScores.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV21harassmentThreateningSdvp":{"name":"harassmentThreatening","abstract":"

Harassment content that also includes violence or serious harm towards any target.

","parent_name":"CategoryScores"},"Structs/ModerationsResult/Moderation/CategoryScores.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV4hateSdvp":{"name":"hate","abstract":"

Content that expresses, incites, or promotes hate based on race, gender, ethnicity, religion, nationality, sexual orientation, disability status, or caste.

","parent_name":"CategoryScores"},"Structs/ModerationsResult/Moderation/CategoryScores.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV15hateThreateningSdvp":{"name":"hateThreatening","abstract":"

Hateful content that also includes violence or serious harm towards the targeted group.

","parent_name":"CategoryScores"},"Structs/ModerationsResult/Moderation/CategoryScores.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV8selfHarmSdvp":{"name":"selfHarm","abstract":"

Content that promotes, encourages, or depicts acts of self-harm, such as suicide, cutting, and eating disorders.

","parent_name":"CategoryScores"},"Structs/ModerationsResult/Moderation/CategoryScores.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV14selfHarmIntentSdvp":{"name":"selfHarmIntent","abstract":"

Content where the speaker expresses that they are engaging or intend to engage in acts of self-harm, such as suicide, cutting, and eating disorders.

","parent_name":"CategoryScores"},"Structs/ModerationsResult/Moderation/CategoryScores.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV20selfHarmInstructionsSdvp":{"name":"selfHarmInstructions","abstract":"

Content that encourages performing acts of self-harm, such as suicide, cutting, and eating disorders, or that gives instructions or advice on how to commit such acts.

","parent_name":"CategoryScores"},"Structs/ModerationsResult/Moderation/CategoryScores.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV6sexualSdvp":{"name":"sexual","abstract":"

Content meant to arouse sexual excitement, such as the description of sexual activity, or that promotes sexual services (excluding sex education and wellness).

","parent_name":"CategoryScores"},"Structs/ModerationsResult/Moderation/CategoryScores.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV12sexualMinorsSdvp":{"name":"sexualMinors","abstract":"

Sexual content that includes an individual who is under 18 years old.

","parent_name":"CategoryScores"},"Structs/ModerationsResult/Moderation/CategoryScores.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV8violenceSdvp":{"name":"violence","abstract":"

Content that promotes or glorifies violence or celebrates the suffering or humiliation of others.

","parent_name":"CategoryScores"},"Structs/ModerationsResult/Moderation/CategoryScores.html#/s:6OpenAI17ModerationsResultV10ModerationV14CategoryScoresV15violenceGraphicSdvp":{"name":"violenceGraphic","abstract":"

Violent content that depicts death, violence, or serious physical injury in extreme graphic detail.

","parent_name":"CategoryScores"},"Structs/ModerationsResult/Moderation/CategoryScores/CodingKeys.html":{"name":"CodingKeys","parent_name":"CategoryScores"},"Structs/ModerationsResult/Moderation/CategoryScores.html#/s:ST12makeIterator0B0QzyF":{"name":"makeIterator()","parent_name":"CategoryScores"},"Structs/ModerationsResult/Moderation/Categories/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV10CodingKeysO10harassmentyA2ImF":{"name":"harassment","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/Categories/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV10CodingKeysO21harassmentThreateningyA2ImF":{"name":"harassmentThreatening","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/Categories/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV10CodingKeysO4hateyA2ImF":{"name":"hate","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/Categories/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV10CodingKeysO15hateThreateningyA2ImF":{"name":"hateThreatening","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/Categories/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV10CodingKeysO8selfHarmyA2ImF":{"name":"selfHarm","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/Categories/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV10CodingKeysO14selfHarmIntentyA2ImF":{"name":"selfHarmIntent","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/Categories/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV10CodingKeysO20selfHarmInstructionsyA2ImF":{"name":"selfHarmInstructions","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/Categories/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV10CodingKeysO6sexualyA2ImF":{"name":"sexual","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/Categories/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV10CodingKeysO12sexualMinorsyA2ImF":{"name":"sexualMinors","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/Categories/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV10CodingKeysO8violenceyA2ImF":{"name":"violence","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/Categories/CodingKeys.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV10CodingKeysO15violenceGraphicyA2ImF":{"name":"violenceGraphic","parent_name":"CodingKeys"},"Structs/ModerationsResult/Moderation/Categories.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV10harassmentSbvp":{"name":"harassment","abstract":"

Content that expresses, incites, or promotes harassing language towards any target.

","parent_name":"Categories"},"Structs/ModerationsResult/Moderation/Categories.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV21harassmentThreateningSbvp":{"name":"harassmentThreatening","abstract":"

Harassment content that also includes violence or serious harm towards any target.

","parent_name":"Categories"},"Structs/ModerationsResult/Moderation/Categories.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV4hateSbvp":{"name":"hate","abstract":"

Content that expresses, incites, or promotes hate based on race, gender, ethnicity, religion, nationality, sexual orientation, disability status, or caste.

","parent_name":"Categories"},"Structs/ModerationsResult/Moderation/Categories.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV15hateThreateningSbvp":{"name":"hateThreatening","abstract":"

Hateful content that also includes violence or serious harm towards the targeted group.

","parent_name":"Categories"},"Structs/ModerationsResult/Moderation/Categories.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV8selfHarmSbvp":{"name":"selfHarm","abstract":"

Content that promotes, encourages, or depicts acts of self-harm, such as suicide, cutting, and eating disorders.

","parent_name":"Categories"},"Structs/ModerationsResult/Moderation/Categories.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV14selfHarmIntentSbvp":{"name":"selfHarmIntent","abstract":"

Content where the speaker expresses that they are engaging or intend to engage in acts of self-harm, such as suicide, cutting, and eating disorders.

","parent_name":"Categories"},"Structs/ModerationsResult/Moderation/Categories.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV20selfHarmInstructionsSbvp":{"name":"selfHarmInstructions","abstract":"

Content that encourages performing acts of self-harm, such as suicide, cutting, and eating disorders, or that gives instructions or advice on how to commit such acts.

","parent_name":"Categories"},"Structs/ModerationsResult/Moderation/Categories.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV6sexualSbvp":{"name":"sexual","abstract":"

Content meant to arouse sexual excitement, such as the description of sexual activity, or that promotes sexual services (excluding sex education and wellness).

","parent_name":"Categories"},"Structs/ModerationsResult/Moderation/Categories.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV12sexualMinorsSbvp":{"name":"sexualMinors","abstract":"

Sexual content that includes an individual who is under 18 years old.

","parent_name":"Categories"},"Structs/ModerationsResult/Moderation/Categories.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV8violenceSbvp":{"name":"violence","abstract":"

Content that promotes or glorifies violence or celebrates the suffering or humiliation of others.

","parent_name":"Categories"},"Structs/ModerationsResult/Moderation/Categories.html#/s:6OpenAI17ModerationsResultV10ModerationV10CategoriesV15violenceGraphicSbvp":{"name":"violenceGraphic","abstract":"

Violent content that depicts death, violence, or serious physical injury in extreme graphic detail.

","parent_name":"Categories"},"Structs/ModerationsResult/Moderation/Categories/CodingKeys.html":{"name":"CodingKeys","parent_name":"Categories"},"Structs/ModerationsResult/Moderation/Categories.html#/s:ST12makeIterator0B0QzyF":{"name":"makeIterator()","parent_name":"Categories"},"Structs/ModerationsResult/Moderation/Categories.html":{"name":"Categories","parent_name":"Moderation"},"Structs/ModerationsResult/Moderation/CategoryScores.html":{"name":"CategoryScores","parent_name":"Moderation"},"Structs/ModerationsResult/Moderation.html#/s:6OpenAI17ModerationsResultV10ModerationV10categoriesAE10CategoriesVvp":{"name":"categories","abstract":"

Collection of per-category binary usage policies violation flags. For each category, the value is true if the model flags the corresponding category as violated, false otherwise.

","parent_name":"Moderation"},"Structs/ModerationsResult/Moderation.html#/s:6OpenAI17ModerationsResultV10ModerationV14categoryScoresAE08CategoryG0Vvp":{"name":"categoryScores","abstract":"

Collection of per-category raw scores output by the model, denoting the model’s confidence that the input violates the OpenAI’s policy for the category. The value is between 0 and 1, where higher values denote higher confidence. The scores should not be interpreted as probabilities.

","parent_name":"Moderation"},"Structs/ModerationsResult/Moderation.html#/s:6OpenAI17ModerationsResultV10ModerationV7flaggedSbvp":{"name":"flagged","abstract":"

True if the model classifies the content as violating OpenAI’s usage policies, false otherwise.

","parent_name":"Moderation"},"Structs/ModerationsResult/Moderation.html":{"name":"Moderation","parent_name":"ModerationsResult"},"Structs/ModerationsResult.html#/s:s12IdentifiableP2id2IDQzvp":{"name":"id","parent_name":"ModerationsResult"},"Structs/ModerationsResult.html#/s:6OpenAI17ModerationsResultV5modelSSvp":{"name":"model","parent_name":"ModerationsResult"},"Structs/ModerationsResult.html#/s:6OpenAI17ModerationsResultV7resultsSayAC10ModerationVGvp":{"name":"results","parent_name":"ModerationsResult"},"Structs/ModerationsQuery.html#/s:6OpenAI16ModerationsQueryV5inputSSvp":{"name":"input","abstract":"

The input text to classify.

","parent_name":"ModerationsQuery"},"Structs/ModerationsQuery.html#/s:6OpenAI16ModerationsQueryV5modelSSSgvp":{"name":"model","abstract":"

ID of the model to use.

","parent_name":"ModerationsQuery"},"Structs/ModerationsQuery.html#/s:6OpenAI16ModerationsQueryV5input5modelACSS_SSSgtcfc":{"name":"init(input:model:)","parent_name":"ModerationsQuery"},"Structs/ModelsResult.html#/s:6OpenAI12ModelsResultV4dataSayAA05ModelD0VGvp":{"name":"data","abstract":"

A list of model objects.

","parent_name":"ModelsResult"},"Structs/ModelsResult.html#/s:6OpenAI12ModelsResultV6objectSSvp":{"name":"object","abstract":"

The object type, which is always list

","parent_name":"ModelsResult"},"Structs/ModelResult/CodingKeys.html#/s:6OpenAI11ModelResultV10CodingKeysO2idyA2EmF":{"name":"id","parent_name":"CodingKeys"},"Structs/ModelResult/CodingKeys.html#/s:6OpenAI11ModelResultV10CodingKeysO7createdyA2EmF":{"name":"created","parent_name":"CodingKeys"},"Structs/ModelResult/CodingKeys.html#/s:6OpenAI11ModelResultV10CodingKeysO6objectyA2EmF":{"name":"object","parent_name":"CodingKeys"},"Structs/ModelResult/CodingKeys.html#/s:6OpenAI11ModelResultV10CodingKeysO7ownedByyA2EmF":{"name":"ownedBy","parent_name":"CodingKeys"},"Structs/ModelResult.html#/s:6OpenAI11ModelResultV2idSSvp":{"name":"id","abstract":"

The model identifier, which can be referenced in the API endpoints.

","parent_name":"ModelResult"},"Structs/ModelResult.html#/s:6OpenAI11ModelResultV7createdSdvp":{"name":"created","abstract":"

The Unix timestamp (in seconds) when the model was created.

","parent_name":"ModelResult"},"Structs/ModelResult.html#/s:6OpenAI11ModelResultV6objectSSvp":{"name":"object","abstract":"

The object type, which is always “model”.

","parent_name":"ModelResult"},"Structs/ModelResult.html#/s:6OpenAI11ModelResultV7ownedBySSvp":{"name":"ownedBy","abstract":"

The organization that owns the model.

","parent_name":"ModelResult"},"Structs/ModelResult/CodingKeys.html":{"name":"CodingKeys","parent_name":"ModelResult"},"Structs/ModelQuery.html#/s:6OpenAI10ModelQueryV5modelSSvp":{"name":"model","abstract":"

The ID of the model to use for this request.

","parent_name":"ModelQuery"},"Structs/ModelQuery.html#/s:6OpenAI10ModelQueryV5modelACSS_tcfc":{"name":"init(model:)","parent_name":"ModelQuery"},"Structs/ImagesResult/Image/CodingKeys.html#/s:6OpenAI12ImagesResultV5ImageV10CodingKeysO7b64JsonyA2GmF":{"name":"b64Json","parent_name":"CodingKeys"},"Structs/ImagesResult/Image/CodingKeys.html#/s:6OpenAI12ImagesResultV5ImageV10CodingKeysO13revisedPromptyA2GmF":{"name":"revisedPrompt","parent_name":"CodingKeys"},"Structs/ImagesResult/Image/CodingKeys.html#/s:6OpenAI12ImagesResultV5ImageV10CodingKeysO3urlyA2GmF":{"name":"url","parent_name":"CodingKeys"},"Structs/ImagesResult/Image.html#/s:6OpenAI12ImagesResultV5ImageV7b64JsonSSSgvp":{"name":"b64Json","abstract":"

The base64-encoded JSON of the generated image, if response_format is b64_json

","parent_name":"Image"},"Structs/ImagesResult/Image.html#/s:6OpenAI12ImagesResultV5ImageV13revisedPromptSSSgvp":{"name":"revisedPrompt","abstract":"

The prompt that was used to generate the image, if there was any revision to the prompt.

","parent_name":"Image"},"Structs/ImagesResult/Image.html#/s:6OpenAI12ImagesResultV5ImageV3urlSSSgvp":{"name":"url","abstract":"

The URL of the generated image, if response_format is url (default).

","parent_name":"Image"},"Structs/ImagesResult/Image/CodingKeys.html":{"name":"CodingKeys","parent_name":"Image"},"Structs/ImagesResult.html#/s:6OpenAI12ImagesResultV7createdSdvp":{"name":"created","parent_name":"ImagesResult"},"Structs/ImagesResult.html#/s:6OpenAI12ImagesResultV4dataSayAC5ImageVGvp":{"name":"data","parent_name":"ImagesResult"},"Structs/ImagesResult/Image.html":{"name":"Image","abstract":"

Represents the url or the content of an image generated by the OpenAI API.

","parent_name":"ImagesResult"},"Structs/ImagesQuery/Size.html#/s:6OpenAI11ImagesQueryV4SizeO4_256yA2EmF":{"name":"_256","parent_name":"Size"},"Structs/ImagesQuery/Size.html#/s:6OpenAI11ImagesQueryV4SizeO4_512yA2EmF":{"name":"_512","parent_name":"Size"},"Structs/ImagesQuery/Size.html#/s:6OpenAI11ImagesQueryV4SizeO5_1024yA2EmF":{"name":"_1024","parent_name":"Size"},"Structs/ImagesQuery/Size.html#/s:6OpenAI11ImagesQueryV4SizeO10_1792_1024yA2EmF":{"name":"_1792_1024","parent_name":"Size"},"Structs/ImagesQuery/Size.html#/s:6OpenAI11ImagesQueryV4SizeO10_1024_1792yA2EmF":{"name":"_1024_1792","parent_name":"Size"},"Structs/ImagesQuery/Quality.html#/s:6OpenAI11ImagesQueryV7QualityO8standardyA2EmF":{"name":"standard","parent_name":"Quality"},"Structs/ImagesQuery/Quality.html#/s:6OpenAI11ImagesQueryV7QualityO2hdyA2EmF":{"name":"hd","parent_name":"Quality"},"Structs/ImagesQuery/Style.html#/s:6OpenAI11ImagesQueryV5StyleO7naturalyA2EmF":{"name":"natural","parent_name":"Style"},"Structs/ImagesQuery/Style.html#/s:6OpenAI11ImagesQueryV5StyleO5vividyA2EmF":{"name":"vivid","parent_name":"Style"},"Structs/ImagesQuery/CodingKeys.html#/s:6OpenAI11ImagesQueryV10CodingKeysO5modelyA2EmF":{"name":"model","parent_name":"CodingKeys"},"Structs/ImagesQuery/CodingKeys.html#/s:6OpenAI11ImagesQueryV10CodingKeysO6promptyA2EmF":{"name":"prompt","parent_name":"CodingKeys"},"Structs/ImagesQuery/CodingKeys.html#/s:6OpenAI11ImagesQueryV10CodingKeysO1nyA2EmF":{"name":"n","parent_name":"CodingKeys"},"Structs/ImagesQuery/CodingKeys.html#/s:6OpenAI11ImagesQueryV10CodingKeysO4sizeyA2EmF":{"name":"size","parent_name":"CodingKeys"},"Structs/ImagesQuery/CodingKeys.html#/s:6OpenAI11ImagesQueryV10CodingKeysO4useryA2EmF":{"name":"user","parent_name":"CodingKeys"},"Structs/ImagesQuery/CodingKeys.html#/s:6OpenAI11ImagesQueryV10CodingKeysO5styleyA2EmF":{"name":"style","parent_name":"CodingKeys"},"Structs/ImagesQuery/CodingKeys.html#/s:6OpenAI11ImagesQueryV10CodingKeysO14responseFormatyA2EmF":{"name":"responseFormat","parent_name":"CodingKeys"},"Structs/ImagesQuery/CodingKeys.html#/s:6OpenAI11ImagesQueryV10CodingKeysO7qualityyA2EmF":{"name":"quality","parent_name":"CodingKeys"},"Structs/ImagesQuery/ResponseFormat.html#/s:6OpenAI11ImagesQueryV14ResponseFormatO3urlyA2EmF":{"name":"url","parent_name":"ResponseFormat"},"Structs/ImagesQuery/ResponseFormat.html#/s:6OpenAI11ImagesQueryV14ResponseFormatO8b64_jsonyA2EmF":{"name":"b64_json","parent_name":"ResponseFormat"},"Structs/ImagesQuery/ResponseFormat.html":{"name":"ResponseFormat","parent_name":"ImagesQuery"},"Structs/ImagesQuery.html#/s:6OpenAI11ImagesQueryV6promptSSvp":{"name":"prompt","abstract":"

A text description of the desired image(s). The maximum length is 1000 characters for dall-e-2 and 4000 characters for dall-e-3.

","parent_name":"ImagesQuery"},"Structs/ImagesQuery.html#/s:6OpenAI11ImagesQueryV5modelSSSgvp":{"name":"model","abstract":"

The model to use for image generation.","parent_name":"ImagesQuery"},"Structs/ImagesQuery.html#/s:6OpenAI11ImagesQueryV14responseFormatAC08ResponseF0OSgvp":{"name":"responseFormat","abstract":"

The format in which the generated images are returned. Must be one of url or b64_json.","parent_name":"ImagesQuery"},"Structs/ImagesQuery.html#/s:6OpenAI11ImagesQueryV1nSiSgvp":{"name":"n","abstract":"

The number of images to generate. Must be between 1 and 10. For dall-e-3, only n=1 is supported.","parent_name":"ImagesQuery"},"Structs/ImagesQuery.html#/s:6OpenAI11ImagesQueryV4sizeAC4SizeOSgvp":{"name":"size","abstract":"

The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024 for dall-e-2. Must be one of 1024x1024, 1792x1024, or 1024x1792 for dall-e-3 models.","parent_name":"ImagesQuery"},"Structs/ImagesQuery.html#/s:6OpenAI11ImagesQueryV4userSSSgvp":{"name":"user","abstract":"

A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.","parent_name":"ImagesQuery"},"Structs/ImagesQuery.html#/s:6OpenAI11ImagesQueryV5styleAC5StyleOSgvp":{"name":"style","abstract":"

The style of the generated images. Must be one of vivid or natural. Vivid causes the model to lean towards generating hyper-real and dramatic images. Natural causes the model to produce more natural, less hyper-real looking images. This param is only supported for dall-e-3.","parent_name":"ImagesQuery"},"Structs/ImagesQuery.html#/s:6OpenAI11ImagesQueryV7qualityAC7QualityOSgvp":{"name":"quality","abstract":"

The quality of the image that will be generated. hd creates images with finer details and greater consistency across the image. This param is only supported for dall-e-3.","parent_name":"ImagesQuery"},"Structs/ImagesQuery.html#/s:6OpenAI11ImagesQueryV6prompt5model1n7quality14responseFormat4size5style4userACSS_SSSgSiSgAC7QualityOSgAC08ResponseI0OSgAC4SizeOSgAC5StyleOSgALtcfc":{"name":"init(prompt:model:n:quality:responseFormat:size:style:user:)","parent_name":"ImagesQuery"},"Structs/ImagesQuery/CodingKeys.html":{"name":"CodingKeys","parent_name":"ImagesQuery"},"Structs/ImagesQuery/Style.html":{"name":"Style","parent_name":"ImagesQuery"},"Structs/ImagesQuery/Quality.html":{"name":"Quality","parent_name":"ImagesQuery"},"Structs/ImagesQuery/Size.html":{"name":"Size","parent_name":"ImagesQuery"},"Structs/ImageVariationsQuery/CodingKeys.html#/s:6OpenAI20ImageVariationsQueryV10CodingKeysO5imageyA2EmF":{"name":"image","parent_name":"CodingKeys"},"Structs/ImageVariationsQuery/CodingKeys.html#/s:6OpenAI20ImageVariationsQueryV10CodingKeysO5modelyA2EmF":{"name":"model","parent_name":"CodingKeys"},"Structs/ImageVariationsQuery/CodingKeys.html#/s:6OpenAI20ImageVariationsQueryV10CodingKeysO1nyA2EmF":{"name":"n","parent_name":"CodingKeys"},"Structs/ImageVariationsQuery/CodingKeys.html#/s:6OpenAI20ImageVariationsQueryV10CodingKeysO14responseFormatyA2EmF":{"name":"responseFormat","parent_name":"CodingKeys"},"Structs/ImageVariationsQuery/CodingKeys.html#/s:6OpenAI20ImageVariationsQueryV10CodingKeysO4sizeyA2EmF":{"name":"size","parent_name":"CodingKeys"},"Structs/ImageVariationsQuery/CodingKeys.html#/s:6OpenAI20ImageVariationsQueryV10CodingKeysO4useryA2EmF":{"name":"user","parent_name":"CodingKeys"},"Structs/ImageVariationsQuery.html#/s:6OpenAI20ImageVariationsQueryV14ResponseFormata":{"name":"ResponseFormat","parent_name":"ImageVariationsQuery"},"Structs/ImageVariationsQuery.html#/s:6OpenAI20ImageVariationsQueryV5image10Foundation4DataVvp":{"name":"image","abstract":"

The image to edit. Must be a valid PNG file, less than 4MB, and square.

","parent_name":"ImageVariationsQuery"},"Structs/ImageVariationsQuery.html#/s:6OpenAI20ImageVariationsQueryV5modelSSSgvp":{"name":"model","abstract":"

The model to use for image generation. Only dall-e-2 is supported at this time.","parent_name":"ImageVariationsQuery"},"Structs/ImageVariationsQuery.html#/s:6OpenAI20ImageVariationsQueryV1nSiSgvp":{"name":"n","abstract":"

The number of images to generate. Must be between 1 and 10.","parent_name":"ImageVariationsQuery"},"Structs/ImageVariationsQuery.html#/s:6OpenAI20ImageVariationsQueryV14responseFormatAA06ImagesE0V08ResponseG0OSgvp":{"name":"responseFormat","abstract":"

The format in which the generated images are returned. Must be one of url or b64_json.","parent_name":"ImageVariationsQuery"},"Structs/ImageVariationsQuery.html#/s:6OpenAI20ImageVariationsQueryV4sizeSSSgvp":{"name":"size","abstract":"

The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024.","parent_name":"ImageVariationsQuery"},"Structs/ImageVariationsQuery.html#/s:6OpenAI20ImageVariationsQueryV4userSSSgvp":{"name":"user","abstract":"

A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.","parent_name":"ImageVariationsQuery"},"Structs/ImageVariationsQuery.html#/s:6OpenAI20ImageVariationsQueryV5image5model1n14responseFormat4size4userAC10Foundation4DataV_SSSgSiSgAA06ImagesE0V08ResponseI0OSgA2Mtcfc":{"name":"init(image:model:n:responseFormat:size:user:)","parent_name":"ImageVariationsQuery"},"Structs/ImageVariationsQuery/CodingKeys.html":{"name":"CodingKeys","parent_name":"ImageVariationsQuery"},"Structs/ImageEditsQuery/CodingKeys.html#/s:6OpenAI15ImageEditsQueryV10CodingKeysO5imageyA2EmF":{"name":"image","parent_name":"CodingKeys"},"Structs/ImageEditsQuery/CodingKeys.html#/s:6OpenAI15ImageEditsQueryV10CodingKeysO4maskyA2EmF":{"name":"mask","parent_name":"CodingKeys"},"Structs/ImageEditsQuery/CodingKeys.html#/s:6OpenAI15ImageEditsQueryV10CodingKeysO6promptyA2EmF":{"name":"prompt","parent_name":"CodingKeys"},"Structs/ImageEditsQuery/CodingKeys.html#/s:6OpenAI15ImageEditsQueryV10CodingKeysO5modelyA2EmF":{"name":"model","parent_name":"CodingKeys"},"Structs/ImageEditsQuery/CodingKeys.html#/s:6OpenAI15ImageEditsQueryV10CodingKeysO1nyA2EmF":{"name":"n","parent_name":"CodingKeys"},"Structs/ImageEditsQuery/CodingKeys.html#/s:6OpenAI15ImageEditsQueryV10CodingKeysO14responseFormatyA2EmF":{"name":"responseFormat","parent_name":"CodingKeys"},"Structs/ImageEditsQuery/CodingKeys.html#/s:6OpenAI15ImageEditsQueryV10CodingKeysO4sizeyA2EmF":{"name":"size","parent_name":"CodingKeys"},"Structs/ImageEditsQuery/CodingKeys.html#/s:6OpenAI15ImageEditsQueryV10CodingKeysO4useryA2EmF":{"name":"user","parent_name":"CodingKeys"},"Structs/ImageEditsQuery.html#/s:6OpenAI15ImageEditsQueryV14ResponseFormata":{"name":"ResponseFormat","parent_name":"ImageEditsQuery"},"Structs/ImageEditsQuery.html#/s:6OpenAI15ImageEditsQueryV4Sizea":{"name":"Size","parent_name":"ImageEditsQuery"},"Structs/ImageEditsQuery.html#/s:6OpenAI15ImageEditsQueryV5image10Foundation4DataVvp":{"name":"image","abstract":"

The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask is not provided, image must have transparency, which will be used as the mask.

","parent_name":"ImageEditsQuery"},"Structs/ImageEditsQuery.html#/s:6OpenAI15ImageEditsQueryV4mask10Foundation4DataVSgvp":{"name":"mask","abstract":"

An additional image whose fully transparent areas (e.g. where alpha is zero) indicate where image should be edited. Must be a valid PNG file, less than 4MB, and have the same dimensions as image.

","parent_name":"ImageEditsQuery"},"Structs/ImageEditsQuery.html#/s:6OpenAI15ImageEditsQueryV6promptSSvp":{"name":"prompt","abstract":"

A text description of the desired image(s). The maximum length is 1000 characters.

","parent_name":"ImageEditsQuery"},"Structs/ImageEditsQuery.html#/s:6OpenAI15ImageEditsQueryV5modelSSSgvp":{"name":"model","abstract":"

The model to use for image generation.","parent_name":"ImageEditsQuery"},"Structs/ImageEditsQuery.html#/s:6OpenAI15ImageEditsQueryV1nSiSgvp":{"name":"n","abstract":"

The number of images to generate. Must be between 1 and 10.

","parent_name":"ImageEditsQuery"},"Structs/ImageEditsQuery.html#/s:6OpenAI15ImageEditsQueryV14responseFormatAA06ImagesE0V08ResponseG0OSgvp":{"name":"responseFormat","abstract":"

The format in which the generated images are returned. Must be one of url or b64_json.","parent_name":"ImageEditsQuery"},"Structs/ImageEditsQuery.html#/s:6OpenAI15ImageEditsQueryV4sizeAA06ImagesE0V4SizeOSgvp":{"name":"size","abstract":"

The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024.

","parent_name":"ImageEditsQuery"},"Structs/ImageEditsQuery.html#/s:6OpenAI15ImageEditsQueryV4userSSSgvp":{"name":"user","abstract":"

A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.","parent_name":"ImageEditsQuery"},"Structs/ImageEditsQuery.html#/s:6OpenAI15ImageEditsQueryV5image6prompt4mask5model1n14responseFormat4size4userAC10Foundation4DataV_SSANSgSSSgSiSgAA06ImagesE0V08ResponseK0OSgAS4SizeOSgAPtcfc":{"name":"init(image:prompt:mask:model:n:responseFormat:size:user:)","parent_name":"ImageEditsQuery"},"Structs/ImageEditsQuery/CodingKeys.html":{"name":"CodingKeys","parent_name":"ImageEditsQuery"},"Structs/EmbeddingsResult/Usage.html#/s:6OpenAI16EmbeddingsResultV5UsageV12promptTokensSivp":{"name":"promptTokens","parent_name":"Usage"},"Structs/EmbeddingsResult/Usage.html#/s:6OpenAI16EmbeddingsResultV5UsageV11totalTokensSivp":{"name":"totalTokens","parent_name":"Usage"},"Structs/EmbeddingsResult/Embedding.html#/s:6OpenAI16EmbeddingsResultV9EmbeddingV6objectSSvp":{"name":"object","abstract":"

The object type, which is always “embedding”.

","parent_name":"Embedding"},"Structs/EmbeddingsResult/Embedding.html#/s:6OpenAI16EmbeddingsResultV9EmbeddingV9embeddingSaySdGvp":{"name":"embedding","abstract":"

The embedding vector, which is a list of floats. The length of vector depends on the model as listed in the embedding guide.","parent_name":"Embedding"},"Structs/EmbeddingsResult/Embedding.html":{"name":"Embedding","parent_name":"EmbeddingsResult"},"Structs/EmbeddingsResult/Usage.html":{"name":"Usage","parent_name":"EmbeddingsResult"},"Structs/EmbeddingsResult.html#/s:6OpenAI16EmbeddingsResultV4dataSayAC9EmbeddingVGvp":{"name":"data","parent_name":"EmbeddingsResult"},"Structs/EmbeddingsResult.html#/s:6OpenAI16EmbeddingsResultV5modelSSvp":{"name":"model","parent_name":"EmbeddingsResult"},"Structs/EmbeddingsResult.html#/s:6OpenAI16EmbeddingsResultV5usageAC5UsageVvp":{"name":"usage","parent_name":"EmbeddingsResult"},"Structs/EmbeddingsResult.html#/s:6OpenAI16EmbeddingsResultV6objectSSvp":{"name":"object","abstract":"

The object type, which is always “list”.

","parent_name":"EmbeddingsResult"},"Structs/EmbeddingsQuery/CodingKeys.html#/s:6OpenAI15EmbeddingsQueryV10CodingKeysO5inputyA2EmF":{"name":"input","parent_name":"CodingKeys"},"Structs/EmbeddingsQuery/CodingKeys.html#/s:6OpenAI15EmbeddingsQueryV10CodingKeysO5modelyA2EmF":{"name":"model","parent_name":"CodingKeys"},"Structs/EmbeddingsQuery/CodingKeys.html#/s:6OpenAI15EmbeddingsQueryV10CodingKeysO14encodingFormatyA2EmF":{"name":"encodingFormat","parent_name":"CodingKeys"},"Structs/EmbeddingsQuery/CodingKeys.html#/s:6OpenAI15EmbeddingsQueryV10CodingKeysO4useryA2EmF":{"name":"user","parent_name":"CodingKeys"},"Structs/EmbeddingsQuery/EncodingFormat.html#/s:6OpenAI15EmbeddingsQueryV14EncodingFormatO5floatyA2EmF":{"name":"float","parent_name":"EncodingFormat"},"Structs/EmbeddingsQuery/EncodingFormat.html#/s:6OpenAI15EmbeddingsQueryV14EncodingFormatO6base64yA2EmF":{"name":"base64","parent_name":"EncodingFormat"},"Structs/EmbeddingsQuery/Input.html#/s:6OpenAI15EmbeddingsQueryV5InputO6stringyAESScAEmF":{"name":"string(_:)","parent_name":"Input"},"Structs/EmbeddingsQuery/Input.html#/s:6OpenAI15EmbeddingsQueryV5InputO10stringListyAESaySSGcAEmF":{"name":"stringList(_:)","parent_name":"Input"},"Structs/EmbeddingsQuery/Input.html#/s:6OpenAI15EmbeddingsQueryV5InputO7intListyAESaySiGcAEmF":{"name":"intList(_:)","parent_name":"Input"},"Structs/EmbeddingsQuery/Input.html#/s:6OpenAI15EmbeddingsQueryV5InputO9intMatrixyAESaySaySiGGcAEmF":{"name":"intMatrix(_:)","parent_name":"Input"},"Structs/EmbeddingsQuery/Input.html#/s:SE6encode2toys7Encoder_p_tKF":{"name":"encode(to:)","parent_name":"Input"},"Structs/EmbeddingsQuery/Input.html#/s:6OpenAI15EmbeddingsQueryV5InputO6stringAESS_tcfc":{"name":"init(string:)","parent_name":"Input"},"Structs/EmbeddingsQuery/Input.html#/s:6OpenAI15EmbeddingsQueryV5InputO10stringListAESaySSG_tcfc":{"name":"init(stringList:)","parent_name":"Input"},"Structs/EmbeddingsQuery/Input.html#/s:6OpenAI15EmbeddingsQueryV5InputO7intListAESaySiG_tcfc":{"name":"init(intList:)","parent_name":"Input"},"Structs/EmbeddingsQuery/Input.html#/s:6OpenAI15EmbeddingsQueryV5InputO9intMatrixAESaySaySiGG_tcfc":{"name":"init(intMatrix:)","parent_name":"Input"},"Structs/EmbeddingsQuery.html#/s:6OpenAI15EmbeddingsQueryV5inputAC5InputOvp":{"name":"input","abstract":"

Input text to embed, encoded as a string or array of tokens. To embed multiple inputs in a single request, pass an array of strings or array of token arrays. The input must not exceed the max input tokens for the model (8192 tokens for text-embedding-ada-002), cannot be an empty string, and any array must be 2048 dimensions or less.

","parent_name":"EmbeddingsQuery"},"Structs/EmbeddingsQuery.html#/s:6OpenAI15EmbeddingsQueryV5modelSSvp":{"name":"model","abstract":"

ID of the model to use. You can use the List models API to see all of your available models, or see our Model overview for descriptions of them.","parent_name":"EmbeddingsQuery"},"Structs/EmbeddingsQuery.html#/s:6OpenAI15EmbeddingsQueryV14encodingFormatAC08EncodingF0OSgvp":{"name":"encodingFormat","abstract":"

The format to return the embeddings in. Can be either float or base64.","parent_name":"EmbeddingsQuery"},"Structs/EmbeddingsQuery.html#/s:6OpenAI15EmbeddingsQueryV4userSSSgvp":{"name":"user","abstract":"

A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.","parent_name":"EmbeddingsQuery"},"Structs/EmbeddingsQuery.html#/s:6OpenAI15EmbeddingsQueryV5input5model14encodingFormat4userA2C5InputO_SSAC08EncodingH0OSgSSSgtcfc":{"name":"init(input:model:encodingFormat:user:)","parent_name":"EmbeddingsQuery"},"Structs/EmbeddingsQuery/Input.html":{"name":"Input","parent_name":"EmbeddingsQuery"},"Structs/EmbeddingsQuery/EncodingFormat.html":{"name":"EncodingFormat","parent_name":"EmbeddingsQuery"},"Structs/EmbeddingsQuery/CodingKeys.html":{"name":"CodingKeys","parent_name":"EmbeddingsQuery"},"Structs/EditsResult/Usage.html#/s:6OpenAI11EditsResultV5UsageV12promptTokensSivp":{"name":"promptTokens","parent_name":"Usage"},"Structs/EditsResult/Usage.html#/s:6OpenAI11EditsResultV5UsageV16completionTokensSivp":{"name":"completionTokens","parent_name":"Usage"},"Structs/EditsResult/Usage.html#/s:6OpenAI11EditsResultV5UsageV11totalTokensSivp":{"name":"totalTokens","parent_name":"Usage"},"Structs/EditsResult/Choice.html#/s:6OpenAI11EditsResultV6ChoiceV4textSSvp":{"name":"text","parent_name":"Choice"},"Structs/EditsResult/Choice.html":{"name":"Choice","parent_name":"EditsResult"},"Structs/EditsResult/Usage.html":{"name":"Usage","parent_name":"EditsResult"},"Structs/EditsResult.html#/s:6OpenAI11EditsResultV6objectSSvp":{"name":"object","parent_name":"EditsResult"},"Structs/EditsResult.html#/s:6OpenAI11EditsResultV7createdSdvp":{"name":"created","parent_name":"EditsResult"},"Structs/EditsResult.html#/s:6OpenAI11EditsResultV7choicesSayAC6ChoiceVGvp":{"name":"choices","parent_name":"EditsResult"},"Structs/EditsResult.html#/s:6OpenAI11EditsResultV5usageAC5UsageVvp":{"name":"usage","parent_name":"EditsResult"},"Structs/EditsQuery.html#/s:6OpenAI10EditsQueryV5modelSSvp":{"name":"model","abstract":"

ID of the model to use.

","parent_name":"EditsQuery"},"Structs/EditsQuery.html#/s:6OpenAI10EditsQueryV5inputSSSgvp":{"name":"input","abstract":"

Input text to get embeddings for.

","parent_name":"EditsQuery"},"Structs/EditsQuery.html#/s:6OpenAI10EditsQueryV11instructionSSvp":{"name":"instruction","abstract":"

The instruction that tells the model how to edit the prompt.

","parent_name":"EditsQuery"},"Structs/EditsQuery.html#/s:6OpenAI10EditsQueryV1nSiSgvp":{"name":"n","abstract":"

The number of images to generate. Must be between 1 and 10.

","parent_name":"EditsQuery"},"Structs/EditsQuery.html#/s:6OpenAI10EditsQueryV11temperatureSdSgvp":{"name":"temperature","abstract":"

What sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.

","parent_name":"EditsQuery"},"Structs/EditsQuery.html#/s:6OpenAI10EditsQueryV4topPSdSgvp":{"name":"topP","abstract":"

An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.

","parent_name":"EditsQuery"},"Structs/EditsQuery.html#/s:6OpenAI10EditsQueryV5model5input11instruction1n11temperature4topPACSS_SSSgSSSiSgSdSgALtcfc":{"name":"init(model:input:instruction:n:temperature:topP:)","parent_name":"EditsQuery"},"Structs/CompletionsResult/Choice.html#/s:6OpenAI17CompletionsResultV6ChoiceV4textSSvp":{"name":"text","parent_name":"Choice"},"Structs/CompletionsResult/Choice.html#/s:6OpenAI17CompletionsResultV6ChoiceV12finishReasonSSSgvp":{"name":"finishReason","parent_name":"Choice"},"Structs/CompletionsResult/Usage.html#/s:6OpenAI17CompletionsResultV5UsageV12promptTokensSivp":{"name":"promptTokens","parent_name":"Usage"},"Structs/CompletionsResult/Usage.html#/s:6OpenAI17CompletionsResultV5UsageV16completionTokensSivp":{"name":"completionTokens","parent_name":"Usage"},"Structs/CompletionsResult/Usage.html#/s:6OpenAI17CompletionsResultV5UsageV11totalTokensSivp":{"name":"totalTokens","parent_name":"Usage"},"Structs/CompletionsResult/Usage.html":{"name":"Usage","parent_name":"CompletionsResult"},"Structs/CompletionsResult/Choice.html":{"name":"Choice","parent_name":"CompletionsResult"},"Structs/CompletionsResult.html#/s:6OpenAI17CompletionsResultV2idSSvp":{"name":"id","parent_name":"CompletionsResult"},"Structs/CompletionsResult.html#/s:6OpenAI17CompletionsResultV6objectSSvp":{"name":"object","parent_name":"CompletionsResult"},"Structs/CompletionsResult.html#/s:6OpenAI17CompletionsResultV7createdSdvp":{"name":"created","parent_name":"CompletionsResult"},"Structs/CompletionsResult.html#/s:6OpenAI17CompletionsResultV5modelSSvp":{"name":"model","parent_name":"CompletionsResult"},"Structs/CompletionsResult.html#/s:6OpenAI17CompletionsResultV7choicesSayAC6ChoiceVGvp":{"name":"choices","parent_name":"CompletionsResult"},"Structs/CompletionsResult.html#/s:6OpenAI17CompletionsResultV5usageAC5UsageVSgvp":{"name":"usage","parent_name":"CompletionsResult"},"Structs/CompletionsQuery.html#/s:6OpenAI16CompletionsQueryV5modelSSvp":{"name":"model","abstract":"

ID of the model to use.

","parent_name":"CompletionsQuery"},"Structs/CompletionsQuery.html#/s:6OpenAI16CompletionsQueryV6promptSSvp":{"name":"prompt","abstract":"

The prompt(s) to generate completions for, encoded as a string, array of strings, array of tokens, or array of token arrays.

","parent_name":"CompletionsQuery"},"Structs/CompletionsQuery.html#/s:6OpenAI16CompletionsQueryV11temperatureSdSgvp":{"name":"temperature","abstract":"

What sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.

","parent_name":"CompletionsQuery"},"Structs/CompletionsQuery.html#/s:6OpenAI16CompletionsQueryV9maxTokensSiSgvp":{"name":"maxTokens","abstract":"

The maximum number of tokens to generate in the completion.

","parent_name":"CompletionsQuery"},"Structs/CompletionsQuery.html#/s:6OpenAI16CompletionsQueryV4topPSdSgvp":{"name":"topP","abstract":"

An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.

","parent_name":"CompletionsQuery"},"Structs/CompletionsQuery.html#/s:6OpenAI16CompletionsQueryV16frequencyPenaltySdSgvp":{"name":"frequencyPenalty","abstract":"

Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model’s likelihood to repeat the same line verbatim.

","parent_name":"CompletionsQuery"},"Structs/CompletionsQuery.html#/s:6OpenAI16CompletionsQueryV15presencePenaltySdSgvp":{"name":"presencePenalty","abstract":"

Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model’s likelihood to talk about new topics.

","parent_name":"CompletionsQuery"},"Structs/CompletionsQuery.html#/s:6OpenAI16CompletionsQueryV4stopSaySSGSgvp":{"name":"stop","abstract":"

Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence.

","parent_name":"CompletionsQuery"},"Structs/CompletionsQuery.html#/s:6OpenAI16CompletionsQueryV4userSSSgvp":{"name":"user","abstract":"

A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.

","parent_name":"CompletionsQuery"},"Structs/CompletionsQuery.html#/s:6OpenAI16CompletionsQueryV5model6prompt11temperature9maxTokens4topP16frequencyPenalty08presenceL04stop4userACSS_SSSdSgSiSgA3MSaySSGSgSSSgtcfc":{"name":"init(model:prompt:temperature:maxTokens:topP:frequencyPenalty:presencePenalty:stop:user:)","parent_name":"CompletionsQuery"},"Structs/ChatStreamResult/CodingKeys.html#/s:6OpenAI16ChatStreamResultV10CodingKeysO2idyA2EmF":{"name":"id","parent_name":"CodingKeys"},"Structs/ChatStreamResult/CodingKeys.html#/s:6OpenAI16ChatStreamResultV10CodingKeysO6objectyA2EmF":{"name":"object","parent_name":"CodingKeys"},"Structs/ChatStreamResult/CodingKeys.html#/s:6OpenAI16ChatStreamResultV10CodingKeysO7createdyA2EmF":{"name":"created","parent_name":"CodingKeys"},"Structs/ChatStreamResult/CodingKeys.html#/s:6OpenAI16ChatStreamResultV10CodingKeysO5modelyA2EmF":{"name":"model","parent_name":"CodingKeys"},"Structs/ChatStreamResult/CodingKeys.html#/s:6OpenAI16ChatStreamResultV10CodingKeysO7choicesyA2EmF":{"name":"choices","parent_name":"CodingKeys"},"Structs/ChatStreamResult/CodingKeys.html#/s:6OpenAI16ChatStreamResultV10CodingKeysO17systemFingerprintyA2EmF":{"name":"systemFingerprint","parent_name":"CodingKeys"},"Structs/ChatStreamResult/Choice/CodingKeys.html#/s:6OpenAI16ChatStreamResultV6ChoiceV10CodingKeysO5deltayA2GmF":{"name":"delta","parent_name":"CodingKeys"},"Structs/ChatStreamResult/Choice/CodingKeys.html#/s:6OpenAI16ChatStreamResultV6ChoiceV10CodingKeysO12finishReasonyA2GmF":{"name":"finishReason","parent_name":"CodingKeys"},"Structs/ChatStreamResult/Choice/CodingKeys.html#/s:6OpenAI16ChatStreamResultV6ChoiceV10CodingKeysO8logprobsyA2GmF":{"name":"logprobs","parent_name":"CodingKeys"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F8LogprobsV0C22CompletionTokenLogprobV10CodingKeysO5tokenyA2KmF":{"name":"token","parent_name":"CodingKeys"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F8LogprobsV0C22CompletionTokenLogprobV10CodingKeysO5bytesyA2KmF":{"name":"bytes","parent_name":"CodingKeys"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F8LogprobsV0C22CompletionTokenLogprobV10CodingKeysO7logprobyA2KmF":{"name":"logprob","parent_name":"CodingKeys"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F8LogprobsV0C22CompletionTokenLogprobV10CodingKeysO03topG0yA2KmF":{"name":"topLogprobs","parent_name":"CodingKeys"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F8LogprobsV0C22CompletionTokenLogprobV03TopJ0V5tokenSSvp":{"name":"token","abstract":"

The token.

","parent_name":"TopLogprob"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F8LogprobsV0C22CompletionTokenLogprobV03TopJ0V5bytesSaySiGSgvp":{"name":"bytes","abstract":"

A list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be null if there is no bytes representation for the token.

","parent_name":"TopLogprob"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F8LogprobsV0C22CompletionTokenLogprobV03TopJ0V7logprobSdvp":{"name":"logprob","abstract":"

The log probability of this token.

","parent_name":"TopLogprob"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F8LogprobsV0C22CompletionTokenLogprobV5tokenSSvp":{"name":"token","abstract":"

The token.

","parent_name":"ChatCompletionTokenLogprob"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F8LogprobsV0C22CompletionTokenLogprobV5bytesSaySiGSgvp":{"name":"bytes","abstract":"

A list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be null if there is no bytes representation for the token.

","parent_name":"ChatCompletionTokenLogprob"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F8LogprobsV0C22CompletionTokenLogprobV7logprobSdvp":{"name":"logprob","abstract":"

The log probability of this token.

","parent_name":"ChatCompletionTokenLogprob"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F8LogprobsV0C22CompletionTokenLogprobV03topG0SayAI03TopJ0VGSgvp":{"name":"topLogprobs","abstract":"

List of the most likely tokens and their log probability, at this token position. In rare cases, there may be fewer than the number of requested top_logprobs returned.

","parent_name":"ChatCompletionTokenLogprob"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html":{"name":"TopLogprob","parent_name":"ChatCompletionTokenLogprob"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html":{"name":"CodingKeys","parent_name":"ChatCompletionTokenLogprob"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F8LogprobsV7contentSayAG0C22CompletionTokenLogprobVGSgvp":{"name":"content","abstract":"

A list of message content tokens with log probability information.

","parent_name":"ChoiceLogprobs"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html":{"name":"ChatCompletionTokenLogprob","parent_name":"ChoiceLogprobs"},"Structs/ChatStreamResult/Choice/ChoiceDelta/CodingKeys.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV10CodingKeysO7contentyA2ImF":{"name":"content","parent_name":"CodingKeys"},"Structs/ChatStreamResult/Choice/ChoiceDelta/CodingKeys.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV10CodingKeysO4roleyA2ImF":{"name":"role","parent_name":"CodingKeys"},"Structs/ChatStreamResult/Choice/ChoiceDelta/CodingKeys.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV10CodingKeysO9toolCallsyA2ImF":{"name":"toolCalls","parent_name":"CodingKeys"},"Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall/ChoiceDeltaToolCallFunction.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV0fG8ToolCallV0fghI8FunctionV9argumentsSSSgvp":{"name":"arguments","abstract":"

The arguments to call the function with, as generated by the model in JSON format. Note that the model does not always generate valid JSON, and may hallucinate parameters not defined by your function schema. Validate the arguments in your code before calling your function.

","parent_name":"ChoiceDeltaToolCallFunction"},"Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall/ChoiceDeltaToolCallFunction.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV0fG8ToolCallV0fghI8FunctionV4nameSSSgvp":{"name":"name","abstract":"

The name of the function to call.

","parent_name":"ChoiceDeltaToolCallFunction"},"Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall/ChoiceDeltaToolCallFunction.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV0fG8ToolCallV0fghI8FunctionV9arguments4nameAKSSSg_ANtcfc":{"name":"init(arguments:name:)","parent_name":"ChoiceDeltaToolCallFunction"},"Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV0fG8ToolCallV2idSSSgvp":{"name":"id","abstract":"

The ID of the tool call.

","parent_name":"ChoiceDeltaToolCall"},"Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV0fG8ToolCallV8functionAI0fghI8FunctionVSgvp":{"name":"function","abstract":"

The function that the model called.

","parent_name":"ChoiceDeltaToolCall"},"Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV0fG8ToolCallV4typeSSSgvp":{"name":"type","abstract":"

The type of the tool. Currently, only function is supported.

","parent_name":"ChoiceDeltaToolCall"},"Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV0fG8ToolCallV5index2id8functionAISi_SSSgAI0fghI8FunctionVSgtcfc":{"name":"init(index:id:function:)","parent_name":"ChoiceDeltaToolCall"},"Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall/ChoiceDeltaToolCallFunction.html":{"name":"ChoiceDeltaToolCallFunction","parent_name":"ChoiceDeltaToolCall"},"Structs/ChatStreamResult/Choice/ChoiceDelta.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV4Rolea":{"name":"Role","parent_name":"ChoiceDelta"},"Structs/ChatStreamResult/Choice/ChoiceDelta.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV7contentSSSgvp":{"name":"content","abstract":"

The contents of the chunk message.

","parent_name":"ChoiceDelta"},"Structs/ChatStreamResult/Choice/ChoiceDelta.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV4roleAA0C5QueryV0C22CompletionMessageParamO4RoleOSgvp":{"name":"role","abstract":"

The role of the author of this message.

","parent_name":"ChoiceDelta"},"Structs/ChatStreamResult/Choice/ChoiceDelta.html#/s:6OpenAI16ChatStreamResultV6ChoiceV0F5DeltaV9toolCallsSayAG0fG8ToolCallVGSgvp":{"name":"toolCalls","parent_name":"ChoiceDelta"},"Structs/ChatStreamResult/Choice/ChoiceDelta/ChoiceDeltaToolCall.html":{"name":"ChoiceDeltaToolCall","parent_name":"ChoiceDelta"},"Structs/ChatStreamResult/Choice/ChoiceDelta/CodingKeys.html":{"name":"CodingKeys","parent_name":"ChoiceDelta"},"Structs/ChatStreamResult/Choice.html#/s:6OpenAI16ChatStreamResultV6ChoiceV12FinishReasona":{"name":"FinishReason","parent_name":"Choice"},"Structs/ChatStreamResult/Choice/ChoiceDelta.html":{"name":"ChoiceDelta","parent_name":"Choice"},"Structs/ChatStreamResult/Choice.html#/s:6OpenAI16ChatStreamResultV6ChoiceV5deltaAE0F5DeltaVvp":{"name":"delta","abstract":"

A chat completion delta generated by streamed model responses.

","parent_name":"Choice"},"Structs/ChatStreamResult/Choice.html#/s:6OpenAI16ChatStreamResultV6ChoiceV12finishReasonAA0cE0VADV06FinishH0OSgvp":{"name":"finishReason","abstract":"

The reason the model stopped generating tokens.","parent_name":"Choice"},"Structs/ChatStreamResult/Choice.html#/s:6OpenAI16ChatStreamResultV6ChoiceV8logprobsAE0F8LogprobsVSgvp":{"name":"logprobs","abstract":"

Log probability information for the choice.

","parent_name":"Choice"},"Structs/ChatStreamResult/Choice/ChoiceLogprobs.html":{"name":"ChoiceLogprobs","parent_name":"Choice"},"Structs/ChatStreamResult/Choice/CodingKeys.html":{"name":"CodingKeys","parent_name":"Choice"},"Structs/ChatStreamResult/Choice.html":{"name":"Choice","parent_name":"ChatStreamResult"},"Structs/ChatStreamResult.html#/s:6OpenAI16ChatStreamResultV2idSSvp":{"name":"id","abstract":"

A unique identifier for the chat completion. Each chunk has the same ID.

","parent_name":"ChatStreamResult"},"Structs/ChatStreamResult.html#/s:6OpenAI16ChatStreamResultV6objectSSvp":{"name":"object","abstract":"

The object type, which is always chat.completion.chunk.

","parent_name":"ChatStreamResult"},"Structs/ChatStreamResult.html#/s:6OpenAI16ChatStreamResultV7createdSdvp":{"name":"created","abstract":"

The Unix timestamp (in seconds) of when the chat completion was created.","parent_name":"ChatStreamResult"},"Structs/ChatStreamResult.html#/s:6OpenAI16ChatStreamResultV5modelSSvp":{"name":"model","abstract":"

The model to generate the completion.

","parent_name":"ChatStreamResult"},"Structs/ChatStreamResult.html#/s:6OpenAI16ChatStreamResultV7choicesSayAC6ChoiceVGvp":{"name":"choices","abstract":"

A list of chat completion choices.","parent_name":"ChatStreamResult"},"Structs/ChatStreamResult.html#/s:6OpenAI16ChatStreamResultV17systemFingerprintSSSgvp":{"name":"systemFingerprint","abstract":"

This fingerprint represents the backend configuration that the model runs with. Can be used in conjunction with the seed request parameter to understand when backend changes have been made that might impact determinism.

","parent_name":"ChatStreamResult"},"Structs/ChatStreamResult/CodingKeys.html":{"name":"CodingKeys","parent_name":"ChatStreamResult"},"Structs/ChatResult/CodingKeys.html#/s:6OpenAI10ChatResultV10CodingKeysO2idyA2EmF":{"name":"id","parent_name":"CodingKeys"},"Structs/ChatResult/CodingKeys.html#/s:6OpenAI10ChatResultV10CodingKeysO6objectyA2EmF":{"name":"object","parent_name":"CodingKeys"},"Structs/ChatResult/CodingKeys.html#/s:6OpenAI10ChatResultV10CodingKeysO7createdyA2EmF":{"name":"created","parent_name":"CodingKeys"},"Structs/ChatResult/CodingKeys.html#/s:6OpenAI10ChatResultV10CodingKeysO5modelyA2EmF":{"name":"model","parent_name":"CodingKeys"},"Structs/ChatResult/CodingKeys.html#/s:6OpenAI10ChatResultV10CodingKeysO7choicesyA2EmF":{"name":"choices","parent_name":"CodingKeys"},"Structs/ChatResult/CodingKeys.html#/s:6OpenAI10ChatResultV10CodingKeysO5usageyA2EmF":{"name":"usage","parent_name":"CodingKeys"},"Structs/ChatResult/CodingKeys.html#/s:6OpenAI10ChatResultV10CodingKeysO17systemFingerprintyA2EmF":{"name":"systemFingerprint","parent_name":"CodingKeys"},"Structs/ChatResult/CompletionUsage.html#/s:6OpenAI10ChatResultV15CompletionUsageV16completionTokensSivp":{"name":"completionTokens","abstract":"

Number of tokens in the generated completion.

","parent_name":"CompletionUsage"},"Structs/ChatResult/CompletionUsage.html#/s:6OpenAI10ChatResultV15CompletionUsageV12promptTokensSivp":{"name":"promptTokens","abstract":"

Number of tokens in the prompt.

","parent_name":"CompletionUsage"},"Structs/ChatResult/CompletionUsage.html#/s:6OpenAI10ChatResultV15CompletionUsageV11totalTokensSivp":{"name":"totalTokens","abstract":"

Total number of tokens used in the request (prompt + completion).

","parent_name":"CompletionUsage"},"Structs/ChatResult/Choice/FinishReason.html#/s:6OpenAI10ChatResultV6ChoiceV12FinishReasonO4stopyA2GmF":{"name":"stop","parent_name":"FinishReason"},"Structs/ChatResult/Choice/FinishReason.html#/s:6OpenAI10ChatResultV6ChoiceV12FinishReasonO6lengthyA2GmF":{"name":"length","parent_name":"FinishReason"},"Structs/ChatResult/Choice/FinishReason.html#/s:6OpenAI10ChatResultV6ChoiceV12FinishReasonO9toolCallsyA2GmF":{"name":"toolCalls","parent_name":"FinishReason"},"Structs/ChatResult/Choice/FinishReason.html#/s:6OpenAI10ChatResultV6ChoiceV12FinishReasonO13contentFilteryA2GmF":{"name":"contentFilter","parent_name":"FinishReason"},"Structs/ChatResult/Choice/FinishReason.html#/s:6OpenAI10ChatResultV6ChoiceV12FinishReasonO12functionCallyA2GmF":{"name":"functionCall","parent_name":"FinishReason"},"Structs/ChatResult/Choice/CodingKeys.html#/s:6OpenAI10ChatResultV6ChoiceV10CodingKeysO8logprobsyA2GmF":{"name":"logprobs","parent_name":"CodingKeys"},"Structs/ChatResult/Choice/CodingKeys.html#/s:6OpenAI10ChatResultV6ChoiceV10CodingKeysO7messageyA2GmF":{"name":"message","parent_name":"CodingKeys"},"Structs/ChatResult/Choice/CodingKeys.html#/s:6OpenAI10ChatResultV6ChoiceV10CodingKeysO12finishReasonyA2GmF":{"name":"finishReason","parent_name":"CodingKeys"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html#/s:6OpenAI10ChatResultV6ChoiceV0E8LogprobsV0C22CompletionTokenLogprobV10CodingKeysO5tokenyA2KmF":{"name":"token","parent_name":"CodingKeys"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html#/s:6OpenAI10ChatResultV6ChoiceV0E8LogprobsV0C22CompletionTokenLogprobV10CodingKeysO5bytesyA2KmF":{"name":"bytes","parent_name":"CodingKeys"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html#/s:6OpenAI10ChatResultV6ChoiceV0E8LogprobsV0C22CompletionTokenLogprobV10CodingKeysO7logprobyA2KmF":{"name":"logprob","parent_name":"CodingKeys"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html#/s:6OpenAI10ChatResultV6ChoiceV0E8LogprobsV0C22CompletionTokenLogprobV10CodingKeysO03topF0yA2KmF":{"name":"topLogprobs","parent_name":"CodingKeys"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html#/s:6OpenAI10ChatResultV6ChoiceV0E8LogprobsV0C22CompletionTokenLogprobV03TopI0V5tokenSSvp":{"name":"token","abstract":"

The token.

","parent_name":"TopLogprob"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html#/s:6OpenAI10ChatResultV6ChoiceV0E8LogprobsV0C22CompletionTokenLogprobV03TopI0V5bytesSaySiGSgvp":{"name":"bytes","abstract":"

A list of integers representing the UTF-8 bytes representation of the token.","parent_name":"TopLogprob"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html#/s:6OpenAI10ChatResultV6ChoiceV0E8LogprobsV0C22CompletionTokenLogprobV03TopI0V7logprobSdvp":{"name":"logprob","abstract":"

The log probability of this token.

","parent_name":"TopLogprob"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html#/s:6OpenAI10ChatResultV6ChoiceV0E8LogprobsV0C22CompletionTokenLogprobV5tokenSSvp":{"name":"token","abstract":"

The token.

","parent_name":"ChatCompletionTokenLogprob"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html#/s:6OpenAI10ChatResultV6ChoiceV0E8LogprobsV0C22CompletionTokenLogprobV5bytesSaySiGSgvp":{"name":"bytes","abstract":"

A list of integers representing the UTF-8 bytes representation of the token.","parent_name":"ChatCompletionTokenLogprob"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html#/s:6OpenAI10ChatResultV6ChoiceV0E8LogprobsV0C22CompletionTokenLogprobV7logprobSdvp":{"name":"logprob","abstract":"

The log probability of this token.

","parent_name":"ChatCompletionTokenLogprob"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html#/s:6OpenAI10ChatResultV6ChoiceV0E8LogprobsV0C22CompletionTokenLogprobV03topF0SayAI03TopI0VGvp":{"name":"topLogprobs","abstract":"

List of the most likely tokens and their log probability, at this token position.","parent_name":"ChatCompletionTokenLogprob"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/TopLogprob.html":{"name":"TopLogprob","parent_name":"ChatCompletionTokenLogprob"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob/CodingKeys.html":{"name":"CodingKeys","parent_name":"ChatCompletionTokenLogprob"},"Structs/ChatResult/Choice/ChoiceLogprobs.html#/s:6OpenAI10ChatResultV6ChoiceV0E8LogprobsV7contentSayAG0C22CompletionTokenLogprobVGSgvp":{"name":"content","parent_name":"ChoiceLogprobs"},"Structs/ChatResult/Choice/ChoiceLogprobs/ChatCompletionTokenLogprob.html":{"name":"ChatCompletionTokenLogprob","parent_name":"ChoiceLogprobs"},"Structs/ChatResult/Choice.html#/s:6OpenAI10ChatResultV6ChoiceV0C17CompletionMessagea":{"name":"ChatCompletionMessage","parent_name":"Choice"},"Structs/ChatResult/Choice.html#/s:6OpenAI10ChatResultV6ChoiceV8logprobsAE0E8LogprobsVSgvp":{"name":"logprobs","abstract":"

Log probability information for the choice.

","parent_name":"Choice"},"Structs/ChatResult/Choice.html#/s:6OpenAI10ChatResultV6ChoiceV7messageAA0C5QueryV0C22CompletionMessageParamOvp":{"name":"message","abstract":"

A chat completion message generated by the model.

","parent_name":"Choice"},"Structs/ChatResult/Choice.html#/s:6OpenAI10ChatResultV6ChoiceV12finishReasonSSSgvp":{"name":"finishReason","abstract":"

The reason the model stopped generating tokens. This will be stop if the model hit a natural stop point or a provided stop sequence, length if the maximum number of tokens specified in the request was reached, content_filter if content was omitted due to a flag from our content filters, tool_calls if the model called a tool, or function_call (deprecated) if the model called a function.

","parent_name":"Choice"},"Structs/ChatResult/Choice/ChoiceLogprobs.html":{"name":"ChoiceLogprobs","parent_name":"Choice"},"Structs/ChatResult/Choice/CodingKeys.html":{"name":"CodingKeys","parent_name":"Choice"},"Structs/ChatResult/Choice/FinishReason.html":{"name":"FinishReason","parent_name":"Choice"},"Structs/ChatResult/Choice.html":{"name":"Choice","abstract":"

mimic the choices array in the chat completion object

","parent_name":"ChatResult"},"Structs/ChatResult/CompletionUsage.html":{"name":"CompletionUsage","parent_name":"ChatResult"},"Structs/ChatResult.html#/s:6OpenAI10ChatResultV2idSSvp":{"name":"id","abstract":"

A unique identifier for the chat completion.

","parent_name":"ChatResult"},"Structs/ChatResult.html#/s:6OpenAI10ChatResultV6objectSSvp":{"name":"object","abstract":"

The object type, which is always chat.completion.

","parent_name":"ChatResult"},"Structs/ChatResult.html#/s:6OpenAI10ChatResultV7createdSdvp":{"name":"created","abstract":"

The Unix timestamp (in seconds) of when the chat completion was created.

","parent_name":"ChatResult"},"Structs/ChatResult.html#/s:6OpenAI10ChatResultV5modelSSvp":{"name":"model","abstract":"

The model used for the chat completion.

","parent_name":"ChatResult"},"Structs/ChatResult.html#/s:6OpenAI10ChatResultV7choicesSayAC6ChoiceVGvp":{"name":"choices","abstract":"

A list of chat completion choices. Can be more than one if n is greater than 1.

","parent_name":"ChatResult"},"Structs/ChatResult.html#/s:6OpenAI10ChatResultV5usageAC15CompletionUsageVSgvp":{"name":"usage","abstract":"

Usage statistics for the completion request.

","parent_name":"ChatResult"},"Structs/ChatResult.html#/s:6OpenAI10ChatResultV17systemFingerprintSSSgvp":{"name":"systemFingerprint","abstract":"

This fingerprint represents the backend configuration that the model runs with.","parent_name":"ChatResult"},"Structs/ChatResult/CodingKeys.html":{"name":"CodingKeys","parent_name":"ChatResult"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO8messagesyA2EmF":{"name":"messages","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO5modelyA2EmF":{"name":"model","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO16frequencyPenaltyyA2EmF":{"name":"frequencyPenalty","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO9logitBiasyA2EmF":{"name":"logitBias","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO8logprobsyA2EmF":{"name":"logprobs","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO9maxTokensyA2EmF":{"name":"maxTokens","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO1nyA2EmF":{"name":"n","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO15presencePenaltyyA2EmF":{"name":"presencePenalty","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO14responseFormatyA2EmF":{"name":"responseFormat","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO4seedyA2EmF":{"name":"seed","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO4stopyA2EmF":{"name":"stop","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO11temperatureyA2EmF":{"name":"temperature","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO10toolChoiceyA2EmF":{"name":"toolChoice","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO5toolsyA2EmF":{"name":"tools","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO11topLogprobsyA2EmF":{"name":"topLogprobs","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO4topPyA2EmF":{"name":"topP","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO4useryA2EmF":{"name":"user","parent_name":"CodingKeys"},"Structs/ChatQuery/CodingKeys.html#/s:6OpenAI9ChatQueryV10CodingKeysO6streamyA2EmF":{"name":"stream","parent_name":"CodingKeys"},"Structs/ChatQuery/ChatCompletionToolParam/ToolsType.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV9ToolsTypeO8functionyA2GmF":{"name":"function","parent_name":"ToolsType"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/JSONType.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8JSONTypeO7integeryA2KmF":{"name":"integer","parent_name":"JSONType"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/JSONType.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8JSONTypeO6stringyA2KmF":{"name":"string","parent_name":"JSONType"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/JSONType.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8JSONTypeO7booleanyA2KmF":{"name":"boolean","parent_name":"JSONType"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/JSONType.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8JSONTypeO5arrayyA2KmF":{"name":"array","parent_name":"JSONType"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/JSONType.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8JSONTypeO6objectyA2KmF":{"name":"object","parent_name":"JSONType"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/JSONType.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8JSONTypeO6numberyA2KmF":{"name":"number","parent_name":"JSONType"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/JSONType.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8JSONTypeO4nullyA2KmF":{"name":"null","parent_name":"JSONType"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5ItemsV8JSONTypea":{"name":"JSONType","parent_name":"Items"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5ItemsV4typeAI8JSONTypeOvp":{"name":"type","parent_name":"Items"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5ItemsV10propertiesSDySSAKGSgvp":{"name":"properties","parent_name":"Items"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5ItemsV7patternSSSgvp":{"name":"pattern","parent_name":"Items"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5ItemsV5constSSSgvp":{"name":"const","parent_name":"Items"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5ItemsV4enumSaySSGSgvp":{"name":"enum","parent_name":"Items"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5ItemsV10multipleOfSiSgvp":{"name":"multipleOf","parent_name":"Items"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5ItemsV7minimumSdSgvp":{"name":"minimum","parent_name":"Items"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5ItemsV7maximumSdSgvp":{"name":"maximum","parent_name":"Items"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5ItemsV03minL0SiSgvp":{"name":"minItems","parent_name":"Items"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5ItemsV03maxL0SiSgvp":{"name":"maxItems","parent_name":"Items"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5ItemsV06uniqueL0SbSgvp":{"name":"uniqueItems","parent_name":"Items"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5ItemsV4type10properties7pattern5const4enum10multipleOf7minimum7maximum03minL003maxL006uniqueL0AmI8JSONTypeO_SDySSAKGSgSSSgA1_SaySSGSgSiSgSdSgA5_A4_A4_SbSgtcfc":{"name":"init(type:properties:pattern:const:enum:multipleOf:minimum:maximum:minItems:maxItems:uniqueItems:)","parent_name":"Items"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV8JSONTypea":{"name":"JSONType","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV4typeAI8JSONTypeOvp":{"name":"type","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV11descriptionSSSgvp":{"name":"description","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV6formatSSSgvp":{"name":"format","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5itemsAK5ItemsVSgvp":{"name":"items","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV8requiredSaySSGSgvp":{"name":"required","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV7patternSSSgvp":{"name":"pattern","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV5constSSSgvp":{"name":"const","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV4enumSaySSGSgvp":{"name":"enum","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV10multipleOfSiSgvp":{"name":"multipleOf","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV7minimumSdSgvp":{"name":"minimum","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV7maximumSdSgvp":{"name":"maximum","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV8minItemsSiSgvp":{"name":"minItems","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV8maxItemsSiSgvp":{"name":"maxItems","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV11uniqueItemsSbSgvp":{"name":"uniqueItems","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8PropertyV4type11description6format5items8required7pattern5const4enum10multipleOf7minimum7maximum8minItems03maxY006uniqueY0AkI8JSONTypeO_SSSgA0_AK0Y0VSgSaySSGSgA0_A0_A5_SiSgSdSgA7_A6_A6_SbSgtcfc":{"name":"init(type:description:format:items:required:pattern:const:enum:multipleOf:minimum:maximum:minItems:maxItems:uniqueItems:)","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property/Items.html":{"name":"Items","parent_name":"Property"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV4typeAI8JSONTypeOvp":{"name":"type","parent_name":"FunctionParameters"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV10propertiesSDySSAI8PropertyVGSgvp":{"name":"properties","parent_name":"FunctionParameters"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV8requiredSaySSGSgvp":{"name":"required","parent_name":"FunctionParameters"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV7patternSSSgvp":{"name":"pattern","parent_name":"FunctionParameters"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV5constSSSgvp":{"name":"const","parent_name":"FunctionParameters"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV4enumSaySSGSgvp":{"name":"enum","parent_name":"FunctionParameters"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV10multipleOfSiSgvp":{"name":"multipleOf","parent_name":"FunctionParameters"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV7minimumSiSgvp":{"name":"minimum","parent_name":"FunctionParameters"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV7maximumSiSgvp":{"name":"maximum","parent_name":"FunctionParameters"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV0H10ParametersV4type10properties8required7pattern5const4enum10multipleOf7minimum7maximumA2I8JSONTypeO_SDySSAI8PropertyVGSgSaySSGSgSSSgA_AZSiSgA0_A0_tcfc":{"name":"init(type:properties:required:pattern:const:enum:multipleOf:minimum:maximum:)","parent_name":"FunctionParameters"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/Property.html":{"name":"Property","parent_name":"FunctionParameters"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters/JSONType.html":{"name":"JSONType","parent_name":"FunctionParameters"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV4nameSSvp":{"name":"name","abstract":"

The name of the function to be called. Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length of 64.

","parent_name":"FunctionDefinition"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV11descriptionSSSgvp":{"name":"description","abstract":"

The description of what the function does.

","parent_name":"FunctionDefinition"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV10parametersAG0H10ParametersVSgvp":{"name":"parameters","abstract":"

The parameters the functions accepts, described as a JSON Schema object.","parent_name":"FunctionDefinition"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV18FunctionDefinitionV4name11description10parametersAGSS_SSSgAG0H10ParametersVSgtcfc":{"name":"init(name:description:parameters:)","parent_name":"FunctionDefinition"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition/FunctionParameters.html":{"name":"FunctionParameters","abstract":"

See the guide for examples, and the JSON Schema reference for documentation about the format.

","parent_name":"FunctionDefinition"},"Structs/ChatQuery/ChatCompletionToolParam.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV8functionAE18FunctionDefinitionVvp":{"name":"function","parent_name":"ChatCompletionToolParam"},"Structs/ChatQuery/ChatCompletionToolParam.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV4typeAE9ToolsTypeOvp":{"name":"type","parent_name":"ChatCompletionToolParam"},"Structs/ChatQuery/ChatCompletionToolParam.html#/s:6OpenAI9ChatQueryV0C19CompletionToolParamV8functionA2E18FunctionDefinitionV_tcfc":{"name":"init(function:)","parent_name":"ChatCompletionToolParam"},"Structs/ChatQuery/ChatCompletionToolParam/FunctionDefinition.html":{"name":"FunctionDefinition","parent_name":"ChatCompletionToolParam"},"Structs/ChatQuery/ChatCompletionToolParam/ToolsType.html":{"name":"ToolsType","parent_name":"ChatCompletionToolParam"},"Structs/ChatQuery/ChatCompletionFunctionCallOptionParam.html#/s:6OpenAI9ChatQueryV0C33CompletionFunctionCallOptionParamO4noneyA2EmF":{"name":"none","parent_name":"ChatCompletionFunctionCallOptionParam"},"Structs/ChatQuery/ChatCompletionFunctionCallOptionParam.html#/s:6OpenAI9ChatQueryV0C33CompletionFunctionCallOptionParamO4autoyA2EmF":{"name":"auto","parent_name":"ChatCompletionFunctionCallOptionParam"},"Structs/ChatQuery/ChatCompletionFunctionCallOptionParam.html#/s:6OpenAI9ChatQueryV0C33CompletionFunctionCallOptionParamO8functionyAESScAEmF":{"name":"function(_:)","parent_name":"ChatCompletionFunctionCallOptionParam"},"Structs/ChatQuery/ChatCompletionFunctionCallOptionParam.html#/s:SE6encode2toys7Encoder_p_tKF":{"name":"encode(to:)","parent_name":"ChatCompletionFunctionCallOptionParam"},"Structs/ChatQuery/ChatCompletionFunctionCallOptionParam.html#/s:6OpenAI9ChatQueryV0C33CompletionFunctionCallOptionParamO8functionAESS_tcfc":{"name":"init(function:)","parent_name":"ChatCompletionFunctionCallOptionParam"},"Structs/ChatQuery/ResponseFormat.html#/s:6OpenAI9ChatQueryV14ResponseFormatO10jsonObjectyA2EmF":{"name":"jsonObject","parent_name":"ResponseFormat"},"Structs/ChatQuery/ResponseFormat.html#/s:6OpenAI9ChatQueryV14ResponseFormatO4textyA2EmF":{"name":"text","parent_name":"ResponseFormat"},"Structs/ChatQuery/ResponseFormat.html#/s:SE6encode2toys7Encoder_p_tKF":{"name":"encode(to:)","parent_name":"ResponseFormat"},"Structs/ChatQuery/Stop.html#/s:6OpenAI9ChatQueryV4StopO6stringyAESScAEmF":{"name":"string(_:)","parent_name":"Stop"},"Structs/ChatQuery/Stop.html#/s:6OpenAI9ChatQueryV4StopO10stringListyAESaySSGcAEmF":{"name":"stringList(_:)","parent_name":"Stop"},"Structs/ChatQuery/Stop.html#/s:SE6encode2toys7Encoder_p_tKF":{"name":"encode(to:)","parent_name":"Stop"},"Structs/ChatQuery/Stop.html#/s:6OpenAI9ChatQueryV4StopO6stringAESS_tcfc":{"name":"init(string:)","parent_name":"Stop"},"Structs/ChatQuery/Stop.html#/s:6OpenAI9ChatQueryV4StopO10stringListAESaySSG_tcfc":{"name":"init(stringList:)","parent_name":"Stop"},"Structs/ChatQuery/ChatCompletionMessageParam/Role.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO4RoleO6systemyA2GmF":{"name":"system","parent_name":"Role"},"Structs/ChatQuery/ChatCompletionMessageParam/Role.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO4RoleO4useryA2GmF":{"name":"user","parent_name":"Role"},"Structs/ChatQuery/ChatCompletionMessageParam/Role.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO4RoleO9assistantyA2GmF":{"name":"assistant","parent_name":"Role"},"Structs/ChatQuery/ChatCompletionMessageParam/Role.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO4RoleO4toolyA2GmF":{"name":"tool","parent_name":"Role"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam/CodingKeys.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4ToolfG0V10CodingKeysO7contentyA2ImF":{"name":"content","parent_name":"CodingKeys"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam/CodingKeys.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4ToolfG0V10CodingKeysO4roleyA2ImF":{"name":"role","parent_name":"CodingKeys"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam/CodingKeys.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4ToolfG0V10CodingKeysO10toolCallIdyA2ImF":{"name":"toolCallId","parent_name":"CodingKeys"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4ToolfG0V4Rolea":{"name":"Role","parent_name":"ChatCompletionToolMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4ToolfG0V7contentSSvp":{"name":"content","abstract":"

The contents of the tool message.

","parent_name":"ChatCompletionToolMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4ToolfG0V4roleAE4RoleOvp":{"name":"role","abstract":"

The role of the messages author, in this case tool.

","parent_name":"ChatCompletionToolMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4ToolfG0V10toolCallIdSSvp":{"name":"toolCallId","abstract":"

Tool call that this message is responding to.

","parent_name":"ChatCompletionToolMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4ToolfG0V7content10toolCallIdAGSS_SStcfc":{"name":"init(content:toolCallId:)","parent_name":"ChatCompletionToolMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam/CodingKeys.html":{"name":"CodingKeys","parent_name":"ChatCompletionToolMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam/FunctionCall.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V0cef8ToolCallG0V08FunctionJ0V9argumentsSSvp":{"name":"arguments","abstract":"

The arguments to call the function with, as generated by the model in JSON format. Note that the model does not always generate valid JSON, and may hallucinate parameters not defined by your function schema. Validate the arguments in your code before calling your function.

","parent_name":"FunctionCall"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam/FunctionCall.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V0cef8ToolCallG0V08FunctionJ0V4nameSSvp":{"name":"name","abstract":"

The name of the function to call.

","parent_name":"FunctionCall"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V0cef8ToolCallG0V9ToolsTypea":{"name":"ToolsType","parent_name":"ChatCompletionMessageToolCallParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V0cef8ToolCallG0V2idSSvp":{"name":"id","abstract":"

The ID of the tool call.

","parent_name":"ChatCompletionMessageToolCallParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V0cef8ToolCallG0V8functionAI08FunctionJ0Vvp":{"name":"function","abstract":"

The function that the model called.

","parent_name":"ChatCompletionMessageToolCallParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V0cef8ToolCallG0V4typeAC0ceiG0V9ToolsTypeOvp":{"name":"type","abstract":"

The type of the tool. Currently, only function is supported.

","parent_name":"ChatCompletionMessageToolCallParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V0cef8ToolCallG0V2id8functionAISS_AI08FunctionJ0Vtcfc":{"name":"init(id:function:)","parent_name":"ChatCompletionMessageToolCallParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam/FunctionCall.html":{"name":"FunctionCall","parent_name":"ChatCompletionMessageToolCallParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/CodingKeys.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V10CodingKeysO4nameyA2ImF":{"name":"name","parent_name":"CodingKeys"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/CodingKeys.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V10CodingKeysO4roleyA2ImF":{"name":"role","parent_name":"CodingKeys"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/CodingKeys.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V10CodingKeysO7contentyA2ImF":{"name":"content","parent_name":"CodingKeys"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/CodingKeys.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V10CodingKeysO9toolCallsyA2ImF":{"name":"toolCalls","parent_name":"CodingKeys"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V4Rolea":{"name":"Role","parent_name":"ChatCompletionAssistantMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V4roleAE4RoleOvp":{"name":"role","abstract":"

/ The role of the messages author, in this case assistant.

","parent_name":"ChatCompletionAssistantMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V7contentSSSgvp":{"name":"content","abstract":"

The contents of the assistant message. Required unless tool_calls is specified.

","parent_name":"ChatCompletionAssistantMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V4nameSSSgvp":{"name":"name","abstract":"

The name of the author of this message. name is required if role is function, and it should be the name of the function whose response is in the content. May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 characters.

","parent_name":"ChatCompletionAssistantMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V9toolCallsSayAG0cef8ToolCallG0VGSgvp":{"name":"toolCalls","abstract":"

The tool calls generated by the model, such as function calls.

","parent_name":"ChatCompletionAssistantMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce9AssistantfG0V7content4name9toolCallsAGSSSg_AKSayAG0cef8ToolCallG0VGSgtcfc":{"name":"init(content:name:toolCalls:)","parent_name":"ChatCompletionAssistantMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/CodingKeys.html":{"name":"CodingKeys","parent_name":"ChatCompletionAssistantMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam/ChatCompletionMessageToolCallParam.html":{"name":"ChatCompletionMessageToolCallParam","parent_name":"ChatCompletionAssistantMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/CodingKeys.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei9PartImageG0V10CodingKeysO8imageUrlyA2OmF":{"name":"imageUrl","parent_name":"CodingKeys"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/CodingKeys.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei9PartImageG0V10CodingKeysO4typeyA2OmF":{"name":"type","parent_name":"CodingKeys"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL/Detail.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei9PartImageG0V0L3URLV6DetailO4autoyA2QmF":{"name":"auto","parent_name":"Detail"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL/Detail.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei9PartImageG0V0L3URLV6DetailO3lowyA2QmF":{"name":"low","parent_name":"Detail"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL/Detail.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei9PartImageG0V0L3URLV6DetailO4highyA2QmF":{"name":"high","parent_name":"Detail"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei9PartImageG0V0L3URLV3urlSSvp":{"name":"url","abstract":"

Either a URL of the image or the base64 encoded image data.

","parent_name":"ImageURL"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei9PartImageG0V0L3URLV6detailAO6DetailOvp":{"name":"detail","abstract":"

Specifies the detail level of the image. Learn more in the","parent_name":"ImageURL"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei9PartImageG0V0L3URLV3url6detailAOSS_AO6DetailOtcfc":{"name":"init(url:detail:)","parent_name":"ImageURL"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei9PartImageG0V0L3URLV3url6detailAO10Foundation4DataV_AO6DetailOtcfc":{"name":"init(url:detail:)","parent_name":"ImageURL"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL/Detail.html":{"name":"Detail","parent_name":"ImageURL"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei9PartImageG0V8imageUrlAM0L3URLVvp":{"name":"imageUrl","parent_name":"ChatCompletionContentPartImageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei9PartImageG0V4typeSSvp":{"name":"type","abstract":"

The type of the content part.

","parent_name":"ChatCompletionContentPartImageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei9PartImageG0V8imageUrlA2M0L3URLV_tcfc":{"name":"init(imageUrl:)","parent_name":"ChatCompletionContentPartImageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/ImageURL.html":{"name":"ImageURL","parent_name":"ChatCompletionContentPartImageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam/CodingKeys.html":{"name":"CodingKeys","parent_name":"ChatCompletionContentPartImageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartTextParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei8PartTextG0V4textSSvp":{"name":"text","abstract":"

The text content.

","parent_name":"ChatCompletionContentPartTextParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartTextParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei8PartTextG0V4typeSSvp":{"name":"type","abstract":"

The type of the content part.

","parent_name":"ChatCompletionContentPartTextParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartTextParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O0cei8PartTextG0V4textAMSS_tcfc":{"name":"init(text:)","parent_name":"ChatCompletionContentPartTextParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O04chatei8PartTextG0yA2K0ceilmG0VcAKmF":{"name":"chatCompletionContentPartTextParam(_:)","parent_name":"VisionContent"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O04chatei9PartImageG0yA2K0ceilmG0VcAKmF":{"name":"chatCompletionContentPartImageParam(_:)","parent_name":"VisionContent"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O4textSSSgvp":{"name":"text","parent_name":"VisionContent"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O8imageUrlAK0cei9PartImageG0V0N3URLVSgvp":{"name":"imageUrl","parent_name":"VisionContent"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O04chatei8PartTextG0A2K0ceilmG0V_tcfc":{"name":"init(chatCompletionContentPartTextParam:)","parent_name":"VisionContent"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO06VisionI0O04chatei9PartImageG0A2K0ceilmG0V_tcfc":{"name":"init(chatCompletionContentPartImageParam:)","parent_name":"VisionContent"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent.html#/s:SE6encode2toys7Encoder_p_tKF":{"name":"encode(to:)","parent_name":"VisionContent"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartTextParam.html":{"name":"ChatCompletionContentPartTextParam","parent_name":"VisionContent"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent/ChatCompletionContentPartImageParam.html":{"name":"ChatCompletionContentPartImageParam","parent_name":"VisionContent"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/CodingKeys.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO10CodingKeysO6stringyA2KmF":{"name":"string","parent_name":"CodingKeys"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/CodingKeys.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO10CodingKeysO6visionyA2KmF":{"name":"vision","parent_name":"CodingKeys"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO6stringyAISScAImF":{"name":"string(_:)","parent_name":"Content"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO6visionyAISayAI06VisionI0OGcAImF":{"name":"vision(_:)","parent_name":"Content"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO6stringSSSgvp":{"name":"string","parent_name":"Content"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO6stringAISS_tcfc":{"name":"init(string:)","parent_name":"Content"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7ContentO6visionAISayAI06VisionI0OG_tcfc":{"name":"init(vision:)","parent_name":"Content"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/CodingKeys.html":{"name":"CodingKeys","parent_name":"Content"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content.html#/s:SE6encode2toys7Encoder_p_tKF":{"name":"encode(to:)","parent_name":"Content"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content/VisionContent.html":{"name":"VisionContent","parent_name":"Content"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content.html#/s:Se4fromxs7Decoder_p_tKcfc":{"name":"init(from:)","parent_name":"Content"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V4Rolea":{"name":"Role","parent_name":"ChatCompletionUserMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7contentAG7ContentOvp":{"name":"content","abstract":"

The contents of the user message.

","parent_name":"ChatCompletionUserMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V4roleAE4RoleOvp":{"name":"role","abstract":"

The role of the messages author, in this case user.

","parent_name":"ChatCompletionUserMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V4nameSSSgvp":{"name":"name","abstract":"

An optional name for the participant. Provides the model information to differentiate between participants of the same role.

","parent_name":"ChatCompletionUserMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce4UserfG0V7content4nameA2G7ContentO_SSSgtcfc":{"name":"init(content:name:)","parent_name":"ChatCompletionUserMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam/Content.html":{"name":"Content","parent_name":"ChatCompletionUserMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionSystemMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce6SystemfG0V4Rolea":{"name":"Role","parent_name":"ChatCompletionSystemMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionSystemMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce6SystemfG0V7contentSSvp":{"name":"content","abstract":"

The contents of the system message.

","parent_name":"ChatCompletionSystemMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionSystemMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce6SystemfG0V4roleAE4RoleOvp":{"name":"role","abstract":"

The role of the messages author, in this case system.

","parent_name":"ChatCompletionSystemMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionSystemMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce6SystemfG0V4nameSSSgvp":{"name":"name","abstract":"

An optional name for the participant. Provides the model information to differentiate between participants of the same role.

","parent_name":"ChatCompletionSystemMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionSystemMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO0ce6SystemfG0V7content4nameAGSS_SSSgtcfc":{"name":"init(content:name:)","parent_name":"ChatCompletionSystemMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO6systemyA2E0ce6SystemfG0VcAEmF":{"name":"system(_:)","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO4useryA2E0ce4UserfG0VcAEmF":{"name":"user(_:)","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO9assistantyA2E0ce9AssistantfG0VcAEmF":{"name":"assistant(_:)","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO4toolyA2E0ce4ToolfG0VcAEmF":{"name":"tool(_:)","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO7contentAE0ce4UserfG0V7ContentOSgvp":{"name":"content","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO4roleAE4RoleOvp":{"name":"role","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO4nameSSSgvp":{"name":"name","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO10toolCallIdSSSgvp":{"name":"toolCallId","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO9toolCallsSayAE0ce9AssistantfG0V0cef8ToolCallG0VGSgvp":{"name":"toolCalls","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO4role7content4name9toolCalls0K6CallIdAESgAE4RoleO_SSSgANSayAE0ce9AssistantfG0V0cef4ToolmG0VGSgANtcfc":{"name":"init(role:content:name:toolCalls:toolCallId:)","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO4role7content4nameAESgAE4RoleO_SayAE0ce4UserfG0V7ContentO06VisionM0OGSSSgtcfc":{"name":"init(role:content:name:)","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam.html#/s:SE6encode2toys7Encoder_p_tKF":{"name":"encode(to:)","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionSystemMessageParam.html":{"name":"ChatCompletionSystemMessageParam","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionUserMessageParam.html":{"name":"ChatCompletionUserMessageParam","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionAssistantMessageParam.html":{"name":"ChatCompletionAssistantMessageParam","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/ChatCompletionToolMessageParam.html":{"name":"ChatCompletionToolMessageParam","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam/Role.html":{"name":"Role","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery/ChatCompletionMessageParam.html#/s:6OpenAI9ChatQueryV0C22CompletionMessageParamO4fromAEs7Decoder_p_tKcfc":{"name":"init(from:)","parent_name":"ChatCompletionMessageParam"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV8messagesSayAC0C22CompletionMessageParamOGvp":{"name":"messages","abstract":"

A list of messages comprising the conversation so far

","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV5modelSSvp":{"name":"model","abstract":"

ID of the model to use. See the model endpoint compatibility table for details on which models work with the Chat API.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV16frequencyPenaltySdSgvp":{"name":"frequencyPenalty","abstract":"

Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model’s likelihood to repeat the same line verbatim.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV9logitBiasSDySSSiGSgvp":{"name":"logitBias","abstract":"

Modify the likelihood of specified tokens appearing in the completion.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV8logprobsSbSgvp":{"name":"logprobs","abstract":"

Whether to return log probabilities of the output tokens or not. If true, returns the log probabilities of each output token returned in the content of message. This option is currently not available on the gpt-4-vision-preview model.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV9maxTokensSiSgvp":{"name":"maxTokens","abstract":"

The maximum number of tokens to generate in the completion.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV1nSiSgvp":{"name":"n","abstract":"

How many chat completion choices to generate for each input message. Note that you will be charged based on the number of generated tokens across all of the choices. Keep n as 1 to minimize costs.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV15presencePenaltySdSgvp":{"name":"presencePenalty","abstract":"

Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model’s likelihood to talk about new topics.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV14responseFormatAC08ResponseF0OSgvp":{"name":"responseFormat","abstract":"

An object specifying the format that the model must output. Compatible with gpt-4-1106-preview and gpt-3.5-turbo-1106.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV4seedSiSgvp":{"name":"seed","abstract":"

This feature is in Beta. If specified, our system will make a best effort to sample deterministically, such that repeated requests with the same seed and parameters should return the same result. Determinism is not guaranteed, and you should refer to the system_fingerprint response parameter to monitor changes in the backend.

","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV4stopAC4StopOSgvp":{"name":"stop","abstract":"

Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV11temperatureSdSgvp":{"name":"temperature","abstract":"

What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV10toolChoiceAC0C33CompletionFunctionCallOptionParamOSgvp":{"name":"toolChoice","abstract":"

Controls which (if any) function is called by the model. none means the model will not call a function and instead generates a message. auto means the model can pick between generating a message or calling a function. Specifying a particular function via {“type”: “function”, “function”: {“name”: “my_function”}} forces the model to call that function.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV5toolsSayAC0C19CompletionToolParamVGSgvp":{"name":"tools","abstract":"

A list of tools the model may call. Currently, only functions are supported as a tool. Use this to provide a list of functions the model may generate JSON inputs for.

","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV11topLogprobsSiSgvp":{"name":"topLogprobs","abstract":"

An integer between 0 and 5 specifying the number of most likely tokens to return at each token position, each with an associated log probability. logprobs must be set to true if this parameter is used.

","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV4topPSdSgvp":{"name":"topP","abstract":"

An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV4userSSSgvp":{"name":"user","abstract":"

A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV6streamSbvp":{"name":"stream","abstract":"

If set, partial message deltas will be sent, like in ChatGPT. Tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a data: [DONE] message.","parent_name":"ChatQuery"},"Structs/ChatQuery.html#/s:6OpenAI9ChatQueryV8messages5model16frequencyPenalty9logitBias8logprobs9maxTokens1n08presenceH014responseFormat4seed4stop11temperature10toolChoice5tools11topLogprobs0W1P4user6streamACSayAC0C22CompletionMessageParamOG_SSSdSgSDySSSiGSgSbSgSiSgA1_AyC08ResponseP0OSgA1_AC4StopOSgAyC0C33CompletionFunctionCallOptionParamOSgSayAC0C19CompletionToolParamVGSgA1_AYSSSgSbtcfc":{"name":"init(messages:model:frequencyPenalty:logitBias:logprobs:maxTokens:n:presencePenalty:responseFormat:seed:stop:temperature:toolChoice:tools:topLogprobs:topP:user:stream:)","parent_name":"ChatQuery"},"Structs/ChatQuery/ChatCompletionMessageParam.html":{"name":"ChatCompletionMessageParam","parent_name":"ChatQuery"},"Structs/ChatQuery/Stop.html":{"name":"Stop","parent_name":"ChatQuery"},"Structs/ChatQuery/ResponseFormat.html":{"name":"ResponseFormat","parent_name":"ChatQuery"},"Structs/ChatQuery/ChatCompletionFunctionCallOptionParam.html":{"name":"ChatCompletionFunctionCallOptionParam","parent_name":"ChatQuery"},"Structs/ChatQuery/ChatCompletionToolParam.html":{"name":"ChatCompletionToolParam","parent_name":"ChatQuery"},"Structs/ChatQuery/CodingKeys.html":{"name":"CodingKeys","parent_name":"ChatQuery"},"Structs/AudioTranslationResult.html#/s:6OpenAI22AudioTranslationResultV4textSSvp":{"name":"text","abstract":"

The translated text.

","parent_name":"AudioTranslationResult"},"Structs/AudioTranslationQuery.html#/s:6OpenAI21AudioTranslationQueryV8FileTypea":{"name":"FileType","parent_name":"AudioTranslationQuery"},"Structs/AudioTranslationQuery.html#/s:6OpenAI21AudioTranslationQueryV14ResponseFormata":{"name":"ResponseFormat","parent_name":"AudioTranslationQuery"},"Structs/AudioTranslationQuery.html#/s:6OpenAI21AudioTranslationQueryV4file10Foundation4DataVvp":{"name":"file","abstract":"

The audio file object (not file name) translate, in one of these formats: flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm.

","parent_name":"AudioTranslationQuery"},"Structs/AudioTranslationQuery.html#/s:6OpenAI21AudioTranslationQueryV8fileTypeAA0c13TranscriptionE0V04FileG0Ovp":{"name":"fileType","parent_name":"AudioTranslationQuery"},"Structs/AudioTranslationQuery.html#/s:6OpenAI21AudioTranslationQueryV5modelSSvp":{"name":"model","abstract":"

ID of the model to use. Only whisper-1 is currently available.

","parent_name":"AudioTranslationQuery"},"Structs/AudioTranslationQuery.html#/s:6OpenAI21AudioTranslationQueryV14responseFormatAA0c13TranscriptionE0V08ResponseG0OSgvp":{"name":"responseFormat","abstract":"

The format of the transcript output, in one of these options: json, text, srt, verbose_json, or vtt.","parent_name":"AudioTranslationQuery"},"Structs/AudioTranslationQuery.html#/s:6OpenAI21AudioTranslationQueryV6promptSSSgvp":{"name":"prompt","abstract":"

An optional text to guide the model’s style or continue a previous audio segment. The prompt should be in English.","parent_name":"AudioTranslationQuery"},"Structs/AudioTranslationQuery.html#/s:6OpenAI21AudioTranslationQueryV11temperatureSdSgvp":{"name":"temperature","abstract":"

The sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.","parent_name":"AudioTranslationQuery"},"Structs/AudioTranslationQuery.html#/s:6OpenAI21AudioTranslationQueryV4file0F4Type5model6prompt11temperature14responseFormatAC10Foundation4DataV_AA0c13TranscriptionE0V04FileG0OS2SSgSdSgAN08ResponseL0OSgtcfc":{"name":"init(file:fileType:model:prompt:temperature:responseFormat:)","parent_name":"AudioTranslationQuery"},"Structs/AudioTranscriptionResult.html#/s:6OpenAI24AudioTranscriptionResultV4textSSvp":{"name":"text","abstract":"

The transcribed text.

","parent_name":"AudioTranscriptionResult"},"Structs/AudioTranscriptionQuery/FileType.html#/s:6OpenAI23AudioTranscriptionQueryV8FileTypeO4flacyA2EmF":{"name":"flac","parent_name":"FileType"},"Structs/AudioTranscriptionQuery/FileType.html#/s:6OpenAI23AudioTranscriptionQueryV8FileTypeO3mp3yA2EmF":{"name":"mp3","parent_name":"FileType"},"Structs/AudioTranscriptionQuery/FileType.html#/s:6OpenAI23AudioTranscriptionQueryV8FileTypeO4mpgayA2EmF":{"name":"mpga","parent_name":"FileType"},"Structs/AudioTranscriptionQuery/FileType.html#/s:6OpenAI23AudioTranscriptionQueryV8FileTypeO3mp4yA2EmF":{"name":"mp4","parent_name":"FileType"},"Structs/AudioTranscriptionQuery/FileType.html#/s:6OpenAI23AudioTranscriptionQueryV8FileTypeO3m4ayA2EmF":{"name":"m4a","parent_name":"FileType"},"Structs/AudioTranscriptionQuery/FileType.html#/s:6OpenAI23AudioTranscriptionQueryV8FileTypeO4mpegyA2EmF":{"name":"mpeg","parent_name":"FileType"},"Structs/AudioTranscriptionQuery/FileType.html#/s:6OpenAI23AudioTranscriptionQueryV8FileTypeO3oggyA2EmF":{"name":"ogg","parent_name":"FileType"},"Structs/AudioTranscriptionQuery/FileType.html#/s:6OpenAI23AudioTranscriptionQueryV8FileTypeO3wavyA2EmF":{"name":"wav","parent_name":"FileType"},"Structs/AudioTranscriptionQuery/FileType.html#/s:6OpenAI23AudioTranscriptionQueryV8FileTypeO4webmyA2EmF":{"name":"webm","parent_name":"FileType"},"Structs/AudioTranscriptionQuery/ResponseFormat.html#/s:6OpenAI23AudioTranscriptionQueryV14ResponseFormatO4jsonyA2EmF":{"name":"json","parent_name":"ResponseFormat"},"Structs/AudioTranscriptionQuery/ResponseFormat.html#/s:6OpenAI23AudioTranscriptionQueryV14ResponseFormatO4textyA2EmF":{"name":"text","parent_name":"ResponseFormat"},"Structs/AudioTranscriptionQuery/ResponseFormat.html#/s:6OpenAI23AudioTranscriptionQueryV14ResponseFormatO11verboseJsonyA2EmF":{"name":"verboseJson","parent_name":"ResponseFormat"},"Structs/AudioTranscriptionQuery/ResponseFormat.html#/s:6OpenAI23AudioTranscriptionQueryV14ResponseFormatO3srtyA2EmF":{"name":"srt","parent_name":"ResponseFormat"},"Structs/AudioTranscriptionQuery/ResponseFormat.html#/s:6OpenAI23AudioTranscriptionQueryV14ResponseFormatO3vttyA2EmF":{"name":"vtt","parent_name":"ResponseFormat"},"Structs/AudioTranscriptionQuery/ResponseFormat.html":{"name":"ResponseFormat","parent_name":"AudioTranscriptionQuery"},"Structs/AudioTranscriptionQuery.html#/s:6OpenAI23AudioTranscriptionQueryV4file10Foundation4DataVvp":{"name":"file","abstract":"

The audio file object (not file name) to transcribe, in one of these formats: flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm.

","parent_name":"AudioTranscriptionQuery"},"Structs/AudioTranscriptionQuery.html#/s:6OpenAI23AudioTranscriptionQueryV8fileTypeAC04FileG0Ovp":{"name":"fileType","parent_name":"AudioTranscriptionQuery"},"Structs/AudioTranscriptionQuery.html#/s:6OpenAI23AudioTranscriptionQueryV5modelSSvp":{"name":"model","abstract":"

ID of the model to use. Only whisper-1 is currently available.

","parent_name":"AudioTranscriptionQuery"},"Structs/AudioTranscriptionQuery.html#/s:6OpenAI23AudioTranscriptionQueryV14responseFormatAC08ResponseG0OSgvp":{"name":"responseFormat","abstract":"

The format of the transcript output, in one of these options: json, text, srt, verbose_json, or vtt.","parent_name":"AudioTranscriptionQuery"},"Structs/AudioTranscriptionQuery.html#/s:6OpenAI23AudioTranscriptionQueryV6promptSSSgvp":{"name":"prompt","abstract":"

An optional text to guide the model’s style or continue a previous audio segment. The prompt should match the audio language.

","parent_name":"AudioTranscriptionQuery"},"Structs/AudioTranscriptionQuery.html#/s:6OpenAI23AudioTranscriptionQueryV11temperatureSdSgvp":{"name":"temperature","abstract":"

The sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.","parent_name":"AudioTranscriptionQuery"},"Structs/AudioTranscriptionQuery.html#/s:6OpenAI23AudioTranscriptionQueryV8languageSSSgvp":{"name":"language","abstract":"

The language of the input audio. Supplying the input language in ISO-639-1 format will improve accuracy and latency.","parent_name":"AudioTranscriptionQuery"},"Structs/AudioTranscriptionQuery.html#/s:6OpenAI23AudioTranscriptionQueryV4file0F4Type5model6prompt11temperature8language14responseFormatAC10Foundation4DataV_AC04FileG0OS2SSgSdSgApC08ResponseM0OSgtcfc":{"name":"init(file:fileType:model:prompt:temperature:language:responseFormat:)","parent_name":"AudioTranscriptionQuery"},"Structs/AudioTranscriptionQuery/FileType.html":{"name":"FileType","parent_name":"AudioTranscriptionQuery"},"Structs/AudioSpeechResult.html#/s:6OpenAI17AudioSpeechResultV5audio10Foundation4DataVvp":{"name":"audio","abstract":"

Audio data for one of the following formats :mp3, opus, aac, flac

","parent_name":"AudioSpeechResult"},"Structs/AudioSpeechQuery/Speed.html#/s:6OpenAI16AudioSpeechQueryV5SpeedO6normalyA2EmF":{"name":"normal","parent_name":"Speed"},"Structs/AudioSpeechQuery/Speed.html#/s:6OpenAI16AudioSpeechQueryV5SpeedO3maxyA2EmF":{"name":"max","parent_name":"Speed"},"Structs/AudioSpeechQuery/Speed.html#/s:6OpenAI16AudioSpeechQueryV5SpeedO3minyA2EmF":{"name":"min","parent_name":"Speed"},"Structs/AudioSpeechQuery/CodingKeys.html#/s:6OpenAI16AudioSpeechQueryV10CodingKeysO5modelyA2EmF":{"name":"model","parent_name":"CodingKeys"},"Structs/AudioSpeechQuery/CodingKeys.html#/s:6OpenAI16AudioSpeechQueryV10CodingKeysO5inputyA2EmF":{"name":"input","parent_name":"CodingKeys"},"Structs/AudioSpeechQuery/CodingKeys.html#/s:6OpenAI16AudioSpeechQueryV10CodingKeysO5voiceyA2EmF":{"name":"voice","parent_name":"CodingKeys"},"Structs/AudioSpeechQuery/CodingKeys.html#/s:6OpenAI16AudioSpeechQueryV10CodingKeysO14responseFormatyA2EmF":{"name":"responseFormat","parent_name":"CodingKeys"},"Structs/AudioSpeechQuery/CodingKeys.html#/s:6OpenAI16AudioSpeechQueryV10CodingKeysO5speedyA2EmF":{"name":"speed","parent_name":"CodingKeys"},"Structs/AudioSpeechQuery/AudioSpeechResponseFormat.html#/s:6OpenAI16AudioSpeechQueryV0cD14ResponseFormatO3mp3yA2EmF":{"name":"mp3","parent_name":"AudioSpeechResponseFormat"},"Structs/AudioSpeechQuery/AudioSpeechResponseFormat.html#/s:6OpenAI16AudioSpeechQueryV0cD14ResponseFormatO4opusyA2EmF":{"name":"opus","parent_name":"AudioSpeechResponseFormat"},"Structs/AudioSpeechQuery/AudioSpeechResponseFormat.html#/s:6OpenAI16AudioSpeechQueryV0cD14ResponseFormatO3aacyA2EmF":{"name":"aac","parent_name":"AudioSpeechResponseFormat"},"Structs/AudioSpeechQuery/AudioSpeechResponseFormat.html#/s:6OpenAI16AudioSpeechQueryV0cD14ResponseFormatO4flacyA2EmF":{"name":"flac","parent_name":"AudioSpeechResponseFormat"},"Structs/AudioSpeechQuery/AudioSpeechVoice.html#/s:6OpenAI16AudioSpeechQueryV0cD5VoiceO5alloyyA2EmF":{"name":"alloy","parent_name":"AudioSpeechVoice"},"Structs/AudioSpeechQuery/AudioSpeechVoice.html#/s:6OpenAI16AudioSpeechQueryV0cD5VoiceO4echoyA2EmF":{"name":"echo","parent_name":"AudioSpeechVoice"},"Structs/AudioSpeechQuery/AudioSpeechVoice.html#/s:6OpenAI16AudioSpeechQueryV0cD5VoiceO5fableyA2EmF":{"name":"fable","parent_name":"AudioSpeechVoice"},"Structs/AudioSpeechQuery/AudioSpeechVoice.html#/s:6OpenAI16AudioSpeechQueryV0cD5VoiceO4onyxyA2EmF":{"name":"onyx","parent_name":"AudioSpeechVoice"},"Structs/AudioSpeechQuery/AudioSpeechVoice.html#/s:6OpenAI16AudioSpeechQueryV0cD5VoiceO4novayA2EmF":{"name":"nova","parent_name":"AudioSpeechVoice"},"Structs/AudioSpeechQuery/AudioSpeechVoice.html#/s:6OpenAI16AudioSpeechQueryV0cD5VoiceO7shimmeryA2EmF":{"name":"shimmer","parent_name":"AudioSpeechVoice"},"Structs/AudioSpeechQuery/AudioSpeechVoice.html":{"name":"AudioSpeechVoice","abstract":"

Encapsulates the voices available for audio generation.

","parent_name":"AudioSpeechQuery"},"Structs/AudioSpeechQuery/AudioSpeechResponseFormat.html":{"name":"AudioSpeechResponseFormat","abstract":"

Encapsulates the response formats available for audio data.

","parent_name":"AudioSpeechQuery"},"Structs/AudioSpeechQuery.html#/s:6OpenAI16AudioSpeechQueryV5inputSSvp":{"name":"input","abstract":"

The text to generate audio for. The maximum length is 4096 characters.

","parent_name":"AudioSpeechQuery"},"Structs/AudioSpeechQuery.html#/s:6OpenAI16AudioSpeechQueryV5modelSSvp":{"name":"model","abstract":"

One of the available TTS models: tts-1 or tts-1-hd

","parent_name":"AudioSpeechQuery"},"Structs/AudioSpeechQuery.html#/s:6OpenAI16AudioSpeechQueryV5voiceAC0cD5VoiceOvp":{"name":"voice","abstract":"

The voice to use when generating the audio. Supported voices are alloy, echo, fable, onyx, nova, and shimmer. Previews of the voices are available in the Text to speech guide.","parent_name":"AudioSpeechQuery"},"Structs/AudioSpeechQuery.html#/s:6OpenAI16AudioSpeechQueryV14responseFormatAC0cd8ResponseG0OSgvp":{"name":"responseFormat","abstract":"

The format to audio in. Supported formats are mp3, opus, aac, and flac.","parent_name":"AudioSpeechQuery"},"Structs/AudioSpeechQuery.html#/s:6OpenAI16AudioSpeechQueryV5speedSSSgvp":{"name":"speed","abstract":"

The speed of the generated audio. Select a value from 0.25 to 4.0. 1.0 is the default.","parent_name":"AudioSpeechQuery"},"Structs/AudioSpeechQuery/CodingKeys.html":{"name":"CodingKeys","parent_name":"AudioSpeechQuery"},"Structs/AudioSpeechQuery.html#/s:6OpenAI16AudioSpeechQueryV5model5input5voice14responseFormat5speedACSS_SSAC0cD5VoiceOAC0cd8ResponseJ0OSdSgtcfc":{"name":"init(model:input:voice:responseFormat:speed:)","parent_name":"AudioSpeechQuery"},"Structs/AudioSpeechQuery/Speed.html":{"name":"Speed","parent_name":"AudioSpeechQuery"},"Structs/AudioSpeechQuery.html#/s:6OpenAI16AudioSpeechQueryV09normalizeD5SpeedySSSdSgFZ":{"name":"normalizeSpeechSpeed(_:)","parent_name":"AudioSpeechQuery"},"Structs/APIErrorResponse.html#/s:6OpenAI16APIErrorResponseV5errorAA0C0Vvp":{"name":"error","parent_name":"APIErrorResponse"},"Structs/APIErrorResponse.html#/s:10Foundation14LocalizedErrorP16errorDescriptionSSSgvp":{"name":"errorDescription","parent_name":"APIErrorResponse"},"Structs/APIError.html#/s:6OpenAI8APIErrorV7messageSSvp":{"name":"message","parent_name":"APIError"},"Structs/APIError.html#/s:6OpenAI8APIErrorV4typeSSvp":{"name":"type","parent_name":"APIError"},"Structs/APIError.html#/s:6OpenAI8APIErrorV5paramSSSgvp":{"name":"param","parent_name":"APIError"},"Structs/APIError.html#/s:6OpenAI8APIErrorV4codeSSSgvp":{"name":"code","parent_name":"APIError"},"Structs/APIError.html#/s:6OpenAI8APIErrorV7message4type5param4codeACSS_S2SSgAHtcfc":{"name":"init(message:type:param:code:)","parent_name":"APIError"},"Structs/APIError.html#/s:Se4fromxs7Decoder_p_tKcfc":{"name":"init(from:)","parent_name":"APIError"},"Structs/APIError.html#/s:10Foundation14LocalizedErrorP16errorDescriptionSSSgvp":{"name":"errorDescription","parent_name":"APIError"},"Structs/APIError.html":{"name":"APIError"},"Structs/APIErrorResponse.html":{"name":"APIErrorResponse"},"Structs/AudioSpeechQuery.html":{"name":"AudioSpeechQuery","abstract":"

Generates audio from the input text."},"Structs/AudioSpeechResult.html":{"name":"AudioSpeechResult","abstract":"

The audio file content."},"Structs/AudioTranscriptionQuery.html":{"name":"AudioTranscriptionQuery"},"Structs/AudioTranscriptionResult.html":{"name":"AudioTranscriptionResult"},"Structs/AudioTranslationQuery.html":{"name":"AudioTranslationQuery","abstract":"

Translates audio into English.

"},"Structs/AudioTranslationResult.html":{"name":"AudioTranslationResult"},"Structs/ChatQuery.html":{"name":"ChatQuery","abstract":"

Creates a model response for the given chat conversation"},"Structs/ChatResult.html":{"name":"ChatResult","abstract":"

https://platform.openai.com/docs/api-reference/chat/object"},"Structs/ChatStreamResult.html":{"name":"ChatStreamResult"},"Structs/CompletionsQuery.html":{"name":"CompletionsQuery"},"Structs/CompletionsResult.html":{"name":"CompletionsResult"},"Structs/EditsQuery.html":{"name":"EditsQuery"},"Structs/EditsResult.html":{"name":"EditsResult"},"Structs/EmbeddingsQuery.html":{"name":"EmbeddingsQuery"},"Structs/EmbeddingsResult.html":{"name":"EmbeddingsResult"},"Structs/ImageEditsQuery.html":{"name":"ImageEditsQuery"},"Structs/ImageVariationsQuery.html":{"name":"ImageVariationsQuery"},"Structs/ImagesQuery.html":{"name":"ImagesQuery","abstract":"

Given a prompt and/or an input image, the model will generate a new image."},"Structs/ImagesResult.html":{"name":"ImagesResult","abstract":"

Returns a list of image objects.

"},"Structs/ModelQuery.html":{"name":"ModelQuery","abstract":"

Retrieves a model instance, providing basic information about the model such as the owner and permissioning.

"},"Structs/ModelResult.html":{"name":"ModelResult","abstract":"

The model object matching the specified ID.

"},"Structs/ModelsResult.html":{"name":"ModelsResult","abstract":"

A list of model objects.

"},"Structs/ModerationsQuery.html":{"name":"ModerationsQuery"},"Structs/ModerationsResult.html":{"name":"ModerationsResult"},"Structs/Vector.html":{"name":"Vector"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP11completions5query10completionyAA16CompletionsQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"completions(query:completion:)","abstract":"

This function sends a completions query to the OpenAI API and retrieves generated completions in response. The Completions API enables you to build applications using OpenAI’s language models, like the powerful GPT-3.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP17completionsStream5query8onResult10completionyAA16CompletionsQueryV_ys0H0OyAA0jH0Vs5Error_pGcysAN_pSgcSgtF":{"name":"completionsStream(query:onResult:completion:)","abstract":"

This function sends a completions query to the OpenAI API and retrieves generated completions in response. The Completions API enables you to build applications using OpenAI’s language models, like the powerful GPT-3. The result is returned by chunks.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP6images5query10completionyAA11ImagesQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"images(query:completion:)","abstract":"

This function sends an images query to the OpenAI API and retrieves generated images in response. The Images Generation API enables you to create various images or graphics using OpenAI’s powerful deep learning models.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP10imageEdits5query10completionyAA05ImageE5QueryV_ys6ResultOyAA06ImagesJ0Vs5Error_pGctF":{"name":"imageEdits(query:completion:)","abstract":"

This function sends an image edit query to the OpenAI API and retrieves generated images in response. The Images Edit API enables you to edit images or graphics using OpenAI’s powerful deep learning models.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP15imageVariations5query10completionyAA05ImageE5QueryV_ys6ResultOyAA06ImagesJ0Vs5Error_pGctF":{"name":"imageVariations(query:completion:)","abstract":"

This function sends an image variation query to the OpenAI API and retrieves generated images in response. The Images Variations API enables you to create a variation of a given image using OpenAI’s powerful deep learning models.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP10embeddings5query10completionyAA15EmbeddingsQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"embeddings(query:completion:)","abstract":"

This function sends an embeddings query to the OpenAI API and retrieves embeddings in response. The Embeddings API enables you to generate high-dimensional vector representations of texts, which can be used for various natural language processing tasks such as semantic similarity, clustering, and classification.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP5chats5query10completionyAA9ChatQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"chats(query:completion:)","abstract":"

This function sends a chat query to the OpenAI API and retrieves chat conversation responses. The Chat API enables you to build chatbots or conversational applications using OpenAI’s powerful natural language models, like GPT-3.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP11chatsStream5query8onResult10completionyAA9ChatQueryV_ys0H0OyAA0jeH0Vs5Error_pGcysAN_pSgcSgtF":{"name":"chatsStream(query:onResult:completion:)","abstract":"

This function sends a chat query to the OpenAI API and retrieves chat stream conversation responses. The Chat API enables you to build chatbots or conversational applications using OpenAI’s powerful natural language models, like GPT-3. The result is returned by chunks.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP5edits5query10completionyAA10EditsQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"edits(query:completion:)","abstract":"

This function sends an edits query to the OpenAI API and retrieves an edited version of the prompt based on the instruction given.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP5model5query10completionyAA10ModelQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"model(query:completion:)","abstract":"

This function sends a model query to the OpenAI API and retrieves a model instance, providing owner information. The Models API in this usage enables you to gather detailed information on the model in question, like GPT-3.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP6models10completionyys6ResultOyAA06ModelsF0Vs5Error_pGc_tF":{"name":"models(completion:)","abstract":"

This function sends a models query to the OpenAI API and retrieves a list of models. The Models API in this usage enables you to list all the available models.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP11moderations5query10completionyAA16ModerationsQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"moderations(query:completion:)","abstract":"

This function sends a moderations query to the OpenAI API and retrieves a list of category results to classify how text may violate OpenAI’s Content Policy.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP17audioCreateSpeech5query10completionyAA05AudioF5QueryV_ys6ResultOyAA0ifK0Vs5Error_pGctF":{"name":"audioCreateSpeech(query:completion:)","abstract":"

This function sends an AudioSpeechQuery to the OpenAI API to create audio speech from text using a specific voice and format.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP19audioTranscriptions5query10completionyAA23AudioTranscriptionQueryV_ys6ResultOyAA0hiK0Vs5Error_pGctF":{"name":"audioTranscriptions(query:completion:)","abstract":"

Transcribes audio data using OpenAI’s audio transcription API and completes the operation asynchronously.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolP17audioTranslations5query10completionyAA21AudioTranslationQueryV_ys6ResultOyAA0hiK0Vs5Error_pGctF":{"name":"audioTranslations(query:completion:)","abstract":"

Translates audio data using OpenAI’s audio translation API and completes the operation asynchronously.

","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE11completions5queryAA17CompletionsResultVAA0F5QueryV_tYaKF":{"name":"completions(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE17completionsStream5queryScsyAA17CompletionsResultVs5Error_pGAA0G5QueryV_tF":{"name":"completionsStream(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE6images5queryAA12ImagesResultVAA0F5QueryV_tYaKF":{"name":"images(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE10imageEdits5queryAA12ImagesResultVAA05ImageE5QueryV_tYaKF":{"name":"imageEdits(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE15imageVariations5queryAA12ImagesResultVAA05ImageE5QueryV_tYaKF":{"name":"imageVariations(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE10embeddings5queryAA16EmbeddingsResultVAA0F5QueryV_tYaKF":{"name":"embeddings(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE5chats5queryAA10ChatResultVAA0F5QueryV_tYaKF":{"name":"chats(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE11chatsStream5queryScsyAA04ChatE6ResultVs5Error_pGAA0G5QueryV_tF":{"name":"chatsStream(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE5edits5queryAA11EditsResultVAA0F5QueryV_tYaKF":{"name":"edits(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE5model5queryAA11ModelResultVAA0F5QueryV_tYaKF":{"name":"model(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE6modelsAA12ModelsResultVyYaKF":{"name":"models()","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE11moderations5queryAA17ModerationsResultVAA0F5QueryV_tYaKF":{"name":"moderations(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE17audioCreateSpeech5queryAA05AudioF6ResultVAA0hF5QueryV_tYaKF":{"name":"audioCreateSpeech(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE19audioTranscriptions5queryAA24AudioTranscriptionResultVAA0gH5QueryV_tYaKF":{"name":"audioTranscriptions(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE17audioTranslations5queryAA22AudioTranslationResultVAA0gH5QueryV_tYaKF":{"name":"audioTranslations(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE11completions5query7Combine12AnyPublisherVyAA17CompletionsResultVs5Error_pGAA0I5QueryV_tF":{"name":"completions(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE17completionsStream5query7Combine12AnyPublisherVys6ResultOyAA011CompletionsJ0Vs5Error_pGsAM_pGAA0K5QueryV_tF":{"name":"completionsStream(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE6images5query7Combine12AnyPublisherVyAA12ImagesResultVs5Error_pGAA0I5QueryV_tF":{"name":"images(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE10imageEdits5query7Combine12AnyPublisherVyAA12ImagesResultVs5Error_pGAA05ImageE5QueryV_tF":{"name":"imageEdits(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE15imageVariations5query7Combine12AnyPublisherVyAA12ImagesResultVs5Error_pGAA05ImageE5QueryV_tF":{"name":"imageVariations(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE10embeddings5query7Combine12AnyPublisherVyAA16EmbeddingsResultVs5Error_pGAA0I5QueryV_tF":{"name":"embeddings(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE5chats5query7Combine12AnyPublisherVyAA10ChatResultVs5Error_pGAA0I5QueryV_tF":{"name":"chats(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE11chatsStream5query7Combine12AnyPublisherVys6ResultOyAA04ChateJ0Vs5Error_pGsAM_pGAA0K5QueryV_tF":{"name":"chatsStream(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE5edits5query7Combine12AnyPublisherVyAA11EditsResultVs5Error_pGAA0I5QueryV_tF":{"name":"edits(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE5model5query7Combine12AnyPublisherVyAA11ModelResultVs5Error_pGAA0I5QueryV_tF":{"name":"model(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE6models7Combine12AnyPublisherVyAA12ModelsResultVs5Error_pGyF":{"name":"models()","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE11moderations5query7Combine12AnyPublisherVyAA17ModerationsResultVs5Error_pGAA0I5QueryV_tF":{"name":"moderations(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE17audioCreateSpeech5query7Combine12AnyPublisherVyAA05AudioF6ResultVs5Error_pGAA0kF5QueryV_tF":{"name":"audioCreateSpeech(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE19audioTranscriptions5query7Combine12AnyPublisherVyAA24AudioTranscriptionResultVs5Error_pGAA0jK5QueryV_tF":{"name":"audioTranscriptions(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html#/s:6OpenAI0A10AIProtocolPAAE17audioTranslations5query7Combine12AnyPublisherVyAA22AudioTranslationResultVs5Error_pGAA0jK5QueryV_tF":{"name":"audioTranslations(query:)","parent_name":"OpenAIProtocol"},"Protocols/OpenAIProtocol.html":{"name":"OpenAIProtocol"},"Extensions/Model.html#/s:SS6OpenAIE6gpt4_oSSvpZ":{"name":"gpt4_o","abstract":"

gpt-4o, currently the most advanced, multimodal flagship model that’s cheaper and faster than GPT-4 Turbo.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE11gpt4_o_miniSSvpZ":{"name":"gpt4_o_mini","abstract":"

gpt-4o-mini, currently the most affordable and intelligent model for fast and lightweight requests.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE10gpt4_turboSSvpZ":{"name":"gpt4_turbo","abstract":"

gpt-4-turbo, The latest GPT-4 Turbo model with vision capabilities. Vision requests can now use JSON mode and function calling and more. Context window: 128,000 tokens

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE18gpt4_turbo_previewSSvpZ":{"name":"gpt4_turbo_preview","abstract":"

gpt-4-turbo, gpt-4 model with improved instruction following, JSON mode, reproducible outputs, parallel function calling and more. Maximum of 4096 output tokens

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE19gpt4_vision_previewSSvpZ":{"name":"gpt4_vision_preview","abstract":"

gpt-4-vision-preview, able to understand images, in addition to all other GPT-4 Turbo capabilities.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE17gpt4_0125_previewSSvpZ":{"name":"gpt4_0125_preview","abstract":"

Snapshot of gpt-4-turbo-preview from January 25th 2024. This model reduces cases of “laziness” where the model doesn’t complete a task. Also fixes the bug impacting non-English UTF-8 generations. Maximum of 4096 output tokens

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE17gpt4_1106_previewSSvpZ":{"name":"gpt4_1106_preview","abstract":"

Snapshot of gpt-4-turbo-preview from November 6th 2023. Improved instruction following, JSON mode, reproducible outputs, parallel function calling and more. Maximum of 4096 output tokens

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE4gpt4SSvpZ":{"name":"gpt4","abstract":"

Most capable gpt-4 model, outperforms any GPT-3.5 model, able to do more complex tasks, and optimized for chat.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE9gpt4_0613SSvpZ":{"name":"gpt4_0613","abstract":"

Snapshot of gpt-4 from June 13th 2023 with function calling data. Unlike gpt-4, this model will not receive updates, and will be deprecated 3 months after a new version is released.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE9gpt4_0314SSvpZ":{"name":"gpt4_0314","abstract":"

Snapshot of gpt-4 from March 14th 2023. Unlike gpt-4, this model will not receive updates, and will only be supported for a three month period ending on June 14th 2023.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE8gpt4_32kSSvpZ":{"name":"gpt4_32k","abstract":"

Same capabilities as the base gpt-4 model but with 4x the context length. Will be updated with our latest model iteration.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE13gpt4_32k_0613SSvpZ":{"name":"gpt4_32k_0613","abstract":"

Snapshot of gpt-4-32k from June 13th 2023. Unlike gpt-4-32k, this model will not receive updates, and will be deprecated 3 months after a new version is released.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE13gpt4_32k_0314SSvpZ":{"name":"gpt4_32k_0314","abstract":"

Snapshot of gpt-4-32k from March 14th 2023. Unlike gpt-4-32k, this model will not receive updates, and will only be supported for a three month period ending on June 14th 2023.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE11gpt3_5TurboSSvpZ":{"name":"gpt3_5Turbo","abstract":"

Most capable gpt-3.5-turbo model and optimized for chat. Will be updated with our latest model iteration.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE16gpt3_5Turbo_0125SSvpZ":{"name":"gpt3_5Turbo_0125","abstract":"

Snapshot of gpt-3.5-turbo from January 25th 2024. Decreased prices by 50%. Various improvements including higher accuracy at responding in requested formats and a fix for a bug which caused a text encoding issue for non-English language function calls.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE16gpt3_5Turbo_1106SSvpZ":{"name":"gpt3_5Turbo_1106","abstract":"

Snapshot of gpt-3.5-turbo from November 6th 2023. The latest gpt-3.5-turbo model with improved instruction following, JSON mode, reproducible outputs, parallel function calling and more.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE16gpt3_5Turbo_0613SSvpZ":{"name":"gpt3_5Turbo_0613","abstract":"

Snapshot of gpt-3.5-turbo from June 13th 2023 with function calling data. Unlike gpt-3.5-turbo, this model will not receive updates, and will be deprecated 3 months after a new version is released.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE16gpt3_5Turbo_0301SSvpZ":{"name":"gpt3_5Turbo_0301","abstract":"

Snapshot of gpt-3.5-turbo from March 1st 2023. Unlike gpt-3.5-turbo, this model will not receive updates, and will only be supported for a three month period ending on June 1st 2023.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE15gpt3_5Turbo_16kSSvpZ":{"name":"gpt3_5Turbo_16k","abstract":"

Same capabilities as the standard gpt-3.5-turbo model but with 4 times the context.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE20gpt3_5Turbo_16k_0613SSvpZ":{"name":"gpt3_5Turbo_16k_0613","abstract":"

Snapshot of gpt-3.5-turbo-16k from June 13th 2023. Unlike gpt-3.5-turbo-16k, this model will not receive updates, and will be deprecated 3 months after a new version is released.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE15textDavinci_003SSvpZ":{"name":"textDavinci_003","abstract":"

Can do any language task with better quality, longer output, and consistent instruction-following than the curie, babbage, or ada models. Also supports inserting completions within text.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE15textDavinci_002SSvpZ":{"name":"textDavinci_002","abstract":"

Similar capabilities to text-davinci-003 but trained with supervised fine-tuning instead of reinforcement learning.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE9textCurieSSvpZ":{"name":"textCurie","abstract":"

Very capable, faster and lower cost than Davinci.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE11textBabbageSSvpZ":{"name":"textBabbage","abstract":"

Capable of straightforward tasks, very fast, and lower cost.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE7textAdaSSvpZ":{"name":"textAda","abstract":"

Capable of very simple tasks, usually the fastest model in the GPT-3 series, and lowest cost.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE15textDavinci_001SSvpZ":{"name":"textDavinci_001","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE19codeDavinciEdit_001SSvpZ":{"name":"codeDavinciEdit_001","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE5tts_1SSvpZ":{"name":"tts_1","abstract":"

The latest text to speech model, optimized for speed.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE8tts_1_hdSSvpZ":{"name":"tts_1_hd","abstract":"

The latest text to speech model, optimized for quality.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE9whisper_1SSvpZ":{"name":"whisper_1","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE8dall_e_2SSvpZ":{"name":"dall_e_2","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE8dall_e_3SSvpZ":{"name":"dall_e_3","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE7davinciSSvpZ":{"name":"davinci","abstract":"

Most capable GPT-3 model. Can do any task the other models can do, often with higher quality.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE5curieSSvpZ":{"name":"curie","abstract":"

Very capable, but faster and lower cost than Davinci.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE7babbageSSvpZ":{"name":"babbage","abstract":"

Capable of straightforward tasks, very fast, and lower cost.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE3adaSSvpZ":{"name":"ada","abstract":"

Capable of very simple tasks, usually the fastest model in the GPT-3 series, and lowest cost.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE16textEmbeddingAdaSSvpZ":{"name":"textEmbeddingAda","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE13textSearchAdaSSvpZ":{"name":"textSearchAda","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE20textSearchBabbageDocSSvpZ":{"name":"textSearchBabbageDoc","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE25textSearchBabbageQuery001SSvpZ":{"name":"textSearchBabbageQuery001","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE14textEmbedding3SSvpZ":{"name":"textEmbedding3","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE19textEmbedding3LargeSSvpZ":{"name":"textEmbedding3Large","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE20textModerationStableSSvpZ":{"name":"textModerationStable","abstract":"

Almost as capable as the latest model, but slightly older.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE20textModerationLatestSSvpZ":{"name":"textModerationLatest","abstract":"

Most capable moderation model. Accuracy will be slightly higher than the stable model.

","parent_name":"Model"},"Extensions/Model.html#/s:SS6OpenAIE10moderationSSvpZ":{"name":"moderation","parent_name":"Model"},"Extensions/Model.html":{"name":"Model"},"Enums/OpenAIError.html#/s:6OpenAI0A7AIErrorO9emptyDatayA2CmF":{"name":"emptyData","parent_name":"OpenAIError"},"Enums/OpenAIError.html":{"name":"OpenAIError"},"Classes/OpenAI/Configuration.html#/s:6OpenAIAAC13ConfigurationV5tokenSSvp":{"name":"token","abstract":"

OpenAI API token. See https://platform.openai.com/docs/api-reference/authentication

","parent_name":"Configuration"},"Classes/OpenAI/Configuration.html#/s:6OpenAIAAC13ConfigurationV22organizationIdentifierSSSgvp":{"name":"organizationIdentifier","abstract":"

Optional OpenAI organization identifier. See https://platform.openai.com/docs/api-reference/authentication

","parent_name":"Configuration"},"Classes/OpenAI/Configuration.html#/s:6OpenAIAAC13ConfigurationV4hostSSvp":{"name":"host","abstract":"

API host. Set this property if you use some kind of proxy or your own server. Default is api.openai.com

","parent_name":"Configuration"},"Classes/OpenAI/Configuration.html#/s:6OpenAIAAC13ConfigurationV4portSivp":{"name":"port","parent_name":"Configuration"},"Classes/OpenAI/Configuration.html#/s:6OpenAIAAC13ConfigurationV6schemeSSvp":{"name":"scheme","parent_name":"Configuration"},"Classes/OpenAI/Configuration.html#/s:6OpenAIAAC13ConfigurationV15timeoutIntervalSdvp":{"name":"timeoutInterval","abstract":"

Default request timeout

","parent_name":"Configuration"},"Classes/OpenAI/Configuration.html#/s:6OpenAIAAC13ConfigurationV5token22organizationIdentifier4host4port6scheme15timeoutIntervalADSS_SSSgSSSiSSSdtcfc":{"name":"init(token:organizationIdentifier:host:port:scheme:timeoutInterval:)","parent_name":"Configuration"},"Classes/OpenAI/Configuration.html":{"name":"Configuration","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAIAAC13configurationAB13ConfigurationVvp":{"name":"configuration","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAIAAC8apiTokenABSS_tcfc":{"name":"init(apiToken:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAIAAC13configurationA2B13ConfigurationV_tcfc":{"name":"init(configuration:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAIAAC13configuration7sessionA2B13ConfigurationV_So12NSURLSessionCtcfc":{"name":"init(configuration:session:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP11completions5query10completionyAA16CompletionsQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"completions(query:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP17completionsStream5query8onResult10completionyAA16CompletionsQueryV_ys0H0OyAA0jH0Vs5Error_pGcysAN_pSgcSgtF":{"name":"completionsStream(query:onResult:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP6images5query10completionyAA11ImagesQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"images(query:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP10imageEdits5query10completionyAA05ImageE5QueryV_ys6ResultOyAA06ImagesJ0Vs5Error_pGctF":{"name":"imageEdits(query:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP15imageVariations5query10completionyAA05ImageE5QueryV_ys6ResultOyAA06ImagesJ0Vs5Error_pGctF":{"name":"imageVariations(query:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP10embeddings5query10completionyAA15EmbeddingsQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"embeddings(query:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP5chats5query10completionyAA9ChatQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"chats(query:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP11chatsStream5query8onResult10completionyAA9ChatQueryV_ys0H0OyAA0jeH0Vs5Error_pGcysAN_pSgcSgtF":{"name":"chatsStream(query:onResult:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP5edits5query10completionyAA10EditsQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"edits(query:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP5model5query10completionyAA10ModelQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"model(query:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP6models10completionyys6ResultOyAA06ModelsF0Vs5Error_pGc_tF":{"name":"models(completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP11moderations5query10completionyAA16ModerationsQueryV_ys6ResultOyAA0gI0Vs5Error_pGctF":{"name":"moderations(query:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP19audioTranscriptions5query10completionyAA23AudioTranscriptionQueryV_ys6ResultOyAA0hiK0Vs5Error_pGctF":{"name":"audioTranscriptions(query:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP17audioTranslations5query10completionyAA21AudioTranslationQueryV_ys6ResultOyAA0hiK0Vs5Error_pGctF":{"name":"audioTranslations(query:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html#/s:6OpenAI0A10AIProtocolP17audioCreateSpeech5query10completionyAA05AudioF5QueryV_ys6ResultOyAA0ifK0Vs5Error_pGctF":{"name":"audioCreateSpeech(query:completion:)","parent_name":"OpenAI"},"Classes/OpenAI.html":{"name":"OpenAI"},"Classes.html":{"name":"Classes","abstract":"

The following classes are available globally.

"},"Enums.html":{"name":"Enumerations","abstract":"

The following enumerations are available globally.

"},"Extensions.html":{"name":"Extensions","abstract":"

The following extensions are available globally.

"},"Protocols.html":{"name":"Protocols","abstract":"

The following protocols are available globally.

"},"Structs.html":{"name":"Structures","abstract":"

The following structures are available globally.

"},"Typealiases.html":{"name":"Type Aliases","abstract":"

The following type aliases are available globally.

"}} \ No newline at end of file diff --git a/docs/undocumented.json b/docs/undocumented.json new file mode 100644 index 00000000..8b7f1785 --- /dev/null +++ b/docs/undocumented.json @@ -0,0 +1,6 @@ +{ + "warnings": [ + + ], + "source_directory": "/Users/dingxiancao/OpenAI" +} \ No newline at end of file