diff --git a/Demo/App/APIProvidedView.swift b/Demo/App/APIProvidedView.swift index 9771e1fb..c9362a21 100644 --- a/Demo/App/APIProvidedView.swift +++ b/Demo/App/APIProvidedView.swift @@ -13,7 +13,9 @@ struct APIProvidedView: View { @Binding var apiKey: String @StateObject var chatStore: ChatStore @StateObject var imageStore: ImageStore + @StateObject var assistantStore: AssistantStore @StateObject var miscStore: MiscStore + @State var isShowingAPIConfigModal: Bool = true @Environment(\.idProviderValue) var idProvider @@ -35,6 +37,12 @@ struct APIProvidedView: View { openAIClient: OpenAI(apiToken: apiKey.wrappedValue) ) ) + self._assistantStore = StateObject( + wrappedValue: AssistantStore( + openAIClient: OpenAI(apiToken: apiKey.wrappedValue), + idProvider: idProvider + ) + ) self._miscStore = StateObject( wrappedValue: MiscStore( openAIClient: OpenAI(apiToken: apiKey.wrappedValue) @@ -46,12 +54,14 @@ struct APIProvidedView: View { ContentView( chatStore: chatStore, imageStore: imageStore, + assistantStore: assistantStore, miscStore: miscStore ) .onChange(of: apiKey) { newApiKey in let client = OpenAI(apiToken: newApiKey) chatStore.openAIClient = client imageStore.openAIClient = client + assistantStore.openAIClient = client miscStore.openAIClient = client } } diff --git a/Demo/App/ContentView.swift b/Demo/App/ContentView.swift index 2826e6bc..091951c7 100644 --- a/Demo/App/ContentView.swift +++ b/Demo/App/ContentView.swift @@ -12,26 +12,38 @@ import SwiftUI struct ContentView: View { @ObservedObject var chatStore: ChatStore @ObservedObject var imageStore: ImageStore + @ObservedObject var assistantStore: AssistantStore @ObservedObject var miscStore: MiscStore + @State private var selectedTab = 0 @Environment(\.idProviderValue) var idProvider var body: some View { TabView(selection: $selectedTab) { ChatView( - store: chatStore + store: chatStore, + assistantStore: assistantStore ) .tabItem { Label("Chats", systemImage: "message") } .tag(0) + AssistantsView( + store: chatStore, + assistantStore: assistantStore + ) + .tabItem { + Label("Assistants", systemImage: "eyeglasses") + } + .tag(1) + TranscribeView( ) .tabItem { Label("Transcribe", systemImage: "mic") } - .tag(1) + .tag(2) ImageView( store: imageStore @@ -39,26 +51,19 @@ struct ContentView: View { .tabItem { Label("Image", systemImage: "photo") } - .tag(2) - + .tag(3) + MiscView( store: miscStore ) .tabItem { Label("Misc", systemImage: "ellipsis") } - .tag(3) + .tag(4) } } } -struct ChatsView: View { - var body: some View { - Text("Chats") - .font(.largeTitle) - } -} - struct TranscribeView: View { var body: some View { Text("Transcribe: TBD") diff --git a/Demo/Demo.xcodeproj/project.pbxproj b/Demo/Demo.xcodeproj/project.pbxproj index edde7d8d..528156b6 100644 --- a/Demo/Demo.xcodeproj/project.pbxproj +++ b/Demo/Demo.xcodeproj/project.pbxproj @@ -234,6 +234,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 16.0; MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; MTL_FAST_MATH = YES; ONLY_ACTIVE_ARCH = YES; @@ -286,6 +287,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 16.0; MTL_ENABLE_DEBUG_INFO = NO; MTL_FAST_MATH = YES; SWIFT_COMPILATION_MODE = wholemodule; @@ -315,7 +317,7 @@ "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]" = UIStatusBarStyleDefault; INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; - IPHONEOS_DEPLOYMENT_TARGET = 16.4; + IPHONEOS_DEPLOYMENT_TARGET = 16.0; LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks"; "LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks"; MACOSX_DEPLOYMENT_TARGET = 13.3; @@ -354,7 +356,7 @@ "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]" = UIStatusBarStyleDefault; INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; - IPHONEOS_DEPLOYMENT_TARGET = 16.4; + IPHONEOS_DEPLOYMENT_TARGET = 16.0; LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks"; "LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks"; MACOSX_DEPLOYMENT_TARGET = 13.3; diff --git a/Demo/DemoChat/Sources/AssistantStore.swift b/Demo/DemoChat/Sources/AssistantStore.swift new file mode 100644 index 00000000..1cbc4fe1 --- /dev/null +++ b/Demo/DemoChat/Sources/AssistantStore.swift @@ -0,0 +1,146 @@ +// +// ChatStore.swift +// DemoChat +// +// Created by Sihao Lu on 3/25/23. +// + +import Foundation +import Combine +import OpenAI + +public final class AssistantStore: ObservableObject { + public var openAIClient: OpenAIProtocol + let idProvider: () -> String + @Published var selectedAssistantId: String? + + @Published var availableAssistants: [Assistant] = [] + + public init( + openAIClient: OpenAIProtocol, + idProvider: @escaping () -> String + ) { + self.openAIClient = openAIClient + self.idProvider = idProvider + } + + // MARK: Models + + @MainActor + func createAssistant(name: String, description: String, instructions: String, codeInterpreter: Bool, fileSearch: Bool, functions: [FunctionDeclaration], fileIds: [String]? = nil) async -> String? { + do { + let toolResources: ToolResources? = if let fileIds { + ToolResources(fileSearch: nil, codeInterpreter: .init(fileIds: fileIds)) + } else { + nil + } + + let tools = createToolsArray(codeInterpreter: codeInterpreter, fileSearch: fileSearch, functions: functions) + let query = AssistantsQuery(model: Model.gpt4_o_mini, name: name, description: description, instructions: instructions, tools:tools, toolResources: toolResources) + let response = try await openAIClient.assistantCreate(query: query) + + // Refresh assistants with one just created (or modified) + let _ = await getAssistants() + + // Returns assistantId + return response.id + + } catch { + // TODO: Better error handling + print(error.localizedDescription) + } + return nil + } + + @MainActor + func modifyAssistant(asstId: String, name: String, description: String, instructions: String, codeInterpreter: Bool, fileSearch: Bool, functions: [FunctionDeclaration], fileIds: [String]? = nil) async -> String? { + do { + let toolResources: ToolResources? = if let fileIds { + ToolResources(fileSearch: nil, codeInterpreter: .init(fileIds: fileIds)) + } else { + nil + } + + let tools = createToolsArray(codeInterpreter: codeInterpreter, fileSearch: fileSearch, functions: functions) + let query = AssistantsQuery(model: Model.gpt4_o_mini, name: name, description: description, instructions: instructions, tools:tools, toolResources: toolResources) + let response = try await openAIClient.assistantModify(query: query, assistantId: asstId) + + // Returns assistantId + return response.id + + } catch { + // TODO: Better error handling + print(error.localizedDescription) + } + return nil + } + + @MainActor + func getAssistants(limit: Int = 20, after: String? = nil) async -> [Assistant] { + do { + let response = try await openAIClient.assistants(after: after) + + var assistants = [Assistant]() + for result in response.data ?? [] { + let tools = result.tools ?? [] + let codeInterpreter = tools.contains { $0 == .codeInterpreter } + let fileSearch = tools.contains { $0 == .fileSearch } + let functions = tools.compactMap { + switch $0 { + case let .function(declaration): + return declaration + default: + return nil + } + } + let fileIds = result.toolResources.codeInterpreter?.fileIds ?? [] + + assistants.append(Assistant(id: result.id, name: result.name ?? "", description: result.description, instructions: result.instructions, codeInterpreter: codeInterpreter, fileSearch: fileSearch, fileIds: fileIds, functions: functions)) + } + if after == nil { + availableAssistants = assistants + } + else { + availableAssistants = availableAssistants + assistants + } + return assistants + + } catch { + // TODO: Better error handling + print(error.localizedDescription) + } + return [] + } + + func selectAssistant(_ assistantId: String?) { + selectedAssistantId = assistantId + } + + @MainActor + func uploadFile(url: URL) async -> FilesResult? { + do { + + let mimeType = url.mimeType() + + let fileData = try Data(contentsOf: url) + + let result = try await openAIClient.files(query: FilesQuery(purpose: "assistants", file: fileData, fileName: url.lastPathComponent, contentType: mimeType)) + return result + } + catch { + print("error = \(error)") + return nil + } + } + + func createToolsArray(codeInterpreter: Bool, fileSearch: Bool, functions: [FunctionDeclaration]) -> [Tool] { + var tools = [Tool]() + if codeInterpreter { + tools.append(.codeInterpreter) + } + if fileSearch { + tools.append(.fileSearch) + } + return tools + functions.map { .function($0) } + } +} diff --git a/Demo/DemoChat/Sources/ChatStore.swift b/Demo/DemoChat/Sources/ChatStore.swift index 0462e87a..1788de13 100644 --- a/Demo/DemoChat/Sources/ChatStore.swift +++ b/Demo/DemoChat/Sources/ChatStore.swift @@ -8,6 +8,7 @@ import Foundation import Combine import OpenAI +import SwiftUI public final class ChatStore: ObservableObject { public var openAIClient: OpenAIProtocol @@ -17,6 +18,15 @@ public final class ChatStore: ObservableObject { @Published var conversationErrors: [Conversation.ID: Error] = [:] @Published var selectedConversationID: Conversation.ID? + // Used for assistants API state. + private var timer: Timer? + private var timeInterval: TimeInterval = 1.0 + private var currentRunId: String? + private var currentThreadId: String? + private var currentConversationId: String? + + @Published var isSendingMessage = false + var selectedConversation: Conversation? { selectedConversationID.flatMap { id in conversations.first { $0.id == id } @@ -39,19 +49,19 @@ public final class ChatStore: ObservableObject { } // MARK: - Events - func createConversation() { - let conversation = Conversation(id: idProvider(), messages: []) + func createConversation(type: ConversationType = .normal, assistantId: String? = nil) { + let conversation = Conversation(id: idProvider(), messages: [], type: type, assistantId: assistantId) conversations.append(conversation) } - + func selectConversation(_ conversationId: Conversation.ID?) { selectedConversationID = conversationId } - + func deleteConversation(_ conversationId: Conversation.ID) { conversations.removeAll(where: { $0.id == conversationId }) } - + @MainActor func sendMessage( _ message: Message, @@ -61,14 +71,75 @@ public final class ChatStore: ObservableObject { guard let conversationIndex = conversations.firstIndex(where: { $0.id == conversationId }) else { return } - conversations[conversationIndex].messages.append(message) - await completeChat( - conversationId: conversationId, - model: model - ) + switch conversations[conversationIndex].type { + case .normal: + conversations[conversationIndex].messages.append(message) + + await completeChat( + conversationId: conversationId, + model: model + ) + // For assistant case we send chats to thread and then poll, polling will receive sent chat + new assistant messages. + case .assistant: + + // First message in an assistant thread. + if conversations[conversationIndex].messages.count == 0 { + + var localMessage = message + localMessage.isLocal = true + conversations[conversationIndex].messages.append(localMessage) + + guard let newMessage = ChatQuery.ChatCompletionMessageParam(role: message.role, content: message.content) else { + print("error: Couldn't form message") + return + } + + do { + + let threadsQuery = ThreadsQuery(messages: [newMessage]) + let threadsResult = try await openAIClient.threads(query: threadsQuery) + + guard let currentAssistantId = conversations[conversationIndex].assistantId else { return print("No assistant selected.")} + + let runsQuery = RunsQuery(assistantId: currentAssistantId) + let runsResult = try await openAIClient.runs(threadId: threadsResult.id, query: runsQuery) + + // check in on the run every time the poller gets hit. + startPolling(conversationId: conversationId, runId: runsResult.id, threadId: threadsResult.id) + } + catch { + print("error: \(error) creating thread w/ message") + } + } + // Subsequent messages on the assistant thread. + else { + + var localMessage = message + localMessage.isLocal = true + conversations[conversationIndex].messages.append(localMessage) + + do { + guard let currentThreadId else { return print("No thread to add message to.")} + + let _ = try await openAIClient.threadsAddMessage(threadId: currentThreadId, + query: MessageQuery(role: message.role, content: message.content)) + + guard let currentAssistantId = conversations[conversationIndex].assistantId else { return print("No assistant selected.")} + + let runsQuery = RunsQuery(assistantId: currentAssistantId) + let runsResult = try await openAIClient.runs(threadId: currentThreadId, query: runsQuery) + + // check in on the run every time the poller gets hit. + startPolling(conversationId: conversationId, runId: runsResult.id, threadId: currentThreadId) + } + catch { + print("error: \(error) adding to thread w/ message") + } + } + } } - + @MainActor func completeChat( conversationId: Conversation.ID, @@ -77,7 +148,7 @@ public final class ChatStore: ObservableObject { guard let conversation = conversations.first(where: { $0.id == conversationId }) else { return } - + conversationErrors[conversationId] = nil do { @@ -89,16 +160,16 @@ public final class ChatStore: ObservableObject { name: "getWeatherData", description: "Get the current weather in a given location", parameters: .init( - type: .object, - properties: [ - "location": .init(type: .string, description: "The city and state, e.g. San Francisco, CA") - ], - required: ["location"] + type: .object, + properties: [ + "location": .init(type: .string, description: "The city and state, e.g. San Francisco, CA") + ], + required: ["location"] ) )) let functions = [weatherFunction] - + let chatsStream: AsyncThrowingStream = openAIClient.chatsStream( query: ChatQuery( messages: conversation.messages.map { message in @@ -153,4 +224,179 @@ public final class ChatStore: ObservableObject { conversationErrors[conversationId] = error } } + + // Start Polling section + func startPolling(conversationId: Conversation.ID, runId: String, threadId: String) { + currentRunId = runId + currentThreadId = threadId + currentConversationId = conversationId + isSendingMessage = true + timer = Timer.scheduledTimer(withTimeInterval: timeInterval, repeats: true) { [weak self] _ in + DispatchQueue.main.async { + self?.timerFired() + } + } + } + + func stopPolling() { + isSendingMessage = false + timer?.invalidate() + timer = nil + } + + private func timerFired() { + Task { + let result = try await openAIClient.runRetrieve(threadId: currentThreadId ?? "", runId: currentRunId ?? "") + + // TESTING RETRIEVAL OF RUN STEPS + try await handleRunRetrieveSteps() + + switch result.status { + // Get threadsMesages. + case .completed: + handleCompleted() + case .failed: + // Handle more gracefully with a popup dialog or failure indicator + await MainActor.run { + self.stopPolling() + } + case .requiresAction: + try await handleRequiresAction(result) + default: + // Handle additional statuses "requires_action", "queued" ?, "expired", "cancelled" + // https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + break + } + } + } + // END Polling section + + // This function is called when a thread is marked "completed" by the run status API. + private func handleCompleted() { + guard let conversationIndex = conversations.firstIndex(where: { $0.id == currentConversationId }) else { + return + } + Task { + await MainActor.run { + self.stopPolling() + } + // Once a thread is marked "completed" by the status API, we can retrieve the threads messages, including a pagins cursor representing the last message we received. + var before: String? + if let lastNonLocalMessage = self.conversations[conversationIndex].messages.last(where: { $0.isLocal == false }) { + before = lastNonLocalMessage.id + } + + let result = try await openAIClient.threadsMessages(threadId: currentThreadId ?? "", before: before) + + for item in result.data.reversed() { + let role = item.role + for innerItem in item.content { + let message = Message( + id: item.id, + role: role, + content: innerItem.text?.value ?? "", + createdAt: Date(), + isLocal: false // Messages from the server are not local + ) + await MainActor.run { + // Check if this message from the API matches a local message + if let localMessageIndex = self.conversations[conversationIndex].messages.firstIndex(where: { $0.isLocal == true }) { + + // Replace the local message with the API message + self.conversations[conversationIndex].messages[localMessageIndex] = message + } else { + // This is a new message from the server, append it + self.conversations[conversationIndex].messages.append(message) + } + } + } + } + } + } + + // Store the function call as a message and submit tool outputs with a simple done message. + private func handleRequiresAction(_ result: RunResult) async throws { + guard let currentThreadId, let currentRunId else { + return + } + + guard let toolCalls = result.requiredAction?.submitToolOutputs.toolCalls else { + return + } + + var toolOutputs = [RunToolOutputsQuery.ToolOutput]() + + for toolCall in toolCalls { + let msgContent = "function\nname: \(toolCall.function.name ?? "")\nargs: \(toolCall.function.arguments ?? "{}")" + + let runStepMessage = Message( + id: toolCall.id, + role: .assistant, + content: msgContent, + createdAt: Date(), + isRunStep: true + ) + await addOrUpdateRunStepMessage(runStepMessage) + + // Just return a generic "Done" output for now + toolOutputs.append(.init(toolCallId: toolCall.id, output: "Done")) + } + + let query = RunToolOutputsQuery(toolOutputs: toolOutputs) + _ = try await openAIClient.runSubmitToolOutputs(threadId: currentThreadId, runId: currentRunId, query: query) + } + + // The run retrieval steps are fetched in a separate task. This request is fetched, checking for new run steps, each time the run is fetched. + private func handleRunRetrieveSteps() async throws { + var before: String? +// if let lastRunStepMessage = self.conversations[conversationIndex].messages.last(where: { $0.isRunStep == true }) { +// before = lastRunStepMessage.id +// } + + let stepsResult = try await openAIClient.runRetrieveSteps(threadId: currentThreadId ?? "", runId: currentRunId ?? "", before: before) + + for item in stepsResult.data.reversed() { + let toolCalls = item.stepDetails.toolCalls?.reversed() ?? [] + + for step in toolCalls { + // TODO: Depending on the type of tool tha is used we can add additional information here + // ie: if its a fileSearch: add file information, code_interpreter: add inputs and outputs info, or function: add arguemts and additional info. + let msgContent: String + switch step.type { + case .fileSearch: + msgContent = "RUN STEP: \(step.type)" + + case .codeInterpreter: + let code = step.codeInterpreter + msgContent = "code_interpreter\ninput:\n\(code?.input ?? "")\noutputs: \(code?.outputs?.first?.logs ?? "")" + + case .function: + msgContent = "function\nname: \(step.function?.name ?? "")\nargs: \(step.function?.arguments ?? "{}")" + + } + let runStepMessage = Message( + id: step.id, + role: .assistant, + content: msgContent, + createdAt: Date(), + isRunStep: true + ) + await addOrUpdateRunStepMessage(runStepMessage) + } + } + } + + @MainActor + private func addOrUpdateRunStepMessage(_ message: Message) async { + guard let conversationIndex = conversations.firstIndex(where: { $0.id == currentConversationId }) else { + return + } + + if let localMessageIndex = conversations[conversationIndex].messages.firstIndex(where: { $0.isRunStep == true && $0.id == message.id }) { + conversations[conversationIndex].messages[localMessageIndex] = message + } + else { + conversations[conversationIndex].messages.append(message) + } + } } diff --git a/Demo/DemoChat/Sources/Models/Assistant.swift b/Demo/DemoChat/Sources/Models/Assistant.swift new file mode 100644 index 00000000..2aecd037 --- /dev/null +++ b/Demo/DemoChat/Sources/Models/Assistant.swift @@ -0,0 +1,44 @@ +// +// Conversation.swift +// DemoChat +// +// Created by Sihao Lu on 3/25/23. +// + +import Foundation +import OpenAI + +struct Assistant: Hashable { + init(id: String, name: String, description: String? = nil, instructions: String? = nil, codeInterpreter: Bool, fileSearch: Bool, fileIds: [String]? = nil, functions: [FunctionDeclaration] = []) { + self.id = id + self.name = name + self.description = description + self.instructions = instructions + self.codeInterpreter = codeInterpreter + self.fileSearch = fileSearch + self.fileIds = fileIds + self.functions = functions + } + + typealias ID = String + + let id: String + let name: String + let description: String? + let instructions: String? + let fileIds: [String]? + var codeInterpreter: Bool + var fileSearch: Bool + var functions: [FunctionDeclaration] +} + + +extension Assistant: Equatable, Identifiable {} + +extension FunctionDeclaration: Hashable { + public func hash(into hasher: inout Hasher) { + hasher.combine(name) + hasher.combine(description) + hasher.combine(parameters) + } +} diff --git a/Demo/DemoChat/Sources/Models/Conversation.swift b/Demo/DemoChat/Sources/Models/Conversation.swift index 7d6f82b8..b1c3ab71 100644 --- a/Demo/DemoChat/Sources/Models/Conversation.swift +++ b/Demo/DemoChat/Sources/Models/Conversation.swift @@ -8,15 +8,24 @@ import Foundation struct Conversation { - init(id: String, messages: [Message] = []) { + init(id: String, messages: [Message] = [], type: ConversationType = .normal, assistantId: String? = nil) { self.id = id self.messages = messages + self.type = type + self.assistantId = assistantId } typealias ID = String let id: String var messages: [Message] + var type: ConversationType + var assistantId: String? +} + +enum ConversationType { + case normal + case assistant } extension Conversation: Equatable, Identifiable {} diff --git a/Demo/DemoChat/Sources/Models/Message.swift b/Demo/DemoChat/Sources/Models/Message.swift index a8429da0..005085d6 100644 --- a/Demo/DemoChat/Sources/Models/Message.swift +++ b/Demo/DemoChat/Sources/Models/Message.swift @@ -13,6 +13,9 @@ struct Message { var role: ChatQuery.ChatCompletionMessageParam.Role var content: String var createdAt: Date + + var isLocal: Bool? + var isRunStep: Bool? } extension Message: Equatable, Codable, Hashable, Identifiable {} diff --git a/Demo/DemoChat/Sources/SupportedFileType.swift b/Demo/DemoChat/Sources/SupportedFileType.swift new file mode 100644 index 00000000..f02b28d0 --- /dev/null +++ b/Demo/DemoChat/Sources/SupportedFileType.swift @@ -0,0 +1,92 @@ +// +// SupportedFileType.swift +// +// +// Created by Chris Dillard on 12/8/23. +// + +import Foundation +import UniformTypeIdentifiers + +struct SupportedFileType { + let fileFormat: String + let mimeType: String + let isCodeInterpreterSupported: Bool + let isFileSearchSupported: Bool +} + +let supportedFileTypes: [SupportedFileType] = [ + SupportedFileType(fileFormat: "c", mimeType: "text/x-c", + isCodeInterpreterSupported: true, isFileSearchSupported: true), + SupportedFileType(fileFormat: "cpp", mimeType: "text/x-c++", + isCodeInterpreterSupported: true, isFileSearchSupported: true), + SupportedFileType(fileFormat: "csv", mimeType: "application/csv", + isCodeInterpreterSupported: true, isFileSearchSupported: false), + SupportedFileType(fileFormat: "docx", mimeType: "application/vnd.openxmlformats-officedocument.wordprocessingml.document", + isCodeInterpreterSupported: true, isFileSearchSupported: true), + SupportedFileType(fileFormat: "html", mimeType: "text/html", + isCodeInterpreterSupported: true, isFileSearchSupported: true), + SupportedFileType(fileFormat: "java", mimeType: "text/x-java", + isCodeInterpreterSupported: true, isFileSearchSupported: true), + SupportedFileType(fileFormat: "json", mimeType: "application/json", + isCodeInterpreterSupported: true, isFileSearchSupported: true), + SupportedFileType(fileFormat: "md", mimeType: "text/markdown", + isCodeInterpreterSupported: true, isFileSearchSupported: true), + SupportedFileType(fileFormat: "pdf", mimeType: "application/pdf", + isCodeInterpreterSupported: true, isFileSearchSupported: true), + SupportedFileType(fileFormat: "php", mimeType: "text/x-php", + isCodeInterpreterSupported: true, isFileSearchSupported: true), + SupportedFileType(fileFormat: "pptx", mimeType: "application/vnd.openxmlformats-officedocument.presentationml.presentation", + isCodeInterpreterSupported: true, isFileSearchSupported: true), + SupportedFileType(fileFormat: "py", mimeType: "text/x-python", + isCodeInterpreterSupported: true, isFileSearchSupported: true), + SupportedFileType(fileFormat: "rb", mimeType: "text/x-ruby", + isCodeInterpreterSupported: true, isFileSearchSupported: true), + SupportedFileType(fileFormat: "tex", mimeType: "text/x-tex", + isCodeInterpreterSupported: true, isFileSearchSupported: true), + SupportedFileType(fileFormat: "txt", mimeType: "text/plain", + isCodeInterpreterSupported: true, isFileSearchSupported: true), + SupportedFileType(fileFormat: "css", mimeType: "text/css", + isCodeInterpreterSupported: true, isFileSearchSupported: false), + SupportedFileType(fileFormat: "jpeg", mimeType: "image/jpeg", + isCodeInterpreterSupported: true, isFileSearchSupported: false), + SupportedFileType(fileFormat: "jpg", mimeType: "image/jpeg", + isCodeInterpreterSupported: true, isFileSearchSupported: false), + SupportedFileType(fileFormat: "js", mimeType: "text/javascript", + isCodeInterpreterSupported: true, isFileSearchSupported: false), + SupportedFileType(fileFormat: "gif", mimeType: "image/gif", + isCodeInterpreterSupported: true, isFileSearchSupported: false), + SupportedFileType(fileFormat: "png", mimeType: "image/png", + isCodeInterpreterSupported: true, isFileSearchSupported: false), + SupportedFileType(fileFormat: "tar", mimeType: "application/x-tar", + isCodeInterpreterSupported: true, isFileSearchSupported: false), + SupportedFileType(fileFormat: "ts", mimeType: "application/typescript", + isCodeInterpreterSupported: true, isFileSearchSupported: false), + SupportedFileType(fileFormat: "xlsx", mimeType: "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", + isCodeInterpreterSupported: true, isFileSearchSupported: false), + SupportedFileType(fileFormat: "xml", mimeType: "application/xml", // or \"text/xml\" + isCodeInterpreterSupported: true, isFileSearchSupported: false), + SupportedFileType(fileFormat: "zip", mimeType: "application/zip", + isCodeInterpreterSupported: true, isFileSearchSupported: false) +] + +func supportedUITypes() -> [UTType] { + var supportedTypes: [UTType] = [] + + for supportedFileType in supportedFileTypes { + if let newType = UTType(filenameExtension: supportedFileType.fileFormat) { + supportedTypes += [newType] + } + } + + return supportedTypes +} + +extension URL { + func mimeType() -> String { + guard let utType = UTType(filenameExtension: self.pathExtension) else { + return "application/octet-stream" // Default type if unknown + } + return utType.preferredMIMEType ?? "application/octet-stream" + } +} diff --git a/Demo/DemoChat/Sources/UI/AssistantModalContentView.swift b/Demo/DemoChat/Sources/UI/AssistantModalContentView.swift new file mode 100644 index 00000000..556215ad --- /dev/null +++ b/Demo/DemoChat/Sources/UI/AssistantModalContentView.swift @@ -0,0 +1,190 @@ +// +// AssistantModalContentView.swift +// +// +// Created by Chris Dillard on 11/9/23. +// + +import SwiftUI +import OpenAI + +struct AssistantModalContentView: View { + enum Mode { + case modify + case create + } + + @Binding var name: String + @Binding var description: String + @Binding var customInstructions: String + + @Binding var codeInterpreter: Bool + @Binding var fileSearch: Bool + @Binding var functions: [FunctionDeclaration] + @Binding var fileIds: [String] + @Binding var isUploading: Bool + @State var isFunctionModalPresented = false + @State var newFunction: FunctionDeclaration? + + var modify: Bool + + @Environment(\.dismiss) var dismiss + + @Binding var isPickerPresented: Bool + // If a file has been selected for uploading and is currently in progress, this is set. + @Binding var selectedFileURL: URL? + + var onCommit: () -> Void + var onFileUpload: () -> Void + + var body: some View { + if modify { + form + } else { + NavigationStack { + form + } + } + } + + @ViewBuilder + private var form: some View { + Form { + Section("Name") { + TextField("Name", text: $name) + } + Section("Description") { + TextEditor(text: $description) + .frame(minHeight: 50) + } + Section("Custom Instructions") { + TextEditor(text: $customInstructions) + .frame(minHeight: 100) + } + + Section("Tools") { + Toggle(isOn: $codeInterpreter, label: { + Text("Code interpreter") + }) + + Toggle(isOn: $fileSearch, label: { + Text("File Search") + }) + } + + Section("Functions") { + if !functions.isEmpty { + ForEach(functions, id: \.name) { function in + HStack { + VStack(alignment: .leading) { + Text(function.name).fontWeight(.semibold) + if let description = function.description { + Text(description) + .font(.caption) + } + if let parameters = function.parameterJSON { + Text(parameters) + .font(.caption2) + } + } + Spacer() + Button { + if let index = functions.firstIndex(of: function) { + functions.remove(at: index) + } + } label: { + Image(systemName: "xmark.circle.fill") // X button + .foregroundColor(.red) + } + } + } + } + Button("Create Function") { + isFunctionModalPresented = true + } + } + + Section("Files") { + if !fileIds.isEmpty { + ForEach(fileIds, id: \.self) { fileId in + HStack { + // File Id of each file added to the assistant. + Text("File: \(fileId)") + Spacer() + // Button to remove fileId from the list of fileIds to be used when create or modify assistant. + Button { + // Add action to remove the file from the list + if let index = fileIds.firstIndex(of: fileId) { + fileIds.remove(at: index) + } + } label: { + Image(systemName: "xmark.circle.fill") // X button + .foregroundColor(.red) + } + } + } + } + + if let selectedFileURL { + HStack { + Text("File: \(selectedFileURL.lastPathComponent)") + + Button("Remove") { + self.selectedFileURL = nil + } + } + } + else { + Button("Upload File") { + isPickerPresented = true + } + .sheet(isPresented: $isPickerPresented) { + DocumentPicker { url in + selectedFileURL = url + onFileUpload() + } + } + } + } + } + .navigationTitle("\(modify ? "Edit" : "Enter") Assistant Details") + .toolbar { + if !modify { + ToolbarItem(placement: .cancellationAction) { + Button("Cancel") { + dismiss() + } + } + } + ToolbarItemGroup(placement: .primaryAction) { + Button("Save") { + onCommit() + dismiss() + } + } + } + .sheet(isPresented: $isFunctionModalPresented) { + if let newFunction { + functions.append(newFunction) + self.newFunction = nil + } + } content: { + FunctionView(name: "", description: "", parameters: "", function: $newFunction) + } + } +} + +extension FunctionDeclaration { + var parameterJSON: String? { + guard let parameters else { + return nil + } + + do { + let parameterData = try JSONEncoder().encode(parameters) + return String(data: parameterData, encoding: .utf8) + } catch { + return nil + } + } +} diff --git a/Demo/DemoChat/Sources/UI/AssistantsListView.swift b/Demo/DemoChat/Sources/UI/AssistantsListView.swift new file mode 100644 index 00000000..3cd59f12 --- /dev/null +++ b/Demo/DemoChat/Sources/UI/AssistantsListView.swift @@ -0,0 +1,47 @@ +// +// ListView.swift +// DemoChat +// +// Created by Sihao Lu on 3/25/23. +// + +import SwiftUI + +struct AssistantsListView: View { + @Binding var assistants: [Assistant] + @Binding var selectedAssistantId: String? + var onLoadMoreAssistants: () -> Void + @Binding var isLoadingMore: Bool + + var body: some View { + VStack { + List( + $assistants, + editActions: [.delete], + selection: $selectedAssistantId + ) { $assistant in + HStack { + Text(assistant.name) + .lineLimit(2) + Spacer() + if assistant.id == selectedAssistantId { + Image(systemName: "checkmark.circle.fill") + .foregroundColor(.accentColor) + } + } + .onAppear { + if assistant.id == assistants.last?.id { + onLoadMoreAssistants() + } + } + } + + + if isLoadingMore { + ProgressView() + .padding() + } + } + .navigationTitle("Assistants") + } +} diff --git a/Demo/DemoChat/Sources/UI/AssistantsView.swift b/Demo/DemoChat/Sources/UI/AssistantsView.swift new file mode 100644 index 00000000..4819c4e6 --- /dev/null +++ b/Demo/DemoChat/Sources/UI/AssistantsView.swift @@ -0,0 +1,215 @@ +// +// ChatView.swift +// DemoChat +// +// Created by Sihao Lu on 3/25/23. +// + +import Combine +import SwiftUI +import OpenAI + +public struct AssistantsView: View { + @ObservedObject var store: ChatStore + @ObservedObject var assistantStore: AssistantStore + + @Environment(\.dateProviderValue) var dateProvider + @Environment(\.idProviderValue) var idProvider + + // state to select file + @State private var isPickerPresented: Bool = false + @State private var fileURL: URL? + + // state to modify assistant + @State private var name: String = "" + @State private var description: String = "" + @State private var customInstructions: String = "" + @State private var fileIds: [String] = [] + + @State private var codeInterpreter: Bool = false + @State private var fileSearch: Bool = false + @State private var functions: [FunctionDeclaration] = [] + @State var isLoadingMore = false + @State private var isModalPresented = false + @State private var isUploading = false + + //If a file is selected via the document picker, this is set. + @State var selectedFileURL: URL? + @State var uploadedFileId: String? + + @State var mode: AssistantModalContentView.Mode = .create + + public init(store: ChatStore, assistantStore: AssistantStore) { + self.store = store + self.assistantStore = assistantStore + } + + public var body: some View { + ZStack { + NavigationSplitView { + AssistantsListView( + assistants: $assistantStore.availableAssistants, selectedAssistantId: Binding( + get: { + assistantStore.selectedAssistantId + + }, set: { newId in + guard newId != nil else { return } + + selectAssistant(newId: newId) + }), onLoadMoreAssistants: { + loadMoreAssistants() + }, isLoadingMore: $isLoadingMore + ) + .toolbar { + ToolbarItemGroup(placement: .primaryAction) { + Button { + mode = .create + isModalPresented = true + } label: { + Label("Create Assistant", systemImage: "plus") + } + Button { + guard let asstId = assistantStore.selectedAssistantId else { + return + } + + // Create new local conversation to represent new thread. + store.createConversation(type: .assistant, assistantId: asstId) + } label: { + Label("Start Chat", systemImage: "plus.message") + } + .disabled(assistantStore.selectedAssistantId == nil) + Button { + Task { + let _ = await assistantStore.getAssistants() + } + } label: { + Label("Get Assistants", systemImage: "arrow.triangle.2.circlepath") + } + } + } + } detail: { + if assistantStore.selectedAssistantId != nil { + assistantContentView() + } else { + Text("Select an assistant") + } + } + .sheet(isPresented: $isModalPresented) { + resetAssistantCreator() + } content: { + assistantContentView() + } + } + } + + @ViewBuilder + private func assistantContentView() -> some View { + AssistantModalContentView(name: $name, description: $description, customInstructions: $customInstructions, + codeInterpreter: $codeInterpreter, fileSearch: $fileSearch, functions: $functions, fileIds: $fileIds, + isUploading: $isUploading, modify: mode == .modify, isPickerPresented: $isPickerPresented, selectedFileURL: $selectedFileURL) { + Task { + await handleOKTap() + } + } onFileUpload: { + Task { + guard let selectedFileURL else { return } + + isUploading = true + let file = await assistantStore.uploadFile(url: selectedFileURL) + uploadedFileId = file?.id + isUploading = false + + if uploadedFileId == nil { + print("Failed to upload") + self.selectedFileURL = nil + } + else { + // if successful upload , we can show it. + if let uploadedFileId = uploadedFileId { + self.selectedFileURL = nil + + fileIds += [uploadedFileId] + + print("Successful upload!") + } + } + } + } + } + + private func handleOKTap() async { + + var mergedFileIds = [String]() + + mergedFileIds += fileIds + + let asstId: String? + + switch mode { + // Create new Assistant and select it + case .create: + asstId = await assistantStore.createAssistant(name: name, description: description, instructions: customInstructions, codeInterpreter: codeInterpreter, fileSearch: fileSearch, functions: functions, fileIds: mergedFileIds.isEmpty ? nil : mergedFileIds) + assistantStore.selectedAssistantId = asstId + // Modify existing Assistant + case .modify: + guard let selectedAssistantId = assistantStore.selectedAssistantId else { + print("Cannot modify assistant, not selected.") + return + } + + asstId = await assistantStore.modifyAssistant(asstId: selectedAssistantId, name: name, description: description, instructions: customInstructions, codeInterpreter: codeInterpreter, fileSearch: fileSearch, functions: functions, fileIds: mergedFileIds.isEmpty ? nil : mergedFileIds) + } + + // Reset Assistant Creator after attempted creation or modification. + resetAssistantCreator() + + if asstId == nil { + print("Failed to modify or create Assistant.") + } + } + + private func loadMoreAssistants() { + guard !isLoadingMore else { return } + + isLoadingMore = true + let lastAssistantId = assistantStore.availableAssistants.last?.id ?? "" + + Task { + // Fetch more assistants and append to the list + let _ = await assistantStore.getAssistants(after: lastAssistantId) + isLoadingMore = false + } + } + + private func resetAssistantCreator() { + // Reset state for Assistant creator. + name = "" + description = "" + customInstructions = "" + + codeInterpreter = false + fileSearch = false + functions = [] + selectedFileURL = nil + uploadedFileId = nil + fileIds = [] + } + + private func selectAssistant(newId: String?) { + assistantStore.selectAssistant(newId) + + let selectedAssistant = assistantStore.availableAssistants.filter { $0.id == assistantStore.selectedAssistantId }.first + + name = selectedAssistant?.name ?? "" + description = selectedAssistant?.description ?? "" + customInstructions = selectedAssistant?.instructions ?? "" + codeInterpreter = selectedAssistant?.codeInterpreter ?? false + fileSearch = selectedAssistant?.fileSearch ?? false + functions = selectedAssistant?.functions ?? [] + fileIds = selectedAssistant?.fileIds ?? [] + + mode = .modify + + } +} diff --git a/Demo/DemoChat/Sources/UI/ChatView.swift b/Demo/DemoChat/Sources/UI/ChatView.swift index 1b872c21..1812ed26 100644 --- a/Demo/DemoChat/Sources/UI/ChatView.swift +++ b/Demo/DemoChat/Sources/UI/ChatView.swift @@ -10,57 +10,63 @@ import SwiftUI public struct ChatView: View { @ObservedObject var store: ChatStore - + @ObservedObject var assistantStore: AssistantStore + @Environment(\.dateProviderValue) var dateProvider @Environment(\.idProviderValue) var idProvider - public init(store: ChatStore) { + public init(store: ChatStore, assistantStore: AssistantStore) { self.store = store + self.assistantStore = assistantStore } - + public var body: some View { - NavigationSplitView { - ListView( - conversations: $store.conversations, - selectedConversationId: Binding( - get: { - store.selectedConversationID - }, set: { newId in - store.selectConversation(newId) - }) - ) - .toolbar { - ToolbarItem( - placement: .primaryAction - ) { - Button(action: { - store.createConversation() - }) { - Image(systemName: "plus") - } - .buttonStyle(.borderedProminent) - } - } - } detail: { - if let conversation = store.selectedConversation { - DetailView( - conversation: conversation, - error: store.conversationErrors[conversation.id], - sendMessage: { message, selectedModel in - Task { - await store.sendMessage( - Message( - id: idProvider(), - role: .user, - content: message, - createdAt: dateProvider() - ), - conversationId: conversation.id, - model: selectedModel - ) + ZStack { + NavigationSplitView { + ListView( + conversations: $store.conversations, + selectedConversationId: Binding( + get: { + store.selectedConversationID + }, set: { newId in + store.selectConversation(newId) + }) + ) + .toolbar { + ToolbarItem( + placement: .primaryAction + ) { + Menu { + Button("Create Chat") { + store.createConversation() + } + } label: { + Image(systemName: "plus") } + .buttonStyle(.borderedProminent) } - ) + } + } detail: { + if let conversation = store.selectedConversation { + DetailView( + availableAssistants: assistantStore.availableAssistants, conversation: conversation, + error: store.conversationErrors[conversation.id], + sendMessage: { message, selectedModel in + Task { + await store.sendMessage( + Message( + id: idProvider(), + role: .user, + content: message, + createdAt: dateProvider() + ), + conversationId: conversation.id, + model: selectedModel + ) + } + }, isSendingMessage: $store.isSendingMessage + ) + } } } } diff --git a/Demo/DemoChat/Sources/UI/DetailView.swift b/Demo/DemoChat/Sources/UI/DetailView.swift index 7aa44479..05b8a4f6 100644 --- a/Demo/DemoChat/Sources/UI/DetailView.swift +++ b/Demo/DemoChat/Sources/UI/DetailView.swift @@ -18,6 +18,7 @@ struct DetailView: View { @FocusState private var isFocused: Bool @State private var showsModelSelectionSheet = false @State private var selectedChatModel: Model = .gpt4_0613 + var availableAssistants: [Assistant] private static let availableChatModels: [Model] = [.gpt3_5Turbo, .gpt4] @@ -25,6 +26,8 @@ struct DetailView: View { let error: Error? let sendMessage: (String, Model) -> Void + @Binding var isSendingMessage: Bool + private var fillColor: Color { #if os(iOS) return Color(uiColor: UIColor.systemBackground) @@ -51,6 +54,10 @@ struct DetailView: View { } .listRowSeparator(.hidden) } + // Tapping on the message bubble area should dismiss the keyboard. + .onTapGesture { + self.hideKeyboard() + } .listStyle(.plain) .animation(.default, value: conversation.messages) // .onChange(of: conversation) { newValue in @@ -65,8 +72,69 @@ struct DetailView: View { inputBar(scrollViewProxy: scrollViewProxy) } - .navigationTitle("Chat", selectedModel: $selectedChatModel) - .modelSelect(selectedModel: $selectedChatModel, models: Self.availableChatModels, showsModelSelectionSheet: $showsModelSelectionSheet, help: "https://platform.openai.com/docs/models/overview") + .navigationTitle(conversation.type == .assistant ? "Assistant: \(currentAssistantName())" : "Chat") + .safeAreaInset(edge: .top) { + HStack { + Text( + "Model: \(conversation.type == .assistant ? Model.gpt4_o_mini : selectedChatModel)" + ) + .font(.caption) + .foregroundColor(.secondary) + Spacer() + } + .padding(.horizontal, 16) + .padding(.vertical, 8) + } + .toolbar { + if conversation.type == .assistant { + ToolbarItem(placement: .navigationBarTrailing) { + + Menu { + ForEach(availableAssistants, id: \.self) { item in + Button(item.name) { + print("Select assistant") + //selectedItem = item + } + } + } label: { + Image(systemName: "eyeglasses") + } + } + } + if conversation.type == .normal { + ToolbarItem(placement: .navigationBarTrailing) { + Button(action: { + showsModelSelectionSheet.toggle() + }) { + Image(systemName: "cpu") + } + } + } + } + .confirmationDialog( + "Select model", + isPresented: $showsModelSelectionSheet, + titleVisibility: .visible, + actions: { + ForEach(DetailView.availableChatModels, id: \.self) { model in + Button { + selectedChatModel = model + } label: { + Text(model) + } + } + + Button("Cancel", role: .cancel) { + showsModelSelectionSheet = false + } + }, + message: { + Text( + "View https://platform.openai.com/docs/models/overview for details" + ) + .font(.caption) + } + ) } } } @@ -121,18 +189,24 @@ struct DetailView: View { } .padding(.leading) - Button(action: { - withAnimation { - tapSendMessage(scrollViewProxy: scrollViewProxy) + if isSendingMessage { + ProgressView() + .progressViewStyle(CircularProgressViewStyle()) + .padding(.trailing) + } else { + Button(action: { + withAnimation { + tapSendMessage(scrollViewProxy: scrollViewProxy) + } + }) { + Image(systemName: "paperplane") + .resizable() + .aspectRatio(contentMode: .fit) + .frame(width: 24, height: 24) + .padding(.trailing) } - }) { - Image(systemName: "paperplane") - .resizable() - .aspectRatio(contentMode: .fit) - .frame(width: 24, height: 24) - .padding(.trailing) + .disabled(inputText.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty) } - .disabled(inputText.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty) } .padding(.bottom) } @@ -152,6 +226,13 @@ struct DetailView: View { // scrollViewProxy.scrollTo(lastMessage.id, anchor: .bottom) // } } + + func currentAssistantName() -> String { + availableAssistants.filter { conversation.assistantId == $0.id }.first?.name ?? "" + } + func hideKeyboard() { + UIApplication.shared.sendAction(#selector(UIResponder.resignFirstResponder), to: nil, from: nil, for: nil) + } } struct ChatBubble: View { @@ -217,6 +298,7 @@ struct ChatBubble: View { struct DetailView_Previews: PreviewProvider { static var previews: some View { DetailView( + availableAssistants: [], conversation: Conversation( id: "1", messages: [ @@ -233,7 +315,7 @@ struct DetailView_Previews: PreviewProvider { ] ), error: nil, - sendMessage: { _, _ in } + sendMessage: { _, _ in }, isSendingMessage: Binding.constant(false) ) } } diff --git a/Demo/DemoChat/Sources/UI/DocumentPicker.swift b/Demo/DemoChat/Sources/UI/DocumentPicker.swift new file mode 100644 index 00000000..3c960235 --- /dev/null +++ b/Demo/DemoChat/Sources/UI/DocumentPicker.swift @@ -0,0 +1,41 @@ +// +// DocumentPicker.swift +// +// +// Created by Chris Dillard on 11/10/23. +// + +import SwiftUI +import UniformTypeIdentifiers + +struct DocumentPicker: UIViewControllerRepresentable { + var callback: (URL) -> Void + + func makeUIViewController(context: Context) -> UIDocumentPickerViewController { + let pickerViewController = UIDocumentPickerViewController(forOpeningContentTypes: supportedUITypes(), asCopy: true) + pickerViewController.allowsMultipleSelection = false + pickerViewController.shouldShowFileExtensions = true + + pickerViewController.delegate = context.coordinator + return pickerViewController + } + + func updateUIViewController(_ uiViewController: UIDocumentPickerViewController, context: Context) {} + + func makeCoordinator() -> Coordinator { + return Coordinator(self) + } + + class Coordinator: NSObject, UIDocumentPickerDelegate { + var parent: DocumentPicker + + init(_ parent: DocumentPicker) { + self.parent = parent + } + + func documentPicker(_ controller: UIDocumentPickerViewController, didPickDocumentsAt urls: [URL]) { + guard let url = urls.first else { return } + parent.callback(url) + } + } +} diff --git a/Demo/DemoChat/Sources/UI/FunctionView.swift b/Demo/DemoChat/Sources/UI/FunctionView.swift new file mode 100644 index 00000000..97413bda --- /dev/null +++ b/Demo/DemoChat/Sources/UI/FunctionView.swift @@ -0,0 +1,72 @@ +// +// FunctionView.swift +// +// +// Created by Brent Whitman on 2024-01-31. +// + +import SwiftUI +import OpenAI + +struct FunctionView: View { + @Environment(\.dismiss) var dismiss + @State var name: String + @State var description: String + @State var parameters: String + @Binding var function: FunctionDeclaration? + @State var isShowingAlert = false + @State var alertMessage = "" + + var body: some View { + NavigationStack { + Form { + TextField("Name", text: $name) + TextField("Description", text: $description) + TextField("Parameters", text: $parameters) + } + .navigationTitle("Create Function") + .navigationBarTitleDisplayMode(.inline) + .toolbarBackground(.visible, for: .navigationBar) + .toolbar { + ToolbarItem(placement: .cancellationAction) { + Button("Cancel") { + dismiss() + } + } + ToolbarItem(placement: .confirmationAction) { + Button("Save") { + let parameters = validateParameters() + guard !isShowingAlert else { + return + } + + function = FunctionDeclaration(name: name, description: description, parameters: parameters) + dismiss() + } + } + } + .alert(isPresented: $isShowingAlert) { + Alert(title: Text("Parameters Error"), message: Text(alertMessage)) + } + } + } + + private func validateParameters() -> JSONSchema? { + guard !parameters.isEmpty, let parametersData = parameters.data(using: .utf8) else { + return nil + } + + do { + let parametersJSON = try JSONDecoder().decode(JSONSchema.self, from: parametersData) + return parametersJSON + } catch { + alertMessage = error.localizedDescription + isShowingAlert = true + return nil + } + } +} + +#Preview { + FunctionView(name: "print", description: "Prints text to the console", parameters: "{\"type\": \"string\"}", function: .constant(nil)) +} diff --git a/Demo/DemoChat/Sources/UI/ListView.swift b/Demo/DemoChat/Sources/UI/ListView.swift index bfbdfc56..d8be5585 100644 --- a/Demo/DemoChat/Sources/UI/ListView.swift +++ b/Demo/DemoChat/Sources/UI/ListView.swift @@ -17,10 +17,28 @@ struct ListView: View { editActions: [.delete], selection: $selectedConversationId ) { $conversation in - Text( - conversation.messages.last?.content ?? "New Conversation" - ) - .lineLimit(2) + if let convoContent = conversation.messages.last?.content { + Text( + convoContent + ) + .lineLimit(2) + } + else { + if conversation.type == .assistant { + Text( + "New Assistant" + ) + .lineLimit(2) + } + else { + Text( + "New Conversation" + ) + .lineLimit(2) + } + } + + } .navigationTitle("Conversations") } diff --git a/Demo/DemoChat/Sources/UI/ModerationChatView.swift b/Demo/DemoChat/Sources/UI/ModerationChatView.swift index 41658845..ec66425e 100644 --- a/Demo/DemoChat/Sources/UI/ModerationChatView.swift +++ b/Demo/DemoChat/Sources/UI/ModerationChatView.swift @@ -19,7 +19,7 @@ public struct ModerationChatView: View { public var body: some View { DetailView( - conversation: store.moderationConversation, + availableAssistants: [], conversation: store.moderationConversation, error: store.moderationConversationError, sendMessage: { message, _ in Task { @@ -32,7 +32,7 @@ public struct ModerationChatView: View { ) ) } - } + }, isSendingMessage: Binding.constant(false) ) } } diff --git a/README.md b/README.md index 34f8d2d1..3dc37b88 100644 --- a/README.md +++ b/README.md @@ -32,6 +32,22 @@ This repository contains Swift community-maintained implementation over [OpenAI] - [Moderations](#moderations) - [Utilities](#utilities) - [Combine Extensions](#combine-extensions) + - [Assistants (Beta)](#assistants) + - [Create Assistant](#create-assistant) + - [Modify Assistant](#modify-assistant) + - [List Assistants](#list-assistants) + - [Threads](#threads) + - [Create Thread](#create-thread) + - [Create and Run Thread](#create-and-run-thread) + - [Get Threads Messages](#get-threads-messages) + - [Add Message to Thread](#add-message-to-thread) + - [Runs](#runs) + - [Create Run](#create-run) + - [Retrieve Run](#retrieve-run) + - [Retrieve Run Steps](#retrieve-run-steps) + - [Submit Tool Outputs for Run](#submit-tool-outputs-for-run) + - [Files](#files) + - [Upload File](#upload-file) - [Example Project](#example-project) - [Contribution Guidelines](#contribution-guidelines) - [Links](#links) @@ -98,31 +114,35 @@ Using the OpenAI Chat API, you can build your own applications with `gpt-3.5-tur **Request** ```swift - struct ChatQuery: Codable { - /// ID of the model to use. Currently, only gpt-3.5-turbo and gpt-3.5-turbo-0301 are supported. - public let model: Model - /// The messages to generate chat completions for - public let messages: [Chat] - /// A list of functions the model may generate JSON inputs for. - public let functions: [ChatFunctionDeclaration]? - /// What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and We generally recommend altering this or top_p but not both. - public let temperature: Double? - /// An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. - public let topP: Double? - /// How many chat completion choices to generate for each input message. - public let n: Int? - /// Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence. - public let stop: [String]? - /// The maximum number of tokens to generate in the completion. - public let maxTokens: Int? - /// Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics. - public let presencePenalty: Double? - /// Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. - public let frequencyPenalty: Double? - ///Modify the likelihood of specified tokens appearing in the completion. - public let logitBias: [String:Int]? - /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. - public let user: String? +struct ChatQuery: Codable { + /// ID of the model to use. + public let model: Model + /// An object specifying the format that the model must output. + public let responseFormat: ResponseFormat? + /// The messages to generate chat completions for + public let messages: [Message] + /// A list of tools the model may call. Currently, only functions are supported as a tool. Use this to provide a list of functions the model may generate JSON inputs for. + public let tools: [Tool]? + /// Controls how the model responds to tool calls. "none" means the model does not call a function, and responds to the end-user. "auto" means the model can pick between and end-user or calling a function. Specifying a particular function via `{"name": "my_function"}` forces the model to call that function. "none" is the default when no functions are present. "auto" is the default if functions are present. + public let toolChoice: ToolChoice? + /// What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and We generally recommend altering this or top_p but not both. + public let temperature: Double? + /// An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. + public let topP: Double? + /// How many chat completion choices to generate for each input message. + public let n: Int? + /// Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence. + public let stop: [String]? + /// The maximum number of tokens to generate in the completion. + public let maxTokens: Int? + /// Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics. + public let presencePenalty: Double? + /// Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. + public let frequencyPenalty: Double? + /// Modify the likelihood of specified tokens appearing in the completion. + public let logitBias: [String:Int]? + /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. + public let user: String? } ``` @@ -220,7 +240,7 @@ for try await result in openAI.chatsStream(query: query) { let openAI = OpenAI(apiToken: "...") // Declare functions which GPT-3 might decide to call. let functions = [ - ChatFunctionDeclaration( + FunctionDeclaration( name: "get_current_weather", description: "Get the current weather in a given location", parameters: @@ -239,7 +259,7 @@ let query = ChatQuery( messages: [ Chat(role: .user, content: "What's the weather like in Boston?") ], - functions: functions + tools: functions.map { Tool.function($0) } ) let result = try await openAI.chats(query: query) ``` @@ -256,10 +276,16 @@ Result will be (serialized as JSON here for readability): "index": 0, "message": { "role": "assistant", - "function_call": { - "name": "get_current_weather", - "arguments": "{\n \"location\": \"Boston, MA\"\n}" - } + "tool_calls": [ + { + "id": "call-0", + "type": "function", + "function": { + "name": "get_current_weather", + "arguments": "{\n \"location\": \"Boston, MA\"\n}" + } + } + ] }, "finish_reason": "function_call" } @@ -832,6 +858,142 @@ func audioTranscriptions(query: AudioTranscriptionQuery) -> AnyPublisher AnyPublisher ``` +### Assistants + +Review [Assistants Documentation](https://platform.openai.com/docs/api-reference/assistants) for more info. + +#### Create Assistant + +Example: Create Assistant +``` +let query = AssistantsQuery(model: Model.gpt4_1106_preview, name: name, description: description, instructions: instructions, tools: tools, fileIds: fileIds) +openAI.assistantCreate(query: query) { result in + //Handle response here +} +``` + +#### Modify Assistant + +Example: Modify Assistant +``` +let query = AssistantsQuery(model: Model.gpt4_1106_preview, name: name, description: description, instructions: instructions, tools: tools, fileIds: fileIds) +openAI.assistantModify(query: query, assistantId: "asst_1234") { result in + //Handle response here +} +``` + +#### List Assistants + +Example: List Assistants +``` +openAI.assistants() { result in + //Handle response here +} +``` + +#### Threads + +Review [Threads Documentation](https://platform.openai.com/docs/api-reference/threads) for more info. + +##### Create Thread + +Example: Create Thread +``` +let threadsQuery = ThreadsQuery(messages: [Chat(role: message.role, content: message.content)]) +openAI.threads(query: threadsQuery) { result in + //Handle response here +} +``` + +##### Create and Run Thread + +Example: Create and Run Thread +``` +let threadsQuery = ThreadQuery(messages: [Chat(role: message.role, content: message.content)]) +let threadRunQuery = ThreadRunQuery(assistantId: "asst_1234" thread: threadsQuery) +openAI.threadRun(query: threadRunQuery) { result in + //Handle response here +} +``` + +##### Get Threads Messages + +Review [Messages Documentation](https://platform.openai.com/docs/api-reference/messages) for more info. + +Example: Get Threads Messages +``` +openAI.threadsMessages(threadId: currentThreadId) { result in + //Handle response here +} +``` + +##### Add Message to Thread + +Example: Add Message to Thread +``` +let query = MessageQuery(role: message.role.rawValue, content: message.content) +openAI.threadsAddMessage(threadId: currentThreadId, query: query) { result in + //Handle response here +} +``` + +#### Runs + +Review [Runs Documentation](https://platform.openai.com/docs/api-reference/runs) for more info. + +##### Create Run + +Example: Create Run +``` +let runsQuery = RunsQuery(assistantId: currentAssistantId) +openAI.runs(threadId: threadsResult.id, query: runsQuery) { result in + //Handle response here +} +``` + +##### Retrieve Run + +Example: Retrieve Run +``` +openAI.runRetrieve(threadId: currentThreadId, runId: currentRunId) { result in + //Handle response here +} +``` + +##### Retrieve Run Steps + +Example: Retrieve Run Steps +``` +openAI.runRetrieveSteps(threadId: currentThreadId, runId: currentRunId) { result in + //Handle response here +} +``` + +##### Submit Tool Outputs for Run + +Example: Submit Tool Outputs for Run +``` +let output = RunToolOutputsQuery.ToolOutput(toolCallId: "call123", output: "Success") +let query = RunToolOutputsQuery(toolOutputs: [output]) +openAI.runSubmitToolOutputs(threadId: currentThreadId, runId: currentRunId, query: query) { result in + //Handle response here +} +``` + +#### Files + +Review [Files Documentation](https://platform.openai.com/docs/api-reference/files) for more info. + +##### Upload file + +Example: Upload file +``` +let query = FilesQuery(purpose: "assistants", file: fileData, fileName: url.lastPathComponent, contentType: "application/pdf") +openAI.files(query: query) { result in + //Handle response here +} +``` + ## Example Project You can find example iOS application in [Demo](/Demo) folder. diff --git a/Sources/OpenAI/OpenAI.swift b/Sources/OpenAI/OpenAI.swift index a0a4d15d..0af51d9f 100644 --- a/Sources/OpenAI/OpenAI.swift +++ b/Sources/OpenAI/OpenAI.swift @@ -64,7 +64,145 @@ final public class OpenAI: OpenAIProtocol { public convenience init(configuration: Configuration, session: URLSession = URLSession.shared) { self.init(configuration: configuration, session: session as URLSessionProtocol) } + + // UPDATES FROM 11-06-23 + public func threadsAddMessage(threadId: String, query: MessageQuery, completion: @escaping (Result) -> Void) { + performRequest( + request: AssistantsRequest.jsonRequest( + urlBuilder: RunsURLBuilder( + configuration: configuration, + path: .threadsMessages, + threadId: threadId + ), + body: query + ), + completion: completion + ) + } + + public func threadsMessages(threadId: String, before: String? = nil, completion: @escaping (Result) -> Void) { + performRequest( + request: AssistantsRequest.jsonRequest( + urlBuilder: RunsURLBuilder(configuration: configuration, path: .threadsMessages, threadId: threadId), + body: nil, + method: "GET" + ), + completion: completion + ) + } + + public func runRetrieve(threadId: String, runId: String, completion: @escaping (Result) -> Void) { + performRequest( + request: AssistantsRequest.jsonRequest( + urlBuilder: RunRetrieveURLBuilder(configuration: configuration, path: .runRetrieve, threadId: threadId, runId: runId), + body: nil, + method: "GET" + ), + completion: completion + ) + } + + public func runRetrieveSteps(threadId: String, runId: String, before: String? = nil, completion: @escaping (Result) -> Void) { + performRequest( + request: AssistantsRequest.jsonRequest( + urlBuilder: RunRetrieveURLBuilder( + configuration: configuration, + path: .runRetrieveSteps, + threadId: threadId, + runId: runId, + before: before + ), + body: nil, + method: "GET" + ), + completion: completion + ) + } + + public func runSubmitToolOutputs(threadId: String, runId: String, query: RunToolOutputsQuery, completion: @escaping (Result) -> Void) { + performRequest( + request: AssistantsRequest.jsonRequest( + urlBuilder: DefaultURLBuilder( + configuration: configuration, + path: .Assistants.runSubmitToolOutputs(threadId: threadId, runId: runId).stringValue + ), + body: query + ), + completion: completion + ) + } + + public func runs(threadId: String, query: RunsQuery, completion: @escaping (Result) -> Void) { + performRequest( + request: AssistantsRequest.jsonRequest( + urlBuilder: RunsURLBuilder(configuration: configuration, path: .runs, threadId: threadId), + body: query + ), + completion: completion + ) + } + + public func threads(query: ThreadsQuery, completion: @escaping (Result) -> Void) { + performRequest( + request: AssistantsRequest.jsonRequest( + urlBuilder: DefaultURLBuilder(configuration: configuration, path: .Assistants.threads.stringValue), + body: query + ), + completion: completion + ) + } + public func threadRun(query: ThreadRunQuery, completion: @escaping (Result) -> Void) { + performRequest( + request: AssistantsRequest.jsonRequest( + urlBuilder: DefaultURLBuilder(configuration: configuration, path: .Assistants.threadRun.stringValue), + body: query + ), + completion: completion + ) + } + + public func assistants(after: String? = nil, completion: @escaping (Result) -> Void) { + performRequest( + request: AssistantsRequest.jsonRequest( + urlBuilder: DefaultURLBuilder(configuration: configuration, path: .Assistants.assistants.stringValue, after: after), + body: nil, + method: "GET" + ), + completion: completion + ) + } + + public func assistantCreate(query: AssistantsQuery, completion: @escaping (Result) -> Void) { + performRequest( + request: AssistantsRequest.jsonRequest( + urlBuilder: DefaultURLBuilder(configuration: configuration, path: .Assistants.assistants.stringValue), + body: query + ), + completion: completion + ) + } + + public func assistantModify(query: AssistantsQuery, assistantId: String, completion: @escaping (Result) -> Void) { + performRequest( + request: AssistantsRequest.jsonRequest( + urlBuilder: AssistantsURLBuilder(configuration: configuration, path: .assistantsModify, assistantId: assistantId), + body: query + ), + completion: completion + ) + } + + public func files(query: FilesQuery, completion: @escaping (Result) -> Void) { + performRequest( + request: AssistantsRequest.multipartFormDataRequest( + urlBuilder: DefaultURLBuilder(configuration: configuration, path: .Assistants.files.stringValue), + body: query + ), + completion: completion + ) + } + public func images(query: ImagesQuery, completion: @escaping (Result) -> Void) { performRequest(request: JSONRequest(body: query, url: buildURL(path: .images)), completion: completion) } @@ -190,33 +328,46 @@ extension OpenAI { } extension OpenAI { - - func buildURL(path: String) -> URL { - var components = URLComponents() - components.scheme = configuration.scheme - components.host = configuration.host - components.port = configuration.port - - let pathComponents = [configuration.basePath, path] - .filter { !$0.isEmpty } - .map { $0.trimmingCharacters(in: ["/"]) } - - components.path = "/" + pathComponents.joined(separator: "/") - - if let url = components.url { - return url - } else { - // We're expecting components.url to be not nil - // But if it isn't, let's just use some URL api that returns non-nil url - // Let all requests fail, but so that we don't crash on explicit unwrapping - return URL(fileURLWithPath: "") - } + func buildURL(path: String, after: String? = nil) -> URL { + DefaultURLBuilder(configuration: configuration, path: path, after: after) + .buildURL() + } + + func buildRunsURL(path: String, threadId: String, before: String? = nil) -> URL { + RunsURLBuilder(configuration: configuration, path: .init(stringValue: path), threadId: threadId) + .buildURL() + } + + func buildRunRetrieveURL(path: String, threadId: String, runId: String, before: String? = nil) -> URL { + RunRetrieveURLBuilder(configuration: configuration, path: .init(stringValue: path), threadId: threadId, runId: runId, before: before) + .buildURL() + } + + func buildAssistantURL(path: APIPath.Assistants, assistantId: String) -> URL { + AssistantsURLBuilder(configuration: configuration, path: path, assistantId: assistantId) + .buildURL() } } typealias APIPath = String extension APIPath { - + struct Assistants { + static let assistants = Assistants(stringValue: "/v1/assistants") + static let assistantsModify = Assistants(stringValue: "/v1/assistants/ASST_ID") + static let threads = Assistants(stringValue: "/v1/threads") + static let threadRun = Assistants(stringValue: "/v1/threads/runs") + static let runs = Assistants(stringValue: "/v1/threads/THREAD_ID/runs") + static let runRetrieve = Assistants(stringValue: "/v1/threads/THREAD_ID/runs/RUN_ID") + static let runRetrieveSteps = Assistants(stringValue: "/v1/threads/THREAD_ID/runs/RUN_ID/steps") + static func runSubmitToolOutputs(threadId: String, runId: String) -> Assistants { + Assistants(stringValue: "/v1/threads/\(threadId)/runs/\(runId)/submit_tool_outputs") + } + static let threadsMessages = Assistants(stringValue: "/v1/threads/THREAD_ID/messages") + static let files = Assistants(stringValue: "/v1/files") + + let stringValue: String + } + static let embeddings = "/v1/embeddings" static let chats = "/v1/chat/completions" static let models = "/v1/models" diff --git a/Sources/OpenAI/Private/AssistantsRequest.swift b/Sources/OpenAI/Private/AssistantsRequest.swift new file mode 100644 index 00000000..5617eb06 --- /dev/null +++ b/Sources/OpenAI/Private/AssistantsRequest.swift @@ -0,0 +1,67 @@ +// +// AssistantsRequest.swift +// OpenAI +// +// Created by Oleksii Nezhyborets on 22.01.2025. +// + +import Foundation +#if canImport(FoundationNetworking) +import FoundationNetworking +#endif + +struct AssistantsRequest: URLRequestBuildable { + private enum Body { + case json(Codable?) + case multipartFormData(MultipartFormDataBodyEncodable) + } + + private let urlBuilder: URLBuilder + private let body: Body + private let method: String + + static func jsonRequest(urlBuilder: URLBuilder, body: Codable?, method: String = "POST") -> AssistantsRequest { + return .init(urlBuilder: urlBuilder, body: .json(body), method: method) + } + + static func multipartFormDataRequest(urlBuilder: URLBuilder, body: MultipartFormDataBodyEncodable, method: String = "POST") -> AssistantsRequest { + return .init(urlBuilder: urlBuilder, body: .multipartFormData(body), method: method) + } + + private init(urlBuilder: URLBuilder, body: Body, method: String = "POST") { + self.urlBuilder = urlBuilder + self.body = body + self.method = method + } + + func build(token: String, organizationIdentifier: String?, timeoutInterval: TimeInterval) throws -> URLRequest { + let customHeaders = ["OpenAI-Beta": "assistants=v2"] + + switch body { + case .json(let codable): + let jsonRequest = JSONRequest( + body: codable, + url: urlBuilder.buildURL(), + method: method, + customHeaders: customHeaders + ) + + return try jsonRequest.build( + token: token, + organizationIdentifier: organizationIdentifier, + timeoutInterval: timeoutInterval + ) + case .multipartFormData(let encodable): + let request = MultipartFormDataRequest( + body: encodable, + url: urlBuilder.buildURL(), + customHeaders: customHeaders + ) + return try request.build( + token: token, + organizationIdentifier: organizationIdentifier, + timeoutInterval: timeoutInterval + ) + } + } +} diff --git a/Sources/OpenAI/Private/JSONRequest.swift b/Sources/OpenAI/Private/JSONRequest.swift index 526f95c9..bf75a0e3 100644 --- a/Sources/OpenAI/Private/JSONRequest.swift +++ b/Sources/OpenAI/Private/JSONRequest.swift @@ -15,20 +15,31 @@ final class JSONRequest { let body: Codable? let url: URL let method: String + let customHeaders: [String: String] - init(body: Codable? = nil, url: URL, method: String = "POST") { + init(body: Codable? = nil, url: URL, method: String = "POST", customHeaders: [String: String] = [:]) { self.body = body self.url = url self.method = method + self.customHeaders = customHeaders } } extension JSONRequest: URLRequestBuildable { - func build(token: String, organizationIdentifier: String?, timeoutInterval: TimeInterval) throws -> URLRequest { + func build( + token: String, + organizationIdentifier: String?, + timeoutInterval: TimeInterval + ) throws -> URLRequest { var request = URLRequest(url: url, timeoutInterval: timeoutInterval) request.setValue("application/json", forHTTPHeaderField: "Content-Type") request.setValue("Bearer \(token)", forHTTPHeaderField: "Authorization") + + for (headerField, value) in customHeaders { + request.setValue(value, forHTTPHeaderField: headerField) + } + if let organizationIdentifier { request.setValue(organizationIdentifier, forHTTPHeaderField: "OpenAI-Organization") } @@ -39,3 +50,5 @@ extension JSONRequest: URLRequestBuildable { return request } } + + diff --git a/Sources/OpenAI/Private/MultipartFormDataRequest.swift b/Sources/OpenAI/Private/MultipartFormDataRequest.swift index 13764a58..923135a9 100644 --- a/Sources/OpenAI/Private/MultipartFormDataRequest.swift +++ b/Sources/OpenAI/Private/MultipartFormDataRequest.swift @@ -15,11 +15,13 @@ final class MultipartFormDataRequest { let body: MultipartFormDataBodyEncodable let url: URL let method: String + let customHeaders: [String: String] - init(body: MultipartFormDataBodyEncodable, url: URL, method: String = "POST") { + init(body: MultipartFormDataBodyEncodable, url: URL, method: String = "POST", customHeaders: [String: String] = [:]) { self.body = body self.url = url self.method = method + self.customHeaders = customHeaders } } @@ -35,6 +37,11 @@ extension MultipartFormDataRequest: URLRequestBuildable { if let organizationIdentifier { request.setValue(organizationIdentifier, forHTTPHeaderField: "OpenAI-Organization") } + + for (headerField, value) in customHeaders { + request.setValue(value, forHTTPHeaderField: headerField) + } + request.httpBody = body.encode(boundary: boundary) return request } diff --git a/Sources/OpenAI/Private/URLBuilder.swift b/Sources/OpenAI/Private/URLBuilder.swift new file mode 100644 index 00000000..899ea7b5 --- /dev/null +++ b/Sources/OpenAI/Private/URLBuilder.swift @@ -0,0 +1,95 @@ +// +// URLBuilder.swift +// OpenAI +// +// Created by Oleksii Nezhyborets on 22.01.2025. +// + +import Foundation +#if canImport(FoundationNetworking) +import FoundationNetworking +#endif + +protocol URLBuilder { + func buildURL() -> URL +} + +struct DefaultURLBuilder: URLBuilder { + private let configuration: OpenAI.Configuration + private let path: String + private let after: String? + + init(configuration: OpenAI.Configuration, path: String, after: String? = nil) { + self.configuration = configuration + self.path = path + self.after = after + } + + func buildURL() -> URL { + var components = URLComponents.components(perConfiguration: configuration, path: path) + + if let after { + components.queryItems = [URLQueryItem(name: "after", value: after)] + } + + return components.urlSafe + } +} + +struct AssistantsURLBuilder: URLBuilder { + let configuration: OpenAI.Configuration + let path: APIPath.Assistants + let assistantId: String + + func buildURL() -> URL { + var components = URLComponents.components(perConfiguration: configuration, path: path.stringValue) + components.path = components.path.replacingOccurrences(of: "ASST_ID", with: assistantId) + return components.urlSafe + } +} + +struct RunsURLBuilder: URLBuilder { + let configuration: OpenAI.Configuration + let path: APIPath.Assistants + let threadId: String + let before: String? = nil + + func buildURL() -> URL { + var components = URLComponents.components(perConfiguration: configuration, path: path.stringValue) + components.path = components.path.replacingOccurrences(of: "THREAD_ID", with: threadId) + if let before { + components.queryItems = [URLQueryItem(name: "before", value: before)] + } + return components.urlSafe + } +} + +struct RunRetrieveURLBuilder: URLBuilder { + private let configuration: OpenAI.Configuration + private let path: APIPath.Assistants + private let threadId: String + private let runId: String + private let before: String? + + init(configuration: OpenAI.Configuration, path: APIPath.Assistants, threadId: String, runId: String, before: String? = nil) { + self.configuration = configuration + self.path = path + self.threadId = threadId + self.runId = runId + self.before = before + } + + func buildURL() -> URL { + var components = URLComponents.components(perConfiguration: configuration, path: path.stringValue) + components.path = components.path + .replacingOccurrences(of: "THREAD_ID", with: threadId) + .replacingOccurrences(of: "RUN_ID", with: runId) + + if let before { + components.queryItems = [URLQueryItem(name: "before", value: before)] + } + return components.urlSafe + } +} + + diff --git a/Sources/OpenAI/Private/URLComponents+.swift b/Sources/OpenAI/Private/URLComponents+.swift new file mode 100644 index 00000000..5a71ffe5 --- /dev/null +++ b/Sources/OpenAI/Private/URLComponents+.swift @@ -0,0 +1,38 @@ +// +// URLComponents+.swift +// OpenAI +// +// Created by Oleksii Nezhyborets on 24.01.2025. +// + +import Foundation +#if canImport(FoundationNetworking) +import FoundationNetworking +#endif + +extension URLComponents { + static func components(perConfiguration configuration: OpenAI.Configuration, path: String) -> URLComponents { + var components = URLComponents() + components.scheme = configuration.scheme + components.host = configuration.host + components.port = configuration.port + + let pathComponents = [configuration.basePath, path] + .filter { !$0.isEmpty } + .map { $0.trimmingCharacters(in: ["/"]) } + + components.path = "/" + pathComponents.joined(separator: "/") + return components + } + + var urlSafe: URL { + if let url { + return url + } else { + // We're expecting components.url to be not nil + // But if it isn't, let's just use some URL api that returns non-nil url + // Let all requests fail, but so that we don't crash on explicit unwrapping + return URL(fileURLWithPath: "") + } + } +} diff --git a/Sources/OpenAI/Private/URLRequestBuildable.swift b/Sources/OpenAI/Private/URLRequestBuildable.swift index a10f3109..543ffd2e 100644 --- a/Sources/OpenAI/Private/URLRequestBuildable.swift +++ b/Sources/OpenAI/Private/URLRequestBuildable.swift @@ -11,8 +11,5 @@ import FoundationNetworking #endif protocol URLRequestBuildable { - - associatedtype ResultType - func build(token: String, organizationIdentifier: String?, timeoutInterval: TimeInterval) throws -> URLRequest } diff --git a/Sources/OpenAI/Public/Models/AssistantResult.swift b/Sources/OpenAI/Public/Models/AssistantResult.swift new file mode 100644 index 00000000..890f1275 --- /dev/null +++ b/Sources/OpenAI/Public/Models/AssistantResult.swift @@ -0,0 +1,61 @@ +// +// AssistantResult.swift +// +// +// Created by Brent Whitman on 2024-01-29. +// + +import Foundation + +public struct AssistantResult: Codable, Equatable { + public let id: String + public let name: String? + public let description: String? + public let instructions: String? + public let tools: [Tool]? + public let toolResources: ToolResources? + + enum CodingKeys: String, CodingKey { + case id + case name + case description + case instructions + case tools + case toolResources = "tool_resources" + } +} + +public struct ToolResources: Codable, Equatable { + public let fileSearch: FileSearchResources? + public let codeInterpreter: CodeInterpreterResources? + + public init(fileSearch: FileSearchResources?, codeInterpreter: CodeInterpreterResources?) { + self.fileSearch = fileSearch + self.codeInterpreter = codeInterpreter + } + + enum CodingKeys: String, CodingKey { + case fileSearch = "file_search" + case codeInterpreter = "code_interpreter" + } +} + +public struct FileSearchResources: Codable, Equatable { + public let vectorStoreIds: [String] + + enum CodingKeys: String, CodingKey { + case vectorStoreIds = "vector_store_ids" + } +} + +public struct CodeInterpreterResources: Codable, Equatable { + public let fileIds: [String] + + public init(fileIds: [String]) { + self.fileIds = fileIds + } + + enum CodingKeys: String, CodingKey { + case fileIds = "file_ids" + } +} diff --git a/Sources/OpenAI/Public/Models/AssistantsQuery.swift b/Sources/OpenAI/Public/Models/AssistantsQuery.swift new file mode 100644 index 00000000..988f7310 --- /dev/null +++ b/Sources/OpenAI/Public/Models/AssistantsQuery.swift @@ -0,0 +1,42 @@ +// +// AssistantsQuery.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct AssistantsQuery: Codable, Equatable { + public let model: Model + public let name: String? + public let description: String? + public let instructions: String? + public let tools: [Tool]? + public let toolResources: ToolResources? + + enum CodingKeys: String, CodingKey { + case model + case name + case description + case instructions + case tools + case toolResources = "tool_resources" + } + + public init( + model: Model, + name: String?, + description: String?, + instructions: String?, + tools: [Tool]?, + toolResources: ToolResources? = nil + ) { + self.model = model + self.name = name + self.description = description + self.instructions = instructions + self.tools = tools + self.toolResources = toolResources + } +} diff --git a/Sources/OpenAI/Public/Models/AssistantsResult.swift b/Sources/OpenAI/Public/Models/AssistantsResult.swift new file mode 100644 index 00000000..cef22fd8 --- /dev/null +++ b/Sources/OpenAI/Public/Models/AssistantsResult.swift @@ -0,0 +1,23 @@ +// +// AssistantsResult.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct AssistantsResult: Codable, Equatable { + + public let data: [AssistantResult]? + public let firstId: String? + public let lastId: String? + public let hasMore: Bool + + enum CodingKeys: String, CodingKey { + case data + case firstId = "first_id" + case lastId = "last_id" + case hasMore = "has_more" + } +} diff --git a/Sources/OpenAI/Public/Models/ChatQuery.swift b/Sources/OpenAI/Public/Models/ChatQuery.swift index 27079d93..2e22d4a0 100644 --- a/Sources/OpenAI/Public/Models/ChatQuery.swift +++ b/Sources/OpenAI/Public/Models/ChatQuery.swift @@ -853,3 +853,147 @@ public struct ChatQuery: Equatable, Codable, Streamable { case stream } } + +/// See the [guide](/docs/guides/gpt/function-calling) for examples, and the [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for documentation about the format. +public struct JSONSchema: Codable, Hashable { + public let type: JSONType + public let properties: [String: Property]? + public let required: [String]? + public let pattern: String? + public let const: String? + public let enumValues: [String]? + public let multipleOf: Int? + public let minimum: Int? + public let maximum: Int? + + private enum CodingKeys: String, CodingKey { + case type, properties, required, pattern, const + case enumValues = "enum" + case multipleOf, minimum, maximum + } + + public struct Property: Codable, Hashable { + public let type: JSONType + public let description: String? + public let format: String? + public let items: Items? + public let required: [String]? + public let pattern: String? + public let const: String? + public let enumValues: [String]? + public let multipleOf: Int? + public let minimum: Double? + public let maximum: Double? + public let minItems: Int? + public let maxItems: Int? + public let uniqueItems: Bool? + + private enum CodingKeys: String, CodingKey { + case type, description, format, items, required, pattern, const + case enumValues = "enum" + case multipleOf, minimum, maximum + case minItems, maxItems, uniqueItems + } + + public init(type: JSONType, description: String? = nil, format: String? = nil, items: Items? = nil, required: [String]? = nil, pattern: String? = nil, const: String? = nil, enumValues: [String]? = nil, multipleOf: Int? = nil, minimum: Double? = nil, maximum: Double? = nil, minItems: Int? = nil, maxItems: Int? = nil, uniqueItems: Bool? = nil) { + self.type = type + self.description = description + self.format = format + self.items = items + self.required = required + self.pattern = pattern + self.const = const + self.enumValues = enumValues + self.multipleOf = multipleOf + self.minimum = minimum + self.maximum = maximum + self.minItems = minItems + self.maxItems = maxItems + self.uniqueItems = uniqueItems + } + } + + public enum JSONType: String, Codable { + case integer = "integer" + case string = "string" + case boolean = "boolean" + case array = "array" + case object = "object" + case number = "number" + case `null` = "null" + } + + public struct Items: Codable, Hashable { + public let type: JSONType + public let properties: [String: Property]? + public let pattern: String? + public let const: String? + public let enumValues: [String]? + public let multipleOf: Int? + public let minimum: Double? + public let maximum: Double? + public let minItems: Int? + public let maxItems: Int? + public let uniqueItems: Bool? + + private enum CodingKeys: String, CodingKey { + case type, properties, pattern, const + case enumValues = "enum" + case multipleOf, minimum, maximum, minItems, maxItems, uniqueItems + } + + public init(type: JSONType, properties: [String : Property]? = nil, pattern: String? = nil, const: String? = nil, enumValues: [String]? = nil, multipleOf: Int? = nil, minimum: Double? = nil, maximum: Double? = nil, minItems: Int? = nil, maxItems: Int? = nil, uniqueItems: Bool? = nil) { + self.type = type + self.properties = properties + self.pattern = pattern + self.const = const + self.enumValues = enumValues + self.multipleOf = multipleOf + self.minimum = minimum + self.maximum = maximum + self.minItems = minItems + self.maxItems = maxItems + self.uniqueItems = uniqueItems + } + } + + public init(type: JSONType, properties: [String : Property]? = nil, required: [String]? = nil, pattern: String? = nil, const: String? = nil, enumValues: [String]? = nil, multipleOf: Int? = nil, minimum: Int? = nil, maximum: Int? = nil) { + self.type = type + self.properties = properties + self.required = required + self.pattern = pattern + self.const = const + self.enumValues = enumValues + self.multipleOf = multipleOf + self.minimum = minimum + self.maximum = maximum + } +} + +public struct ChatFunctionCall: Codable, Equatable { + /// The name of the function to call. + public let name: String + /// The arguments to call the function with, as generated by the model in JSON format. Note that the model does not always generate valid JSON, and may hallucinate parameters not defined by your function schema. Validate the arguments in your code before calling your function. + public let arguments: String + + public init(name: String, arguments: String) { + self.name = name + self.arguments = arguments + } +} + +public struct ChatToolCall: Codable, Equatable { + public enum ToolType: String, Codable, Equatable { + case function + } + + public let id: String + public let type: ToolType + public let function: ChatFunctionCall + + public init(id: String, type: ToolType = .function, function: ChatFunctionCall) { + self.id = id + self.type = type + self.function = function + } +} diff --git a/Sources/OpenAI/Public/Models/FilesQuery.swift b/Sources/OpenAI/Public/Models/FilesQuery.swift new file mode 100644 index 00000000..09d883dc --- /dev/null +++ b/Sources/OpenAI/Public/Models/FilesQuery.swift @@ -0,0 +1,42 @@ +// +// FilesQuery.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct FilesQuery: Codable { + + public let purpose: String + + public let file: Data + public let fileName: String + + public let contentType: String + + enum CodingKeys: String, CodingKey { + case purpose + case file + case fileName + case contentType + } + + public init(purpose: String, file: Data, fileName: String, contentType: String) { + self.purpose = purpose + self.file = file + self.fileName = fileName + self.contentType = contentType + } +} + +extension FilesQuery: MultipartFormDataBodyEncodable { + func encode(boundary: String) -> Data { + let bodyBuilder = MultipartFormDataBodyBuilder(boundary: boundary, entries: [ + .string(paramName: "purpose", value: purpose), + .file(paramName: "file", fileName: fileName, fileData: file, contentType: contentType), + ]) + return bodyBuilder.build() + } +} diff --git a/Sources/OpenAI/Public/Models/FilesResult.swift b/Sources/OpenAI/Public/Models/FilesResult.swift new file mode 100644 index 00000000..d5012d46 --- /dev/null +++ b/Sources/OpenAI/Public/Models/FilesResult.swift @@ -0,0 +1,15 @@ +// +// FilesResult.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct FilesResult: Codable, Equatable { + + public let id: String + public let name: String? + +} diff --git a/Sources/OpenAI/Public/Models/FunctionDeclaration.swift b/Sources/OpenAI/Public/Models/FunctionDeclaration.swift new file mode 100644 index 00000000..66e70164 --- /dev/null +++ b/Sources/OpenAI/Public/Models/FunctionDeclaration.swift @@ -0,0 +1,25 @@ +// +// FunctionDeclaration.swift +// +// +// Created by Brent Whitman on 2024-01-29. +// + +import Foundation + +public struct FunctionDeclaration: Codable, Equatable { + /// The name of the function to be called. Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length of 64. + public let name: String + + /// The description of what the function does. + public let description: String? + + /// The parameters the functions accepts, described as a JSON Schema object. + public let parameters: JSONSchema? + + public init(name: String, description: String?, parameters: JSONSchema?) { + self.name = name + self.description = description + self.parameters = parameters + } +} diff --git a/Sources/OpenAI/Public/Models/ImagesQuery.swift b/Sources/OpenAI/Public/Models/ImagesQuery.swift index dae7cba2..b00f47b8 100644 --- a/Sources/OpenAI/Public/Models/ImagesQuery.swift +++ b/Sources/OpenAI/Public/Models/ImagesQuery.swift @@ -9,7 +9,7 @@ import Foundation /// Given a prompt and/or an input image, the model will generate a new image. /// https://platform.openai.com/docs/guides/images -public struct ImagesQuery: Codable { +public struct ImagesQuery: Codable, Equatable { public enum ResponseFormat: String, Codable, Equatable { case url diff --git a/Sources/OpenAI/Public/Models/MessageQuery.swift b/Sources/OpenAI/Public/Models/MessageQuery.swift new file mode 100644 index 00000000..d547a7df --- /dev/null +++ b/Sources/OpenAI/Public/Models/MessageQuery.swift @@ -0,0 +1,26 @@ +// +// MessageQuery.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct MessageQuery: Equatable, Codable { + public let role: ChatQuery.ChatCompletionMessageParam.Role + public let content: String + public let fileIds: [String]? + + enum CodingKeys: String, CodingKey { + case role + case content + case fileIds = "file_ids" + } + + public init(role: ChatQuery.ChatCompletionMessageParam.Role, content: String, fileIds: [String]? = nil) { + self.role = role + self.content = content + self.fileIds = fileIds + } +} diff --git a/Sources/OpenAI/Public/Models/RunResult.swift b/Sources/OpenAI/Public/Models/RunResult.swift new file mode 100644 index 00000000..05009beb --- /dev/null +++ b/Sources/OpenAI/Public/Models/RunResult.swift @@ -0,0 +1,55 @@ +// +// RunResult.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct RunResult: Codable, Equatable { + public enum Status: String, Codable { + case queued + case inProgress = "in_progress" + case requiresAction = "requires_action" + case cancelling + case cancelled + case failed + case completed + case expired + } + + public struct RequiredAction: Codable, Equatable { + public let submitToolOutputs: SubmitToolOutputs + + enum CodingKeys: String, CodingKey { + case submitToolOutputs = "submit_tool_outputs" + } + } + + public struct SubmitToolOutputs: Codable, Equatable { + public let toolCalls: [ToolCall] + + enum CodingKeys: String, CodingKey { + case toolCalls = "tool_calls" + } + } + + public struct ToolCall: Codable, Equatable { + public let id: String + public let type: String + public let function: ChatFunctionCall + } + + enum CodingKeys: String, CodingKey { + case id + case threadId = "thread_id" + case status + case requiredAction = "required_action" + } + + public let id: String + public let threadId: String + public let status: Status + public let requiredAction: RequiredAction? +} diff --git a/Sources/OpenAI/Public/Models/RunRetrieveQuery.swift b/Sources/OpenAI/Public/Models/RunRetrieveQuery.swift new file mode 100644 index 00000000..eef7e5d0 --- /dev/null +++ b/Sources/OpenAI/Public/Models/RunRetrieveQuery.swift @@ -0,0 +1,15 @@ +// +// RunRetrieveQuery.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct RunRetrieveQuery: Equatable, Codable { + + public init() { + + } +} diff --git a/Sources/OpenAI/Public/Models/RunRetrieveStepsResult.swift b/Sources/OpenAI/Public/Models/RunRetrieveStepsResult.swift new file mode 100644 index 00000000..58257ebb --- /dev/null +++ b/Sources/OpenAI/Public/Models/RunRetrieveStepsResult.swift @@ -0,0 +1,68 @@ +// +// RunRetrieveStepsResult.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct RunRetrieveStepsResult: Codable, Equatable { + + public struct StepDetailsTopLevel: Codable, Equatable { + public let id: String + public let stepDetails: StepDetailsSecondLevel + + enum CodingKeys: String, CodingKey { + case id + case stepDetails = "step_details" + } + + public struct StepDetailsSecondLevel: Codable, Equatable { + + public let toolCalls: [ToolCall]? + + enum CodingKeys: String, CodingKey { + case toolCalls = "tool_calls" + } + + public struct ToolCall: Codable, Equatable { + public enum ToolType: String, Codable { + case codeInterpreter = "code_interpreter" + case function + case fileSearch = "file_search" + } + + public let id: String + public let type: ToolType + public let codeInterpreter: CodeInterpreterCall? + public let function: FunctionCall? + + enum CodingKeys: String, CodingKey { + case id + case type + case codeInterpreter = "code_interpreter" + case function + } + + public struct CodeInterpreterCall: Codable, Equatable { + public let input: String + public let outputs: [CodeInterpreterCallOutput]? + + public struct CodeInterpreterCallOutput: Codable, Equatable { + public let type: String + public let logs: String? + } + } + + public struct FunctionCall: Codable, Equatable { + public let name: String + public let arguments: String + public let output: String? + } + } + } + } + + public let data: [StepDetailsTopLevel] +} diff --git a/Sources/OpenAI/Public/Models/RunToolOutputsQuery.swift b/Sources/OpenAI/Public/Models/RunToolOutputsQuery.swift new file mode 100644 index 00000000..7fc42779 --- /dev/null +++ b/Sources/OpenAI/Public/Models/RunToolOutputsQuery.swift @@ -0,0 +1,35 @@ +// +// RunToolOutputsQuery.swift +// +// +// Created by Brent Whitman on 2024-01-29. +// + +import Foundation + +public struct RunToolOutputsQuery: Codable, Equatable { + public struct ToolOutput: Codable, Equatable { + public let toolCallId: String? + public let output: String? + + enum CodingKeys: String, CodingKey { + case toolCallId = "tool_call_id" + case output + } + + public init(toolCallId: String?, output: String?) { + self.toolCallId = toolCallId + self.output = output + } + } + + public let toolOutputs: [ToolOutput] + + enum CodingKeys: String, CodingKey { + case toolOutputs = "tool_outputs" + } + + public init(toolOutputs: [ToolOutput]) { + self.toolOutputs = toolOutputs + } +} diff --git a/Sources/OpenAI/Public/Models/RunsQuery.swift b/Sources/OpenAI/Public/Models/RunsQuery.swift new file mode 100644 index 00000000..b9ba2ad6 --- /dev/null +++ b/Sources/OpenAI/Public/Models/RunsQuery.swift @@ -0,0 +1,22 @@ +// +// AssistantsQuery.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct RunsQuery: Codable { + + public let assistantId: String + + enum CodingKeys: String, CodingKey { + case assistantId = "assistant_id" + } + + public init(assistantId: String) { + + self.assistantId = assistantId + } +} diff --git a/Sources/OpenAI/Public/Models/ThreadAddMessagesResult.swift b/Sources/OpenAI/Public/Models/ThreadAddMessagesResult.swift new file mode 100644 index 00000000..f39736ef --- /dev/null +++ b/Sources/OpenAI/Public/Models/ThreadAddMessagesResult.swift @@ -0,0 +1,13 @@ +// +// ThreadsMessagesResult.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct ThreadAddMessageResult: Codable, Equatable { + public let id: String + +} diff --git a/Sources/OpenAI/Public/Models/ThreadRunQuery.swift b/Sources/OpenAI/Public/Models/ThreadRunQuery.swift new file mode 100644 index 00000000..24eadaf8 --- /dev/null +++ b/Sources/OpenAI/Public/Models/ThreadRunQuery.swift @@ -0,0 +1,39 @@ +// +// ThreadsRunsQuery.swift +// +// +// Created by Brent Whitman on 2024-01-29. +// + +import Foundation + +public struct ThreadRunQuery: Equatable, Codable { + + public let assistantId: String + public let thread: ThreadsQuery + public let model: Model? + public let instructions: String? + public let tools: [Tool]? + + enum CodingKeys: String, CodingKey { + case assistantId = "assistant_id" + case thread + case model + case instructions + case tools + } + + public init( + assistantId: String, + thread: ThreadsQuery, + model: Model? = nil, + instructions: String? = nil, + tools: [Tool]? = nil + ) { + self.assistantId = assistantId + self.thread = thread + self.model = model + self.instructions = instructions + self.tools = tools + } +} diff --git a/Sources/OpenAI/Public/Models/ThreadsMessagesResult.swift b/Sources/OpenAI/Public/Models/ThreadsMessagesResult.swift new file mode 100644 index 00000000..0aee6ae4 --- /dev/null +++ b/Sources/OpenAI/Public/Models/ThreadsMessagesResult.swift @@ -0,0 +1,66 @@ +// +// ThreadsMessagesResult.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct ThreadsMessagesResult: Codable, Equatable { + + public struct ThreadsMessage: Codable, Equatable { + + public struct ThreadsMessageContent: Codable, Equatable { + + public struct ThreadsMessageContentText: Codable, Equatable { + + public let value: String? + + enum CodingKeys: String, CodingKey { + case value + } + } + + public struct ImageFileContentText: Codable, Equatable { + + public let fildId: String + + enum CodingKeys: String, CodingKey { + case fildId = "file_id" + } + } + + public enum ContentType: String, Codable { + case text + case imageFile = "image_file" + } + + public let type: ContentType + public let text: ThreadsMessageContentText? + public let imageFile: ThreadsMessageContentText? + + enum CodingKeys: String, CodingKey { + case type + case text + case imageFile = "image_file" + } + } + + public let id: String + public let role: ChatQuery.ChatCompletionMessageParam.Role + public let content: [ThreadsMessageContent] + + enum CodingKeys: String, CodingKey { + case id + case content + case role + } + } + + public let data: [ThreadsMessage] + + enum CodingKeys: String, CodingKey { + case data + } +} diff --git a/Sources/OpenAI/Public/Models/ThreadsQuery.swift b/Sources/OpenAI/Public/Models/ThreadsQuery.swift new file mode 100644 index 00000000..369f7069 --- /dev/null +++ b/Sources/OpenAI/Public/Models/ThreadsQuery.swift @@ -0,0 +1,20 @@ +// +// ThreadsQuery.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct ThreadsQuery: Equatable, Codable { + public let messages: [ChatQuery.ChatCompletionMessageParam] + + enum CodingKeys: String, CodingKey { + case messages + } + + public init(messages: [ChatQuery.ChatCompletionMessageParam]) { + self.messages = messages + } +} diff --git a/Sources/OpenAI/Public/Models/ThreadsResult.swift b/Sources/OpenAI/Public/Models/ThreadsResult.swift new file mode 100644 index 00000000..def6031c --- /dev/null +++ b/Sources/OpenAI/Public/Models/ThreadsResult.swift @@ -0,0 +1,13 @@ +// +// AssistantsResult.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct ThreadsResult: Codable, Equatable { + + public let id: String +} diff --git a/Sources/OpenAI/Public/Models/Tool.swift b/Sources/OpenAI/Public/Models/Tool.swift new file mode 100644 index 00000000..02984466 --- /dev/null +++ b/Sources/OpenAI/Public/Models/Tool.swift @@ -0,0 +1,64 @@ +// +// Tool.swift +// +// +// Created by Brent Whitman on 2024-01-29. +// + +import Foundation + +/// The type of tool +/// +/// Refer to the [documentation](https://platform.openai.com/docs/assistants/tools/tools-beta) for more information on tools. +public enum Tool: Codable, Equatable { + /// Code Interpreter allows the Assistants API to write and run Python code in a sandboxed execution environment. + case codeInterpreter + /// Function calling allows you to describe functions to the Assistants and have it intelligently return the functions that need to be called along with their arguments. + case function(FunctionDeclaration) + /// File Search augments the Assistant with knowledge from outside its model, such as proprietary product information or documents provided by your users. + case fileSearch + + enum CodingKeys: String, CodingKey { + case type + case function + } + + fileprivate var rawValue: String { + switch self { + case .codeInterpreter: + return "code_interpreter" + case .function: + return "function" + case .fileSearch: + return "file_search" + } + } + + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: CodingKeys.self) + let toolTypeString = try container.decode(String.self, forKey: .type) + + switch toolTypeString { + case "code_interpreter": + self = .codeInterpreter + case "function": + let functionDeclaration = try container.decode(FunctionDeclaration.self, forKey: .function) + self = .function(functionDeclaration) + case "file_search": + self = .fileSearch + default: + throw DecodingError.dataCorruptedError(forKey: .type, in: container, debugDescription: "Invalid tool type") + } + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) + try container.encode(rawValue, forKey: .type) + switch self { + case let .function(declaration): + try container.encode(declaration, forKey: .function) + default: + break + } + } +} diff --git a/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift b/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift index 39362db3..db396349 100644 --- a/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift +++ b/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift @@ -187,4 +187,194 @@ public extension OpenAIProtocol { } } } + + // 1106 + func assistants( + after: String? = nil + ) async throws -> AssistantsResult { + try await withCheckedThrowingContinuation { continuation in + assistants(after: after) { result in + switch result { + case let .success(success): + return continuation.resume(returning: success) + case let .failure(failure): + return continuation.resume(throwing: failure) + } + } + } + } + + func assistantCreate( + query: AssistantsQuery + ) async throws -> AssistantResult { + try await withCheckedThrowingContinuation { continuation in + assistantCreate(query: query) { result in + switch result { + case let .success(success): + return continuation.resume(returning: success) + case let .failure(failure): + return continuation.resume(throwing: failure) + } + } + } + } + + func assistantModify( + query: AssistantsQuery, + assistantId: String + ) async throws -> AssistantResult { + try await withCheckedThrowingContinuation { continuation in + assistantModify(query: query, assistantId: assistantId) { result in + switch result { + case let .success(success): + return continuation.resume(returning: success) + case let .failure(failure): + return continuation.resume(throwing: failure) + } + } + } + } + + func threads( + query: ThreadsQuery + ) async throws -> ThreadsResult { + try await withCheckedThrowingContinuation { continuation in + threads(query: query) { result in + switch result { + case let .success(success): + return continuation.resume(returning: success) + case let .failure(failure): + return continuation.resume(throwing: failure) + } + } + } + } + + func threadRun( + query: ThreadRunQuery + ) async throws -> RunResult { + try await withCheckedThrowingContinuation { continuation in + threadRun(query: query) { result in + switch result { + case let .success(success): + return continuation.resume(returning: success) + case let .failure(failure): + return continuation.resume(throwing: failure) + } + } + } + } + + func runs( + threadId: String, + query: RunsQuery + ) async throws -> RunResult { + try await withCheckedThrowingContinuation { continuation in + runs(threadId: threadId, query: query) { result in + switch result { + case let .success(success): + return continuation.resume(returning: success) + case let .failure(failure): + return continuation.resume(throwing: failure) + } + } + } + } + + func runRetrieve( + threadId: String, + runId: String + ) async throws -> RunResult { + try await withCheckedThrowingContinuation { continuation in + runRetrieve(threadId: threadId, runId: runId) { result in + switch result { + case let .success(success): + return continuation.resume(returning: success) + case let .failure(failure): + return continuation.resume(throwing: failure) + } + } + } + } + + func runRetrieveSteps( + threadId: String, + runId: String, + before: String? = nil + ) async throws -> RunRetrieveStepsResult { + try await withCheckedThrowingContinuation { continuation in + runRetrieveSteps(threadId: threadId, runId: runId, before: before) { result in + switch result { + case let .success(success): + return continuation.resume(returning: success) + case let .failure(failure): + return continuation.resume(throwing: failure) + } + } + } + } + + func runSubmitToolOutputs( + threadId: String, + runId: String, + query: RunToolOutputsQuery + ) async throws -> RunResult { + try await withCheckedThrowingContinuation { continuation in + runSubmitToolOutputs(threadId: threadId, runId: runId, query: query) { result in + switch result { + case let .success(success): + return continuation.resume(returning: success) + case let .failure(failure): + return continuation.resume(throwing: failure) + } + } + } + } + + func threadsMessages( + threadId: String, + before: String? = nil + ) async throws -> ThreadsMessagesResult { + try await withCheckedThrowingContinuation { continuation in + threadsMessages(threadId: threadId, before: before) { result in + switch result { + case let .success(success): + return continuation.resume(returning: success) + case let .failure(failure): + return continuation.resume(throwing: failure) + } + } + } + } + + func threadsAddMessage( + threadId: String, + query: MessageQuery + ) async throws -> ThreadAddMessageResult { + try await withCheckedThrowingContinuation { continuation in + threadsAddMessage(threadId: threadId, query: query) { result in + switch result { + case let .success(success): + return continuation.resume(returning: success) + case let .failure(failure): + return continuation.resume(throwing: failure) + } + } + } + } + func files( + query: FilesQuery + ) async throws -> FilesResult { + try await withCheckedThrowingContinuation { continuation in + files(query: query) { result in + switch result { + case let .success(success): + return continuation.resume(returning: success) + case let .failure(failure): + return continuation.resume(throwing: failure) + } + } + } + } + // 1106 end } diff --git a/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Combine.swift b/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Combine.swift index fdb6671d..8ab9d105 100644 --- a/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Combine.swift +++ b/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Combine.swift @@ -105,6 +105,93 @@ public extension OpenAIProtocol { } .eraseToAnyPublisher() } + + // 1106 + func assistants(after: String? = nil) -> AnyPublisher { + Future { + assistants(after: after, completion: $0) + } + .eraseToAnyPublisher() + } + + func assistantCreate(query: AssistantsQuery) -> AnyPublisher { + Future { + assistantCreate(query: query, completion: $0) + } + .eraseToAnyPublisher() + } + + func assistantModify(query: AssistantsQuery, assistantId: String) -> AnyPublisher { + Future { + assistantModify(query: query, assistantId: assistantId, completion: $0) + } + .eraseToAnyPublisher() + } + + func threads(query: ThreadsQuery) -> AnyPublisher { + Future { + threads(query: query, completion: $0) + } + .eraseToAnyPublisher() + } + + func threadRun(query: ThreadRunQuery) -> AnyPublisher { + Future { + threadRun(query: query, completion: $0) + } + .eraseToAnyPublisher() + } + + func runs(threadId: String, query: RunsQuery) -> AnyPublisher { + Future { + runs(threadId: threadId, query: query, completion: $0) + } + .eraseToAnyPublisher() + } + + func runRetrieve(threadId: String, runId: String) -> AnyPublisher { + Future { + runRetrieve(threadId: threadId, runId: runId, completion: $0) + } + .eraseToAnyPublisher() + } + + func runRetrieveSteps(threadId: String, runId: String, before: String? = nil) -> AnyPublisher { + Future { + runRetrieveSteps(threadId: threadId, runId: runId, before: before, completion: $0) + } + .eraseToAnyPublisher() + } + + func runSubmitToolOutputs(threadId: String, runId: String, query: RunToolOutputsQuery) -> AnyPublisher { + Future { + runSubmitToolOutputs(threadId: threadId, runId: runId, query: query, completion: $0) + } + .eraseToAnyPublisher() + } + + func threadsMessages(threadId: String, before: String? = nil) -> AnyPublisher { + Future { + threadsMessages(threadId: threadId, before: before, completion: $0) + } + .eraseToAnyPublisher() + } + + func threadsAddMessage(threadId: String, query: MessageQuery) -> AnyPublisher { + Future { + threadsAddMessage(threadId: threadId, query: query, completion: $0) + } + .eraseToAnyPublisher() + } + + func files(query: FilesQuery) -> AnyPublisher { + Future { + files(query: query, completion: $0) + } + .eraseToAnyPublisher() + } + + // 1106 end } #endif diff --git a/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift b/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift index ae46daac..4643257f 100644 --- a/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift +++ b/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift @@ -196,4 +196,208 @@ public protocol OpenAIProtocol { Returns a `Result` of type `AudioTranslationResult` if successful, or an `Error` if an error occurs. **/ func audioTranslations(query: AudioTranslationQuery, completion: @escaping (Result) -> Void) + + /// + // The following functions represent new functionality added to OpenAI Beta on 11-06-23 + /// + + /** + This function sends a assistants query to the OpenAI API to list assistants that have been created. + + Example: List Assistants + ``` + openAI.assistants() { result in + //Handle response here + } + ``` + + - Parameter after: A cursor for use in pagination. after is an object ID that defines your place in the list. + - Parameter completion: The completion handler to be executed upon completion of the assistant request. + Returns a `Result` of type `AssistantsResult` if successful, or an `Error` if an error occurs. + **/ + func assistants(after: String?, completion: @escaping (Result) -> Void) + + /** + This function sends an assistants query to the OpenAI API and creates an assistant. + + ``` + let query = AssistantsQuery(model: Model.gpt4_1106_preview, name: name, description: description, instructions: instructions, tools: tools, fileIds: fileIds) + openAI.createAssistant(query: query) { result in + //Handle response here + } + ``` + + - Parameter query: The `AssistantsQuery` instance, containing the information required for the assistant request. + - Parameter completion: The completion handler to be executed upon completion of the assistant request. + Returns a `Result` of type `AssistantResult` if successful, or an `Error` if an error occurs. + **/ + func assistantCreate(query: AssistantsQuery, completion: @escaping (Result) -> Void) + + /** + This function sends a assistants query to the OpenAI API and modifies an assistant. The Assistants API in this usage enables you to modify an assistant. + + Example: Modify Assistant + ``` + let query = AssistantsQuery(model: Model.gpt4_1106_preview, name: name, description: description, instructions: instructions, tools: tools, fileIds: fileIds) + openAI.assistantModify(query: query, assistantId: "asst_1234") { result in + //Handle response here + } + ``` + + - Parameter query: The `AssistantsQuery` instance, containing the information required for the assistant request. + - Parameter assistantId: The assistant id for the assistant to modify. + - Parameter completion: The completion handler to be executed upon completion of the assistant request. + Returns a `Result` of type `AssistantResult` if successful, or an `Error` if an error occurs. + **/ + func assistantModify(query: AssistantsQuery, assistantId: String, completion: @escaping (Result) -> Void) + + /** + This function sends a threads query to the OpenAI API and creates a thread. The Threads API in this usage enables you to create a thread. + + Example: Create Thread + ``` + let threadsQuery = ThreadsQuery(messages: [Chat(role: message.role, content: message.content)]) + openAI.threads(query: threadsQuery) { result in + //Handle response here + } + + ``` + - Parameter query: The `ThreadsQuery` instance, containing the information required for the threads request. + - Parameter completion: The completion handler to be executed upon completion of the threads request. + Returns a `Result` of type `ThreadsResult` if successful, or an `Error` if an error occurs. + **/ + func threads(query: ThreadsQuery, completion: @escaping (Result) -> Void) + + /** + This function sends a threads query to the OpenAI API that creates and runs a thread in a single request. + + Example: Create and Run Thread + ``` + let threadsQuery = ThreadQuery(messages: [Chat(role: message.role, content: message.content)]) + let threadRunQuery = ThreadRunQuery(assistantId: "asst_1234" thread: threadsQuery) + openAI.threadRun(query: threadRunQuery) { result in + //Handle response here + } + ``` + - Parameter query: The `ThreadRunQuery` instance, containing the information required for the request. + - Parameter completion: The completion handler to be executed upon completion of the threads request. + Returns a `Result` of type `RunResult` if successful, or an `Error` if an error occurs. + **/ + func threadRun(query: ThreadRunQuery, completion: @escaping (Result) -> Void) + + /** + This function sends a runs query to the OpenAI API and creates a run. The Runs API in this usage enables you to create a run. + + Example: Create Run + ``` + let runsQuery = RunsQuery(assistantId: currentAssistantId) + openAI.runs(threadId: threadsResult.id, query: runsQuery) { result in + //Handle response here + } + ``` + + - Parameter threadId: The thread id for the thread to run. + - Parameter query: The `RunsQuery` instance, containing the information required for the runs request. + - Parameter completion: The completion handler to be executed upon completion of the runs request. + Returns a `Result` of type `RunResult` if successful, or an `Error` if an error occurs. + **/ + func runs(threadId: String, query: RunsQuery, completion: @escaping (Result) -> Void) + + /** + This function sends a thread id and run id to the OpenAI API and retrieves a run. The Runs API in this usage enables you to retrieve a run. + + Example: Retrieve Run + ``` + openAI.runRetrieve(threadId: currentThreadId, runId: currentRunId) { result in + //Handle response here + } + ``` + - Parameter threadId: The thread id for the thread to run. + - Parameter runId: The run id for the run to retrieve. + - Parameter completion: The completion handler to be executed upon completion of the runRetrieve request. + Returns a `Result` of type `RunRetrieveResult` if successful, or an `Error` if an error occurs. + **/ + func runRetrieve(threadId: String, runId: String, completion: @escaping (Result) -> Void) + + /** + This function sends a thread id and run id to the OpenAI API and retrieves a list of run steps. The Runs API in this usage enables you to retrieve a runs run steps. + + Example: Retrieve Run Steps + ``` + openAI.runRetrieveSteps(threadId: currentThreadId, runId: currentRunId) { result in + //Handle response here + } + ``` + - Parameter threadId: The thread id for the thread to run. + - Parameter runId: The run id for the run to retrieve. + - Parameter before: String?: The message id for the run step that defines your place in the list of run steps. Pass nil to get all. + - Parameter completion: The completion handler to be executed upon completion of the runRetrieve request. + Returns a `Result` of type `RunRetrieveStepsResult` if successful, or an `Error` if an error occurs. + **/ + func runRetrieveSteps(threadId: String, runId: String, before: String?, completion: @escaping (Result) -> Void) + + /** + This function submits tool outputs for a run to the OpenAI API. It should be submitted when a run is in status `required_action` and `required_action.type` is `submit_tool_outputs` + + - Parameter threadId: The thread id for the thread which needs tool outputs. + - Parameter runId: The run id for the run which needs tool outputs. + - Parameter query: An object containing the tool outputs, populated based on the results of the requested function call + - Parameter completion: The completion handler to be executed upon completion of the runSubmitToolOutputs request. + Returns a `Result` of type `RunResult` if successful, or an `Error` if an error occurs. + */ + func runSubmitToolOutputs(threadId: String, runId: String, query: RunToolOutputsQuery, completion: @escaping (Result) -> Void) + + /** + This function sends a thread id and run id to the OpenAI API and retrieves a threads messages. + The Thread API in this usage enables you to retrieve a threads messages. + + Example: Get Threads Messages + ``` + openAI.threadsMessages(threadId: currentThreadId) { result in + //Handle response here + } + ``` + + - Parameter threadId: The thread id for the thread to run. + - Parameter before: String?: The message id for the message that defines your place in the list of messages. Pass nil to get all. + - Parameter completion: The completion handler to be executed upon completion of the runRetrieve request. + Returns a `Result` of type `ThreadsMessagesResult` if successful, or an `Error` if an error occurs. + **/ + func threadsMessages(threadId: String, before: String?, completion: @escaping (Result) -> Void) + + /** + This function sends a thread id and message contents to the OpenAI API and returns a run. + + Example: Add Message to Thread + ``` + let query = MessageQuery(role: message.role.rawValue, content: message.content) + openAI.threadsAddMessage(threadId: currentThreadId, query: query) { result in + //Handle response here + } + ``` + + - Parameter threadId: The thread id for the thread to run. + - Parameter query: The `MessageQuery` instance, containing the information required for the threads request. + - Parameter completion: The completion handler to be executed upon completion of the runRetrieve request. + Returns a `Result` of type `ThreadAddMessageResult` if successful, or an `Error` if an error occurs. + **/ + func threadsAddMessage(threadId: String, query: MessageQuery, completion: @escaping (Result) -> Void) + + /** + This function sends a purpose string, file contents, and fileName contents to the OpenAI API and returns a file id result. + + Example: Upload file + ``` + let query = FilesQuery(purpose: "assistants", file: fileData, fileName: url.lastPathComponent, contentType: "application/pdf") + openAI.files(query: query) { result in + //Handle response here + } + ``` + - Parameter query: The `FilesQuery` instance, containing the information required for the files request. + - Parameter completion: The completion handler to be executed upon completion of the files request. + Returns a `Result` of type `FilesResult` if successful, or an `Error` if an error occurs. + **/ + func files(query: FilesQuery, completion: @escaping (Result) -> Void) + + // END new functionality added to OpenAI Beta on 11-06-23 end } diff --git a/Tests/OpenAITests/Extensions/AssistantResult+Mock.swift b/Tests/OpenAITests/Extensions/AssistantResult+Mock.swift new file mode 100644 index 00000000..edf40391 --- /dev/null +++ b/Tests/OpenAITests/Extensions/AssistantResult+Mock.swift @@ -0,0 +1,15 @@ +// +// File.swift +// OpenAI +// +// Created by Oleksii Nezhyborets on 24.01.2025. +// + +import Foundation +@testable import OpenAI + +extension AssistantResult { + static func makeMock() -> AssistantResult { + AssistantResult(id: "asst_9876", name: "My New Assistant", description: "Assistant Description", instructions: "You are a helpful assistant.", tools: nil, toolResources: nil) + } +} diff --git a/Tests/OpenAITests/Extensions/AssistantsQuery+Mock.swift b/Tests/OpenAITests/Extensions/AssistantsQuery+Mock.swift new file mode 100644 index 00000000..11657dc0 --- /dev/null +++ b/Tests/OpenAITests/Extensions/AssistantsQuery+Mock.swift @@ -0,0 +1,15 @@ +// +// File.swift +// OpenAI +// +// Created by Oleksii Nezhyborets on 21.01.2025. +// + +import Foundation +@testable import OpenAI + +extension AssistantsQuery { + static func makeMock() -> AssistantsQuery { + .init(model: .gpt4, name: "My New Assistant", description: "Assistant Description", instructions: "You are a helpful assistant.", tools: []) + } +} diff --git a/Tests/OpenAITests/OpenAITests.swift b/Tests/OpenAITests/OpenAITests.swift index 51d85777..dc7b3d70 100644 --- a/Tests/OpenAITests/OpenAITests.swift +++ b/Tests/OpenAITests/OpenAITests.swift @@ -215,6 +215,7 @@ class OpenAITests: XCTestCase { XCTAssertEqual(result, moderationsResult) } + @available(iOS 16.0, *) func testModerationsIterable() { let categories = ModerationsResult.Moderation.Categories(harassment: false, harassmentThreatening: false, hate: false, hateThreatening: false, selfHarm: false, selfHarmIntent: false, selfHarmInstructions: false, sexual: false, sexualMinors: false, violence: false, violenceGraphic: false) Mirror(reflecting: categories).children.enumerated().forEach { index, element in @@ -427,6 +428,246 @@ class OpenAITests: XCTestCase { let openAI = OpenAI(configuration: configuration, session: URLSessionMock()) XCTAssertEqual(openAI.buildURL(path: "/foo"), URL(string: "https://bizbaz.com:443/openai/foo")) } + + // 1106 + func testAssistantCreateQuery() async throws { + let query = assistantsQuery() + let expectedResult = AssistantResult.makeMock() + try self.stub(result: expectedResult) + + let result = try await openAI.assistantCreate(query: query) + XCTAssertEqual(result, expectedResult) + } + + func testAssistantCreateQueryError() async throws { + let query = assistantsQuery() + + let inError = APIError(message: "foo", type: "bar", param: "baz", code: "100") + self.stub(error: inError) + + let apiError: APIError = try await XCTExpectError { try await openAI.assistantCreate(query: query) } + XCTAssertEqual(inError, apiError) + } + + func testListAssistantQuery() async throws { + let expectedAssistant = AssistantResult.makeMock() + let expectedResult = AssistantsResult(data: [expectedAssistant], firstId: expectedAssistant.id, lastId: expectedAssistant.id, hasMore: false) + try self.stub(result: expectedResult) + + let result = try await openAI.assistants() + XCTAssertEqual(result, expectedResult) + } + + func testListAssistantQueryError() async throws { + let inError = APIError(message: "foo", type: "bar", param: "baz", code: "100") + self.stub(error: inError) + + let apiError: APIError = try await XCTExpectError { try await openAI.assistants() } + XCTAssertEqual(inError, apiError) + } + + func testAssistantModifyQuery() async throws { + let query = assistantsQuery() + let expectedResult = AssistantResult.makeMock() + try self.stub(result: expectedResult) + + let result = try await openAI.assistantModify(query: query, assistantId: "asst_9876") + XCTAssertEqual(result, expectedResult) + } + + func testAssistantModifyQueryError() async throws { + let query = assistantsQuery() + let inError = APIError(message: "foo", type: "bar", param: "baz", code: "100") + self.stub(error: inError) + + let apiError: APIError = try await XCTExpectError { try await openAI.assistantModify(query: query, assistantId: "asst_9876") } + XCTAssertEqual(inError, apiError) + } + + func testThreadsQuery() async throws { + let query = ThreadsQuery(messages: [ChatQuery.ChatCompletionMessageParam(role: .user, content: "Hello, What is AI?")!]) + let expectedResult = ThreadsResult(id: "thread_1234") + try self.stub(result: expectedResult) + + let result = try await openAI.threads(query: query) + XCTAssertEqual(result, expectedResult) + } + + func testThreadsQueryError() async throws { + let query = ThreadsQuery(messages: [ChatQuery.ChatCompletionMessageParam(role: .user, content: "Hello, What is AI?")!]) + + let inError = APIError(message: "foo", type: "bar", param: "baz", code: "100") + self.stub(error: inError) + + let apiError: APIError = try await XCTExpectError { try await openAI.threads(query: query) } + XCTAssertEqual(inError, apiError) + } + + func testThreadRunQuery() async throws { + let query = ThreadRunQuery(assistantId: "asst_7654321", thread: .init(messages: [ChatQuery.ChatCompletionMessageParam(role: .user, content: "Hello, What is AI?")!])) + let expectedResult = RunResult(id: "run_1234", threadId: "thread_1234", status: .completed, requiredAction: nil) + try self.stub(result: expectedResult) + + let result = try await openAI.threadRun(query: query) + XCTAssertEqual(result, expectedResult) + } + + func testThreadRunQueryError() async throws { + let query = ThreadRunQuery(assistantId: "asst_7654321", thread: .init(messages: [ChatQuery.ChatCompletionMessageParam(role: .user, content: "Hello, What is AI?")!])) + let inError = APIError(message: "foo", type: "bar", param: "baz", code: "100") + self.stub(error: inError) + + let apiError: APIError = try await XCTExpectError { try await openAI.threadRun(query: query) } + XCTAssertEqual(inError, apiError) + } + + func testRunsQuery() async throws { + let query = RunsQuery(assistantId: "asst_7654321") + let expectedResult = RunResult(id: "run_1234", threadId: "thread_1234", status: .completed, requiredAction: nil) + try self.stub(result: expectedResult) + + let result = try await openAI.runs(threadId: "thread_1234", query: query) + XCTAssertEqual(result, expectedResult) + } + + func testRunsQueryError() async throws { + let query = RunsQuery(assistantId: "asst_7654321") + let inError = APIError(message: "foo", type: "bar", param: "baz", code: "100") + self.stub(error: inError) + + let apiError: APIError = try await XCTExpectError { try await openAI.runs(threadId: "thread_1234", query: query) } + XCTAssertEqual(inError, apiError) + } + + func testRunRetrieveQuery() async throws { + let expectedResult = RunResult(id: "run_1234", threadId: "thread_1234", status: .inProgress, requiredAction: nil) + try self.stub(result: expectedResult) + + let result = try await openAI.runRetrieve(threadId: "thread_1234", runId: "run_1234") + XCTAssertEqual(result, expectedResult) + } + + func testRunRetrieveQueryError() async throws { + let inError = APIError(message: "foo", type: "bar", param: "baz", code: "100") + self.stub(error: inError) + + let apiError: APIError = try await XCTExpectError { try await openAI.runRetrieve(threadId: "thread_1234", runId: "run_1234") } + XCTAssertEqual(inError, apiError) + } + + func testRunRetrieveStepsQuery() async throws { + let expectedResult = RunRetrieveStepsResult(data: [.init(id: "step_1234", stepDetails: .init(toolCalls: [.init(id: "tool_456", type: .fileSearch, codeInterpreter: nil, function: nil)]))]) + try self.stub(result: expectedResult) + + let result = try await openAI.runRetrieveSteps(threadId: "thread_1234", runId: "run_1234") + XCTAssertEqual(result, expectedResult) + } + + func testRunRetreiveStepsQueryError() async throws { + let inError = APIError(message: "foo", type: "bar", param: "baz", code: "100") + self.stub(error: inError) + + let apiError: APIError = try await XCTExpectError { try await openAI.runRetrieveSteps(threadId: "thread_1234", runId: "run_1234") } + XCTAssertEqual(inError, apiError) + } + + func testRunSubmitToolOutputsQuery() async throws { + let query = RunToolOutputsQuery(toolOutputs: [.init(toolCallId: "call_123", output: "Success")]) + let expectedResult = RunResult(id: "run_123", threadId: "thread_456", status: .inProgress, requiredAction: nil) + try self.stub(result: expectedResult) + + let result = try await openAI.runSubmitToolOutputs(threadId: "thread_456", runId: "run_123", query: query) + XCTAssertEqual(result, expectedResult) + } + + func testRunSubmitToolOutputsQueryError() async throws { + let query = RunToolOutputsQuery(toolOutputs: [.init(toolCallId: "call_123", output: "Success")]) + let inError = APIError(message: "foo", type: "bar", param: "baz", code: "100") + self.stub(error: inError) + + let apiError: APIError = try await XCTExpectError { try await openAI.runSubmitToolOutputs(threadId: "thread_456", runId: "run_123", query: query) } + XCTAssertEqual(inError, apiError) + } + + func testThreadAddMessageQuery() async throws { + let query = MessageQuery(role: .user, content: "Hello, What is AI?", fileIds: ["file_123"]) + let expectedResult = ThreadAddMessageResult(id: "message_1234") + try self.stub(result: expectedResult) + + let result = try await openAI.threadsAddMessage(threadId: "thread_1234", query: query) + XCTAssertEqual(result, expectedResult) + } + + func testThreadAddMessageQueryError() async throws { + let query = MessageQuery(role: .user, content: "Hello, What is AI?", fileIds: ["file_123"]) + let inError = APIError(message: "foo", type: "bar", param: "baz", code: "100") + self.stub(error: inError) + + let apiError: APIError = try await XCTExpectError { try await openAI.threadsAddMessage(threadId: "thread_1234", query: query) } + XCTAssertEqual(inError, apiError) + } + + func testThreadsMessageQuery() async throws { + let expectedResult = ThreadsMessagesResult(data: [ThreadsMessagesResult.ThreadsMessage(id: "thread_1234", role: ChatQuery.ChatCompletionMessageParam.Role.user, content: [ThreadsMessagesResult.ThreadsMessage.ThreadsMessageContent(type: .text, text: ThreadsMessagesResult.ThreadsMessage.ThreadsMessageContent.ThreadsMessageContentText(value: "Hello, What is AI?"), imageFile: nil)])]) + try self.stub(result: expectedResult) + + let result = try await openAI.threadsMessages(threadId: "thread_1234") + XCTAssertEqual(result, expectedResult) + } + + func testThreadsMessageQueryError() async throws { + let inError = APIError(message: "foo", type: "bar", param: "baz", code: "100") + self.stub(error: inError) + + let apiError: APIError = try await XCTExpectError { try await openAI.threadsMessages(threadId: "thread_1234") } + XCTAssertEqual(inError, apiError) + } + + func testFilesQuery() async throws { + let data = try XCTUnwrap("{\"test\":\"data\"}".data(using: .utf8)) + let query = FilesQuery(purpose: "assistant", file: data, fileName: "test.json", contentType: "application/json") + let expectedResult = FilesResult(id: "file_1234", name: "test.json") + try self.stub(result: expectedResult) + + let result = try await openAI.files(query: query) + XCTAssertEqual(result, expectedResult) + } + + func testFilesQueryError() async throws { + let data = try XCTUnwrap("{\"test\":\"data\"}".data(using: .utf8)) + let query = FilesQuery(purpose: "assistant", file: data, fileName: "test.json", contentType: "application/json") + let inError = APIError(message: "foo", type: "bar", param: "baz", code: "100") + self.stub(error: inError) + + let apiError: APIError = try await XCTExpectError { try await openAI.files(query: query) } + XCTAssertEqual(inError, apiError) + } + + func testCustomRunsURLBuilt() { + let configuration = OpenAI.Configuration(token: "foo", organizationIdentifier: "bar", host: "my.host.com", timeoutInterval: 14) + let openAI = OpenAI(configuration: configuration, session: self.urlSession) + let completionsURL = openAI.buildRunsURL(path: APIPath.Assistants.runs.stringValue, threadId: "thread_4321") + XCTAssertEqual(completionsURL, URL(string: "https://my.host.com:443/v1/threads/thread_4321/runs")) + } + + func testCustomRunsRetrieveURLBuilt() { + let configuration = OpenAI.Configuration(token: "foo", organizationIdentifier: "bar", host: "my.host.com", timeoutInterval: 14) + let openAI = OpenAI(configuration: configuration, session: self.urlSession) + let completionsURL = openAI.buildRunRetrieveURL(path: APIPath.Assistants.runRetrieve.stringValue, threadId: "thread_4321", runId: "run_1234") + XCTAssertEqual(completionsURL, URL(string: "https://my.host.com:443/v1/threads/thread_4321/runs/run_1234")) + } + + func testCustomRunRetrieveStepsURLBuilt() { + let configuration = OpenAI.Configuration(token: "foo", organizationIdentifier: "bar", host: "my.host.com", timeoutInterval: 14) + let openAI = OpenAI(configuration: configuration, session: self.urlSession) + let completionsURL = openAI.buildRunRetrieveURL(path: APIPath.Assistants.runRetrieveSteps.stringValue, threadId: "thread_4321", runId: "run_1234") + XCTAssertEqual(completionsURL, URL(string: "https://my.host.com:443/v1/threads/thread_4321/runs/run_1234/steps")) + } + // 1106 end + + private func assistantsQuery() -> AssistantsQuery { + .makeMock() + } } @available(tvOS 13.0, *) diff --git a/Tests/OpenAITests/OpenAITestsCombine.swift b/Tests/OpenAITests/OpenAITestsCombine.swift index e924cd0d..fe878f8a 100644 --- a/Tests/OpenAITests/OpenAITestsCombine.swift +++ b/Tests/OpenAITests/OpenAITestsCombine.swift @@ -102,6 +102,119 @@ final class OpenAITestsCombine: XCTestCase { let result = try awaitPublisher(openAI.audioTranslations(query: query)) XCTAssertEqual(result, transcriptionResult) } + + // 1106 + func testAssistantsQuery() throws { + let expectedAssistant = AssistantResult.makeMock() + let expectedResult = AssistantsResult(data: [expectedAssistant], firstId: expectedAssistant.id, lastId: expectedAssistant.id, hasMore: false) + try self.stub(result: expectedResult) + + let result = try awaitPublisher(openAI.assistants()) + XCTAssertEqual(result, expectedResult) + } + + func testAssistantCreateQuery() throws { + let query = AssistantsQuery.makeMock() + let expectedResult = AssistantResult.makeMock() + try self.stub(result: expectedResult) + + let result = try awaitPublisher(openAI.assistantCreate(query: query)) + XCTAssertEqual(result, expectedResult) + } + + func testAssistantModifyQuery() throws { + let query = AssistantsQuery.makeMock() + let expectedResult = AssistantResult.makeMock() + try self.stub(result: expectedResult) + + let result = try awaitPublisher(openAI.assistantModify(query: query, assistantId: "asst_9876")) + XCTAssertEqual(result, expectedResult) + } + + func testThreadsQuery() throws { + let query = ThreadsQuery(messages: [ ChatQuery.ChatCompletionMessageParam(role: .user, content: "Hello, What is AI?")!]) + let expectedResult = ThreadsResult(id: "thread_1234") + + try self.stub(result: expectedResult) + let result = try awaitPublisher(openAI.threads(query: query)) + + XCTAssertEqual(result, expectedResult) + } + + func testThreadRunQuery() throws { + let query = ThreadRunQuery(assistantId: "asst_7654321", thread: .init(messages: [.init(role: .user, content: "Hello, What is AI?")!])) + let expectedResult = RunResult(id: "run_1234", threadId: "thread_1234", status: .completed, requiredAction: nil) + try self.stub(result: expectedResult) + + let result = try awaitPublisher(openAI.threadRun(query: query)) + XCTAssertEqual(result, expectedResult) + } + + func testRunsQuery() throws { + let query = RunsQuery(assistantId: "asst_7654321") + let expectedResult = RunResult(id: "run_1234", threadId: "thread_1234", status: .inProgress, requiredAction: nil) + + try self.stub(result: expectedResult) + let result = try awaitPublisher(openAI.runs(threadId: "thread_1234", query: query)) + + XCTAssertEqual(result, expectedResult) + } + + func testRunRetrieveQuery() throws { + let expectedResult = RunResult(id: "run_1234", threadId: "thread_1234", status: .inProgress, requiredAction: nil) + try self.stub(result: expectedResult) + + let result = try awaitPublisher(openAI.runRetrieve(threadId: "thread_1234", runId: "run_1234")) + + XCTAssertEqual(result, expectedResult) + } + + func testRunRetrieveStepsQuery() throws { + let expectedResult = RunRetrieveStepsResult(data: [.init(id: "step_1234", stepDetails: .init(toolCalls: [.init(id: "tool_456", type: .fileSearch, codeInterpreter: nil, function: nil)]))]) + try self.stub(result: expectedResult) + + let result = try awaitPublisher(openAI.runRetrieveSteps(threadId: "thread_1234", runId: "run_1234")) + XCTAssertEqual(result, expectedResult) + } + + func testRunSubmitToolOutputsQuery() throws { + let query = RunToolOutputsQuery(toolOutputs: [.init(toolCallId: "call_123", output: "Success")]) + let expectedResult = RunResult(id: "run_123", threadId: "thread_456", status: .inProgress, requiredAction: nil) + try self.stub(result: expectedResult) + + let result = try awaitPublisher(openAI.runSubmitToolOutputs(threadId: "thread_456", runId: "run_123", query: query)) + XCTAssertEqual(result, expectedResult) + } + + func testThreadAddMessageQuery() throws { + let query = MessageQuery(role: .user, content: "Hello, What is AI?", fileIds: ["file_123"]) + let expectedResult = ThreadAddMessageResult(id: "message_1234") + try self.stub(result: expectedResult) + + let result = try awaitPublisher(openAI.threadsAddMessage(threadId: "thread_1234", query: query)) + XCTAssertEqual(result, expectedResult) + } + + func testThreadsMessageQuery() throws { + let expectedResult = ThreadsMessagesResult(data: [ThreadsMessagesResult.ThreadsMessage(id: "thread_1234", role: ChatQuery.ChatCompletionMessageParam.Role.user, content: [ThreadsMessagesResult.ThreadsMessage.ThreadsMessageContent(type: .text, text: ThreadsMessagesResult.ThreadsMessage.ThreadsMessageContent.ThreadsMessageContentText(value: "Hello, What is AI?"), imageFile: nil)])]) + try self.stub(result: expectedResult) + + let result = try awaitPublisher(openAI.threadsMessages(threadId: "thread_1234", before: nil)) + + XCTAssertEqual(result, expectedResult) + } + + func testFilesQuery() throws { + let data = try XCTUnwrap("{\"test\":\"data\"}".data(using: .utf8)) + let query = FilesQuery(purpose: "assistant", file: data, fileName: "test.json", contentType: "application/json") + let expectedResult = FilesResult(id: "file_1234", name: "test.json") + try self.stub(result: expectedResult) + + let result = try awaitPublisher(openAI.files(query: query)) + XCTAssertEqual(result, expectedResult) + } + // 1106 end + } @available(tvOS 13.0, *) diff --git a/Tests/OpenAITests/OpenAITestsDecoder.swift b/Tests/OpenAITests/OpenAITestsDecoder.swift index 832270ae..55df332d 100644 --- a/Tests/OpenAITests/OpenAITestsDecoder.swift +++ b/Tests/OpenAITests/OpenAITestsDecoder.swift @@ -17,10 +17,17 @@ class OpenAITestsDecoder: XCTestCase { super.setUp() } - private func decode(_ jsonString: String, _ expectedValue: T) throws { + private func decode(_ jsonString: String, _ expectedValue: T, file: StaticString = #filePath, line: UInt = #line) throws { let data = jsonString.data(using: .utf8)! let decoded = try JSONDecoder().decode(T.self, from: data) - XCTAssertEqual(decoded, expectedValue) + XCTAssertEqual(decoded, expectedValue, file: file, line: line) + } + + private func encode(_ expectedValue: T, _ jsonString: String, file: StaticString = #filePath, line: UInt = #line) throws { + // To compare serialized JSONs we first convert them both into NSDictionary which are comparable (unlike native swift dictionaries) + let expectedValueAsDict = try jsonDataAsNSDictionary(JSONEncoder().encode(expectedValue)) + let jsonStringAsDict = try jsonDataAsNSDictionary(jsonString.data(using: .utf8)!) + XCTAssertEqual(jsonStringAsDict, expectedValueAsDict, file: file, line: line) } func jsonDataAsNSDictionary(_ data: Data) throws -> NSDictionary { @@ -105,11 +112,7 @@ class OpenAITestsDecoder: XCTestCase { } """ - // To compare serialized JSONs we first convert them both into NSDictionary which are comparable (unline native swift dictionaries) - let imageQueryAsDict = try jsonDataAsNSDictionary(JSONEncoder().encode(imageQuery)) - let expectedValueAsDict = try jsonDataAsNSDictionary(expectedValue.data(using: .utf8)!) - - XCTAssertEqual(imageQueryAsDict, expectedValueAsDict) + try encode(imageQuery, expectedValue) } func testChatQueryWithVision() async throws { @@ -163,6 +166,7 @@ class OpenAITestsDecoder: XCTestCase { ], model: .gpt3_5Turbo, responseFormat: ChatQuery.ResponseFormat.jsonObject, + toolChoice: .function("get_current_weather"), tools: [ .init(function: .init( name: "get_current_weather", @@ -182,7 +186,10 @@ class OpenAITestsDecoder: XCTestCase { { "model": "gpt-3.5-turbo", "messages": [ - { "role": "user", "content": "What's the weather like in Boston?" } + { + "role": "user", + "content": "What's the weather like in Boston?" + } ], "response_format": { "type": "json_object" @@ -207,15 +214,17 @@ class OpenAITestsDecoder: XCTestCase { "type": "function" } ], + "tool_choice": { + "type": "function", + "function": { + "name": "get_current_weather" + } + }, "stream": false } """ - // To compare serialized JSONs we first convert them both into NSDictionary which are comparable (unline native swift dictionaries) - let chatQueryAsDict = try jsonDataAsNSDictionary(JSONEncoder().encode(chatQuery)) - let expectedValueAsDict = try jsonDataAsNSDictionary(expectedValue.data(using: .utf8)!) - - XCTAssertEqual(chatQueryAsDict, expectedValueAsDict) + try encode(chatQuery, expectedValue) } func testChatCompletionWithFunctionCall() async throws { @@ -414,4 +423,198 @@ class OpenAITestsDecoder: XCTestCase { let expectedValue = AudioTranslationResult(text: "Hello, world!") try decode(data, expectedValue) } + + func testAssistantResult() async throws { + let data = """ + { + "id": "asst_abc123", + "object": "assistant", + "created_at": 1698984975, + "name": "Math Tutor", + "description": null, + "model": "gpt-4", + "instructions": "You are a personal math tutor. When asked a question, write and run Python code to answer the question.", + "tools": [ + { + "type": "code_interpreter" + } + ], + "file_ids": [], + "metadata": {} + } + """ + + let expectedValue = AssistantResult(id: "asst_abc123", name: "Math Tutor", description: nil, instructions: "You are a personal math tutor. When asked a question, write and run Python code to answer the question.", tools: [.codeInterpreter], toolResources: nil) + try decode(data, expectedValue) + } + + func testAssistantsQuery() async throws { + let assistantsQuery = AssistantsQuery( + model: .gpt4, + name: "Math Tutor", + description: nil, + instructions: "You are a personal math tutor. When asked a question, write and run Python code to answer the question.", + tools: [.codeInterpreter], + toolResources: nil + ) + + let expectedValue = """ + { + "instructions": "You are a personal math tutor. When asked a question, write and run Python code to answer the question.", + "name": "Math Tutor", + "tools": [ + {"type": "code_interpreter"} + ], + "model": "gpt-4" + } + """ + + try encode(assistantsQuery, expectedValue) + } + + func testAssistantsResult() async throws { + let data = """ + { + "object": "list", + "data": [ + { + "id": "asst_abc123", + "object": "assistant", + "created_at": 1698982736, + "name": "Coding Tutor", + "description": null, + "model": "gpt-4", + "instructions": "You are a helpful assistant designed to make me better at coding!", + "tools": [], + "file_ids": [], + "metadata": {} + }, + { + "id": "asst_abc456", + "object": "assistant", + "created_at": 1698982718, + "name": "My Assistant", + "description": null, + "model": "gpt-4", + "instructions": "You are a helpful assistant designed to teach me about AI!", + "tools": [], + "file_ids": [], + "metadata": {} + } + ], + "first_id": "asst_abc123", + "last_id": "asst_abc789", + "has_more": false + } + """ + + let expectedValue = AssistantsResult( + data: [ + .init(id: "asst_abc123", name: "Coding Tutor", description: nil, instructions: "You are a helpful assistant designed to make me better at coding!", tools: [], toolResources: nil), + .init(id: "asst_abc456", name: "My Assistant", description: nil, instructions: "You are a helpful assistant designed to teach me about AI!", tools: [], toolResources: nil), + ], + firstId: "asst_abc123", + lastId: "asst_abc789", + hasMore: false + ) + + try decode(data, expectedValue) + } + + func testMessageQuery() async throws { + let messageQuery = MessageQuery( + role: .user, + content: "How does AI work? Explain it in simple terms.", + fileIds: ["file_abc123"] + ) + + let expectedValue = """ + { + "role": "user", + "content": "How does AI work? Explain it in simple terms.", + "file_ids": ["file_abc123"] + } + """ + + try encode(messageQuery, expectedValue) + } + + func testRunResult() async throws { + let data = """ + { + "id": "run_1a", + "thread_id": "thread_2b", + "status": "requires_action", + "required_action": { + "type": "submit_tool_outputs", + "submit_tool_outputs": { + "tool_calls": [ + { + "id": "tool_abc890", + "type": "function", + "function": { + "name": "print", + "arguments": "{\\"text\\": \\"hello\\"}" + } + } + ] + } + } + } + """ + + let expectedValue = RunResult( + id: "run_1a", + threadId: "thread_2b", + status: .requiresAction, + requiredAction: .init( + submitToolOutputs: .init(toolCalls: [.init(id: "tool_abc890", type: "function", function: .init(name: "print", arguments: "{\"text\": \"hello\"}"))]) + ) + ) + + try decode(data, expectedValue) + } + + func testRunToolOutputsQuery() async throws { + let runToolOutputsQuery = RunToolOutputsQuery( + toolOutputs: [ + .init(toolCallId: "call_abc0", output: "success") + ] + ) + + let expectedValue = """ + { + "tool_outputs": [ + { + "tool_call_id": "call_abc0", + "output": "success" + } + ] + } + """ + + try encode(runToolOutputsQuery, expectedValue) + } + + func testThreadRunQuery() async throws { + let threadRunQuery = ThreadRunQuery( + assistantId: "asst_abc123", + thread: .init( + messages: [.init(role: .user, content: "Explain deep learning to a 5 year old.")!] + ) + ) + + let expectedValue = """ + { + "assistant_id": "asst_abc123", + "thread": { + "messages": [ + {"role": "user", "content": "Explain deep learning to a 5 year old."} + ] + } + } + """ + + try encode(threadRunQuery, expectedValue) + } }