diff --git a/Examples/iOS_Example/iOS_Example.xcodeproj/project.pbxproj b/Examples/iOS_Example/iOS_Example.xcodeproj/project.pbxproj index 052869c..666d901 100644 --- a/Examples/iOS_Example/iOS_Example.xcodeproj/project.pbxproj +++ b/Examples/iOS_Example/iOS_Example.xcodeproj/project.pbxproj @@ -7,7 +7,6 @@ objects = { /* Begin PBXBuildFile section */ - 761C24FD2940346B005066C2 /* EditExample.swift in Sources */ = {isa = PBXBuildFile; fileRef = 761C24FC2940346B005066C2 /* EditExample.swift */; }; 761C250429403883005066C2 /* EditImageExample.swift in Sources */ = {isa = PBXBuildFile; fileRef = 761C250329403883005066C2 /* EditImageExample.swift */; }; 761C250B2940392F005066C2 /* GenerateCompletionExample.swift in Sources */ = {isa = PBXBuildFile; fileRef = 761C250A2940392F005066C2 /* GenerateCompletionExample.swift */; }; 761C250D294039A8005066C2 /* GenerateImageVariationsExample.swift in Sources */ = {isa = PBXBuildFile; fileRef = 761C250C294039A8005066C2 /* GenerateImageVariationsExample.swift */; }; @@ -41,12 +40,11 @@ 7D4A910A2A87E110000A138A /* audio_translation.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = 7D4A91092A87E110000A138A /* audio_translation.mp3 */; }; 7D76E4F82AD46B7500E2F4C3 /* CreateChatFunctionCallExample.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7D76E4F72AD46B7500E2F4C3 /* CreateChatFunctionCallExample.swift */; }; 7DC0E9082A87CF97005C8763 /* CreateTranscription.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7DC0E9072A87CF97005C8763 /* CreateTranscription.swift */; }; - 7DC0E90A2A87D046005C8763 /* audio.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = 7DC0E9092A87D046005C8763 /* audio.mp3 */; }; 7DDE422E29AFE567000BBB01 /* CreateChatCompletion.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7DDE422D29AFE567000BBB01 /* CreateChatCompletion.swift */; }; + 7DDF5CF62AD77AAC00A72E6B /* audio.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = 7DDF5CF52AD77AAC00A72E6B /* audio.mp3 */; }; /* End PBXBuildFile section */ /* Begin PBXFileReference section */ - 761C24FC2940346B005066C2 /* EditExample.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EditExample.swift; sourceTree = ""; }; 761C250329403883005066C2 /* EditImageExample.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EditImageExample.swift; sourceTree = ""; }; 761C250A2940392F005066C2 /* GenerateCompletionExample.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = GenerateCompletionExample.swift; sourceTree = ""; }; 761C250C294039A8005066C2 /* GenerateImageVariationsExample.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = GenerateImageVariationsExample.swift; sourceTree = ""; }; @@ -81,8 +79,8 @@ 7D4A91092A87E110000A138A /* audio_translation.mp3 */ = {isa = PBXFileReference; lastKnownFileType = audio.mp3; path = audio_translation.mp3; sourceTree = ""; }; 7D76E4F72AD46B7500E2F4C3 /* CreateChatFunctionCallExample.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CreateChatFunctionCallExample.swift; sourceTree = ""; }; 7DC0E9072A87CF97005C8763 /* CreateTranscription.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CreateTranscription.swift; sourceTree = ""; }; - 7DC0E9092A87D046005C8763 /* audio.mp3 */ = {isa = PBXFileReference; lastKnownFileType = audio.mp3; name = audio.mp3; path = ../audio.mp3; sourceTree = ""; }; 7DDE422D29AFE567000BBB01 /* CreateChatCompletion.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CreateChatCompletion.swift; sourceTree = ""; }; + 7DDF5CF52AD77AAC00A72E6B /* audio.mp3 */ = {isa = PBXFileReference; lastKnownFileType = audio.mp3; name = audio.mp3; path = ../../../../../../../Downloads/audio.mp3; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ @@ -101,7 +99,6 @@ isa = PBXGroup; children = ( 761C250A2940392F005066C2 /* GenerateCompletionExample.swift */, - 761C24FC2940346B005066C2 /* EditExample.swift */, 765BB1D829404D52000BF124 /* CompletionMainView.swift */, ); path = Completion; @@ -243,8 +240,8 @@ 7DC0E90B2A87D04D005C8763 /* Sound */ = { isa = PBXGroup; children = ( + 7DDF5CF52AD77AAC00A72E6B /* audio.mp3 */, 7D4A91092A87E110000A138A /* audio_translation.mp3 */, - 7DC0E9092A87D046005C8763 /* audio.mp3 */, ); path = Sound; sourceTree = ""; @@ -291,7 +288,7 @@ attributes = { BuildIndependentTargetsInParallel = 1; LastSwiftUpdateCheck = 1410; - LastUpgradeCheck = 1410; + LastUpgradeCheck = 1500; TargetAttributes = { 76C0C24F293FB7CC003075D2 = { CreatedOnToolsVersion = 14.1; @@ -322,11 +319,11 @@ buildActionMask = 2147483647; files = ( 76C0C25B293FB7CE003075D2 /* Preview Assets.xcassets in Resources */, + 7DDF5CF62AD77AAC00A72E6B /* audio.mp3 in Resources */, 765BB20229405723000BF124 /* mask.png in Resources */, 765BB20129405723000BF124 /* variation.png in Resources */, 7D4A910A2A87E110000A138A /* audio_translation.mp3 in Resources */, 76C0C258293FB7CE003075D2 /* Assets.xcassets in Resources */, - 7DC0E90A2A87D046005C8763 /* audio.mp3 in Resources */, 761C253129404086005066C2 /* SampleData.jsonl in Resources */, 765BB20329405723000BF124 /* image.png in Resources */, ); @@ -351,7 +348,6 @@ 765BB1D929404D52000BF124 /* CompletionMainView.swift in Sources */, 76C0C256293FB7CC003075D2 /* ContentView.swift in Sources */, 761C251329403AC7005066C2 /* GenerateImageExample.swift in Sources */, - 761C24FD2940346B005066C2 /* EditExample.swift in Sources */, 761C251B29403CD2005066C2 /* FileDetailsView.swift in Sources */, 761C252C2940400B005066C2 /* RetrieveModelExample.swift in Sources */, 761C251729403CA0005066C2 /* ListFilesExample.swift in Sources */, @@ -408,6 +404,7 @@ DEBUG_INFORMATION_FORMAT = dwarf; ENABLE_STRICT_OBJC_MSGSEND = YES; ENABLE_TESTABILITY = YES; + ENABLE_USER_SCRIPT_SANDBOXING = YES; GCC_C_LANGUAGE_STANDARD = gnu11; GCC_DYNAMIC_NO_PIC = NO; GCC_NO_COMMON_BLOCKS = YES; @@ -468,6 +465,7 @@ DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; ENABLE_NS_ASSERTIONS = NO; ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_USER_SCRIPT_SANDBOXING = YES; GCC_C_LANGUAGE_STANDARD = gnu11; GCC_NO_COMMON_BLOCKS = YES; GCC_WARN_64_TO_32_BIT_CONVERSION = YES; diff --git a/Examples/iOS_Example/iOS_Example/Audio/CreateTranscription.swift b/Examples/iOS_Example/iOS_Example/Audio/CreateTranscription.swift index 7194510..34e2cf5 100644 --- a/Examples/iOS_Example/iOS_Example/Audio/CreateTranscription.swift +++ b/Examples/iOS_Example/iOS_Example/Audio/CreateTranscription.swift @@ -101,6 +101,7 @@ struct CreateTranscriptionExample: View { Task { do { + // โš ๏ธ๐Ÿ”‘ NEVER store OpenAI API keys directly in code. Use environment variables or secrets management. Avoid git commits of keys! ๐Ÿ”‘โš ๏ธ let config = Configuration( organizationId: "INSERT-ORGANIZATION-ID", apiKey: "INSERT-API-KEY" diff --git a/Examples/iOS_Example/iOS_Example/Chat/CreateChatCompletion.swift b/Examples/iOS_Example/iOS_Example/Chat/CreateChatCompletion.swift index fb7596b..fc14237 100644 --- a/Examples/iOS_Example/iOS_Example/Chat/CreateChatCompletion.swift +++ b/Examples/iOS_Example/iOS_Example/Chat/CreateChatCompletion.swift @@ -63,6 +63,7 @@ struct CreateChatCompletionExample: View { Task { do { + // โš ๏ธ๐Ÿ”‘ NEVER store OpenAI API keys directly in code. Use environment variables or secrets management. Avoid git commits of keys! ๐Ÿ”‘โš ๏ธ let config = Configuration( organizationId: "INSERT-ORGANIZATION-ID", apiKey: "INSERT-API-KEY" diff --git a/Examples/iOS_Example/iOS_Example/Chat/CreateChatCompletionStreaming.swift b/Examples/iOS_Example/iOS_Example/Chat/CreateChatCompletionStreaming.swift index 7ac5a3f..5272a1c 100644 --- a/Examples/iOS_Example/iOS_Example/Chat/CreateChatCompletionStreaming.swift +++ b/Examples/iOS_Example/iOS_Example/Chat/CreateChatCompletionStreaming.swift @@ -59,6 +59,7 @@ struct CreateChatCompletionStreamingExample: View { Task { do { + // โš ๏ธ๐Ÿ”‘ NEVER store OpenAI API keys directly in code. Use environment variables or secrets management. Avoid git commits of keys! ๐Ÿ”‘โš ๏ธ let config = Configuration( organizationId: "INSERT-ORGANIZATION-ID", apiKey: "INSERT-API-KEY" diff --git a/Examples/iOS_Example/iOS_Example/Chat/CreateChatFunctionCallExample.swift b/Examples/iOS_Example/iOS_Example/Chat/CreateChatFunctionCallExample.swift index dccad08..66008b6 100644 --- a/Examples/iOS_Example/iOS_Example/Chat/CreateChatFunctionCallExample.swift +++ b/Examples/iOS_Example/iOS_Example/Chat/CreateChatFunctionCallExample.swift @@ -71,6 +71,7 @@ struct CreateChatFunctionCallExample: View { Task { do { + // โš ๏ธ๐Ÿ”‘ NEVER store OpenAI API keys directly in code. Use environment variables or secrets management. Avoid git commits of keys! ๐Ÿ”‘โš ๏ธ let config = Configuration( organizationId: "INSERT-ORGANIZATION-ID", apiKey: "INSERT-API-KEY" @@ -82,6 +83,7 @@ struct CreateChatFunctionCallExample: View { functionCall: "auto", functions: functions ) + let chatCompletion = try await openAI.generateChatCompletion( parameters: chatParameters ) diff --git a/Examples/iOS_Example/iOS_Example/Completion/CompletionMainView.swift b/Examples/iOS_Example/iOS_Example/Completion/CompletionMainView.swift index a16b433..affe123 100644 --- a/Examples/iOS_Example/iOS_Example/Completion/CompletionMainView.swift +++ b/Examples/iOS_Example/iOS_Example/Completion/CompletionMainView.swift @@ -31,10 +31,6 @@ struct CompletionMainView: View { NavigationLink(destination: GenerateCompletionExample()) { Text("Generate Completion Example") } - - NavigationLink(destination: EditExample()) { - Text("Edit Example") - } } .listStyle(.plain) .navigationTitle("Completion") diff --git a/Examples/iOS_Example/iOS_Example/Completion/GenerateCompletionExample.swift b/Examples/iOS_Example/iOS_Example/Completion/GenerateCompletionExample.swift index 7eb85a8..ddfd702 100644 --- a/Examples/iOS_Example/iOS_Example/Completion/GenerateCompletionExample.swift +++ b/Examples/iOS_Example/iOS_Example/Completion/GenerateCompletionExample.swift @@ -51,6 +51,7 @@ struct GenerateCompletionExample: View { Task { do { + // โš ๏ธ๐Ÿ”‘ NEVER store OpenAI API keys directly in code. Use environment variables or secrets management. Avoid git commits of keys! ๐Ÿ”‘โš ๏ธ let config = Configuration( organizationId: "INSERT-ORGANIZATION-ID", apiKey: "INSERT-API-KEY" diff --git a/Examples/iOS_Example/iOS_Example/Content Policy/ContentPolicyExample.swift b/Examples/iOS_Example/iOS_Example/Content Policy/ContentPolicyExample.swift index 2db7620..f3a5741 100644 --- a/Examples/iOS_Example/iOS_Example/Content Policy/ContentPolicyExample.swift +++ b/Examples/iOS_Example/iOS_Example/Content Policy/ContentPolicyExample.swift @@ -49,6 +49,7 @@ struct ContentPolicyExample: View { Task { do { + // โš ๏ธ๐Ÿ”‘ NEVER store OpenAI API keys directly in code. Use environment variables or secrets management. Avoid git commits of keys! ๐Ÿ”‘โš ๏ธ let openAI = OpenAI(Configuration(organizationId: "INSERT-ORGANIZATION-ID", apiKey: "INSERT-API-KEY")) let contentParameter = ContentPolicyParameters(input: input) let contentResult = try await openAI.checkContentPolicy(parameters: contentParameter) diff --git a/Examples/iOS_Example/iOS_Example/Embeddings/CreateEmbeddingsExample.swift b/Examples/iOS_Example/iOS_Example/Embeddings/CreateEmbeddingsExample.swift index 5d5e124..9b945e3 100644 --- a/Examples/iOS_Example/iOS_Example/Embeddings/CreateEmbeddingsExample.swift +++ b/Examples/iOS_Example/iOS_Example/Embeddings/CreateEmbeddingsExample.swift @@ -63,9 +63,10 @@ struct CreateEmbeddingsExample: View { Task { do { + // โš ๏ธ๐Ÿ”‘ NEVER store OpenAI API keys directly in code. Use environment variables or secrets management. Avoid git commits of keys! ๐Ÿ”‘โš ๏ธ let config = Configuration(organizationId: "INSERT-ORGANIZATION-ID", apiKey: "INSERT-API-KEY") let openAI = OpenAI(config) - let embeddingsParam = EmbeddingsParameters(model: "text-similarity-babbage-001", input: input) + let embeddingsParam = EmbeddingsParameters(model: "text-similarity-ada-002", input: input) self.embeddingsResponse = try await openAI.createEmbeddings(parameters: embeddingsParam) diff --git a/Examples/iOS_Example/iOS_Example/File/DeleteFileExample.swift b/Examples/iOS_Example/iOS_Example/File/DeleteFileExample.swift index 25c1e63..aa55aa4 100644 --- a/Examples/iOS_Example/iOS_Example/File/DeleteFileExample.swift +++ b/Examples/iOS_Example/iOS_Example/File/DeleteFileExample.swift @@ -50,6 +50,7 @@ struct DeleteFileExample: View { Task { do { + // โš ๏ธ๐Ÿ”‘ NEVER store OpenAI API keys directly in code. Use environment variables or secrets management. Avoid git commits of keys! ๐Ÿ”‘โš ๏ธ let config = Configuration(organizationId: "INSERT-ORGANIZATION-ID", apiKey: "INSERT-API-KEY") let openAI = OpenAI(config) diff --git a/Examples/iOS_Example/iOS_Example/File/ListFilesExample.swift b/Examples/iOS_Example/iOS_Example/File/ListFilesExample.swift index c4f5207..b138edb 100644 --- a/Examples/iOS_Example/iOS_Example/File/ListFilesExample.swift +++ b/Examples/iOS_Example/iOS_Example/File/ListFilesExample.swift @@ -53,6 +53,7 @@ struct ListFilesExample: View { Task { do { + // โš ๏ธ๐Ÿ”‘ NEVER store OpenAI API keys directly in code. Use environment variables or secrets management. Avoid git commits of keys! ๐Ÿ”‘โš ๏ธ let config = Configuration(organizationId: "INSERT-ORGANIZATION-ID", apiKey: "INSERT-API-KEY") let openAI = OpenAI(config) let filesResponse = try await openAI.listFiles() diff --git a/Examples/iOS_Example/iOS_Example/File/RetrieveFileContentsExample.swift b/Examples/iOS_Example/iOS_Example/File/RetrieveFileContentsExample.swift index 375e96c..8ec81b7 100644 --- a/Examples/iOS_Example/iOS_Example/File/RetrieveFileContentsExample.swift +++ b/Examples/iOS_Example/iOS_Example/File/RetrieveFileContentsExample.swift @@ -52,6 +52,7 @@ struct RetrieveFileContentsExample: View { Task { do { + // โš ๏ธ๐Ÿ”‘ NEVER store OpenAI API keys directly in code. Use environment variables or secrets management. Avoid git commits of keys! ๐Ÿ”‘โš ๏ธ let config = Configuration(organizationId: "INSERT-ORGANIZATION-ID", apiKey: "INSERT-API-KEY") let openAI = OpenAI(config) diff --git a/Examples/iOS_Example/iOS_Example/File/RetrieveFileExample.swift b/Examples/iOS_Example/iOS_Example/File/RetrieveFileExample.swift index aed73b2..4ded835 100644 --- a/Examples/iOS_Example/iOS_Example/File/RetrieveFileExample.swift +++ b/Examples/iOS_Example/iOS_Example/File/RetrieveFileExample.swift @@ -53,6 +53,7 @@ struct RetrieveFileExample: View { Task { do { + // โš ๏ธ๐Ÿ”‘ NEVER store OpenAI API keys directly in code. Use environment variables or secrets management. Avoid git commits of keys! ๐Ÿ”‘โš ๏ธ let config = Configuration(organizationId: "INSERT-ORGANIZATION-ID", apiKey: "INSERT-API-KEY") let openAI = OpenAI(config) diff --git a/Examples/iOS_Example/iOS_Example/File/UploadFileExample.swift b/Examples/iOS_Example/iOS_Example/File/UploadFileExample.swift index 5699e1d..1e6ad57 100644 --- a/Examples/iOS_Example/iOS_Example/File/UploadFileExample.swift +++ b/Examples/iOS_Example/iOS_Example/File/UploadFileExample.swift @@ -53,6 +53,7 @@ struct UploadFileExample: View { Task { do { + // โš ๏ธ๐Ÿ”‘ NEVER store OpenAI API keys directly in code. Use environment variables or secrets management. Avoid git commits of keys! ๐Ÿ”‘โš ๏ธ let config = Configuration(organizationId: "INSERT-ORGANIZATION-ID", apiKey: "INSERT-API-KEY") let openAI = OpenAI(config) diff --git a/Examples/iOS_Example/iOS_Example/Image/EditImageExample.swift b/Examples/iOS_Example/iOS_Example/Image/EditImageExample.swift index 4dbef10..4576a66 100644 --- a/Examples/iOS_Example/iOS_Example/Image/EditImageExample.swift +++ b/Examples/iOS_Example/iOS_Example/Image/EditImageExample.swift @@ -47,6 +47,7 @@ struct EditImageExample: View { Task { do { + // โš ๏ธ๐Ÿ”‘ NEVER store OpenAI API keys directly in code. Use environment variables or secrets management. Avoid git commits of keys! ๐Ÿ”‘โš ๏ธ let config = Configuration( organizationId: "INSERT-ORGANIZATION-ID", apiKey: "INSERT-API-KEY" diff --git a/Examples/iOS_Example/iOS_Example/Image/GenerateImageExample.swift b/Examples/iOS_Example/iOS_Example/Image/GenerateImageExample.swift index 78e4a26..9ba063d 100644 --- a/Examples/iOS_Example/iOS_Example/Image/GenerateImageExample.swift +++ b/Examples/iOS_Example/iOS_Example/Image/GenerateImageExample.swift @@ -40,9 +40,10 @@ struct GenerateImageExample: View { VStack { Button { isGenerating = true - + Task { do { + // โš ๏ธ๐Ÿ”‘ NEVER store OpenAI API keys directly in code. Use environment variables or secrets management. Avoid git commits of keys! ๐Ÿ”‘โš ๏ธ let config = Configuration( organizationId: "INSERT-ORGANIZATION-ID", apiKey: "INSERT-API-KEY" diff --git a/Examples/iOS_Example/iOS_Example/Image/GenerateImageVariationsExample.swift b/Examples/iOS_Example/iOS_Example/Image/GenerateImageVariationsExample.swift index 1b8edd4..4789be6 100644 --- a/Examples/iOS_Example/iOS_Example/Image/GenerateImageVariationsExample.swift +++ b/Examples/iOS_Example/iOS_Example/Image/GenerateImageVariationsExample.swift @@ -47,6 +47,7 @@ struct GenerateImageVariationExample: View { Task { do { + // โš ๏ธ๐Ÿ”‘ NEVER store OpenAI API keys directly in code. Use environment variables or secrets management. Avoid git commits of keys! ๐Ÿ”‘โš ๏ธ let config = Configuration( organizationId: "INSERT-ORGANIZATION-ID", apiKey: "INSERT-API-KEY" diff --git a/Examples/iOS_Example/iOS_Example/Model/ListModelsExample.swift b/Examples/iOS_Example/iOS_Example/Model/ListModelsExample.swift index 293ba82..8f3a413 100644 --- a/Examples/iOS_Example/iOS_Example/Model/ListModelsExample.swift +++ b/Examples/iOS_Example/iOS_Example/Model/ListModelsExample.swift @@ -46,6 +46,7 @@ struct ListModelsExample: View { Task { do { + // โš ๏ธ๐Ÿ”‘ NEVER store OpenAI API keys directly in code. Use environment variables or secrets management. Avoid git commits of keys! ๐Ÿ”‘โš ๏ธ let config = Configuration( organizationId: "INSERT-ORGANIZATION-ID", apiKey: "INSERT-API-KEY" diff --git a/Examples/iOS_Example/iOS_Example/Model/RetrieveModelExample.swift b/Examples/iOS_Example/iOS_Example/Model/RetrieveModelExample.swift index b6368a1..d6d00a2 100644 --- a/Examples/iOS_Example/iOS_Example/Model/RetrieveModelExample.swift +++ b/Examples/iOS_Example/iOS_Example/Model/RetrieveModelExample.swift @@ -50,6 +50,7 @@ struct RetrieveModelExample: View { Task { do { + // โš ๏ธ๐Ÿ”‘ NEVER store OpenAI API keys directly in code. Use environment variables or secrets management. Avoid git commits of keys! ๐Ÿ”‘โš ๏ธ let config = Configuration(organizationId: "INSERT-ORGANIZATION-ID", apiKey: "INSERT-API-KEY") let openAI = OpenAI(config) diff --git a/README.md b/README.md index 8690e27..0cd7660 100644 --- a/README.md +++ b/README.md @@ -1,17 +1,15 @@ -[![Swift](https://img.shields.io/badge/Swift-5.5_5.6_5.7-orange?style=flat-square)](https://img.shields.io/badge/Swift-5.5_5.6_5.7-Orange?style=flat-square) -[![Platforms](https://img.shields.io/badge/Platforms-macOS_iOS_tvOS_watchOS_visionOS-green?style=flat-square)](https://img.shields.io/badge/Platforms-macOS_iOS_tvOS_watchOS_visionOS-green?style=flat-square) +[![](https://img.shields.io/endpoint?url=https%3A%2F%2Fswiftpackageindex.com%2Fapi%2Fpackages%2FMarcoDotIO%2FOpenAIKit%2Fbadge%3Ftype%3Dswift-versions)](https://swiftpackageindex.com/MarcoDotIO/OpenAIKit) +[![](https://img.shields.io/endpoint?url=https%3A%2F%2Fswiftpackageindex.com%2Fapi%2Fpackages%2FMarcoDotIO%2FOpenAIKit%2Fbadge%3Ftype%3Dplatforms)](https://swiftpackageindex.com/MarcoDotIO/OpenAIKit) [![Swift Package Manager](https://img.shields.io/badge/Swift_Package_Manager-compatible-orange?style=flat-square)](https://img.shields.io/badge/Swift_Package_Manager-compatible-orange?style=flat-square) [![Swift](https://github.com/MarcoDotIO/OpenAIKit/actions/workflows/swift.yml/badge.svg?branch=main)](https://github.com/MarcoDotIO/OpenAIKit/actions/workflows/swift.yml)
OpenAIKit Logo - -

OpenAIKit

+ +

OpenAIKit

- A community-maintained Swift API for the OpenAI REST endpoint. -
- Explore the docs ยป + A community-maintained Swift SDK for the OpenAI API.

Report Bug @@ -24,7 +22,6 @@ ## Table of Contents - [About the Project][#about-the-project] - - [Motivation](#motivation) - [Our Vision](#our-vision) - [Features](#features) @@ -34,8 +31,17 @@ - [SPM Through Xcode Project](#spm-through-xcode-project) - [SPM Through Xcode Package](#spm-through-xcode-package) - [Using OpenAIKit](#using-openaikit) + - [Obtaining API Keys](#obtaining-api-keys) + - [Image](#image) + - [Chat](#chat) + - [Audio](#audio) + - [Utilities](#utilities) + - [Embeddings](#embeddings) + - [Models](#models) + - [Completion](#completion) + - [Migration From 1.x to 2.0](#migration-from-1.x-to-2.0) +- [Example Projects](#example-projects) - [Development and Testing](#development-and-testing) -- [Next Steps](#next-steps) - [Credits](#credits) - [License](#license) @@ -55,7 +61,7 @@ As the lead developer for OpenAIKit, I envisioned a tool that not only provides ## Features -- [x] Generate new, edited, and variations of images using Dall-E 2 (Dall-E 3 coming soon). +- [x] Generate new, edited, and variations of images using Dall-E 2 (with Dall-E 3 coming soon). - [x] Generate edits and completions using GPT-3 and GPT-4. - [x] List avaiable models for use with GPT-3 and GPT-4. - [x] Retrieve embeddings for GPT-3 and GPT-4 prompts. @@ -71,7 +77,7 @@ As the lead developer for OpenAIKit, I envisioned a tool that not only provides | Platform | Minimum Swift Version | Installation | Status | | ------------------------------------------------------------ | --------------------- | ----------------------------------------------- | ------------ | -| iOS 13.0+ / macOS 10.15+ / tvOS 13.0+ / watchOS 6.0+ / visionOS 1.0+ | 5.5 | [Swift Package Manager](#swift-package-manager) | Fully Tested | +| iOS 13.0+ / macOS 10.15+ / tvOS 13.0+ / watchOS 6.0+ / visionOS 1.0+ | 5.7 | [Swift Package Manager](#swift-package-manager) | Fully Tested | ## Installation @@ -83,7 +89,7 @@ The [Swift Package Manager](https://swift.org/package-manager/) allows for devel * File > Swift Packages > Add Package Dependency * Add `https://github.com/marcodotio/OpenAIKit.git` -* Select "Up to next Major" with "1.2" +* Select "Up to next Major" with "2.0" #### SPM Through Xcode Package @@ -91,35 +97,53 @@ Once you have your Swift package set up, add the Git link within the `dependenci ```swift dependencies: [ - .package(url: "https://github.com/marcodotio/OpenAIKit.git", .upToNextMajor(from: "1.2")) + .package(url: "https://github.com/marcodotio/OpenAIKit.git", .upToNextMajor(from: "2.0")) ] ``` ## Using OpenAIKit -OpenAIKit is designed to be very easy to integrate into your own projects. The main method of utilizing OpenAIKit is to set a `OpenAI` class object: +### Obtaining API Keys -```swift -import OpenAIKit +To obtain an API Key, go to your `API Keys` page on your account page [here](https://platform.openai.com/account/api-keys). -// An API key and Organization ID is required to use the API library. -let config = Configuration(organizationId: "INSERT-ORGANIZATION-ID", apiKey: "INSERT-API-KEY") +API Keys Page + +> ## โš ๏ธ ๐Ÿ”‘ **Important: Do Not Store OpenAI API Keys Directly in Code!** ๐Ÿ”‘โš ๏ธ +> +> When working with OpenAI's API, it's essential for security reasons not to embed your API keys directly within the codebase of your application. Embedding keys directly in your source code can expose them to unauthorized users and malicious actors, leading to potential misuse. +> +> **Best Practices:** +> +> 1. **Environment Variables:** Store your API keys in environment variables and access them in your code. This way, they aren't hard-coded into your application and can be managed securely. +> 2. **Secrets Management:** Use a secure secrets management tool or service to store and retrieve your API keys. Tools like AWS Secrets Manager, HashiCorp Vault, Firebase, CloudKit, or Azure Key Vault are designed to store, manage, and retrieve sensitive data. This way, the user does not have on-device access to the keys. +> 3. **.gitignore:** If you store your API keys in a configuration file, always ensure that this file is added to `.gitignore` or a similar mechanism to prevent it from being accidentally committed to a source control system. +> 4. **Regularly Rotate Keys:** Rotate your API keys periodically to mitigate the potential impact of a leak. +> +> Remember, security is of utmost importance. Always prioritize the safe handling of sensitive information like API keys! +> +> For more information, check out [this article](https://nshipster.com/secrets/) all about API key management for Swift developers. -// Create an `OpenAI` object using the Configuration object. -let openAI = OpenAI(config) -``` -From there, it's as easy as calling one of the provided function members. The code below demonstrates how you can generate an image using `createImage()`: +### Image + +Dive into the next evolution of AI-powered image generation with DALL-E 2 (and coming soon, DALL-E 3), brought to you by OpenAI. Building upon the legacy of its predecessor, DALL-E 2 offers enhanced capabilities to materialize intricate visuals from textual prompts, creating a seamless bridge between language and imagery. Developers can harness this groundbreaking technology to enrich user experiences, craft dynamic content, and inspire new avenues of creativity. Revolutionize your applications and projects by integrating the cutting-edge power of DALL-E 2. + +There are three endpoints provided by OpenAI to interact with DALL-E 2: + - `createImage` is the main endpoint that allows direct image generation from a text prompt. Here's an example on how to use the endpoint, along with the correlating output: ```swift do { let imageParam = ImageParameters( - prompt: "a red apple", - resolution: .small, - responseFormat: .base64Json + // A text description of the desired image(s). + prompt: "An armchair in the shape of an avocado", + // The size of the generated images. + resolution: .large, + // The format in which the generated images are returned. + responseFormat: .base64Json ) let result = try await openAi.createImage( - parameters: imageParam + parameters: imageParam ) let b64Image = result.data[0].image let image = try openAi.decodeBase64Image(b64Image) @@ -127,207 +151,533 @@ do { // Insert your own error handling method here. } ``` - -As well, you are able to generate completions using GPT-3: - +Avocado Chair +โ€‹ +โ€‹ - `createImageEdit` allows the developer to create edits based on the original image inputted, along with a transparent-enabled image, with the transparent portions being the areas to edit in; and a prompt of the original image with the edit the developer wants. Here's an example on how to use the endpoint, along with the correlating output: ```swift do { - let completionParameter = CompletionParameters( - model: "text-davinci-001", - prompt: ["Say this is a test ->"], - maxTokens: 4, - temperature: 0.98 + let imageEditParam = try ImageEditParameters( + // The image to edit. + image: image, + // An additional image whose fully transparent areas indicate where image should be edited. + mask: mask, + // A text description of the desired image(s). + prompt: "The Mona Lisa wearing a beret, in the style of Leonardo DaVinci", + // The size of the generated images. + resolution: .large, + // The format in which the generated images are returned. + responseFormat: .base64Json ) - let completionResponse = try await openAI.generateCompletion( - parameters: completionParameter + + let imageResponse = try await openAI.generateImageEdits( + parameters: imageEditParam ) - let responseText = completionResponse.choices[0].text + + let image = try openAI.decodeBase64Image(imageResponse.data[0].image) } catch { // Insert your own error handling method here. } ``` +| ![Original](Resources/OriginalMonaLisa.png) | ![Mask](Resources/MonaLisaMask.png) | ![Edit](Resources/MonaLisaBeret.png) | +|-----------------------|---------------------------|---------------------------------| +| **Original** | **Mask** | **Edit** | -## Development And Testing - -I welcome anyone to contribute to the project through posting issues, if they encounter any bugs / glitches while using OpenAIKit; and as well with creating pull issues that add any additional features to OpenAIKit. - -## Next Steps - -* In the near future, there will be full documentation outlining how a user can fully utilize OpenAIKit. -* As well, more features listed in [ToDo](#todo) will be fully implemented. -* More examples, from other platforms, will be uploaded for developers to be able to focus more on implementing the end user experience, and less time figuring out their project's architecture. - -## Credits - -I would like to personally thank the [OpenAI Team](https://openai.com) for implementing the REST endpoint and implementing the models themselves, as without them, this project wouldn't have been possible. - -As well, I would like to personally thank [YufeiG](https://github.com/YufeiG) for providing troubleshooting help on sending Image data for the Image Edit and Variations endpoints. - -## License - -OpenAIKit is released under the MIT license, and any use of OpenAI's REST endpoint will be under the [Usage policies](https://beta.openai.com/docs/usage-policies) set by them. [See LICENSE](https://github.com/MarcoDotIO/OpenAIKit/blob/main/LICENSE) for details. - - - - -[contributors-shield]: https://img.shields.io/github/contributors/othneildrew/Best-README-Template.svg?style=for-the-badge -[contributors-url]: https://github.com/MarcoDotIO/OpenAIKit/graphs/contributors -[forks-shield]: https://img.shields.io/github/forks/othneildrew/Best-README-Template.svg?style=for-the-badge -[forks-url]: https://github.com/MarcoDotIO/OpenAIKit/network/members -[stars-shield]: https://img.shields.io/github/stars/othneildrew/Best-README-Template.svg?style=for-the-badge -[stars-url]: https://github.com/MarcoDotIO/OpenAIKit/stargazers -[issues-shield]: https://img.shields.io/github/issues/othneildrew/Best-README-Template.svg?style=for-the-badge -[issues-url]: https://github.com/MarcoDotIO/OpenAIKit/issues -[license-shield]: https://img.shields.io/github/license/othneildrew/Best-README-Template.svg?style=for-the-badge -[license-url]: https://github.com/MarcoDotIO/OpenAIKit/blob/main/LICENSE.txt - -[![Swift](https://img.shields.io/badge/Swift-5.5_5.6_5.7-orange?style=flat-square)](https://img.shields.io/badge/Swift-5.5_5.6_5.7-Orange?style=flat-square) -[![Platforms](https://img.shields.io/badge/Platforms-macOS_iOS_tvOS_watchOS_visionOS-green?style=flat-square)](https://img.shields.io/badge/Platforms-macOS_iOS_tvOS_watchOS_visionOS-green?style=flat-square) -[![Swift Package Manager](https://img.shields.io/badge/Swift_Package_Manager-compatible-orange?style=flat-square)](https://img.shields.io/badge/Swift_Package_Manager-compatible-orange?style=flat-square) -[![Swift](https://github.com/MarcoDotIO/OpenAIKit/actions/workflows/swift.yml/badge.svg?branch=main)](https://github.com/MarcoDotIO/OpenAIKit/actions/workflows/swift.yml) +โ€‹ - `createImageVariation` allows the developer to create variations of a given input image. Here's an example on how to use the endpoint, along with the correlating output: +```swift +do { + let imageVariationParam = try ImageVariationParameters( + // The image to use as the basis for the variation(s). + image: image, + // The size of the generated images. + resolution: .large, + // The format in which the generated images are returned. + responseFormat: .base64Json + ) -- [Features](#features) -- [ToDo](#todo) -- [Requirements](#requirements) -- [Installation](#installation) - - [Swift Package Manager](#swift-package-manager) - - [SPM Through Xcode Project](#spm-through-xcode-project) - - [SPM Through Xcode Package](#spm-through-xcode-package) -- [Using OpenAIKit](#using-openaikit) -- [Development and Testing](#development-and-testing) -- [Next Steps](#next-steps) -- [Credits](#credits) -- [License](#license) + let variationResponse = try await openAI.generateImageVariations( + parameters: imageVariationParam + ) -## Features + self.image = try openAI.decodeBase64Image( + variationResponse.data[0].image + ) +} catch { + // Insert your own error handling method here. +} +``` +| ![Original](Resources/OriginalWave.png) | ![Variation](Resources/WaveVariation.png) | +|-----------------------|---------------------------------| +| **Original** | **Variation** | -- [x] Generate new, edited, and variations of images using Dall-E 2. -- [x] Generate edits and completions using GPT-3. -- [x] List avaiable models for use with GPT-3. -- [x] Retrieve embeddings for GPT-3 prompts. -- [x] Generate Chat responses using ChatGPT. -- [x] Upload and view training set files for Fine-tuning. -- [x] Generate and use Fine-tuned models. -- [x] View whether a prompt is flagged by the Moderations endpoint or not. -- [x] Comprehensive Unit and Integration Test coverage. -- [x] Swift Concurrency Support back to iOS 13, macOS 10.15, tvOS 13, and watchOS 6. -- [x] Complete documentation of OpenAIKit. +### Chat -## ToDo +ChatGPT, built on OpenAI's GPT-4 architecture, is a cutting-edge conversational AI model. It provides developers with a robust tool for integrating advanced natural language processing capabilities into applications. Using ChatGPT can enhance user interactions, improve efficiency, and offer AI-driven solutions in various use cases. Incorporate GPT-4's strength into your projects for tangible results. -- [ ] Implement data streaming for Fine-Tune and GPT-3 event streams. +There is a single endpoint for this feature, however, this SDK splits the endpoint into two functions with three separate features in total: + - `generateChatCompletion` allows the developer to generate chat completions using the provided models from OpenAI; or the developer's owned fine tuned models. Here's an example on how to use the endpoint, along with the correlating output: +```swift +do { + let chat: [ChatMessage] = [ + ChatMessage(role: .system, content: "You are a helpful assistant."), + ChatMessage(role: .user, content: "Who won the world series in 2020?"), + ChatMessage(role: .assistant, content: "The Los Angeles Dodgers won the World Series in 2020."), + ChatMessage(role: .user, content: "Where was it played?") + ] + + let chatParameters = ChatParameters( + model: .gpt4, // ID of the model to use. + messages: chat // A list of messages comprising the conversation so far. + ) -## Requirements + let chatCompletion = try await openAI.generateChatCompletion( + parameters: chatParameters + ) -| Platform | Minimum Swift Version | Installation | Status | -| ------------------------------------------------------------ | --------------------- | ----------------------------------------------- | ------------ | -| iOS 13.0+ / macOS 10.15+ / tvOS 13.0+ / watchOS 6.0+ / visionOS 1.0+ | 5.5 | [Swift Package Manager](#swift-package-manager) | Fully Tested | + if let message = chatCompletion.choices[0].message { + let content = message.content + } +} catch { + // Insert your own error handling method here. +} +``` +``` +ChatResponse( + id: "chatcmpl-88eG5VruffcNHPNVGBKGVAV5HGk4j", + object: OpenAIKit.OpenAIObject.chatCompletion, + created: 1697072069, + choices: [ + OpenAIKit.ChatChoice( + message: Optional( + OpenAIKit.ChatMessage( + id: "250FDA2D-2F38-4E6F-B97E-DAD74FED1FB6", + role: OpenAIKit.ChatRole.assistant, + content: Optional( + "The 2020 World Series was played at Globe Life Field in Arlington, Texas." + ), + functionCall: nil + ) + ), + delta: nil, + index: 0, + logprobs: nil, + finishReason: Optional("stop") + ) + ], + usage: Optional( + OpenAIKit.Usage( + promptTokens: 53, + completionTokens: 17, + totalTokens: 70 + ) + ) +) +``` +The developer is also able to use function calls to execute various functions (i.e., fetching weather info, uploading files, etc). Here's an example on how to use the parameter, the corresponding response, and example usage with a local function. +```swift +do { + let functions: [Function] = [ + Function( + name: "getCurrentWeather", + description: "Get the current weather in a given location", + parameters: Parameters( + type: "object", + properties: [ + "location": ParameterDetail( + type: "string", + description: "The city and state, e.g. San Francisco, CA" + ), + "unit": ParameterDetail( + type: "string", enumValues: ["fahrenheit", "celsius"] + ) + ], + required: ["location"] + ) + ) + ] + let messages: [ChatMessage] = [ + ChatMessage(role: .user, content: "What's the weather like in Boston?") + ] + + let chatParameters = ChatParameters( + model: .gpt4, // ID of the model to use. + messages: messages, // A list of messages comprising the conversation so far. + functionCall: "auto", // Controls how the model calls functions. + functions: functions // A list of functions the model may generate JSON inputs for. + ) -## Installation + let chatCompletion = try await openAI.generateChatCompletion( + parameters: chatParameters + ) +} catch { + // Insert your own error handling method here. +} +``` +``` +ChatResponse( + id: "chatcmpl-88eVjsHEPtDDiSEuCexsqO8iuhnfG", + object: OpenAIKit.OpenAIObject.chatCompletion, + created: 1697073039, + choices: [ + OpenAIKit.ChatChoice( + message: Optional( + OpenAIKit.ChatMessage( + id: "DCE5EECB-9521-481D-9E75-C7FF9390E4CF", + role: OpenAIKit.ChatRole.assistant, + content: nil, + functionCall: Optional( + OpenAIKit.FunctionCall( + arguments: "{\n\"location\": \"Boston, MA\"\n}", + name: "getCurrentWeather" + ) + ) + ) + ), + delta: nil, + index: 0, + logprobs: nil, + finishReason: Optional("function_call") + ) + ], + usage: Optional(OpenAIKit.Usage(promptTokens: 81, completionTokens: 16, totalTokens: 97)) +) +``` +```swift +func getCurrentWeather(location: String, unit: TemperatureUnit = .fahrenheit) -> WeatherInfo { + return WeatherInfo(location: location, temperature: "72", unit: unit, forecast: ["sunny", "windy"]) +} -### Swift Package Manager +if let message = chatCompletion.choices[0].message, let functionCall = message.functionCall { + let jsonString = functionCall.arguments + if let data = jsonString.data(using: .utf8) { + do { + if + let json = try JSONSerialization.jsonObject(with: data, options: []) as? [String: Any], + let location = json["location"] as? String + { + self.weatherInfo = self.getCurrentWeather(location: location) + } + } catch { + // Insert your own error handling method here. + } + } +} +``` -The [Swift Package Manager](https://swift.org/package-manager/) allows for developers to easily integrate packages into their Xcode projects and packages; and is also fully integrated into the `swift` compiler. +โ€‹ - `generateChatCompletionStreaming` allows the developr to stream chat completion data from the endpoint. Here's an example on how to use the endpoint, along with the correlating output: +```swift +do { + let chat: [ChatMessage] = [ + ChatMessage(role: .system, content: "You are a helpful assistant."), + ChatMessage(role: .user, content: "Who won the world series in 2020?"), + ChatMessage(role: .assistant, content: "The Los Angeles Dodgers won the World Series in 2020."), + ChatMessage(role: .user, content: "Where was it played?") + ] + + let chatParameters = ChatParameters(model: .chatGPTTurbo, messages: chat) + + let stream = try openAI.generateChatCompletionStreaming( + parameters: chatParameters + ) +} catch { + // Insert your own error handling method here. +} +``` +``` +ChatResponse( + id: "chatcmpl-88enklY0vmc4fNkM1mJQCkzW6hcST", + object: OpenAIKit.OpenAIObject.chatCompletionChunk, + created: 1697074156, + choices: [ + OpenAIKit.ChatChoice( + message: nil, + delta: Optional( + OpenAIKit.ChatDelta( + role: Optional(OpenAIKit.ChatRole.assistant), + content: Optional("") + ) + ), + index: 0, + logprobs: nil, + finishReason: nil + ) + ], + usage: nil +) +ChatResponse( + id: "chatcmpl-88enklY0vmc4fNkM1mJQCkzW6hcST", + object: OpenAIKit.OpenAIObject.chatCompletionChunk, + created: 1697074156, + choices: [ + OpenAIKit.ChatChoice( + message: nil, + delta: Optional( + OpenAIKit.ChatDelta( + role: nil, + content: Optional("The") + ) + ), + index: 0, + logprobs: nil, + finishReason: nil + ) + ], + usage: nil +) + +// ... + +ChatResponse( + id: "chatcmpl-88enklY0vmc4fNkM1mJQCkzW6hcST", + object: OpenAIKit.OpenAIObject.chatCompletionChunk, + created: 1697074156, + choices: [ + OpenAIKit.ChatChoice( + message: nil, + delta: Optional( + OpenAIKit.ChatDelta( + role: nil, + content: Optional(".") + ) + ), + index: 0, + logprobs: nil, + finishReason: nil + ) + ], + usage: nil +) +ChatResponse( + id: "chatcmpl-88enklY0vmc4fNkM1mJQCkzW6hcST", + object: OpenAIKit.OpenAIObject.chatCompletionChunk, + created: 1697074156, + choices: [ + OpenAIKit.ChatChoice( + message: nil, + delta: Optional( + OpenAIKit.ChatDelta( + role: nil, + content: nil + ) + ), + index: 0, + logprobs: nil, + finishReason: Optional("stop") + ) + ], + usage: nil +) +``` -#### SPM Through XCode Project +### Audio -* File > Swift Packages > Add Package Dependency -* Add `https://github.com/marcodotio/OpenAIKit.git` -* Select "Up to next Major" with "1.2" +Whisper is OpenAI's speech-to-text AI model, designed for accurate transcription of spoken content. By converting audio into text, it offers developers a straightforward tool for tasks like transcription services, voice commands, vocal language translations, or audio indexing. Implementing Whisper can help streamline processes, make applications more accessible, and leverage voice data efficiently. -#### SPM Through Xcode Package +There are the two main endpoints that use the Whisper model: + - `createTranscription` is the main endpoint that allows developers to transcribe spoken audio into text. Here's an example on how to use the endpoint, and the corresponding returned item: -Once you have your Swift package set up, add the Git link within the `dependencies` value of your `Package.swift` file. +```swift +do { + let audioParameters = TranscriptionParameters(file: audio) + + let transcriptionCompletion = try await openAI.createTranscription(parameters: audioParameters) +} catch { + // Insert your own error handling method here. +} +``` + +``` +Arousing from the most profound of slumbers, we break the gossamer web of some dream. Yet in a second afterward, so frail may that web have been, we remember not that we have dreamed. In the return to life from the swoon there are two stages, first, that of the sense of mental or spiritual, secondly, that of the sense of physical existence. It seems probable that if, upon reaching the second stage, we could recall the impressions of the first, we should find these impressions eloquent in memories of the gulf beyond. And that gulf is what? How at least shall we distinguish its shadows from those of the tomb? +``` +โ€‹ - `createTranslation` is the second endpoint that allows developers to translate any non-English audio into transcribed English text. Here's an example on how to use the endpoint, and the corresponding returned item: ```swift -dependencies: [ - .package(url: "https://github.com/marcodotio/OpenAIKit.git", .upToNextMajor(from: "1.2")) -] +do { + let audioParameters = TranscriptionParameters(file: audio) + + let transcriptionCompletion = try await openAI.createTranslation(parameters: audioParameters) +} catch { + // Insert your own error handling method here. +} +``` + +``` +In the night, when I was out of my mind, and I, pained, numb and tired, thought about the beauty and goodness of a long-forgotten lesson, and my head began to shake, I suddenly heard something tick that scared me out of my sleep. Who could it be in that weather? A visitor is knocking, I thought, at my room in that weather. That's all it is, and nothing more. ``` -## Using OpenAIKit +### Utilities -OpenAIKit is designed to be very easy to integrate into your own projects. The main method of utilizing OpenAIKit is to set a `OpenAI` class object: +#### Embeddings -```swift -import OpenAIKit +Embeddings utilize OpenAI's GPT models to generate dense vector representations for text. These embeddings capture semantic information, enabling developers to perform tasks like similarity search, clustering, or fine-tuning on specific tasks. By integrating GPT Embeddings into applications, developers can enhance text analysis and retrieval capabilities, leading to more effective data processing and insights. -// An API key and Organization ID is required to use the API library. -// Note: It's recommended to load the API Key through a .plist dictionary, rather than hard coding it in a String. -let config = Configuration(organizationId: "INSERT-ORGANIZATION-ID", apiKey: "INSERT-API-KEY") +Here's an example on how to use it, and the expected output for the endpoint: +```swift +do { + let embeddingsParam = EmbeddingsParameters(model: "text-similarity-ada-002", input: input) -// Create an `OpenAI` object using the Configuration object. -let openAI = OpenAI(config) + self.embeddingsResponse = try await openAI.createEmbeddings(parameters: embeddingsParam) +} catch { + // Insert your own error handling method here. +} +``` ``` +OpenAIKit.EmbeddingsResponse( + object: OpenAIKit.OpenAIObject.list, + data: [ + OpenAIKit.EmbeddingsData( + object: OpenAIKit.OpenAIObject.embedding, + embedding: [ + 0.0028667077, + 0.018867997, + -0.030135695, + // ... + -0.004177677, + -0.015615467, + -0.008131327 + ], + index: 0 + ) + ], + model: "text-similarity-ada:002", + usage: OpenAIKit.EmbeddingsUsage( + promptTokens: 8, + totalTokens: 8 + ) +) +``` + +#### Models -From there, it's as easy as calling one of the provided function members. The code below demonstrates how you can generate an image using `createImage()`: +The models endpoint allows developers to fetch the currently available models from OpenAI, along with their own fine-tuned models. +There are two endpoints available for use with this feature: +- `listModels` fetches the entire list of models avaialbe to the developer. Here's an example usage, along with the corresponding output: ```swift do { - let imageParam = ImageParameters( - prompt: "a red apple", - resolution: .small, - responseFormat: .base64Json - ) - let result = try await openAi.createImage( - parameters: imageParam - ) - let b64Image = result.data[0].image - let image = try openAi.decodeBase64Image(b64Image) + let modelsResponse = try await openAi.listModels() } catch { // Insert your own error handling method here. } ``` +``` +ListModelResponse( + object: OpenAIKit.OpenAIObject.list, + data: [ + OpenAIKit.Model( + id: "text-search-babbage-doc-001", + object: OpenAIKit.OpenAIObject.model, + created: 1651172509, + ownedBy: "openai-dev" + ), + OpenAIKit.Model( + id: "curie-search-query", + object: OpenAIKit.OpenAIObject.model, + created: 1651172509, + ownedBy: "openai-dev" + ), + OpenAIKit.Model( + id: "text-search-babbage-query-001", + object: OpenAIKit.OpenAIObject.model, + created: 1651172509, + ownedBy: "openai-dev" + ), + OpenAIKit.Model( + id: "babbage", + object: OpenAIKit.OpenAIObject.model, + created: 1649358449, + ownedBy: "openai" + ), + OpenAIKit.Model( + id: "gpt-3.5-turbo-instruct-0914", + object: OpenAIKit.OpenAIObject.model, + created: 1694122472, + ownedBy: "system" + ) + // ... + ] +) +``` -As well, you are able to generate completions using GPT-3: - +- `retrieveModels` fetches a single model, given the input model's ID. Here's an example usage, along with the corresponding output: ```swift do { - let completionParameter = CompletionParameters( - model: "text-davinci-001", - prompt: ["Say this is a test ->"], - maxTokens: 4, - temperature: 0.98 - ) - let completionResponse = try await openAI.generateCompletion( - parameters: completionParameter - ) - let responseText = completionResponse.choices[0].text + let model = try await openAI.retrieveModel(modelId: "text-davinci-001") } catch { // Insert your own error handling method here. } ``` +``` +OpenAIKit.Model( + id: "text-davinci-001", + object: OpenAIKit.OpenAIObject.model, + created: 1649364042, + ownedBy: "openai" +) +``` -## Development And Testing +### Completion -I welcome anyone to contribute to the project through posting issues, if they encounter any bugs / glitches while using OpenAIKit; and as well with creating pull issues that add any additional features to OpenAIKit. +#### โš ๏ธ Deprecation Notice for Completion API โš ๏ธ + +As of July 6, 2023, OpenAI has announced the deprecation of the older models in the Completions API, which are set to retire at the beginning of 2024. It's highly recommended to transition to the Chat Completions API which provides a more structured prompt interface and multi-turn conversation capabilities. The Chat Completions API has proven to handle a vast majority of previous use cases and new conversational needs with higher flexibility and specificity, significantly enhancing the developer experience. For more details, refer to the [official announcement](https://openai.com/blog/gpt-4-api-general-availability). + +### Migration From 1.x to 2.0 -## Next Steps +As of 2.0, Chat Completion Models use enums to label each available model for the main usage of default models. Here is an example on how to migrate from using the older initializer for `ChatParameter` to the initializer for 2.0: +```swift +// โŒ 1.x implementation +let chatParameters = ChatParameters(model: "gpt4", messages: chat) + +// โœ… 2.0 implementation +let chatParameters = ChatParameters(model: .gpt4, messages: chat) +``` -* In the near future, there will be full documentation outlining how a user can fully utilize OpenAIKit. -* As well, more features listed in [ToDo](#todo) will be fully implemented. -* More examples, from other platforms, will be uploaded for developers to be able to focus more on implementing the end user experience, and less time figuring out their project's architecture. +If you need a custom string for fine-tuned models, use the `customModel` parameter: +```swift +// โœ… 2.0 implementation for custom model IDs +let chatParameters = ChatParameters(customModel: "INSERT-MODEL-ID", message: chat) +``` + +## Example Projects + +(TBD) + +## Development And Testing + +I welcome anyone to contribute to the project through posting issues, if they encounter any bugs / glitches while using OpenAIKit; and as well with creating pull issues that add any additional features to OpenAIKit. ## Credits -I would like to personally thank the [OpenAI Team](https://openai.com) for implementing the REST endpoint and implementing the models themselves, as without them, this project wouldn't have been possible. +I would like to personally thank the [OpenAI Team](https://openai.com) for implementing the API endpoint and implementing the models themselves, as without them, this project wouldn't have been possible. As well, I would like to personally thank [YufeiG](https://github.com/YufeiG) for providing troubleshooting help on sending Image data for the Image Edit and Variations endpoints. ## License -OpenAIKit is released under the MIT license, and any use of OpenAI's REST endpoint will be under the [Usage policies](https://beta.openai.com/docs/usage-policies) set by them. [See LICENSE](https://github.com/MarcoDotIO/OpenAIKit/blob/main/LICENSE) for details. - - - - -[contributors-shield]: https://img.shields.io/github/contributors/othneildrew/Best-README-Template.svg?style=for-the-badge -[contributors-url]: https://github.com/MarcoDotIO/OpenAIKit/graphs/contributors -[forks-shield]: https://img.shields.io/github/forks/othneildrew/Best-README-Template.svg?style=for-the-badge -[forks-url]: https://github.com/MarcoDotIO/OpenAIKit/network/members -[stars-shield]: https://img.shields.io/github/stars/othneildrew/Best-README-Template.svg?style=for-the-badge -[stars-url]: https://github.com/MarcoDotIO/OpenAIKit/stargazers -[issues-shield]: https://img.shields.io/github/issues/othneildrew/Best-README-Template.svg?style=for-the-badge -[issues-url]: https://github.com/MarcoDotIO/OpenAIKit/issues -[license-shield]: https://img.shields.io/github/license/othneildrew/Best-README-Template.svg?style=for-the-badge -[license-url]: https://github.com/MarcoDotIO/OpenAIKit/blob/main/LICENSE.txt +OpenAIKit is released under the MIT license, and any use of OpenAI's REST endpoint will be under the [Usage policies](https://beta.openai.com/docs/usage-policies) set by them. + +``` +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +``` diff --git a/Resources/InputTranslationAudio.mp3 b/Resources/InputTranslationAudio.mp3 new file mode 100644 index 0000000..82d5726 Binary files /dev/null and b/Resources/InputTranslationAudio.mp3 differ diff --git a/Sources/OpenAIKit/Types/Structs/Parameters/Chat/ChatParameters.swift b/Sources/OpenAIKit/Types/Structs/Parameters/Chat/ChatParameters.swift index 30cabc4..d762452 100644 --- a/Sources/OpenAIKit/Types/Structs/Parameters/Chat/ChatParameters.swift +++ b/Sources/OpenAIKit/Types/Structs/Parameters/Chat/ChatParameters.swift @@ -26,9 +26,12 @@ import Foundation public struct ChatParameters { - /// ID of the model to use. Currently, only `gpt-3.5-turbo` and `gpt-3.5-turbo-0301` are supported. + /// ID of the model to use. public var model: ChatModels + /// ID of the custom model created from fine-tuning. + public var customModel: String? + /// The messages to generate chat completions for, in the /// [chat format](https://platform.openai.com/docs/guides/chat/introduction). public var messages: [ChatMessage] @@ -101,6 +104,7 @@ public struct ChatParameters { public init( model: ChatModels, + customModel: String? = nil, messages: [ChatMessage], temperature: Double = 1.0, topP: Double = 1.0, @@ -115,6 +119,7 @@ public struct ChatParameters { functions: [Function]? = nil ) { self.model = model + self.customModel = customModel self.messages = messages self.temperature = temperature self.topP = topP @@ -132,7 +137,7 @@ public struct ChatParameters { /// The body of the URL used for OpenAI API requests. public var body: [String: Any] { var result: [String: Any] = [ - "model": self.model.description, + "model": self.customModel != nil ? self.customModel! : self.model.description, "temperature": self.temperature, "top_p": self.topP, "n": self.numberOfCompletions,