Skip to content

Commit

Permalink
Release 1.1.7
Browse files Browse the repository at this point in the history
  • Loading branch information
sgusakovsky committed Mar 5, 2023
1 parent c40a26e commit 63118c2
Show file tree
Hide file tree
Showing 7 changed files with 177 additions and 20 deletions.
2 changes: 1 addition & 1 deletion OpenAIService.podspec
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
Pod::Spec.new do |spec|
spec.name = 'OpenAIService'
spec.version = '1.1.6'
spec.version = '1.1.7'
spec.homepage = 'https://github.com/sgusakovsky/OpenAIService'
spec.license = {
:type => 'MIT',
Expand Down
46 changes: 44 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -103,9 +103,51 @@ service?.sendImageGeneration(with: body, completionHandler: { result in
}
})
```
The API will return an `OpenAIGenerationImageResponse` object containing the corresponding image url items.
The API will return an `OpenAIImageResponse` object containing the corresponding image url items.

For a full list of the supported models see [OpenAICompletionModelType.swift](https://github.com/gusakovsky/OpenAIService/blob/main/Sources/OpenAIService/Models/Completion/OpenAICompletionModelType.swift), [OpenAIChatModelType.swift](https://github.com/gusakovsky/OpenAIService/blob/main/Sources/OpenAIService/Models/Chat/OpenAIChatModelType.swift), [OpenAIEditsModelType.swift](https://github.com/gusakovsky/OpenAIService/blob/main/Sources/OpenAIService/Models/Edits/OpenAIEditsModelType.swift). For more information on the models see the [OpenAI API Documentation](https://platform.openai.com/docs/models).
Create a call to the image edit API, passing in a text prompt.

```swift
guard let body = OpenAIImageEditsBody(image: UIImage(named: "image")!, mask: UIImage(named: "mask")!, prompt: "A cute baby sea otter wearing a beret", size: .small, responseFormat: .base64) else {
return
}
service?.sendImageEdits(with: body, completionHandler: { result in
switch result {
case .success(let response):
if let image = response.data.first?.image {
print(image)
} else if let url = response.data.first?.url {
print(url)
}
case .failure(let error):
print(error.localizedDescription)
}
})
```
The API will return an `OpenAIImageResponse` object containing the corresponding image url items.

Create a call to the image edit API, passing in a text prompt.

```swift
guard let body = OpenAIImageVariationBody(image: UIImage(named: "image")!, size: .small, responseFormat: .base64) else {
return
}
service?.sendImageVariation(with: body, completionHandler: { result in
switch result {
case .success(let response):
if let image = response.data.first?.image {
print(image)
} else if let url = response.data.first?.url {
print(url)
}
case .failure(let error):
print(error.localizedDescription)
}
})
```
The API will return an `OpenAIImageResponse` object containing the corresponding image url items.

For a full list of the supported models see [OpenAICompletionModelType.swift](https://github.com/sgusakovsky/OpenAIService/blob/main/Sources/OpenAIService/Models/Completion/OpenAICompletionModelType.swift), [OpenAIChatModelType.swift](https://github.com/sgusakovsky/OpenAIService/blob/main/Sources/OpenAIService/Models/Chat/OpenAIChatModelType.swift), [OpenAIEditsModelType.swift](https://github.com/sgusakovsky/OpenAIService/blob/main/Sources/OpenAIService/Models/Edits/OpenAIEditsModelType.swift). For more information on the models see the [OpenAI API Documentation](https://platform.openai.com/docs/models).

OpenAIService also supports Swift concurrency so you can use Swift’s async/await syntax to fetch completions.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import Foundation
import UIKit
#endif

/// https://platform.openai.com/docs/api-reference/images/create-edit
public struct OpenAIImageEditsBody {
public let image: FormData
public let mask: FormData
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@ import Foundation
import UIKit
#endif

public struct OpenAIGenerationImageResponse: Codable {
public let data: [OpenAIGenerationImageData]
public struct OpenAIImageResponse: Codable {
public let data: [OpenAIImageData]
}

public enum OpenAIGenerationImageData: Codable {
public enum OpenAIImageData: Codable {
case url(String)
case base64(String)

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
//
// OpenAIImageVariationBody.swift
// OpenAIDemo
//
// Created by Gusakovsky, Sergey on 5.03.23.
//

import Foundation
#if os(iOS)
import UIKit
#endif

/// https://platform.openai.com/docs/api-reference/images/create-variation
public struct OpenAIImageVariationBody {
public let image: FormData
public let numberOfImages: Int
public let size: OpenAIGenerationImageSize
public let responseFormat: OpenAIGenerationImageResponseFormat
public let user: String?

public init(
image: Data,
numberOfImages: Int = 1,
size: OpenAIGenerationImageSize = .large,
responseFormat: OpenAIGenerationImageResponseFormat = .url,
user: String? = nil
) {
self.image = FormData(data: image, mimeType: "image/png", fileName: "image.png")
self.numberOfImages = numberOfImages
self.size = size
self.responseFormat = responseFormat
self.user = user
}

#if os(iOS)
public init?(
image: UIImage,
numberOfImages: Int = 1,
size: OpenAIGenerationImageSize = .large,
responseFormat: OpenAIGenerationImageResponseFormat = .url,
user: String? = nil
) {
guard let imageData = image.pngData() else { return nil }
self.image = FormData(data: imageData, mimeType: "image/png", fileName: "image.png")
self.numberOfImages = numberOfImages
self.size = size
self.responseFormat = responseFormat
self.user = user
}
#endif

public var body: [String: Any] {
var result: [String: Any] = [
"image": self.image,
"n": self.numberOfImages,
"size": self.size.rawValue,
"response_format": self.responseFormat.rawValue
]
if let user = self.user {
result["user"] = user
}

return result
}
}
7 changes: 5 additions & 2 deletions Sources/OpenAIService/Networking/OpenAIEndpoint.swift
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ enum OpenAIEndpoint {
case chatCompletions
case imagesGenerations
case imageEdits
case imageVariation

var path: String {
switch self {
Expand All @@ -26,19 +27,21 @@ enum OpenAIEndpoint {
return "/v1/images/generations"
case .imageEdits:
return "/v1/images/edits"
case .imageVariation:
return "/v1/images/variations"
}
}

var method: HTTPMethod {
switch self {
case .completions, .edits, .chatCompletions, .imagesGenerations, .imageEdits:
case .completions, .edits, .chatCompletions, .imagesGenerations, .imageEdits, .imageVariation:
return .post
}
}

func baseURL() -> String {
switch self {
case .completions, .edits, .chatCompletions, .imagesGenerations, .imageEdits:
case .completions, .edits, .chatCompletions, .imagesGenerations, .imageEdits, .imageVariation:
return "https://api.openai.com"
}
}
Expand Down
70 changes: 58 additions & 12 deletions Sources/OpenAIService/OpenAIService.swift
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ public final class OpenAIService {

/// Send a Edit request to the OpenAI API
/// - Parameters:
/// - body: Body of chat completion request
/// - body: Body of test edits request
/// - completionHandler: Returns an OpenAIEditsResponse Data Model
public func sendEdits(
with body: OpenAIEditsBody,
Expand All @@ -94,13 +94,13 @@ public final class OpenAIService {

/// Send a Image generation request to the OpenAI API
/// - Parameters:
/// - body: Body of chat completion request
/// - completionHandler: Returns an OpenAIGenerationImageResponse Data Model
/// - body: Body of image generation request
/// - completionHandler: Returns an OpenAIImageResponse Data Model
public func sendImageGeneration(
with body: OpenAIGenerationImageBody,
networkQueue: DispatchQueue = .global(qos: .background),
responseQueue: DispatchQueue = .main,
completionHandler: @escaping (Result<OpenAIGenerationImageResponse, OpenAIAPIError>) -> Void
completionHandler: @escaping (Result<OpenAIImageResponse, OpenAIAPIError>) -> Void
) {
let endpoint = OpenAIEndpoint.imagesGenerations
guard let request = apiClient.prepareRequest(endpoint, body: body, config: config) else {
Expand All @@ -117,13 +117,13 @@ public final class OpenAIService {

/// Send a Image edits request to the OpenAI API
/// - Parameters:
/// - body: Body of chat completion request
/// - completionHandler: Returns an OpenAIGenerationImageResponse Data Model
/// - body: Body of image edits request
/// - completionHandler: Returns an OpenAIImageResponse Data Model
public func sendImageEdits(
with body: OpenAIImageEditsBody,
networkQueue: DispatchQueue = .global(qos: .background),
responseQueue: DispatchQueue = .main,
completionHandler: @escaping (Result<OpenAIGenerationImageResponse, OpenAIAPIError>) -> Void
completionHandler: @escaping (Result<OpenAIImageResponse, OpenAIAPIError>) -> Void
) {
let endpoint = OpenAIEndpoint.imageEdits
guard let request = apiClient.prepareMultipartFormDataRequest(endpoint, body: body.body, config: config) else {
Expand All @@ -139,6 +139,30 @@ public final class OpenAIService {
)
}

/// Send a Image variation request to the OpenAI API
/// - Parameters:
/// - body: Body of image variation request
/// - completionHandler: Returns an OpenAIImageResponse Data Model
public func sendImageVariation(
with body: OpenAIImageVariationBody,
networkQueue: DispatchQueue = .global(qos: .background),
responseQueue: DispatchQueue = .main,
completionHandler: @escaping (Result<OpenAIImageResponse, OpenAIAPIError>) -> Void
) {
let endpoint = OpenAIEndpoint.imageVariation
guard let request = apiClient.prepareMultipartFormDataRequest(endpoint, body: body.body, config: config) else {
completionHandler(.failure(.genericError(error: RequestError())))
return
}

apiClient.makeRequest(
request: request,
networkQueue: networkQueue,
responseQueue: responseQueue,
completionHandler: completionHandler
)
}

/// Send a Completion to the OpenAI API
/// - Parameters:
/// - body: Body of chat completion request
Expand Down Expand Up @@ -185,7 +209,7 @@ public final class OpenAIService {

/// Send a Edit request to the OpenAI API
/// - Parameters:
/// - body: Body of chat completion request
/// - body: Body of test edits request
/// - Returns: Returns an OpenAIEditsResponse Data Model
@available(swift 5.5)
@available(macOS 10.15, iOS 13, watchOS 6, tvOS 13, *)
Expand All @@ -207,15 +231,15 @@ public final class OpenAIService {

/// Send a Image generation request to the OpenAI API
/// - Parameters:
/// - body: Body of chat completion request
/// - body: Body of image generation request
/// - Returns: Returns an OpenAIGenerationImageResponse Data Model
@available(swift 5.5)
@available(macOS 10.15, iOS 13, watchOS 6, tvOS 13, *)
public func sendImageGeneration(
with body: OpenAIGenerationImageBody,
networkQueue: DispatchQueue = .global(qos: .background),
responseQueue: DispatchQueue = .main
) async throws -> OpenAIGenerationImageResponse {
) async throws -> OpenAIImageResponse {
return try await withCheckedThrowingContinuation { continuation in
sendImageGeneration(
with: body,
Expand All @@ -229,15 +253,15 @@ public final class OpenAIService {

/// Send a Image edits request to the OpenAI API
/// - Parameters:
/// - body: Body of chat completion request
/// - body: Body of image edits request
/// - Returns: Returns an OpenAIGenerationImageResponse Data Model
@available(swift 5.5)
@available(macOS 10.15, iOS 13, watchOS 6, tvOS 13, *)
public func sendImageEdits(
with body: OpenAIImageEditsBody,
networkQueue: DispatchQueue = .global(qos: .background),
responseQueue: DispatchQueue = .main
) async throws -> OpenAIGenerationImageResponse {
) async throws -> OpenAIImageResponse {
return try await withCheckedThrowingContinuation { continuation in
sendImageEdits(
with: body,
Expand All @@ -248,4 +272,26 @@ public final class OpenAIService {
}
}
}

/// Send a Image vatiation request to the OpenAI API
/// - Parameters:
/// - body: Body of image variation request
/// - Returns: Returns an OpenAIGenerationImageResponse Data Model
@available(swift 5.5)
@available(macOS 10.15, iOS 13, watchOS 6, tvOS 13, *)
public func sendImageVariation(
with body: OpenAIImageVariationBody,
networkQueue: DispatchQueue = .global(qos: .background),
responseQueue: DispatchQueue = .main
) async throws -> OpenAIImageResponse {
return try await withCheckedThrowingContinuation { continuation in
sendImageVariation(
with: body,
networkQueue: networkQueue,
responseQueue: responseQueue
) { result in
continuation.resume(with: result)
}
}
}
}

0 comments on commit 63118c2

Please sign in to comment.