From ba47dfcc3305905b072f6e9239ec747f6bf87de2 Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Wed, 3 Jul 2024 09:30:46 +0200
Subject: [PATCH 01/39] Update .DS_Store

---
 .DS_Store | Bin 6148 -> 8196 bytes
 1 file changed, 0 insertions(+), 0 deletions(-)

diff --git a/.DS_Store b/.DS_Store
index 260ac9a974c3892801255650cbc02a59f4f89405..c2caeabe0d368f5dec73f7a5b34191c7773ea08c 100644
GIT binary patch
literal 8196
zcmeHMJ#SMn6g@5p4WNpQ9YA^#D~jll{WT;KMSxJl0#j*H0wU>)O4_b(XXy`s1%3b{
z6H*6|7(4JWB7OuQCOFr2Tl?ihOo)nW$v%#KuYG*3^BlhbFpcf*60iuM!Yo)l$6`W~
ze#v8L&bM438v3K(TWWW*!Enq{8&m;RKow90Q~_1sUr~T(wxr6GbKmu9sS2n9|D^)*
zd`OrDQ;)er`{-a{uK>gnn{8sfDivblG4+@`WDk>Zu0-c*{1wAEceW?qF7=o@bnY<z
z@?reU#@|qkot@(+)*L2vsHG~P3XCftclQ!%=;0AY(fxhz+nca&kTsj@{cN3Wm9Njf
zzgQ^0TaNp?Gy8`RgLeZj_vy-Sd<{>q%b6cwgdqme{CK@O$tL9S{MzaL&cR#ZF*ep9
zJLWUXF5=}LE@d(r;663kSm|dSziN4~$V|0gHn9c-mJ#>*^&fF>$olf7v*yER%R2B6
z`A)WonAX<{^Y65`Hv8FTkIk}*dECGXuA+roXronF$17Td>!{*3*W1Bkw`#^^3mc40
zhCX&!>&E_b9}E5qWBx~RjXll9g6A2mlH)R(xJj&0_rhG}X~AQj>&dW9h8^Kp82{&N
z>nKX7-=%e7yf9Mt-k-01`D7zCW_7%~rdvf3!vQDbj=5u%9Tl0+N#w$q1eP&tvlovO
zGHzTcAATbO|8Q?jwuqS?o}rv4VV3=rddwY4leFvkFG6Ti1x}y>ryS=+x&ODOzyCjh
zD^tp<fGY5(3Yb!}-E7d>p6V04<djdC*O(=VUGC7ju!w#L@bl3hhO8$v6`#~&?vO1^
P`Xa!}poJ>%R~7gT4|ffT

delta 138
zcmZp1XfcprU|?W$DortDU=RQ@Ie-{MGjUE#6q~50C=3!{1dFA5=Hw?Q<>V)AOkByx
z$T3+)&|>oyL2<^()xugE)0$WovvY6=G6R(ZfdDs<a0MB&G4VU|WPTY(kUb!SSU@xr
N#3HcR=6Iet%m8x;7DWI6


From af553e5d7d14ed05ac33eddfc508ade92efd77a4 Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Wed, 3 Jul 2024 18:31:25 +0200
Subject: [PATCH 02/39] Update OpenAIDefaultLoader.swift

---
 .../viewModel/OpenAIDefaultLoader.swift                         | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Sources/openai-async-image-swiftui/viewModel/OpenAIDefaultLoader.swift b/Sources/openai-async-image-swiftui/viewModel/OpenAIDefaultLoader.swift
index 584c2ec..cd51f88 100644
--- a/Sources/openai-async-image-swiftui/viewModel/OpenAIDefaultLoader.swift
+++ b/Sources/openai-async-image-swiftui/viewModel/OpenAIDefaultLoader.swift
@@ -50,7 +50,7 @@ public final class OpenAIDefaultLoader : IOpenAILoader{
         
         let body = Input(prompt: prompt, size: size, response_format: .b64, n: 1)
         
-        let headers = ["Authorization": "Bearer \(endpoint.apiKey)"]
+        let headers = ["Content-Type": "application/json","Authorization": "Bearer \(endpoint.apiKey)"]
         let path = endpoint.path
         
         guard let client = client else{

From eb1a284c69769843f28237a6f3cbea9da9a28271 Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Wed, 3 Jul 2024 18:38:11 +0200
Subject: [PATCH 03/39] Update README.md

---
 README.md | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/README.md b/README.md
index a7677eb..df088af 100644
--- a/README.md
+++ b/README.md
@@ -9,6 +9,10 @@ DALL-E and DALL-E 2 are deep learning models developed by OpenAI to generate dig
 [![](https://img.shields.io/endpoint?url=https%3A%2F%2Fswiftpackageindex.com%2Fapi%2Fpackages%2FThe-Igor%2Fopenai-async-image-swiftui%2Fbadge%3Ftype%3Dswift-versions)](https://swiftpackageindex.com/The-Igor/openai-async-image-swiftui)
 [![](https://img.shields.io/endpoint?url=https%3A%2F%2Fswiftpackageindex.com%2Fapi%2Fpackages%2FThe-Igor%2Fopenai-async-image-swiftui%2Fbadge%3Ftype%3Dplatforms)](https://swiftpackageindex.com/The-Igor/openai-async-image-swiftui)
 
+## Example for the package
+
+[OpenAI AsyncImage SwiftUI example](https://github.com/The-Igor/openai-async-image-swiftui-example)
+
 ## Features
 - [x] Multiplatform iOS, macOS, watchOS and tvOS
 - [x] Customizable in term of SwiftUI Image specs [renderingMode, resizable,  antialiased...]
@@ -65,10 +69,6 @@ or with custom **ViewBuilder**
 - You need to have Xcode 13 installed in order to have access to Documentation Compiler (DocC)
 - Go to Product > Build Documentation or **⌃⇧⌘ D**
 
-## SwiftUI example for the package
-
-[OpenAI AsyncImage SwiftUI example](https://github.com/The-Igor/openai-async-image-swiftui-example)
-
 
 ![OpenAI AsyncImage SwiftUI](https://github.com/The-Igor/openai-async-image-swiftui/blob/main/image/sun_11.png) 
 

From 328d1269df5d9662a9c115c0190595ad40273834 Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Thu, 4 Jul 2024 09:59:17 +0200
Subject: [PATCH 04/39] Update OpenAIAsyncImage.swift

---
 .../OpenAIAsyncImage.swift                    | 68 +++++++++++--------
 1 file changed, 38 insertions(+), 30 deletions(-)

diff --git a/Sources/openai-async-image-swiftui/OpenAIAsyncImage.swift b/Sources/openai-async-image-swiftui/OpenAIAsyncImage.swift
index ce59921..f554d1f 100644
--- a/Sources/openai-async-image-swiftui/OpenAIAsyncImage.swift
+++ b/Sources/openai-async-image-swiftui/OpenAIAsyncImage.swift
@@ -1,6 +1,6 @@
 //
 //  OpenAIAsyncImage.swift
-//  
+//
 //
 //  Created by Igor on 18.02.2023.
 //
@@ -13,44 +13,44 @@ fileprivate typealias ImageSize = OpenAIImageSize
 @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
 public struct OpenAIAsyncImage<Content: View, T: IOpenAILoader>: View {
     
-    /// Custom view builder tpl
+    /// Custom view builder template type alias
     public typealias ImageProcess = (ImageState) -> Content
         
-    /// Default loader
+    /// Default loader, injected from environment
     @Environment(\.openAIDefaultLoader) var defaultLoader : OpenAIDefaultLoader
     
     // MARK: - Private properties
     
-    /// OpenAI image
+    /// State variable to hold the OpenAI image
     @State private var image: Image?
         
-    /// Error
+    /// State variable to hold any errors encountered during loading
     @State private var error: Error?
         
-    /// Current task
+    /// State variable to hold the current task responsible for loading the image
     @State private var task : Task<Void, Never>?
    
     // MARK: - Config
     
-    /// A text description of the desired image(s). The maximum length is 1000 characters
+    /// A binding to the text prompt describing the desired image. The maximum length is 1000 characters
     @Binding var prompt : String
         
-    /// Custom loader
+    /// Optional custom loader conforming to `IOpenAILoader` protocol
     let loader : T?
         
-    /// Image size
+    /// The size of the image to be generated
     let size : OpenAIImageSize
         
-    /// Custom view builder tpl
+    /// Optional custom view builder template
     let tpl : ImageProcess?
     
-    // MARK: - Life circle
+    // MARK: - Life cycle
         
     /// - Parameters:
     ///   - prompt: A text description of the desired image(s). The maximum length is 1000 characters
     ///   - size: The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024
-    ///   - tpl: Custom view builder tpl
-    ///   - loader: Custom loader
+    ///   - tpl: Custom view builder template
+    ///   - loader: Custom loader conforming to `IOpenAILoader`
     public init(
         prompt : Binding<String>,
         size : OpenAIImageSize = .dpi256,
@@ -86,9 +86,9 @@ public struct OpenAIAsyncImage<Content: View, T: IOpenAILoader>: View {
         }
     }
     
-    // MARK: - Private
+    // MARK: - Private methods
        
-    /// - Returns: Current image state status
+    /// - Returns: The current image state status
     private func getState () -> ImageState{
         
         if let image { return .loaded(image) }
@@ -97,17 +97,20 @@ public struct OpenAIAsyncImage<Content: View, T: IOpenAILoader>: View {
         return .loading
     }
         
-    /// Load using default loader
+    /// Load using the default loader
+    /// - Parameters:
+    ///   - prompt: The text prompt for generating the image
+    ///   - size: The desired size of the image
     /// - Returns: OpenAI image
     private func loadImageDefault(_ prompt : String, with size : ImageSize) async throws -> Image{
         try await defaultLoader.load(prompt, with: size)
     }
     
-    /// Load image by text
+    /// Load image using the provided or default loader
     /// - Parameters:
-    ///   - prompt: Text
-    ///   - size: Image size
-    /// - Returns: Open AI Image
+    ///   - prompt: The text prompt for generating the image
+    ///   - size: The desired size of the image
+    /// - Returns: OpenAI image if successful, otherwise nil
     private func loadImage(_ prompt : String, with size : ImageSize) async -> Image?{
         do{
             if let loader = loader{
@@ -124,25 +127,28 @@ public struct OpenAIAsyncImage<Content: View, T: IOpenAILoader>: View {
         }
     }
     
-    /// - Parameter value: OpenAI image
+    /// Sets the image on the main thread
+    /// - Parameter value: The image to be set
     @MainActor
     private func setImage(_ value : Image){
         image = value
     }
     
-    /// Clear properties
+    /// Clears the image and error state properties
     @MainActor
     private func clear(){
         image = nil
         error = nil
     }
     
+    /// Cancels the current loading task if any
     private func cancelTask(){
         task?.cancel()
         task = nil
     }
     
-    /// - Returns: Task to fetch OpenAI image
+    /// Creates and returns a task to fetch the OpenAI image
+    /// - Returns: A task that fetches the OpenAI image
     private func getTask() -> Task<Void, Never>{
         Task{
             if let image = await loadImage(prompt, with: size){
@@ -152,13 +158,14 @@ public struct OpenAIAsyncImage<Content: View, T: IOpenAILoader>: View {
     }
 }
 
-// MARK: - Extension public -
+// MARK: - Public extensions -
 
 public extension OpenAIAsyncImage where Content == EmptyView, T == OpenAIDefaultLoader{
     
+    /// Convenience initializer for default loader without custom view template
     /// - Parameters:
-    ///   - prompt: Text
-    ///   - size: Image size
+    ///   - prompt: The text prompt for generating the image
+    ///   - size: The desired size of the image
     init(
         prompt : Binding<String>,
         size : OpenAIImageSize = .dpi256
@@ -172,10 +179,11 @@ public extension OpenAIAsyncImage where Content == EmptyView, T == OpenAIDefault
 
 public extension OpenAIAsyncImage where T == OpenAIDefaultLoader{
     
+    /// Convenience initializer for default loader with custom view template
     /// - Parameters:
-    ///   - prompt: Text
-    ///   - size: Image size
-    ///   - tpl: View tpl
+    ///   - prompt: The text prompt for generating the image
+    ///   - size: The desired size of the image
+    ///   - tpl: Custom view template
     init(
         prompt : Binding<String>,
         size : OpenAIImageSize = .dpi256,
@@ -188,7 +196,7 @@ public extension OpenAIAsyncImage where T == OpenAIDefaultLoader{
     }
 }
 
-// MARK: - File private -
+// MARK: - File private functions -
 
 @ViewBuilder
 fileprivate func imageTpl(_ state : ImageState) -> some View{

From 86f6523db639b5aebf3a0d8796b13f11071d88b9 Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Thu, 4 Jul 2024 10:04:02 +0200
Subject: [PATCH 05/39] Update OpenAIDefaultLoader.swift

---
 .../viewModel/OpenAIDefaultLoader.swift       | 31 +++++++++++--------
 1 file changed, 18 insertions(+), 13 deletions(-)

diff --git a/Sources/openai-async-image-swiftui/viewModel/OpenAIDefaultLoader.swift b/Sources/openai-async-image-swiftui/viewModel/OpenAIDefaultLoader.swift
index cd51f88..f5f4a59 100644
--- a/Sources/openai-async-image-swiftui/viewModel/OpenAIDefaultLoader.swift
+++ b/Sources/openai-async-image-swiftui/viewModel/OpenAIDefaultLoader.swift
@@ -1,6 +1,6 @@
 //
 //  OpenAIViewModel.swift
-//  
+//
 //
 //  Created by Igor on 28.02.2023.
 //
@@ -19,13 +19,14 @@ import AppKit.NSImage
 @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
 public final class OpenAIDefaultLoader : IOpenAILoader{
     
-    /// Http async client
+    /// HTTP async client to handle requests
     private let client : Http.Proxy<JsonReader, JsonWriter>?
     
-    /// Set of params for making requests
+    /// Endpoint parameters required for making requests
     private let endpoint : IOpenAIImageEndpoint
     
-    /// - Parameter endpoint: Set of params for making requests
+    /// Initializes the loader with endpoint parameters
+    /// - Parameter endpoint: Set of parameters for making requests
     public init(endpoint : IOpenAIImageEndpoint) {
         
         self.endpoint = endpoint
@@ -38,18 +39,20 @@ public final class OpenAIDefaultLoader : IOpenAILoader{
         client = Http.Proxy(baseURL: url)
     }
        
-    /// Load image by text
+    /// Loads an image from the OpenAI API based on a text prompt
     /// - Parameters:
-    ///   - prompt: Text
-    ///   - size: Image size
-    /// - Returns: Open AI Image
+    ///   - prompt: The text prompt describing the desired image
+    ///   - size: The size of the generated image
+    /// - Returns: OpenAI Image
     public func load(
         _ prompt : String,
         with size : OpenAIImageSize
     ) async throws -> Image{
         
+        // Prepare the request body with the prompt and size
         let body = Input(prompt: prompt, size: size, response_format: .b64, n: 1)
         
+        // Set the request headers, including authorization
         let headers = ["Content-Type": "application/json","Authorization": "Bearer \(endpoint.apiKey)"]
         let path = endpoint.path
         
@@ -57,14 +60,16 @@ public final class OpenAIDefaultLoader : IOpenAILoader{
             throw AsyncImageErrors.clientIsNotDefined
         }
         
+        // Send the request and get the response
         let result: Http.Response<Output> = try await client.post(path: path, body: body, headers: headers)
         
+        // Convert the response to an image
         return try imageBase64(from: result.value)
     }
         
-    /// Decode base64 to Data
-    /// - Parameter output: Received format from the endpoint
-    /// - Returns: Decoded data
+    /// Decodes base64 encoded string to Data
+    /// - Parameter output: The output received from the endpoint
+    /// - Returns: Decoded Data
     private func decodeBase64(from output: Output) throws -> Data?{
         guard let base64 = output.firstImage else  {
             throw AsyncImageErrors.returnedNoImages
@@ -74,7 +79,7 @@ public final class OpenAIDefaultLoader : IOpenAILoader{
     }
     
 #if os(iOS) || os(watchOS) || os(tvOS)
-    /// Base64 encoder for iOS
+    /// Converts base64 encoded string to UIImage for iOS
     /// - Parameter output: OpenAI response type
     /// - Returns: UIImage
     private func imageBase64(from output: Output) throws -> Image {
@@ -90,7 +95,7 @@ public final class OpenAIDefaultLoader : IOpenAILoader{
 #endif
     
 #if os(macOS)
-    /// Base64 encoder for macOS
+    /// Converts base64 encoded string to NSImage for macOS
     /// - Parameter output: OpenAI response type
     /// - Returns: NSImage
     private func imageBase64(from output: Output) throws -> Image {

From bb4c3e0766573e1e7c2c14c003f2b1791c647b93 Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Thu, 4 Jul 2024 10:30:33 +0200
Subject: [PATCH 06/39] update

---
 .../enum/AsyncImageErrors.swift               | 10 ++---
 .../enum/ImageState.swift                     | 11 +++--
 .../model/Output.swift                        | 20 +++++----
 .../net/OpenAIImageEndpoint.swift             | 42 ++++++++++---------
 .../protocol/IOpenAIImageEndpoint.swift       | 17 ++++----
 .../protocol/IOpenAILoader.swift              | 14 +++----
 6 files changed, 58 insertions(+), 56 deletions(-)

diff --git a/Sources/openai-async-image-swiftui/enum/AsyncImageErrors.swift b/Sources/openai-async-image-swiftui/enum/AsyncImageErrors.swift
index 11055d4..62b11b6 100644
--- a/Sources/openai-async-image-swiftui/enum/AsyncImageErrors.swift
+++ b/Sources/openai-async-image-swiftui/enum/AsyncImageErrors.swift
@@ -7,17 +7,17 @@
 
 import Foundation
 
-/// Set of errors for ``OpenAIAsyncImage``
+/// Enumeration representing the various errors that can occur in `OpenAIAsyncImage`
 @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
-enum AsyncImageErrors: Error, Equatable{
+enum AsyncImageErrors: Error, Equatable {
     
-    /// Could not create Image from uiImage
+    /// Error indicating that an image could not be created from a `uiImage`
     case imageInit
     
-    /// Client not found - the reason url in not valid
+    /// Error indicating that the client was not found, likely due to an invalid URL
     case clientIsNotDefined
     
-    /// response returned no images
+    /// Error indicating that the response returned no images
     case returnedNoImages
     
 }
diff --git a/Sources/openai-async-image-swiftui/enum/ImageState.swift b/Sources/openai-async-image-swiftui/enum/ImageState.swift
index c302014..e10ecb9 100644
--- a/Sources/openai-async-image-swiftui/enum/ImageState.swift
+++ b/Sources/openai-async-image-swiftui/enum/ImageState.swift
@@ -7,17 +7,16 @@
 
 import SwiftUI
 
-/// Set of states  for ``OpenAIAsyncImage``
+/// Enumeration representing the various states of `OpenAIAsyncImage`
 @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
-public enum ImageState{
+public enum ImageState {
     
-    /// Loading currently
+    /// State when the image is currently being loaded
     case loading
     
-    /// Loaded
+    /// State when the image has been successfully loaded
     case loaded(Image)
     
-    /// There's an error happened while fetching
+    /// State when an error occurred during image fetching
     case loadError(Error)
-    
 }
diff --git a/Sources/openai-async-image-swiftui/model/Output.swift b/Sources/openai-async-image-swiftui/model/Output.swift
index b107e52..5919549 100644
--- a/Sources/openai-async-image-swiftui/model/Output.swift
+++ b/Sources/openai-async-image-swiftui/model/Output.swift
@@ -7,22 +7,24 @@
 
 import Foundation
 
-/// Output format for OpenAI API
+/// Structure representing the output format for the OpenAI API response
 @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
-struct Output: Decodable{
+struct Output: Decodable {
     
-    /// Date and time
-    let created : Int
+    /// The creation date and time of the response in UNIX timestamp format
+    let created: Int
     
-    /// Set of images
+    /// An array of base64 encoded images
     let data: [Base64]
         
-    /// Fist image from the received data set
-    var firstImage : String?{
+    /// The first image from the received data set, if available
+    var firstImage: String? {
         data.first?.b64_json
     }
 }
 
-struct Base64: Decodable{
-    let b64_json : String
+/// Structure representing a base64 encoded image
+struct Base64: Decodable {
+    /// The base64 encoded image data in JSON format
+    let b64_json: String
 }
diff --git a/Sources/openai-async-image-swiftui/net/OpenAIImageEndpoint.swift b/Sources/openai-async-image-swiftui/net/OpenAIImageEndpoint.swift
index a038334..034ecbb 100644
--- a/Sources/openai-async-image-swiftui/net/OpenAIImageEndpoint.swift
+++ b/Sources/openai-async-image-swiftui/net/OpenAIImageEndpoint.swift
@@ -7,45 +7,47 @@
 
 import Foundation
 
-/// Set of specs for access to OpenAPI image resource
+/// Struct providing specifications for accessing the OpenAI image resource
 @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
-public struct OpenAIImageEndpoint: IOpenAIImageEndpoint{
+public struct OpenAIImageEndpoint: IOpenAIImageEndpoint {
         
-    // MARK: - Static
+    // MARK: - Static Properties
     
-    /// Base url to OpenAPI image resource
+    /// Static base URL for the OpenAI image resource
     public static var urlString = "https://api.openai.com"
     
-    /// Path to the point
+    /// Static path to the specific endpoint for generating images
     public static var path = "/v1/images/generations"
     
-    /// - Parameter apiKey: Api key for access
-    /// - Returns: Endpoint
-    static public func get(with apiKey: String) -> Self{
+    /// Creates an instance of `OpenAIImageEndpoint` with the provided API key
+    /// - Parameter apiKey: API key for accessing the OpenAI API
+    /// - Returns: Configured instance of `OpenAIImageEndpoint`
+    public static func get(with apiKey: String) -> Self {
         .init(
             urlString: Self.urlString,
             apiKey: apiKey,
-            path: Self.path)
+            path: Self.path
+        )
     }
     
-    // MARK: - Config
+    // MARK: - Instance Properties
     
-    /// Base url to OpenAPI image resource
+    /// Base URL for the OpenAI image resource
     public let urlString: String
     
-    /// Path to the point
-    public let path : String
+    /// Path to the specific endpoint
+    public let path: String
     
-    /// Api key for access
-    public let apiKey : String
+    /// API key for authentication and access to the OpenAI API
+    public let apiKey: String
 
-    // MARK: - Life circle
+    // MARK: - Initializer
     
+    /// Initializes a new instance of `OpenAIImageEndpoint`
     /// - Parameters:
-    ///   - urlString: Base url to OpenAPI image resource
-    ///   - httpMethod: Http method
-    ///   - apiKey: Api key for access
-    ///   - path: Path to the point
+    ///   - urlString: Base URL for the OpenAI image resource
+    ///   - apiKey: API key for accessing the OpenAI API
+    ///   - path: Path to the specific endpoint
     public init(urlString: String, apiKey: String, path: String) {
         self.urlString = urlString
         self.apiKey = apiKey
diff --git a/Sources/openai-async-image-swiftui/protocol/IOpenAIImageEndpoint.swift b/Sources/openai-async-image-swiftui/protocol/IOpenAIImageEndpoint.swift
index 0b30dac..636a8fb 100644
--- a/Sources/openai-async-image-swiftui/protocol/IOpenAIImageEndpoint.swift
+++ b/Sources/openai-async-image-swiftui/protocol/IOpenAIImageEndpoint.swift
@@ -7,18 +7,17 @@
 
 import Foundation
 
-/// Defines access API to OpenAI image API
+/// Protocol defining access to the OpenAI image API
 @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
-public protocol IOpenAIImageEndpoint{
+public protocol IOpenAIImageEndpoint {
     
-    /// Base url to OpenAPI image resource
-    var urlString : String { get }
+    /// Base URL for the OpenAI image resource
+    var urlString: String { get }
     
-    /// Path to the point
-    var path : String { get }
-    
-    /// Api key for access
-    var apiKey : String { get }
+    /// Path to the specific endpoint within the OpenAI API
+    var path: String { get }
     
+    /// API key for authentication and access to the OpenAI API
+    var apiKey: String { get }
 
 }
diff --git a/Sources/openai-async-image-swiftui/protocol/IOpenAILoader.swift b/Sources/openai-async-image-swiftui/protocol/IOpenAILoader.swift
index d4ee384..a6b0198 100644
--- a/Sources/openai-async-image-swiftui/protocol/IOpenAILoader.swift
+++ b/Sources/openai-async-image-swiftui/protocol/IOpenAILoader.swift
@@ -7,14 +7,14 @@
 
 import SwiftUI
 
-/// Loader for getting images
+/// Protocol defining the loader for fetching images from the OpenAI API
 @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
-public protocol IOpenAILoader{
+public protocol IOpenAILoader {
        
-    /// Load image by text
+    /// Asynchronously loads an image based on a provided text prompt and size
     /// - Parameters:
-    ///   - prompt: Text
-    ///   - size: Image size
-    /// - Returns: Open AI Image
-    func load(_ prompt : String, with size : OpenAIImageSize) async throws  -> Image
+    ///   - prompt: The text prompt describing the desired image
+    ///   - size: The size of the generated image
+    /// - Returns: The generated OpenAI image
+    func load(_ prompt: String, with size: OpenAIImageSize) async throws -> Image
 }

From cb5658d1a27d47413d41fc8fef48274e7e44bf52 Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Thu, 4 Jul 2024 14:11:07 +0200
Subject: [PATCH 07/39] updated loader

---
 .../enum/AsyncImageErrors.swift               | 19 ++++-
 .../viewModel/OpenAIDefaultLoader.swift       | 70 ++++++++++++-------
 2 files changed, 61 insertions(+), 28 deletions(-)

diff --git a/Sources/openai-async-image-swiftui/enum/AsyncImageErrors.swift b/Sources/openai-async-image-swiftui/enum/AsyncImageErrors.swift
index 62b11b6..12b55d1 100644
--- a/Sources/openai-async-image-swiftui/enum/AsyncImageErrors.swift
+++ b/Sources/openai-async-image-swiftui/enum/AsyncImageErrors.swift
@@ -7,7 +7,6 @@
 
 import Foundation
 
-/// Enumeration representing the various errors that can occur in `OpenAIAsyncImage`
 @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
 enum AsyncImageErrors: Error, Equatable {
     
@@ -20,4 +19,22 @@ enum AsyncImageErrors: Error, Equatable {
     /// Error indicating that the response returned no images
     case returnedNoImages
     
+    /// Status is not valid
+    case httpStatus(String)
+}
+
+@available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
+extension AsyncImageErrors: LocalizedError {
+    public var errorDescription: String? {
+        switch self {
+        case .imageInit:
+            return NSLocalizedString("Unable to create image from the provided data.", comment: "")
+        case .clientIsNotDefined:
+            return NSLocalizedString("Client not found. The URL might be invalid.", comment: "")
+        case .returnedNoImages:
+            return NSLocalizedString("The response did not contain any images.", comment: "")
+        case .httpStatus(let data):
+            return NSLocalizedString("HTTP status error: \(data).", comment: "")
+        }
+    }
 }
diff --git a/Sources/openai-async-image-swiftui/viewModel/OpenAIDefaultLoader.swift b/Sources/openai-async-image-swiftui/viewModel/OpenAIDefaultLoader.swift
index f5f4a59..45e662e 100644
--- a/Sources/openai-async-image-swiftui/viewModel/OpenAIDefaultLoader.swift
+++ b/Sources/openai-async-image-swiftui/viewModel/OpenAIDefaultLoader.swift
@@ -17,21 +17,20 @@ import AppKit.NSImage
 #endif
 
 @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
-public final class OpenAIDefaultLoader : IOpenAILoader{
+public final class OpenAIDefaultLoader: IOpenAILoader {
     
     /// HTTP async client to handle requests
-    private let client : Http.Proxy<JsonReader, JsonWriter>?
+    private let client: Http.Proxy<JsonReader, JsonWriter>?
     
     /// Endpoint parameters required for making requests
-    private let endpoint : IOpenAIImageEndpoint
+    private let endpoint: IOpenAIImageEndpoint
     
     /// Initializes the loader with endpoint parameters
     /// - Parameter endpoint: Set of parameters for making requests
-    public init(endpoint : IOpenAIImageEndpoint) {
-        
+    public init(endpoint: IOpenAIImageEndpoint) {
         self.endpoint = endpoint
         
-        guard let url = URL(string: endpoint.urlString) else{
+        guard let url = URL(string: endpoint.urlString) else {
             client = nil
             return
         }
@@ -45,33 +44,52 @@ public final class OpenAIDefaultLoader : IOpenAILoader{
     ///   - size: The size of the generated image
     /// - Returns: OpenAI Image
     public func load(
-        _ prompt : String,
-        with size : OpenAIImageSize
-    ) async throws -> Image{
-        
-        // Prepare the request body with the prompt and size
-        let body = Input(prompt: prompt, size: size, response_format: .b64, n: 1)
-        
-        // Set the request headers, including authorization
-        let headers = ["Content-Type": "application/json","Authorization": "Bearer \(endpoint.apiKey)"]
-        let path = endpoint.path
+        _ prompt: String,
+        with size: OpenAIImageSize
+    ) async throws -> Image {
         
-        guard let client = client else{
+        guard let client = client else {
             throw AsyncImageErrors.clientIsNotDefined
         }
         
-        // Send the request and get the response
-        let result: Http.Response<Output> = try await client.post(path: path, body: body, headers: headers)
+        do {
+            let (path, body, headers) = prepareRequest(prompt: prompt, size: size)
+            let result: Http.Response<Output> = try await client.post(path: path, body: body, headers: headers)
+            return try imageBase64(from: result.value)
+            
+        } catch {
+           try handleRequestError(error)
+        }
+    }
+    
+    /// Prepares the request with the necessary parameters
+    /// - Parameters:
+    ///   - prompt: The text prompt describing the desired image
+    ///   - size: The size of the generated image
+    /// - Returns: A tuple containing the path, body, and headers for the request
+    private func prepareRequest(prompt: String, size: OpenAIImageSize) -> (String, Input, [String: String]) {
+        let body = Input(prompt: prompt, size: size, response_format: .b64, n: 1)
+        let headers = ["Content-Type": "application/json", "Authorization": "Bearer \(endpoint.apiKey)"]
+        let path = endpoint.path
+        return (path, body, headers)
+    }
+    
+    /// Handles errors that occur during the request
+    /// - Parameter error: The error that occurred
+    private func handleRequestError(_ error: Error) throws -> Never {
+        if case let Http.Errors.status(_, _, data) = error, let responseData = data {
+            let data = String(data: responseData, encoding: .utf8) ?? "Unable to decode data"
+            throw AsyncImageErrors.httpStatus(data)
+        }
         
-        // Convert the response to an image
-        return try imageBase64(from: result.value)
+        throw error
     }
         
     /// Decodes base64 encoded string to Data
     /// - Parameter output: The output received from the endpoint
     /// - Returns: Decoded Data
-    private func decodeBase64(from output: Output) throws -> Data?{
-        guard let base64 = output.firstImage else  {
+    private func decodeBase64(from output: Output) throws -> Data? {
+        guard let base64 = output.firstImage else {
             throw AsyncImageErrors.returnedNoImages
         }
         
@@ -83,10 +101,9 @@ public final class OpenAIDefaultLoader : IOpenAILoader{
     /// - Parameter output: OpenAI response type
     /// - Returns: UIImage
     private func imageBase64(from output: Output) throws -> Image {
-        
         let data = try decodeBase64(from: output)
         
-        if let data, let image = UIImage(data: data){
+        if let data, let image = UIImage(data: data) {
             return Image(uiImage: image)
         }
         
@@ -99,10 +116,9 @@ public final class OpenAIDefaultLoader : IOpenAILoader{
     /// - Parameter output: OpenAI response type
     /// - Returns: NSImage
     private func imageBase64(from output: Output) throws -> Image {
-        
         let data = try decodeBase64(from: output)
         
-        if let data, let image = NSImage(data: data){
+        if let data, let image = NSImage(data: data) {
             return Image(nsImage: image)
         }
         

From a33a0dbae4f731f0fe032fb9b874e5965b39f830 Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Sun, 7 Jul 2024 13:08:26 +0200
Subject: [PATCH 08/39] refactor error handling

---
 .../enum/AsyncImageErrors.swift               | 69 ++++++++++++++-----
 .../viewModel/OpenAIDefaultLoader.swift       | 12 +---
 2 files changed, 54 insertions(+), 27 deletions(-)

diff --git a/Sources/openai-async-image-swiftui/enum/AsyncImageErrors.swift b/Sources/openai-async-image-swiftui/enum/AsyncImageErrors.swift
index 12b55d1..f8002f6 100644
--- a/Sources/openai-async-image-swiftui/enum/AsyncImageErrors.swift
+++ b/Sources/openai-async-image-swiftui/enum/AsyncImageErrors.swift
@@ -1,26 +1,21 @@
 //
 //  AsyncImageErrors.swift
-//  
+//
 //
 //  Created by Igor on 18.02.2023.
 //
 
 import Foundation
+import async_http_client
 
 @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
-enum AsyncImageErrors: Error, Equatable {
-    
-    /// Error indicating that an image could not be created from a `uiImage`
-    case imageInit
-    
-    /// Error indicating that the client was not found, likely due to an invalid URL
-    case clientIsNotDefined
-    
-    /// Error indicating that the response returned no images
-    case returnedNoImages
-    
-    /// Status is not valid
-    case httpStatus(String)
+/// Enum representing different errors that can occur when loading images asynchronously
+enum AsyncImageErrors: Error {
+    case imageInit               // Error initializing an image from data
+    case clientIsNotDefined      // HTTP client is not defined
+    case returnedNoImages        // No images were returned in the response
+    case httpStatus(String)      // HTTP status error with a message
+    case responseError(Error)    // Generic response error
 }
 
 @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
@@ -33,8 +28,50 @@ extension AsyncImageErrors: LocalizedError {
             return NSLocalizedString("Client not found. The URL might be invalid.", comment: "")
         case .returnedNoImages:
             return NSLocalizedString("The response did not contain any images.", comment: "")
-        case .httpStatus(let data):
-            return NSLocalizedString("HTTP status error: \(data).", comment: "")
+        case .httpStatus(let status):
+            return NSLocalizedString("HTTP status error: \(status).", comment: "")
+        case .responseError(let error):
+            return error.localizedDescription
+        }
+    }
+}
+
+@available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
+extension AsyncImageErrors {
+    /// Handles errors that occur during the request
+    /// - Parameter error: The error that occurred
+    /// - Returns: An instance of `AsyncImageErrors`
+    static func handleRequest(_ error: Error) -> AsyncImageErrors {
+        if let httpError = error as? Http.Errors,
+           case let .status(_, _, data) = httpError,
+           let responseData = data {
+            return decodeErrorResponse(from: responseData)
         }
+        return .responseError(error)
     }
 }
+
+/// Decodes the error response data
+/// - Parameter responseData: The response data to decode
+/// - Returns: An instance of `AsyncImageErrors` with a decoded message
+fileprivate func decodeErrorResponse(from responseData: Data) -> AsyncImageErrors {
+    if let apiResponse = try? JSONDecoder().decode(ErrorResponseWrapper.self, from: responseData) {
+        return .httpStatus(apiResponse.error.message)
+    }
+    
+    let dataString = String(data: responseData, encoding: .utf8) ?? "Unable to decode data"
+    return .httpStatus(dataString)
+}
+
+/// Defines the structure for the inner "error" object in the API response
+fileprivate struct ErrorResponse: Decodable {
+    let code: String?
+    let message: String
+    let param: String?
+    let type: String
+}
+
+/// Defines the structure for the overall response wrapper containing the error object
+fileprivate struct ErrorResponseWrapper: Decodable {
+    let error: ErrorResponse
+}
diff --git a/Sources/openai-async-image-swiftui/viewModel/OpenAIDefaultLoader.swift b/Sources/openai-async-image-swiftui/viewModel/OpenAIDefaultLoader.swift
index 45e662e..d975afb 100644
--- a/Sources/openai-async-image-swiftui/viewModel/OpenAIDefaultLoader.swift
+++ b/Sources/openai-async-image-swiftui/viewModel/OpenAIDefaultLoader.swift
@@ -58,7 +58,7 @@ public final class OpenAIDefaultLoader: IOpenAILoader {
             return try imageBase64(from: result.value)
             
         } catch {
-           try handleRequestError(error)
+            throw AsyncImageErrors.handleRequest(error)
         }
     }
     
@@ -74,16 +74,6 @@ public final class OpenAIDefaultLoader: IOpenAILoader {
         return (path, body, headers)
     }
     
-    /// Handles errors that occur during the request
-    /// - Parameter error: The error that occurred
-    private func handleRequestError(_ error: Error) throws -> Never {
-        if case let Http.Errors.status(_, _, data) = error, let responseData = data {
-            let data = String(data: responseData, encoding: .utf8) ?? "Unable to decode data"
-            throw AsyncImageErrors.httpStatus(data)
-        }
-        
-        throw error
-    }
         
     /// Decodes base64 encoded string to Data
     /// - Parameter output: The output received from the endpoint

From cbe62bceb22be14be0d645e61bce485207ae8f3f Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Sun, 7 Jul 2024 16:42:38 +0200
Subject: [PATCH 09/39] Update AsyncImageErrors.swift

---
 .../openai-async-image-swiftui/enum/AsyncImageErrors.swift    | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/Sources/openai-async-image-swiftui/enum/AsyncImageErrors.swift b/Sources/openai-async-image-swiftui/enum/AsyncImageErrors.swift
index f8002f6..7546576 100644
--- a/Sources/openai-async-image-swiftui/enum/AsyncImageErrors.swift
+++ b/Sources/openai-async-image-swiftui/enum/AsyncImageErrors.swift
@@ -28,8 +28,8 @@ extension AsyncImageErrors: LocalizedError {
             return NSLocalizedString("Client not found. The URL might be invalid.", comment: "")
         case .returnedNoImages:
             return NSLocalizedString("The response did not contain any images.", comment: "")
-        case .httpStatus(let status):
-            return NSLocalizedString("HTTP status error: \(status).", comment: "")
+        case .httpStatus(let description):
+            return NSLocalizedString(description, comment: "")
         case .responseError(let error):
             return error.localizedDescription
         }

From a8bf018e2a3e6c9971e429b6655b10784c5fb5e7 Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Wed, 24 Jul 2024 09:33:04 +0200
Subject: [PATCH 10/39] update

---
 .../environmentKey/OpenAIAsyncImageLoaderKey.swift            | 2 +-
 .../openai-async-image-swiftui/net/OpenAIImageEndpoint.swift  | 4 ++--
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/Sources/openai-async-image-swiftui/environmentKey/OpenAIAsyncImageLoaderKey.swift b/Sources/openai-async-image-swiftui/environmentKey/OpenAIAsyncImageLoaderKey.swift
index 98fe531..d08a514 100644
--- a/Sources/openai-async-image-swiftui/environmentKey/OpenAIAsyncImageLoaderKey.swift
+++ b/Sources/openai-async-image-swiftui/environmentKey/OpenAIAsyncImageLoaderKey.swift
@@ -12,7 +12,7 @@ import SwiftUI
 public struct OpenAIDefaultLoaderKey : EnvironmentKey{
     public typealias Value = OpenAIDefaultLoader
     
-    public static var defaultValue = OpenAIDefaultLoader(endpoint: OpenAIImageEndpoint.get(with: ""))
+    public static let defaultValue = OpenAIDefaultLoader(endpoint: OpenAIImageEndpoint.get(with: ""))
 }
 
 public extension EnvironmentValues{
diff --git a/Sources/openai-async-image-swiftui/net/OpenAIImageEndpoint.swift b/Sources/openai-async-image-swiftui/net/OpenAIImageEndpoint.swift
index 034ecbb..c9242ce 100644
--- a/Sources/openai-async-image-swiftui/net/OpenAIImageEndpoint.swift
+++ b/Sources/openai-async-image-swiftui/net/OpenAIImageEndpoint.swift
@@ -14,10 +14,10 @@ public struct OpenAIImageEndpoint: IOpenAIImageEndpoint {
     // MARK: - Static Properties
     
     /// Static base URL for the OpenAI image resource
-    public static var urlString = "https://api.openai.com"
+    public static let urlString = "https://api.openai.com"
     
     /// Static path to the specific endpoint for generating images
-    public static var path = "/v1/images/generations"
+    public static let path = "/v1/images/generations"
     
     /// Creates an instance of `OpenAIImageEndpoint` with the provided API key
     /// - Parameter apiKey: API key for accessing the OpenAI API

From 323062b1874a9623f75414c230c5321fafebe82f Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Thu, 25 Jul 2024 12:28:01 +0200
Subject: [PATCH 11/39] update

---
 Package.resolved | 8 ++++----
 Package.swift    | 2 +-
 2 files changed, 5 insertions(+), 5 deletions(-)

diff --git a/Package.resolved b/Package.resolved
index c686312..1111124 100644
--- a/Package.resolved
+++ b/Package.resolved
@@ -5,8 +5,8 @@
       "kind" : "remoteSourceControl",
       "location" : "https://github.com/The-Igor/async-http-client.git",
       "state" : {
-        "revision" : "8be7d1c1f036350a8109b075052a71d3353ad366",
-        "version" : "1.4.4"
+        "revision" : "9a56240b21baff71f5399194a1f2af9ab8c4c076",
+        "version" : "1.4.6"
       }
     },
     {
@@ -14,8 +14,8 @@
       "kind" : "remoteSourceControl",
       "location" : "https://github.com/The-Igor/retry-policy-service.git",
       "state" : {
-        "revision" : "46ded002161a95c6b08ddd02c3f319891c773d14",
-        "version" : "1.0.0"
+        "revision" : "2a6a1f057fbf77337dfc73db98bd3d538127b3e2",
+        "version" : "1.0.1"
       }
     }
   ],
diff --git a/Package.swift b/Package.swift
index 793984b..f587b91 100644
--- a/Package.swift
+++ b/Package.swift
@@ -14,7 +14,7 @@ let package = Package(
     ],
     dependencies: [
         // Dependencies declare other packages that this package depends on.
-        .package(url: "https://github.com/The-Igor/async-http-client.git", from: "1.4.4")
+        .package(url: "https://github.com/The-Igor/async-http-client.git", from: "1.4.6")
     ],
     targets: [
         // Targets are the basic building blocks of a package. A target can define a module or a test suite.

From 9b02d7e476b36e9ce8497fbc38e9c7799d128640 Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Fri, 26 Jul 2024 09:43:25 +0200
Subject: [PATCH 12/39] update

---
 .../protocol/IOpenAIImageEndpoint.swift       |  2 +-
 .../viewModel/OpenAIDefaultLoader.swift       | 27 +++++++++----------
 2 files changed, 14 insertions(+), 15 deletions(-)

diff --git a/Sources/openai-async-image-swiftui/protocol/IOpenAIImageEndpoint.swift b/Sources/openai-async-image-swiftui/protocol/IOpenAIImageEndpoint.swift
index 636a8fb..8bf9894 100644
--- a/Sources/openai-async-image-swiftui/protocol/IOpenAIImageEndpoint.swift
+++ b/Sources/openai-async-image-swiftui/protocol/IOpenAIImageEndpoint.swift
@@ -9,7 +9,7 @@ import Foundation
 
 /// Protocol defining access to the OpenAI image API
 @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
-public protocol IOpenAIImageEndpoint {
+public protocol IOpenAIImageEndpoint: Sendable {
     
     /// Base URL for the OpenAI image resource
     var urlString: String { get }
diff --git a/Sources/openai-async-image-swiftui/viewModel/OpenAIDefaultLoader.swift b/Sources/openai-async-image-swiftui/viewModel/OpenAIDefaultLoader.swift
index d975afb..8dfa1bf 100644
--- a/Sources/openai-async-image-swiftui/viewModel/OpenAIDefaultLoader.swift
+++ b/Sources/openai-async-image-swiftui/viewModel/OpenAIDefaultLoader.swift
@@ -17,7 +17,7 @@ import AppKit.NSImage
 #endif
 
 @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
-public final class OpenAIDefaultLoader: IOpenAILoader {
+public final class OpenAIDefaultLoader: IOpenAILoader, Sendable {
     
     /// HTTP async client to handle requests
     private let client: Http.Proxy<JsonReader, JsonWriter>?
@@ -85,34 +85,33 @@ public final class OpenAIDefaultLoader: IOpenAILoader {
         
         return Data(base64Encoded: base64)
     }
-    
-#if os(iOS) || os(watchOS) || os(tvOS)
-    /// Converts base64 encoded string to UIImage for iOS
+       
+#if os(macOS)
+    /// Converts base64 encoded string to NSImage for macOS
     /// - Parameter output: OpenAI response type
-    /// - Returns: UIImage
+    /// - Returns: NSImage
     private func imageBase64(from output: Output) throws -> Image {
         let data = try decodeBase64(from: output)
         
-        if let data, let image = UIImage(data: data) {
-            return Image(uiImage: image)
+        if let data, let image = NSImage(data: data) {
+            return Image(nsImage: image)
         }
         
         throw AsyncImageErrors.imageInit
     }
-#endif
-    
-#if os(macOS)
-    /// Converts base64 encoded string to NSImage for macOS
+#else
+    /// Converts base64 encoded string to UIImage for iOS
     /// - Parameter output: OpenAI response type
-    /// - Returns: NSImage
+    /// - Returns: UIImage
     private func imageBase64(from output: Output) throws -> Image {
         let data = try decodeBase64(from: output)
         
-        if let data, let image = NSImage(data: data) {
-            return Image(nsImage: image)
+        if let data, let image = UIImage(data: data) {
+            return Image(uiImage: image)
         }
         
         throw AsyncImageErrors.imageInit
     }
 #endif
+    
 }

From aeda73110c177e4be7c6829485bb0d9a76419723 Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Wed, 21 Aug 2024 12:11:34 +0200
Subject: [PATCH 13/39] Update README.md

---
 README.md | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/README.md b/README.md
index df088af..f5fc301 100644
--- a/README.md
+++ b/README.md
@@ -6,8 +6,7 @@ You just type in any your idea and AI will give you an art solution
 
 DALL-E and DALL-E 2 are deep learning models developed by OpenAI to generate digital images from natural language descriptions, called "prompts"
 
-[![](https://img.shields.io/endpoint?url=https%3A%2F%2Fswiftpackageindex.com%2Fapi%2Fpackages%2FThe-Igor%2Fopenai-async-image-swiftui%2Fbadge%3Ftype%3Dswift-versions)](https://swiftpackageindex.com/The-Igor/openai-async-image-swiftui)
-[![](https://img.shields.io/endpoint?url=https%3A%2F%2Fswiftpackageindex.com%2Fapi%2Fpackages%2FThe-Igor%2Fopenai-async-image-swiftui%2Fbadge%3Ftype%3Dplatforms)](https://swiftpackageindex.com/The-Igor/openai-async-image-swiftui)
+[![](https://img.shields.io/endpoint?url=https%3A%2F%2Fswiftpackageindex.com%2Fapi%2Fpackages%2Figor11191708%2Fopenai-async-image-swiftui%2Fbadge%3Ftype%3Dplatforms)](https://swiftpackageindex.com/igor11191708/openai-async-image-swiftui)
 
 ## Example for the package
 

From d42caed08e1a914f9b2ace0fddeb9ed0c0f0a50a Mon Sep 17 00:00:00 2001
From: Software Engineer <igorshelopaev@gmail.com>
Date: Sat, 14 Sep 2024 18:21:43 +0300
Subject: [PATCH 14/39] Create .spi.yml

---
 .spi.yml | 4 ++++
 1 file changed, 4 insertions(+)
 create mode 100644 .spi.yml

diff --git a/.spi.yml b/.spi.yml
new file mode 100644
index 0000000..1600437
--- /dev/null
+++ b/.spi.yml
@@ -0,0 +1,4 @@
+version: 1
+builder:
+  configs:
+    - documentation_targets: [swiftui-loop-videoplayer]

From 768e1ad59053b9f48097296647bc87d8238656d3 Mon Sep 17 00:00:00 2001
From: Software Engineer <igorshelopaev@gmail.com>
Date: Sat, 14 Sep 2024 18:26:59 +0300
Subject: [PATCH 15/39] Update .spi.yml

---
 .spi.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.spi.yml b/.spi.yml
index 1600437..544a002 100644
--- a/.spi.yml
+++ b/.spi.yml
@@ -1,4 +1,4 @@
 version: 1
 builder:
   configs:
-    - documentation_targets: [swiftui-loop-videoplayer]
+    - documentation_targets: [openai-async-image-swiftui]

From 12236fe31f095f74f078ab1ea4c81443301c0c12 Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Fri, 20 Sep 2024 14:51:21 +0200
Subject: [PATCH 16/39] Update README.md

---
 README.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/README.md b/README.md
index f5fc301..ac3728f 100644
--- a/README.md
+++ b/README.md
@@ -6,7 +6,7 @@ You just type in any your idea and AI will give you an art solution
 
 DALL-E and DALL-E 2 are deep learning models developed by OpenAI to generate digital images from natural language descriptions, called "prompts"
 
-[![](https://img.shields.io/endpoint?url=https%3A%2F%2Fswiftpackageindex.com%2Fapi%2Fpackages%2Figor11191708%2Fopenai-async-image-swiftui%2Fbadge%3Ftype%3Dplatforms)](https://swiftpackageindex.com/igor11191708/openai-async-image-swiftui)
+[![](https://img.shields.io/endpoint?url=https%3A%2F%2Fswiftpackageindex.com%2Fapi%2Fpackages%2Figor11191708%2Fopenai-asyncimage-swiftui%2Fbadge%3Ftype%3Dplatforms)](https://swiftpackageindex.com/igor11191708/openai-asyncimage-swiftui)
 
 ## Example for the package
 

From cd87794bba59cf82cbf39ba012fd88876340a9a5 Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Fri, 20 Sep 2024 14:59:04 +0200
Subject: [PATCH 17/39] Update README.md

---
 README.md | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/README.md b/README.md
index ac3728f..7ad8787 100644
--- a/README.md
+++ b/README.md
@@ -8,9 +8,7 @@ DALL-E and DALL-E 2 are deep learning models developed by OpenAI to generate dig
 
 [![](https://img.shields.io/endpoint?url=https%3A%2F%2Fswiftpackageindex.com%2Fapi%2Fpackages%2Figor11191708%2Fopenai-asyncimage-swiftui%2Fbadge%3Ftype%3Dplatforms)](https://swiftpackageindex.com/igor11191708/openai-asyncimage-swiftui)
 
-## Example for the package
-
-[OpenAI AsyncImage SwiftUI example](https://github.com/The-Igor/openai-async-image-swiftui-example)
+## [Example for the package](https://github.com/The-Igor/openai-async-image-swiftui-example)
 
 ## Features
 - [x] Multiplatform iOS, macOS, watchOS and tvOS

From ac4601f7d9b8630b96411f700e689a02ec5a471c Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Fri, 20 Sep 2024 18:55:59 +0200
Subject: [PATCH 18/39] Update README.md

---
 README.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/README.md b/README.md
index 7ad8787..21518ab 100644
--- a/README.md
+++ b/README.md
@@ -6,7 +6,7 @@ You just type in any your idea and AI will give you an art solution
 
 DALL-E and DALL-E 2 are deep learning models developed by OpenAI to generate digital images from natural language descriptions, called "prompts"
 
-[![](https://img.shields.io/endpoint?url=https%3A%2F%2Fswiftpackageindex.com%2Fapi%2Fpackages%2Figor11191708%2Fopenai-asyncimage-swiftui%2Fbadge%3Ftype%3Dplatforms)](https://swiftpackageindex.com/igor11191708/openai-asyncimage-swiftui)
+[![](https://img.shields.io/endpoint?url=https%3A%2F%2Fswiftpackageindex.com%2Fapi%2Fpackages%2Figor11191708%2Fopenai-async-image-swiftui%2Fbadge%3Ftype%3Dplatforms)](https://swiftpackageindex.com/igor11191708/openai-async-image-swiftui)
 
 ## [Example for the package](https://github.com/The-Igor/openai-async-image-swiftui-example)
 

From cd60e15c60513606f8c89ac9f97b79982845374e Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Tue, 26 Nov 2024 11:36:51 +0100
Subject: [PATCH 19/39] update

---
 .../OpenAIAsyncImage.swift                    | 115 +++++++++++-------
 .../enum/DalleModel.swift                     |  13 ++
 .../enum/OpenAIImageSize.swift                |   4 +
 .../model/Input.swift                         |   3 +
 .../protocol/IOpenAILoader.swift              |  13 +-
 .../viewModel/OpenAIDefaultLoader.swift       |  34 ++++--
 6 files changed, 120 insertions(+), 62 deletions(-)
 create mode 100644 Sources/openai-async-image-swiftui/enum/DalleModel.swift

diff --git a/Sources/openai-async-image-swiftui/OpenAIAsyncImage.swift b/Sources/openai-async-image-swiftui/OpenAIAsyncImage.swift
index f554d1f..e0d545b 100644
--- a/Sources/openai-async-image-swiftui/OpenAIAsyncImage.swift
+++ b/Sources/openai-async-image-swiftui/OpenAIAsyncImage.swift
@@ -44,21 +44,32 @@ public struct OpenAIAsyncImage<Content: View, T: IOpenAILoader>: View {
     /// Optional custom view builder template
     let tpl : ImageProcess?
     
+    /// Dall-e model type
+    let model : DalleModel
+    
     // MARK: - Life cycle
         
+    /// Initializes a view model for generating images using the OpenAI API with customizable parameters.
     /// - Parameters:
-    ///   - prompt: A text description of the desired image(s). The maximum length is 1000 characters
-    ///   - size: The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024
-    ///   - tpl: Custom view builder template
-    ///   - loader: Custom loader conforming to `IOpenAILoader`
+    ///   - prompt: A `Binding` to a `String` that represents a text description of the desired image(s).
+    ///             The maximum length for the prompt is 1000 characters.
+    ///   - size: The size of the generated images, specified as an `OpenAIImageSize`.
+    ///           Defaults to `.dpi256`. Must be one of `.dpi256` (256x256), `.dpi512` (512x512), or `.dpi1024` (1024x1024).
+    ///   - model: The `DalleModel` specifying which model to use for generating the image(s).
+    ///            Defaults to `.dalle2`.
+    ///   - tpl: A custom SwiftUI `ViewBuilder` template for processing or rendering the generated image(s).
+    ///   - loader: A custom loader conforming to the `IOpenAILoader` protocol, responsible for handling
+    ///             the image generation process, such as communicating with the OpenAI API.
     public init(
-        prompt : Binding<String>,
-        size : OpenAIImageSize = .dpi256,
-        @ViewBuilder tpl : @escaping ImageProcess,
-        loader : T
-    ){
+        prompt: Binding<String>,
+        size: OpenAIImageSize = .dpi256,
+        model: DalleModel = .dalle2,
+        @ViewBuilder tpl: @escaping ImageProcess,
+        loader: T
+    ) {
         self._prompt = prompt
         self.size = size
+        self.model = model
         self.tpl = tpl
         self.loader = loader
     }
@@ -97,32 +108,41 @@ public struct OpenAIAsyncImage<Content: View, T: IOpenAILoader>: View {
         return .loading
     }
         
-    /// Load using the default loader
+    /// Loads an image using the default loader.
     /// - Parameters:
-    ///   - prompt: The text prompt for generating the image
-    ///   - size: The desired size of the image
-    /// - Returns: OpenAI image
-    private func loadImageDefault(_ prompt : String, with size : ImageSize) async throws -> Image{
-        try await defaultLoader.load(prompt, with: size)
+    ///   - prompt: The text prompt describing the desired image content.
+    ///   - size: The dimensions of the generated image, specified as `ImageSize`.
+    ///   - model: The `DalleModel` specifying the AI model to use for image generation.
+    /// - Returns: A generated `Image` object if successful.
+    /// - Throws: An error if the image generation fails.
+    private func loadImageDefault(
+        _ prompt: String,
+        with size: ImageSize,
+        model: DalleModel
+    ) async throws -> Image {
+        try await defaultLoader.load(prompt, with: size, model: model)
     }
-    
-    /// Load image using the provided or default loader
+
+    /// Loads an image using a provided loader, or falls back to the default loader if none is provided.
     /// - Parameters:
-    ///   - prompt: The text prompt for generating the image
-    ///   - size: The desired size of the image
-    /// - Returns: OpenAI image if successful, otherwise nil
-    private func loadImage(_ prompt : String, with size : ImageSize) async -> Image?{
-        do{
-            if let loader = loader{
-                return try await loader.load(prompt, with: size)
+    ///   - prompt: The text prompt describing the desired image content.
+    ///   - size: The dimensions of the generated image, specified as `ImageSize`.
+    ///   - model: The `DalleModel` specifying the AI model to use for image generation.
+    /// - Returns: An `Image` object if successful, or `nil` if the operation fails or is cancelled.
+    private func loadImage(
+        _ prompt: String,
+        with size: ImageSize,
+        model: DalleModel
+    ) async -> Image? {
+        do {
+            if let loader = loader {
+                return try await loader.load(prompt, with: size, model: model)
             }
-            
-            return try await loadImageDefault(prompt, with: size)
-        }catch{
-            if !Task.isCancelled{
+            return try await loadImageDefault(prompt, with: size, model: model)
+        } catch {
+            if !Task.isCancelled {
                 self.error = error
             }
-            
             return nil
         }
     }
@@ -151,7 +171,7 @@ public struct OpenAIAsyncImage<Content: View, T: IOpenAILoader>: View {
     /// - Returns: A task that fetches the OpenAI image
     private func getTask() -> Task<Void, Never>{
         Task{
-            if let image = await loadImage(prompt, with: size){
+            if let image = await loadImage(prompt, with: size, model: model){
                 await setImage(image)
             }
         }
@@ -162,16 +182,20 @@ public struct OpenAIAsyncImage<Content: View, T: IOpenAILoader>: View {
 
 public extension OpenAIAsyncImage where Content == EmptyView, T == OpenAIDefaultLoader{
     
-    /// Convenience initializer for default loader without custom view template
+    /// Convenience initializer for creating an instance with the default loader and no custom view template.
     /// - Parameters:
-    ///   - prompt: The text prompt for generating the image
-    ///   - size: The desired size of the image
+    ///   - prompt: A `Binding` to a `String` containing the text prompt that describes the desired image content.
+    ///   - size: The desired size of the generated image, specified as an `OpenAIImageSize`.
+    ///           Defaults to `.dpi256`.
+    ///   - model: The `DalleModel` specifying the AI model to use for image generation. Defaults to `.dalle2`.
     init(
-        prompt : Binding<String>,
-        size : OpenAIImageSize = .dpi256
-    ){
+        prompt: Binding<String>,
+        size: OpenAIImageSize = .dpi256,
+        model: DalleModel = .dalle2
+    ) {
         self._prompt = prompt
         self.size = size
+        self.model = model
         self.tpl = nil
         self.loader = nil
     }
@@ -179,18 +203,21 @@ public extension OpenAIAsyncImage where Content == EmptyView, T == OpenAIDefault
 
 public extension OpenAIAsyncImage where T == OpenAIDefaultLoader{
     
-    /// Convenience initializer for default loader with custom view template
+    /// Convenience initializer for creating an instance with the default loader and a custom view template.
     /// - Parameters:
-    ///   - prompt: The text prompt for generating the image
-    ///   - size: The desired size of the image
-    ///   - tpl: Custom view template
+    ///   - prompt: A `Binding` to a `String` containing the text prompt that describes the desired image content.
+    ///   - size: The desired size of the generated image, specified as an `OpenAIImageSize`. Defaults to `.dpi256`.
+    ///   - model: The `DalleModel` specifying the AI model to use for image generation. Defaults to `.dalle2`.
+    ///   - tpl: A SwiftUI `@ViewBuilder` closure that provides a custom view template for processing or rendering the generated image.
     init(
-        prompt : Binding<String>,
-        size : OpenAIImageSize = .dpi256,
-        @ViewBuilder tpl : @escaping ImageProcess
-    ){
+        prompt: Binding<String>,
+        size: OpenAIImageSize = .dpi256,
+        model: DalleModel = .dalle2,
+        @ViewBuilder tpl: @escaping ImageProcess
+    ) {
         self._prompt = prompt
         self.size = size
+        self.model = model
         self.tpl = tpl
         self.loader = nil
     }
diff --git a/Sources/openai-async-image-swiftui/enum/DalleModel.swift b/Sources/openai-async-image-swiftui/enum/DalleModel.swift
new file mode 100644
index 0000000..43dbcac
--- /dev/null
+++ b/Sources/openai-async-image-swiftui/enum/DalleModel.swift
@@ -0,0 +1,13 @@
+//
+//  DalleModel.swift
+//  openai-async-image-swiftui
+//
+//  Created by Igor  on 26.11.24.
+//
+
+public enum DalleModel: String{
+    
+    case dalle2 = "dall-e-2"
+    
+    case dalle3 = "dall-e-3"
+}
diff --git a/Sources/openai-async-image-swiftui/enum/OpenAIImageSize.swift b/Sources/openai-async-image-swiftui/enum/OpenAIImageSize.swift
index 8139966..f9b2998 100644
--- a/Sources/openai-async-image-swiftui/enum/OpenAIImageSize.swift
+++ b/Sources/openai-async-image-swiftui/enum/OpenAIImageSize.swift
@@ -16,4 +16,8 @@ public enum OpenAIImageSize: String, Encodable{
     case dpi512 = "512x512"
     
     case dpi1024 = "1024x1024"
+    
+    case dpi1792x1024 = "1792x1024"
+    
+    case dpi1024x1792 = "1024x1792"
 }
diff --git a/Sources/openai-async-image-swiftui/model/Input.swift b/Sources/openai-async-image-swiftui/model/Input.swift
index 42a2aa6..17c1118 100644
--- a/Sources/openai-async-image-swiftui/model/Input.swift
+++ b/Sources/openai-async-image-swiftui/model/Input.swift
@@ -12,6 +12,9 @@ import Foundation
 /// Given a prompt and/or an input image, the model will generate a new image
 @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
 struct Input: Encodable{
+    
+    /// dall-e model
+    let model : String
         
     /// A text description of the desired image(s). The maximum length is 1000 characters
     let prompt: String
diff --git a/Sources/openai-async-image-swiftui/protocol/IOpenAILoader.swift b/Sources/openai-async-image-swiftui/protocol/IOpenAILoader.swift
index a6b0198..f94745e 100644
--- a/Sources/openai-async-image-swiftui/protocol/IOpenAILoader.swift
+++ b/Sources/openai-async-image-swiftui/protocol/IOpenAILoader.swift
@@ -11,10 +11,13 @@ import SwiftUI
 @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
 public protocol IOpenAILoader {
        
-    /// Asynchronously loads an image based on a provided text prompt and size
+    /// Asynchronously generates an image using a given text prompt, size, and model.
     /// - Parameters:
-    ///   - prompt: The text prompt describing the desired image
-    ///   - size: The size of the generated image
-    /// - Returns: The generated OpenAI image
-    func load(_ prompt: String, with size: OpenAIImageSize) async throws -> Image
+    ///   - prompt: A descriptive text prompt that defines the content of the desired image.
+    ///   - size: The dimensions of the generated image, specified as an `OpenAIImageSize`.
+    ///   - model: The `DalleModel` used for image generation.
+    /// - Returns: A generated `Image` based on the provided prompt and size.
+    /// - Throws: An error if the image generation process fails, such as issues with the prompt, model, or network.
+    func load(_ prompt: String, with size: OpenAIImageSize,
+              model: DalleModel) async throws -> Image
 }
diff --git a/Sources/openai-async-image-swiftui/viewModel/OpenAIDefaultLoader.swift b/Sources/openai-async-image-swiftui/viewModel/OpenAIDefaultLoader.swift
index 8dfa1bf..665b4cc 100644
--- a/Sources/openai-async-image-swiftui/viewModel/OpenAIDefaultLoader.swift
+++ b/Sources/openai-async-image-swiftui/viewModel/OpenAIDefaultLoader.swift
@@ -38,14 +38,18 @@ public final class OpenAIDefaultLoader: IOpenAILoader, Sendable {
         client = Http.Proxy(baseURL: url)
     }
        
-    /// Loads an image from the OpenAI API based on a text prompt
+    /// Asynchronously loads an image from the OpenAI API using a text prompt and specified parameters.
     /// - Parameters:
-    ///   - prompt: The text prompt describing the desired image
-    ///   - size: The size of the generated image
-    /// - Returns: OpenAI Image
+    ///   - prompt: The text prompt describing the desired image content.
+    ///   - size: The dimensions of the generated image, specified as `OpenAIImageSize`.
+    ///   - model: The `DalleModel` used for generating the image.
+    /// - Returns: A generated `Image` object based on the prompt and size.
+    /// - Throws: An `AsyncImageErrors` if the client is undefined, the request fails,
+    ///           or the OpenAI API returns an error.
     public func load(
         _ prompt: String,
-        with size: OpenAIImageSize
+        with size: OpenAIImageSize,
+        model: DalleModel
     ) async throws -> Image {
         
         guard let client = client else {
@@ -53,7 +57,7 @@ public final class OpenAIDefaultLoader: IOpenAILoader, Sendable {
         }
         
         do {
-            let (path, body, headers) = prepareRequest(prompt: prompt, size: size)
+            let (path, body, headers) = prepareRequest(prompt: prompt, size: size, model: model)
             let result: Http.Response<Output> = try await client.post(path: path, body: body, headers: headers)
             return try imageBase64(from: result.value)
             
@@ -61,14 +65,18 @@ public final class OpenAIDefaultLoader: IOpenAILoader, Sendable {
             throw AsyncImageErrors.handleRequest(error)
         }
     }
-    
-    /// Prepares the request with the necessary parameters
+
+    /// Prepares the API request for generating an image with the given parameters.
     /// - Parameters:
-    ///   - prompt: The text prompt describing the desired image
-    ///   - size: The size of the generated image
-    /// - Returns: A tuple containing the path, body, and headers for the request
-    private func prepareRequest(prompt: String, size: OpenAIImageSize) -> (String, Input, [String: String]) {
-        let body = Input(prompt: prompt, size: size, response_format: .b64, n: 1)
+    ///   - prompt: The descriptive text prompt for generating the image.
+    ///   - size: The dimensions of the image to be generated, as `OpenAIImageSize`.
+    ///   - model: The `DalleModel` specifying the AI model to use for generation.
+    /// - Returns: A tuple containing:
+    ///   - `path`: The API endpoint path as a `String`.
+    ///   - `body`: The request payload as an `Input` object, containing model, prompt, size, and other parameters.
+    ///   - `headers`: A dictionary of HTTP headers required for the request.
+    private func prepareRequest(prompt: String, size: OpenAIImageSize, model: DalleModel) -> (String, Input, [String: String]) {
+        let body = Input(model: model.rawValue, prompt: prompt, size: size, response_format: .b64, n: 1)
         let headers = ["Content-Type": "application/json", "Authorization": "Bearer \(endpoint.apiKey)"]
         let path = endpoint.path
         return (path, body, headers)

From 378228f2285ff93390ce5c52456d202977e96af6 Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Tue, 26 Nov 2024 14:08:22 +0100
Subject: [PATCH 20/39] update

---
 .../OpenAIAsyncImage.swift                    | 19 +++++++++----------
 .../enum/AsyncImageErrors.swift               |  3 +++
 2 files changed, 12 insertions(+), 10 deletions(-)

diff --git a/Sources/openai-async-image-swiftui/OpenAIAsyncImage.swift b/Sources/openai-async-image-swiftui/OpenAIAsyncImage.swift
index e0d545b..3f9e236 100644
--- a/Sources/openai-async-image-swiftui/OpenAIAsyncImage.swift
+++ b/Sources/openai-async-image-swiftui/OpenAIAsyncImage.swift
@@ -133,18 +133,11 @@ public struct OpenAIAsyncImage<Content: View, T: IOpenAILoader>: View {
         _ prompt: String,
         with size: ImageSize,
         model: DalleModel
-    ) async -> Image? {
-        do {
+    ) async throws -> Image? {
             if let loader = loader {
                 return try await loader.load(prompt, with: size, model: model)
             }
             return try await loadImageDefault(prompt, with: size, model: model)
-        } catch {
-            if !Task.isCancelled {
-                self.error = error
-            }
-            return nil
-        }
     }
     
     /// Sets the image on the main thread
@@ -171,8 +164,14 @@ public struct OpenAIAsyncImage<Content: View, T: IOpenAILoader>: View {
     /// - Returns: A task that fetches the OpenAI image
     private func getTask() -> Task<Void, Never>{
         Task{
-            if let image = await loadImage(prompt, with: size, model: model){
-                await setImage(image)
+            do{
+                if let image = try await loadImage(prompt, with: size, model: model){
+                    setImage(image)
+                }
+            }catch is CancellationError{
+                self.error = AsyncImageErrors.cancellationError
+            }catch{
+                self.error = error
             }
         }
     }
diff --git a/Sources/openai-async-image-swiftui/enum/AsyncImageErrors.swift b/Sources/openai-async-image-swiftui/enum/AsyncImageErrors.swift
index 7546576..140be38 100644
--- a/Sources/openai-async-image-swiftui/enum/AsyncImageErrors.swift
+++ b/Sources/openai-async-image-swiftui/enum/AsyncImageErrors.swift
@@ -16,6 +16,7 @@ enum AsyncImageErrors: Error {
     case returnedNoImages        // No images were returned in the response
     case httpStatus(String)      // HTTP status error with a message
     case responseError(Error)    // Generic response error
+    case cancellationError
 }
 
 @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
@@ -32,6 +33,8 @@ extension AsyncImageErrors: LocalizedError {
             return NSLocalizedString(description, comment: "")
         case .responseError(let error):
             return error.localizedDescription
+        case .cancellationError:
+            return NSLocalizedString("Cancellation error.", comment: "")
         }
     }
 }

From cd337d7890a0e05d366558df996130faa912a518 Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Wed, 27 Nov 2024 12:08:21 +0100
Subject: [PATCH 21/39] Update Package.swift

---
 Package.swift | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Package.swift b/Package.swift
index f587b91..fde87d4 100644
--- a/Package.swift
+++ b/Package.swift
@@ -14,7 +14,7 @@ let package = Package(
     ],
     dependencies: [
         // Dependencies declare other packages that this package depends on.
-        .package(url: "https://github.com/The-Igor/async-http-client.git", from: "1.4.6")
+        .package(url: "https://github.com/The-Igor/async-http-client.git", from: "1.4.7")
     ],
     targets: [
         // Targets are the basic building blocks of a package. A target can define a module or a test suite.

From b6dc4bc20b4304c7e970348d70dcd807e143477e Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Wed, 27 Nov 2024 12:18:55 +0100
Subject: [PATCH 22/39] Update Package.resolved

---
 Package.resolved | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/Package.resolved b/Package.resolved
index 1111124..bde66f8 100644
--- a/Package.resolved
+++ b/Package.resolved
@@ -5,14 +5,14 @@
       "kind" : "remoteSourceControl",
       "location" : "https://github.com/The-Igor/async-http-client.git",
       "state" : {
-        "revision" : "9a56240b21baff71f5399194a1f2af9ab8c4c076",
-        "version" : "1.4.6"
+        "revision" : "94d2573c14bb6b88110338a0f4f4cd7b0c6f3165",
+        "version" : "1.4.7"
       }
     },
     {
       "identity" : "retry-policy-service",
       "kind" : "remoteSourceControl",
-      "location" : "https://github.com/The-Igor/retry-policy-service.git",
+      "location" : "https://github.com/igor11191708/retry-policy-service.git",
       "state" : {
         "revision" : "2a6a1f057fbf77337dfc73db98bd3d538127b3e2",
         "version" : "1.0.1"

From dbebdad486a9927241a83c690aff077b1fac146f Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Wed, 27 Nov 2024 12:19:50 +0100
Subject: [PATCH 23/39] update

---
 Package.resolved | 2 +-
 Package.swift    | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/Package.resolved b/Package.resolved
index bde66f8..3b566eb 100644
--- a/Package.resolved
+++ b/Package.resolved
@@ -3,7 +3,7 @@
     {
       "identity" : "async-http-client",
       "kind" : "remoteSourceControl",
-      "location" : "https://github.com/The-Igor/async-http-client.git",
+      "location" : "https://github.com/igor11191708/async-http-client.git",
       "state" : {
         "revision" : "94d2573c14bb6b88110338a0f4f4cd7b0c6f3165",
         "version" : "1.4.7"
diff --git a/Package.swift b/Package.swift
index fde87d4..f01dac1 100644
--- a/Package.swift
+++ b/Package.swift
@@ -14,7 +14,7 @@ let package = Package(
     ],
     dependencies: [
         // Dependencies declare other packages that this package depends on.
-        .package(url: "https://github.com/The-Igor/async-http-client.git", from: "1.4.7")
+        .package(url: "https://github.com/igor11191708/async-http-client.git", from: "1.4.7")
     ],
     targets: [
         // Targets are the basic building blocks of a package. A target can define a module or a test suite.

From 60ce51a20abdc637407db1b283d6343778d40828 Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Wed, 27 Nov 2024 16:33:05 +0100
Subject: [PATCH 24/39] Update README.md

---
 README.md | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/README.md b/README.md
index 21518ab..842df00 100644
--- a/README.md
+++ b/README.md
@@ -9,6 +9,8 @@ DALL-E and DALL-E 2 are deep learning models developed by OpenAI to generate dig
 [![](https://img.shields.io/endpoint?url=https%3A%2F%2Fswiftpackageindex.com%2Fapi%2Fpackages%2Figor11191708%2Fopenai-async-image-swiftui%2Fbadge%3Ftype%3Dplatforms)](https://swiftpackageindex.com/igor11191708/openai-async-image-swiftui)
 
 ## [Example for the package](https://github.com/The-Igor/openai-async-image-swiftui-example)
+## [Documentation(API)](https://swiftpackageindex.com/igor11191708/openai-async-image-swiftui/1.2.0/documentation/openai_async_image_swiftui)
+
 
 ## Features
 - [x] Multiplatform iOS, macOS, watchOS and tvOS

From 92d3a87fdd74a4b8de519939bc7a5f3e4e5b214c Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Wed, 27 Nov 2024 16:34:08 +0100
Subject: [PATCH 25/39] Update README.md

---
 README.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/README.md b/README.md
index 842df00..23358e1 100644
--- a/README.md
+++ b/README.md
@@ -9,7 +9,7 @@ DALL-E and DALL-E 2 are deep learning models developed by OpenAI to generate dig
 [![](https://img.shields.io/endpoint?url=https%3A%2F%2Fswiftpackageindex.com%2Fapi%2Fpackages%2Figor11191708%2Fopenai-async-image-swiftui%2Fbadge%3Ftype%3Dplatforms)](https://swiftpackageindex.com/igor11191708/openai-async-image-swiftui)
 
 ## [Example for the package](https://github.com/The-Igor/openai-async-image-swiftui-example)
-## [Documentation(API)](https://swiftpackageindex.com/igor11191708/openai-async-image-swiftui/1.2.0/documentation/openai_async_image_swiftui)
+## [Documentation(API)](https://swiftpackageindex.com/igor11191708/openai-async-image-swiftui/main/documentation/openai_async_image_swiftui)
 
 
 ## Features

From 6c511fc8d2a6e69060f0edaf79abfcc40a1f6de8 Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Thu, 28 Nov 2024 19:06:14 +0100
Subject: [PATCH 26/39] update

---
 README.md | 20 ++++++++++----------
 1 file changed, 10 insertions(+), 10 deletions(-)

diff --git a/README.md b/README.md
index 23358e1..c2e16ef 100644
--- a/README.md
+++ b/README.md
@@ -6,10 +6,10 @@ You just type in any your idea and AI will give you an art solution
 
 DALL-E and DALL-E 2 are deep learning models developed by OpenAI to generate digital images from natural language descriptions, called "prompts"
 
-[![](https://img.shields.io/endpoint?url=https%3A%2F%2Fswiftpackageindex.com%2Fapi%2Fpackages%2Figor11191708%2Fopenai-async-image-swiftui%2Fbadge%3Ftype%3Dplatforms)](https://swiftpackageindex.com/igor11191708/openai-async-image-swiftui)
+[![](https://img.shields.io/endpoint?url=https%3A%2F%2Fswiftpackageindex.com%2Fapi%2Fpackages%2Fswiftuiux%2Fopenai-async-image-swiftui%2Fbadge%3Ftype%3Dplatforms)](https://swiftpackageindex.com/swiftuiux/openai-async-image-swiftui)
 
-## [Example for the package](https://github.com/The-Igor/openai-async-image-swiftui-example)
-## [Documentation(API)](https://swiftpackageindex.com/igor11191708/openai-async-image-swiftui/main/documentation/openai_async_image_swiftui)
+## [Example for the package](https://github.com/swiftuiux/openai-async-image-swiftui-example)
+## [Documentation(API)](https://swiftpackageindex.com/swiftuiux/openai-async-image-swiftui/main/documentation/openai_async_image_swiftui)
 
 
 ## Features
@@ -18,7 +18,7 @@ DALL-E and DALL-E 2 are deep learning models developed by OpenAI to generate dig
 - [x] Customizable in term of the transport layer [Loader]
 - [x] Based on interfaces not implementations
 
- ![OpenAI AsyncImage SwiftUI](https://github.com/The-Igor/openai-async-image-swiftui/blob/main/image/sun_watch.png) 
+ ![OpenAI AsyncImage SwiftUI](https://github.com/swiftuiux/openai-async-image-swiftui/blob/main/image/sun_watch.png) 
 
 ## How to use
 
@@ -62,14 +62,14 @@ or with custom **ViewBuilder**
 | tpl | Custom view builder tpl |
 | loader | Custom loader if you need something specific|
 
- ![OpenAI AsyncImage SwiftUI](https://github.com/The-Igor/openai-async-image-swiftui/blob/main/image/appletv_art.png) 
+ ![OpenAI AsyncImage SwiftUI](https://github.com/swiftuiux/openai-async-image-swiftui/blob/main/image/appletv_art.png) 
 
 ## Documentation(API)
 - You need to have Xcode 13 installed in order to have access to Documentation Compiler (DocC)
 - Go to Product > Build Documentation or **⌃⇧⌘ D**
 
 
-![OpenAI AsyncImage SwiftUI](https://github.com/The-Igor/openai-async-image-swiftui/blob/main/image/sun_11.png) 
+![OpenAI AsyncImage SwiftUI](https://github.com/swiftuiux/openai-async-image-swiftui/blob/main/image/sun_11.png) 
 
 ## More Stable Diffusion examples 
 
@@ -77,12 +77,12 @@ or with custom **ViewBuilder**
 Announced in 2022, OpenAI's text-to-image model DALL-E 2 is a recent example of diffusion models. It uses diffusion models for both the model's prior (which produces an image embedding given a text caption) and the decoder that generates the final image.
 In machine learning, diffusion models, also known as diffusion probabilistic models, are a class of latent variable models. They are Markov chains trained using variational inference. The goal of diffusion models is to learn the latent structure of a dataset by modeling the way in which data points diffuse through the latent space.
 Diffusion models can be applied to a variety of tasks, including image denoising, inpainting, super-resolution, and image generation. For example, an image generation model would start with a random noise image and then, after having been trained reversing the diffusion process on natural images, the model would be able to generate new natural images. 
-[Replicate kit](https://github.com/The-Igor/replicate-kit-swift)
+[Replicate kit](https://github.com/swiftuiux/replicate-kit-swift)
 
 
-![The concept](https://github.com/The-Igor/replicate-kit-swift/raw/main/img/image_02.png) 
+![The concept](https://github.com/swiftuiux/replicate-kit-swift/raw/main/img/image_02.png) 
 
 ### CoreML Stable Diffusion
-[The example app](https://github.com/The-Igor/coreml-stable-diffusion-swift-example) for running text-to-image or image-to-image models to generate images using Apple's Core ML Stable Diffusion implementation
+[The example app](https://github.com/swiftuiux/coreml-stable-diffusion-swift-example) for running text-to-image or image-to-image models to generate images using Apple's Core ML Stable Diffusion implementation
 
-![The concept](https://github.com/The-Igor/coreml-stable-diffusion-swift-example/blob/main/img/img_01.png) 
+![The concept](https://github.com/swiftuiux/coreml-stable-diffusion-swift-example/blob/main/img/img_01.png) 

From d13ebdb425e2b81cddc4e0b700d548599bdbce5a Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Fri, 29 Nov 2024 10:35:41 +0100
Subject: [PATCH 27/39] update

---
 Package.resolved | 8 ++++----
 Package.swift    | 2 +-
 2 files changed, 5 insertions(+), 5 deletions(-)

diff --git a/Package.resolved b/Package.resolved
index 3b566eb..eaedb43 100644
--- a/Package.resolved
+++ b/Package.resolved
@@ -3,16 +3,16 @@
     {
       "identity" : "async-http-client",
       "kind" : "remoteSourceControl",
-      "location" : "https://github.com/igor11191708/async-http-client.git",
+      "location" : "https://github.com/swiftuiux/async-http-client.git",
       "state" : {
-        "revision" : "94d2573c14bb6b88110338a0f4f4cd7b0c6f3165",
-        "version" : "1.4.7"
+        "revision" : "936a6e953d3c3e05a14c3d852fea9955e57c9854",
+        "version" : "1.5.0"
       }
     },
     {
       "identity" : "retry-policy-service",
       "kind" : "remoteSourceControl",
-      "location" : "https://github.com/igor11191708/retry-policy-service.git",
+      "location" : "https://github.com/swiftuiux/retry-policy-service.git",
       "state" : {
         "revision" : "2a6a1f057fbf77337dfc73db98bd3d538127b3e2",
         "version" : "1.0.1"
diff --git a/Package.swift b/Package.swift
index f01dac1..6e4af9c 100644
--- a/Package.swift
+++ b/Package.swift
@@ -14,7 +14,7 @@ let package = Package(
     ],
     dependencies: [
         // Dependencies declare other packages that this package depends on.
-        .package(url: "https://github.com/igor11191708/async-http-client.git", from: "1.4.7")
+        .package(url: "https://github.com/swiftuiux/async-http-client.git", from: "1.5.0")
     ],
     targets: [
         // Targets are the basic building blocks of a package. A target can define a module or a test suite.

From 439aba5bf867510a448e19b65b9de863cd1daa4a Mon Sep 17 00:00:00 2001
From: Software Engineer <igorshelopaev@gmail.com>
Date: Sat, 30 Nov 2024 16:08:35 +0300
Subject: [PATCH 28/39] Update LICENSE

---
 LICENSE | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/LICENSE b/LICENSE
index b2bea10..f148400 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,6 +1,6 @@
 MIT License
 
-Copyright (c) 2023 Igor
+Copyright (c) 2023  Igor Shelopaev
 
 Permission is hereby granted, free of charge, to any person obtaining a copy
 of this software and associated documentation files (the "Software"), to deal

From f33d70168468b7f81591634b54fe0492765fa646 Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Sun, 1 Dec 2024 11:20:15 +0100
Subject: [PATCH 29/39] update

---
 Package.resolved                              |  9 +++
 Package.swift                                 |  5 +-
 .../OpenAIAsyncImage.swift                    | 79 +++++++------------
 3 files changed, 40 insertions(+), 53 deletions(-)

diff --git a/Package.resolved b/Package.resolved
index eaedb43..7da7e3b 100644
--- a/Package.resolved
+++ b/Package.resolved
@@ -9,6 +9,15 @@
         "version" : "1.5.0"
       }
     },
+    {
+      "identity" : "async-task",
+      "kind" : "remoteSourceControl",
+      "location" : "https://github.com/swiftuiux/async-task.git",
+      "state" : {
+        "revision" : "0e10077166d3b79d6fdffe4542ffc0196f63954f",
+        "version" : "1.2.3"
+      }
+    },
     {
       "identity" : "retry-policy-service",
       "kind" : "remoteSourceControl",
diff --git a/Package.swift b/Package.swift
index 6e4af9c..2debc36 100644
--- a/Package.swift
+++ b/Package.swift
@@ -14,14 +14,15 @@ let package = Package(
     ],
     dependencies: [
         // Dependencies declare other packages that this package depends on.
-        .package(url: "https://github.com/swiftuiux/async-http-client.git", from: "1.5.0")
+        .package(url: "https://github.com/swiftuiux/async-http-client.git", from: "1.5.0"),
+        .package(url: "https://github.com/swiftuiux/async-task.git", from: "1.2.3")
     ],
     targets: [
         // Targets are the basic building blocks of a package. A target can define a module or a test suite.
         // Targets can depend on other targets in this package, and on products in packages this package depends on.
         .target(
             name: "openai-async-image-swiftui",
-            dependencies: ["async-http-client"]),
+            dependencies: ["async-http-client", "async-task"]),
         .testTarget(
             name: "openai-async-image-swiftuiTests",
             dependencies: ["openai-async-image-swiftui"]),
diff --git a/Sources/openai-async-image-swiftui/OpenAIAsyncImage.swift b/Sources/openai-async-image-swiftui/OpenAIAsyncImage.swift
index 3f9e236..c7be6f4 100644
--- a/Sources/openai-async-image-swiftui/OpenAIAsyncImage.swift
+++ b/Sources/openai-async-image-swiftui/OpenAIAsyncImage.swift
@@ -6,30 +6,23 @@
 //
 
 import SwiftUI
+import async_task
 
 fileprivate typealias ImageSize = OpenAIImageSize
+fileprivate typealias TaskModel = Async.SingleTask<Image, AsyncImageErrors>
 
 /// Async image component to load and show OpenAI image from OpenAI image API
 @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
 public struct OpenAIAsyncImage<Content: View, T: IOpenAILoader>: View {
     
+    @StateObject private var taskModel = TaskModel(errorMapper: errorMapper)
+    
     /// Custom view builder template type alias
     public typealias ImageProcess = (ImageState) -> Content
         
     /// Default loader, injected from environment
     @Environment(\.openAIDefaultLoader) var defaultLoader : OpenAIDefaultLoader
-    
-    // MARK: - Private properties
-    
-    /// State variable to hold the OpenAI image
-    @State private var image: Image?
-        
-    /// State variable to hold any errors encountered during loading
-    @State private var error: Error?
-        
-    /// State variable to hold the current task responsible for loading the image
-    @State private var task : Task<Void, Never>?
-   
+      
     // MARK: - Config
     
     /// A binding to the text prompt describing the desired image. The maximum length is 1000 characters
@@ -85,15 +78,13 @@ public struct OpenAIAsyncImage<Content: View, T: IOpenAILoader>: View {
             }
         }
         .onChange(of: prompt){ _ in
-            cancelTask()
-            clear()
-            task = getTask()
+            start()
         }
         .onAppear {
-           task = getTask()
+            start()
         }
         .onDisappear{
-            cancelTask()
+            cancel()
         }
     }
     
@@ -102,8 +93,8 @@ public struct OpenAIAsyncImage<Content: View, T: IOpenAILoader>: View {
     /// - Returns: The current image state status
     private func getState () -> ImageState{
         
-        if let image { return .loaded(image) }
-        else if let error { return .loadError(error)}
+        if let image = taskModel.value { return .loaded(image) }
+        else if let error = taskModel.error { return .loadError(error)}
         
         return .loading
     }
@@ -139,42 +130,19 @@ public struct OpenAIAsyncImage<Content: View, T: IOpenAILoader>: View {
             }
             return try await loadImageDefault(prompt, with: size, model: model)
     }
-    
-    /// Sets the image on the main thread
-    /// - Parameter value: The image to be set
-    @MainActor
-    private func setImage(_ value : Image){
-        image = value
-    }
-    
-    /// Clears the image and error state properties
-    @MainActor
-    private func clear(){
-        image = nil
-        error = nil
-    }
-    
-    /// Cancels the current loading task if any
-    private func cancelTask(){
-        task?.cancel()
-        task = nil
-    }
-    
+
     /// Creates and returns a task to fetch the OpenAI image
     /// - Returns: A task that fetches the OpenAI image
-    private func getTask() -> Task<Void, Never>{
-        Task{
-            do{
-                if let image = try await loadImage(prompt, with: size, model: model){
-                    setImage(image)
-                }
-            }catch is CancellationError{
-                self.error = AsyncImageErrors.cancellationError
-            }catch{
-                self.error = error
-            }
+    private func start(){
+        taskModel.start{
+            try await loadImage(prompt, with: size, model: model)
         }
     }
+    
+    /// Cancel task
+    private func cancel(){
+        taskModel.cancel()
+    }
 }
 
 // MARK: - Public extensions -
@@ -232,3 +200,12 @@ fileprivate func imageTpl(_ state : ImageState) -> some View{
         case .loading : ProgressView()
     }
 }
+
+@Sendable
+fileprivate func errorMapper(_ error : Error?) -> AsyncImageErrors?{
+    if error is CancellationError{
+        return .cancellationError
+    }
+    
+    return nil
+}

From 9726d0ef3b52a58db35977af3bc1089c25bd80fe Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Sun, 1 Dec 2024 11:24:27 +0100
Subject: [PATCH 30/39] Update OpenAIAsyncImage.swift

---
 Sources/openai-async-image-swiftui/OpenAIAsyncImage.swift | 1 +
 1 file changed, 1 insertion(+)

diff --git a/Sources/openai-async-image-swiftui/OpenAIAsyncImage.swift b/Sources/openai-async-image-swiftui/OpenAIAsyncImage.swift
index c7be6f4..12574e5 100644
--- a/Sources/openai-async-image-swiftui/OpenAIAsyncImage.swift
+++ b/Sources/openai-async-image-swiftui/OpenAIAsyncImage.swift
@@ -15,6 +15,7 @@ fileprivate typealias TaskModel = Async.SingleTask<Image, AsyncImageErrors>
 @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
 public struct OpenAIAsyncImage<Content: View, T: IOpenAILoader>: View {
     
+    /// Task model for managing image loading cycle
     @StateObject private var taskModel = TaskModel(errorMapper: errorMapper)
     
     /// Custom view builder template type alias

From 5cec6b959315a1cbe760927c5ac03e279e19bed9 Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Sun, 1 Dec 2024 11:43:46 +0100
Subject: [PATCH 31/39] update

---
 Package.resolved | 4 ++--
 Package.swift    | 2 +-
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/Package.resolved b/Package.resolved
index 7da7e3b..6d6537d 100644
--- a/Package.resolved
+++ b/Package.resolved
@@ -14,8 +14,8 @@
       "kind" : "remoteSourceControl",
       "location" : "https://github.com/swiftuiux/async-task.git",
       "state" : {
-        "revision" : "0e10077166d3b79d6fdffe4542ffc0196f63954f",
-        "version" : "1.2.3"
+        "revision" : "d05dc1ec967813392da38e3501dfe666098baaec",
+        "version" : "1.2.5"
       }
     },
     {
diff --git a/Package.swift b/Package.swift
index 2debc36..2dde9bb 100644
--- a/Package.swift
+++ b/Package.swift
@@ -15,7 +15,7 @@ let package = Package(
     dependencies: [
         // Dependencies declare other packages that this package depends on.
         .package(url: "https://github.com/swiftuiux/async-http-client.git", from: "1.5.0"),
-        .package(url: "https://github.com/swiftuiux/async-task.git", from: "1.2.3")
+        .package(url: "https://github.com/swiftuiux/async-task.git", from: "1.2.5")
     ],
     targets: [
         // Targets are the basic building blocks of a package. A target can define a module or a test suite.

From cc9ddaa2f88d53b988f7f5f7c9d1b37dbb45c1b2 Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Tue, 10 Dec 2024 10:13:37 +0100
Subject: [PATCH 32/39] Update README.md

---
 README.md | 9 +++++----
 1 file changed, 5 insertions(+), 4 deletions(-)

diff --git a/README.md b/README.md
index c2e16ef..e88f6ed 100644
--- a/README.md
+++ b/README.md
@@ -13,10 +13,11 @@ DALL-E and DALL-E 2 are deep learning models developed by OpenAI to generate dig
 
 
 ## Features
-- [x] Multiplatform iOS, macOS, watchOS and tvOS
-- [x] Customizable in term of SwiftUI Image specs [renderingMode, resizable,  antialiased...]
-- [x] Customizable in term of the transport layer [Loader]
-- [x] Based on interfaces not implementations
+- [x] Supports multiple platforms: iOS, macOS, watchOS, and tvOS  
+- [x] Customizable with SwiftUI Image properties (e.g., `renderingMode`, `resizable`, `antialiased`)  
+- [x] Configurable transport layer via custom `Loader`  
+- [x] Designed with interfaces, not implementations  
+- [x] Fully leverages Swift's new concurrency model  
 
  ![OpenAI AsyncImage SwiftUI](https://github.com/swiftuiux/openai-async-image-swiftui/blob/main/image/sun_watch.png) 
 

From 47bb4c69da1c11edb785a090d4a7df00dbf65b78 Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Tue, 10 Dec 2024 10:14:00 +0100
Subject: [PATCH 33/39] Update README.md

---
 README.md | 2 --
 1 file changed, 2 deletions(-)

diff --git a/README.md b/README.md
index e88f6ed..b735872 100644
--- a/README.md
+++ b/README.md
@@ -2,8 +2,6 @@
 
 SwiftUI view that asynchronously loads and displays an OpenAI image from open API
 
-You just type in any your idea and AI will give you an art solution
-
 DALL-E and DALL-E 2 are deep learning models developed by OpenAI to generate digital images from natural language descriptions, called "prompts"
 
 [![](https://img.shields.io/endpoint?url=https%3A%2F%2Fswiftpackageindex.com%2Fapi%2Fpackages%2Fswiftuiux%2Fopenai-async-image-swiftui%2Fbadge%3Ftype%3Dplatforms)](https://swiftpackageindex.com/swiftuiux/openai-async-image-swiftui)

From 475b8537e7b4e74fa767f8e3eca6a49980ebb414 Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Tue, 10 Dec 2024 10:16:11 +0100
Subject: [PATCH 34/39] Update openai_async_image_swiftuiTests.swift

---
 .../openai_async_image_swiftuiTests.swift                    | 5 +----
 1 file changed, 1 insertion(+), 4 deletions(-)

diff --git a/Tests/openai-async-image-swiftuiTests/openai_async_image_swiftuiTests.swift b/Tests/openai-async-image-swiftuiTests/openai_async_image_swiftuiTests.swift
index 44051a3..453239c 100644
--- a/Tests/openai-async-image-swiftuiTests/openai_async_image_swiftuiTests.swift
+++ b/Tests/openai-async-image-swiftuiTests/openai_async_image_swiftuiTests.swift
@@ -3,9 +3,6 @@ import XCTest
 
 final class openai_async_image_swiftuiTests: XCTestCase {
     func testExample() throws {
-        // This is an example of a functional test case.
-        // Use XCTAssert and related functions to verify your tests produce the correct
-        // results.
-        // XCTAssertEqual(openai_async_image_swiftui().text, "Hello, World!")
+
     }
 }

From 2d833aa04229d81fbbd8bfde9d8f707949dd79b6 Mon Sep 17 00:00:00 2001
From: Software Engineer <igorshelopaev@gmail.com>
Date: Tue, 10 Dec 2024 12:17:30 +0300
Subject: [PATCH 35/39] Update .gitignore

---
 .gitignore | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.gitignore b/.gitignore
index 330d167..4f20167 100644
--- a/.gitignore
+++ b/.gitignore
@@ -88,3 +88,4 @@ fastlane/test_output
 # https://github.com/johnno1962/injectionforxcode
 
 iOSInjectionProject/
+.DS_Store

From c5c7d0c6e4bcc5359da72ba1c817742a9471649d Mon Sep 17 00:00:00 2001
From: Software Engineer <igorshelopaev@gmail.com>
Date: Tue, 10 Dec 2024 12:17:56 +0300
Subject: [PATCH 36/39] Delete .DS_Store

---
 .DS_Store | Bin 8196 -> 0 bytes
 1 file changed, 0 insertions(+), 0 deletions(-)
 delete mode 100644 .DS_Store

diff --git a/.DS_Store b/.DS_Store
deleted file mode 100644
index c2caeabe0d368f5dec73f7a5b34191c7773ea08c..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 8196
zcmeHMJ#SMn6g@5p4WNpQ9YA^#D~jll{WT;KMSxJl0#j*H0wU>)O4_b(XXy`s1%3b{
z6H*6|7(4JWB7OuQCOFr2Tl?ihOo)nW$v%#KuYG*3^BlhbFpcf*60iuM!Yo)l$6`W~
ze#v8L&bM438v3K(TWWW*!Enq{8&m;RKow90Q~_1sUr~T(wxr6GbKmu9sS2n9|D^)*
zd`OrDQ;)er`{-a{uK>gnn{8sfDivblG4+@`WDk>Zu0-c*{1wAEceW?qF7=o@bnY<z
z@?reU#@|qkot@(+)*L2vsHG~P3XCftclQ!%=;0AY(fxhz+nca&kTsj@{cN3Wm9Njf
zzgQ^0TaNp?Gy8`RgLeZj_vy-Sd<{>q%b6cwgdqme{CK@O$tL9S{MzaL&cR#ZF*ep9
zJLWUXF5=}LE@d(r;663kSm|dSziN4~$V|0gHn9c-mJ#>*^&fF>$olf7v*yER%R2B6
z`A)WonAX<{^Y65`Hv8FTkIk}*dECGXuA+roXronF$17Td>!{*3*W1Bkw`#^^3mc40
zhCX&!>&E_b9}E5qWBx~RjXll9g6A2mlH)R(xJj&0_rhG}X~AQj>&dW9h8^Kp82{&N
z>nKX7-=%e7yf9Mt-k-01`D7zCW_7%~rdvf3!vQDbj=5u%9Tl0+N#w$q1eP&tvlovO
zGHzTcAATbO|8Q?jwuqS?o}rv4VV3=rddwY4leFvkFG6Ti1x}y>ryS=+x&ODOzyCjh
zD^tp<fGY5(3Yb!}-E7d>p6V04<djdC*O(=VUGC7ju!w#L@bl3hhO8$v6`#~&?vO1^
P`Xa!}poJ>%R~7gT4|ffT


From ca6f2123bd8800191b8820b197d96fcc14df86b4 Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Tue, 10 Dec 2024 10:20:50 +0100
Subject: [PATCH 37/39] Update OpenAIAsyncImage.swift

---
 .../OpenAIAsyncImage.swift                    | 24 +++++++++++++------
 1 file changed, 17 insertions(+), 7 deletions(-)

diff --git a/Sources/openai-async-image-swiftui/OpenAIAsyncImage.swift b/Sources/openai-async-image-swiftui/OpenAIAsyncImage.swift
index 12574e5..dc37f41 100644
--- a/Sources/openai-async-image-swiftui/OpenAIAsyncImage.swift
+++ b/Sources/openai-async-image-swiftui/OpenAIAsyncImage.swift
@@ -193,20 +193,30 @@ public extension OpenAIAsyncImage where T == OpenAIDefaultLoader{
 
 // MARK: - File private functions -
 
+/// A function that builds the appropriate view for a given `ImageState`.
+/// - Parameter state: The current state of the image.
+/// - Returns: A SwiftUI view representing the current state of the image.
 @ViewBuilder
-fileprivate func imageTpl(_ state : ImageState) -> some View{
-    switch state{
-        case .loaded(let image) : image.resizable()
-        case .loadError(let error) : Text(error.localizedDescription)
-        case .loading : ProgressView()
+fileprivate func imageTpl(_ state: ImageState) -> some View {
+    switch state {
+    case .loaded(let image):
+        image.resizable()
+    case .loadError(let error):
+        Text(error.localizedDescription)
+    case .loading:
+        ProgressView()
     }
 }
 
+/// Maps an error to a corresponding `AsyncImageErrors` type.
+/// - Parameter error: The error to map, which may be `nil`.
+/// - Returns: An `AsyncImageErrors` value if the error can be mapped; otherwise, `nil`.
 @Sendable
-fileprivate func errorMapper(_ error : Error?) -> AsyncImageErrors?{
-    if error is CancellationError{
+fileprivate func errorMapper(_ error: Error?) -> AsyncImageErrors? {
+    if error is CancellationError {
         return .cancellationError
     }
     
+    // Return nil for other errors
     return nil
 }

From 8c31114b916b3421407874d4588aaaf2507a1022 Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Tue, 10 Dec 2024 15:27:27 +0100
Subject: [PATCH 38/39] Update README.md

---
 README.md | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/README.md b/README.md
index b735872..fa41f91 100644
--- a/README.md
+++ b/README.md
@@ -2,6 +2,9 @@
 
 SwiftUI view that asynchronously loads and displays an OpenAI image from open API
 
+### Please star the repository if you believe continuing the development of this package is worthwhile. This will help me understand which package deserves more effort.
+
+
 DALL-E and DALL-E 2 are deep learning models developed by OpenAI to generate digital images from natural language descriptions, called "prompts"
 
 [![](https://img.shields.io/endpoint?url=https%3A%2F%2Fswiftpackageindex.com%2Fapi%2Fpackages%2Fswiftuiux%2Fopenai-async-image-swiftui%2Fbadge%3Ftype%3Dplatforms)](https://swiftpackageindex.com/swiftuiux/openai-async-image-swiftui)

From 89a9bc357f1a0f634ac453edafb982bfcf90dfce Mon Sep 17 00:00:00 2001
From: Igor <igorshelopaev@gmail.com>
Date: Thu, 12 Dec 2024 17:24:56 +0100
Subject: [PATCH 39/39] Update README.md

---
 README.md | 3 ---
 1 file changed, 3 deletions(-)

diff --git a/README.md b/README.md
index fa41f91..f2fba21 100644
--- a/README.md
+++ b/README.md
@@ -4,9 +4,6 @@ SwiftUI view that asynchronously loads and displays an OpenAI image from open AP
 
 ### Please star the repository if you believe continuing the development of this package is worthwhile. This will help me understand which package deserves more effort.
 
-
-DALL-E and DALL-E 2 are deep learning models developed by OpenAI to generate digital images from natural language descriptions, called "prompts"
-
 [![](https://img.shields.io/endpoint?url=https%3A%2F%2Fswiftpackageindex.com%2Fapi%2Fpackages%2Fswiftuiux%2Fopenai-async-image-swiftui%2Fbadge%3Ftype%3Dplatforms)](https://swiftpackageindex.com/swiftuiux/openai-async-image-swiftui)
 
 ## [Example for the package](https://github.com/swiftuiux/openai-async-image-swiftui-example)