From 258bca8f2951757ab03a7377d96b2ae9581e7b04 Mon Sep 17 00:00:00 2001 From: Joan Disho Date: Fri, 20 Dec 2024 11:10:10 +0100 Subject: [PATCH 1/8] Enable strict concurrency check in all the targets --- Package.swift | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/Package.swift b/Package.swift index bd3afa8..ec952eb 100644 --- a/Package.swift +++ b/Package.swift @@ -10,6 +10,12 @@ import PackageDescription +#if swift(<6) +let swiftConcurrency: SwiftSetting = .enableExperimentalFeature("StrictConcurrency") +#else +let swiftConcurrency: SwiftSetting = .enableUpcomingFeature("StrictConcurrency") +#endif + let package = Package( name: "SpeziLLM", @@ -45,6 +51,9 @@ let package = Package( .product(name: "Spezi", package: "Spezi"), .product(name: "SpeziChat", package: "SpeziChat"), .product(name: "SpeziViews", package: "SpeziViews") + ], + swiftSettings: [ + swiftConcurrency ] ), .target( @@ -60,6 +69,9 @@ let package = Package( .product(name: "MLXRandom", package: "mlx-swift"), .product(name: "Transformers", package: "swift-transformers"), .product(name: "LLM", package: "mlx-swift-examples") + ], + swiftSettings: [ + swiftConcurrency ] ), .target( @@ -69,6 +81,9 @@ let package = Package( .product(name: "SpeziViews", package: "SpeziViews"), .target(name: "SpeziLLMLocal"), .product(name: "LLM", package: "mlx-swift-examples") + ], + swiftSettings: [ + swiftConcurrency ] ), .target( @@ -81,6 +96,9 @@ let package = Package( .product(name: "SpeziChat", package: "SpeziChat"), .product(name: "SpeziSecureStorage", package: "SpeziStorage"), .product(name: "SpeziOnboarding", package: "SpeziOnboarding") + ], + swiftSettings: [ + swiftConcurrency ] ), .target( @@ -89,12 +107,18 @@ let package = Package( .target(name: "SpeziLLM"), .product(name: "Spezi", package: "Spezi"), .product(name: "OpenAI", package: "OpenAI") + ], + swiftSettings: [ + swiftConcurrency ] ), .testTarget( name: "SpeziLLMTests", dependencies: [ .target(name: "SpeziLLMOpenAI") + ], + swiftSettings: [ + swiftConcurrency ] ) ] From aeeadd89081ced820e7878bb97cf90e5ba9e3665 Mon Sep 17 00:00:00 2001 From: Joan Disho Date: Fri, 20 Dec 2024 12:10:13 +0100 Subject: [PATCH 2/8] Fix concurrency warnings in SpeziLLM & SpeziLLMFog --- Sources/SpeziLLM/Helpers/LLMContext+Chat.swift | 1 + Sources/SpeziLLM/LLMPlatformState.swift | 2 +- Sources/SpeziLLM/LLMSessionProvider.swift | 1 + Sources/SpeziLLM/Models/LLMContextEntity.swift | 6 +++--- 4 files changed, 6 insertions(+), 4 deletions(-) diff --git a/Sources/SpeziLLM/Helpers/LLMContext+Chat.swift b/Sources/SpeziLLM/Helpers/LLMContext+Chat.swift index 29e2ceb..fe513ab 100644 --- a/Sources/SpeziLLM/Helpers/LLMContext+Chat.swift +++ b/Sources/SpeziLLM/Helpers/LLMContext+Chat.swift @@ -6,6 +6,7 @@ // SPDX-License-Identifier: MIT // +@preconcurrency import SpeziChat diff --git a/Sources/SpeziLLM/LLMPlatformState.swift b/Sources/SpeziLLM/LLMPlatformState.swift index 0acbd3a..4af098b 100644 --- a/Sources/SpeziLLM/LLMPlatformState.swift +++ b/Sources/SpeziLLM/LLMPlatformState.swift @@ -10,7 +10,7 @@ /// Describes the current state of the ``LLMPlatform`` which is responsible for sending ``LLMSchema``s to execution via ``LLMSession``s. /// /// The ``LLMPlatformState`` is quite minimal with only ``LLMPlatformState/idle`` and ``LLMPlatformState/processing`` states. -public enum LLMPlatformState { +public enum LLMPlatformState: Sendable { /// Indicates that the ``LLMPlatform`` is currently idle and doesn't execute any ``LLMSession``s. case idle /// Indicates that the ``LLMPlatform`` is currently processing and executing ``LLMSession``s. diff --git a/Sources/SpeziLLM/LLMSessionProvider.swift b/Sources/SpeziLLM/LLMSessionProvider.swift index f28893a..6eccbc3 100644 --- a/Sources/SpeziLLM/LLMSessionProvider.swift +++ b/Sources/SpeziLLM/LLMSessionProvider.swift @@ -45,6 +45,7 @@ public struct _LLMSessionProvider: DynamicProperty { // s } /// Creates a `Binding` to the ``LLMSession``that one can pass around. Useful for passing the ``LLMSession`` as a `Binding` to the ``LLMChatView``. + @MainActor public var projectedValue: Binding { Binding { wrappedValue diff --git a/Sources/SpeziLLM/Models/LLMContextEntity.swift b/Sources/SpeziLLM/Models/LLMContextEntity.swift index a842ad1..7353a00 100644 --- a/Sources/SpeziLLM/Models/LLMContextEntity.swift +++ b/Sources/SpeziLLM/Models/LLMContextEntity.swift @@ -14,9 +14,9 @@ import Foundation /// A ``LLMContextEntity`` can be thought of as a single message entity within a ``LLMContext`` /// It consists of a ``LLMContextEntity/Role``, a unique identifier, a timestamp in the form of a `Date` as well as an `String`-based ``LLMContextEntity/content`` property which can contain Markdown-formatted text. /// Furthermore, the ``LLMContextEntity/complete`` flag indicates if the current state of the ``LLMContextEntity`` is final and the content will not be updated anymore. -public struct LLMContextEntity: Codable, Equatable, Hashable, Identifiable { +public struct LLMContextEntity: Codable, Equatable, Hashable, Identifiable, Sendable { /// Represents a tool call by the LLM, including its parameters - public struct ToolCall: Codable, Equatable, Hashable { + public struct ToolCall: Codable, Equatable, Hashable, Sendable { /// The ID of the function call, uniquely identifying the specific function call and matching the response to it. public let id: String /// The name of the function call. @@ -39,7 +39,7 @@ public struct LLMContextEntity: Codable, Equatable, Hashable, Identifiable { } /// Indicates which ``LLMContextEntity/Role`` is associated with a ``LLMContextEntity``. - public enum Role: Codable, Equatable, Hashable { + public enum Role: Codable, Equatable, Hashable, Sendable { case user case assistant(toolCalls: [ToolCall] = []) case system From f358d3e83b2bea7d0ac2a003710bc14e5844e6b2 Mon Sep 17 00:00:00 2001 From: jdisho Date: Fri, 20 Dec 2024 21:53:30 +0100 Subject: [PATCH 3/8] Resolve SwiftLint warnings --- Sources/SpeziLLM/Helpers/LLMContext+Chat.swift | 3 +-- Sources/SpeziLLM/LLMSessionProvider.swift | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/Sources/SpeziLLM/Helpers/LLMContext+Chat.swift b/Sources/SpeziLLM/Helpers/LLMContext+Chat.swift index fe513ab..34ba4ce 100644 --- a/Sources/SpeziLLM/Helpers/LLMContext+Chat.swift +++ b/Sources/SpeziLLM/Helpers/LLMContext+Chat.swift @@ -6,8 +6,7 @@ // SPDX-License-Identifier: MIT // -@preconcurrency -import SpeziChat +@preconcurrency import SpeziChat extension LLMContext { diff --git a/Sources/SpeziLLM/LLMSessionProvider.swift b/Sources/SpeziLLM/LLMSessionProvider.swift index 6eccbc3..e41a27a 100644 --- a/Sources/SpeziLLM/LLMSessionProvider.swift +++ b/Sources/SpeziLLM/LLMSessionProvider.swift @@ -45,8 +45,7 @@ public struct _LLMSessionProvider: DynamicProperty { // s } /// Creates a `Binding` to the ``LLMSession``that one can pass around. Useful for passing the ``LLMSession`` as a `Binding` to the ``LLMChatView``. - @MainActor - public var projectedValue: Binding { + @MainActor public var projectedValue: Binding { Binding { wrappedValue } set: { From ec16833eb70aaacf29ac3afc53119eb9da7ef778 Mon Sep 17 00:00:00 2001 From: jdisho Date: Fri, 20 Dec 2024 22:15:57 +0100 Subject: [PATCH 4/8] Resolve concurrency warning in LLMFog --- Sources/SpeziLLMFog/LLMFogSession+Generation.swift | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/Sources/SpeziLLMFog/LLMFogSession+Generation.swift b/Sources/SpeziLLMFog/LLMFogSession+Generation.swift index c37a4a5..03447df 100644 --- a/Sources/SpeziLLMFog/LLMFogSession+Generation.swift +++ b/Sources/SpeziLLMFog/LLMFogSession+Generation.swift @@ -12,14 +12,7 @@ import SpeziChat extension LLMFogSession { - private static let modelNotFoundRegex: Regex = { - guard let regex = try? Regex("model '([\\w:]+)' not found, try pulling it first") else { - preconditionFailure("SpeziLLMFog: Error Regex could not be parsed") - } - - return regex - }() - + private static let modelNotFoundRegex = "model '([\\w:]+)' not found, try pulling it first" /// Based on the input prompt, generate the output via some OpenAI API, e.g., Ollama. /// @@ -61,7 +54,7 @@ extension LLMFogSession { } } catch let error as APIErrorResponse { // Sadly, there's no better way to check the error messages as there aren't any Ollama error codes as with the OpenAI API - if error.error.message.contains(Self.modelNotFoundRegex) { + if error.error.message.range(of: Self.modelNotFoundRegex, options: .regularExpression) != nil { Self.logger.error("SpeziLLMFog: LLM model type could not be accessed on fog node - \(error.error.message)") await finishGenerationWithError(LLMFogError.modelAccessError(error), on: continuation) } else if error.error.code == "401" || error.error.code == "403" { From a8977faf73d88ab37c4b548d1f3eac88ec62b77c Mon Sep 17 00:00:00 2001 From: jdisho Date: Sun, 22 Dec 2024 00:09:43 +0100 Subject: [PATCH 5/8] Remove redundant code from Package.swift --- Package.swift | 27 +-------------------------- 1 file changed, 1 insertion(+), 26 deletions(-) diff --git a/Package.swift b/Package.swift index ec952eb..07795bc 100644 --- a/Package.swift +++ b/Package.swift @@ -1,4 +1,4 @@ -// swift-tools-version:5.9 +// swift-tools-version:6.0 // // This source file is part of the Stanford Spezi open source project @@ -10,13 +10,6 @@ import PackageDescription -#if swift(<6) -let swiftConcurrency: SwiftSetting = .enableExperimentalFeature("StrictConcurrency") -#else -let swiftConcurrency: SwiftSetting = .enableUpcomingFeature("StrictConcurrency") -#endif - - let package = Package( name: "SpeziLLM", defaultLocalization: "en", @@ -51,9 +44,6 @@ let package = Package( .product(name: "Spezi", package: "Spezi"), .product(name: "SpeziChat", package: "SpeziChat"), .product(name: "SpeziViews", package: "SpeziViews") - ], - swiftSettings: [ - swiftConcurrency ] ), .target( @@ -69,9 +59,6 @@ let package = Package( .product(name: "MLXRandom", package: "mlx-swift"), .product(name: "Transformers", package: "swift-transformers"), .product(name: "LLM", package: "mlx-swift-examples") - ], - swiftSettings: [ - swiftConcurrency ] ), .target( @@ -81,9 +68,6 @@ let package = Package( .product(name: "SpeziViews", package: "SpeziViews"), .target(name: "SpeziLLMLocal"), .product(name: "LLM", package: "mlx-swift-examples") - ], - swiftSettings: [ - swiftConcurrency ] ), .target( @@ -96,9 +80,6 @@ let package = Package( .product(name: "SpeziChat", package: "SpeziChat"), .product(name: "SpeziSecureStorage", package: "SpeziStorage"), .product(name: "SpeziOnboarding", package: "SpeziOnboarding") - ], - swiftSettings: [ - swiftConcurrency ] ), .target( @@ -107,18 +88,12 @@ let package = Package( .target(name: "SpeziLLM"), .product(name: "Spezi", package: "Spezi"), .product(name: "OpenAI", package: "OpenAI") - ], - swiftSettings: [ - swiftConcurrency ] ), .testTarget( name: "SpeziLLMTests", dependencies: [ .target(name: "SpeziLLMOpenAI") - ], - swiftSettings: [ - swiftConcurrency ] ) ] From 6618578e52e21fdd681f8231304a5638ab9b04bd Mon Sep 17 00:00:00 2001 From: jdisho Date: Sun, 22 Dec 2024 00:28:49 +0100 Subject: [PATCH 6/8] Upgrade SpeziChat to 0.2.2 --- Package.swift | 2 +- Sources/SpeziLLM/Helpers/LLMContext+Chat.swift | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Package.swift b/Package.swift index 07795bc..b9c32e1 100644 --- a/Package.swift +++ b/Package.swift @@ -34,7 +34,7 @@ let package = Package( .package(url: "https://github.com/StanfordSpezi/SpeziFoundation", from: "2.0.0"), .package(url: "https://github.com/StanfordSpezi/SpeziStorage", from: "1.0.2"), .package(url: "https://github.com/StanfordSpezi/SpeziOnboarding", from: "1.1.1"), - .package(url: "https://github.com/StanfordSpezi/SpeziChat", .upToNextMinor(from: "0.2.1")), + .package(url: "https://github.com/StanfordSpezi/SpeziChat", .upToNextMinor(from: "0.2.2")), .package(url: "https://github.com/StanfordSpezi/SpeziViews", from: "1.3.1") ], targets: [ diff --git a/Sources/SpeziLLM/Helpers/LLMContext+Chat.swift b/Sources/SpeziLLM/Helpers/LLMContext+Chat.swift index 34ba4ce..29e2ceb 100644 --- a/Sources/SpeziLLM/Helpers/LLMContext+Chat.swift +++ b/Sources/SpeziLLM/Helpers/LLMContext+Chat.swift @@ -6,7 +6,7 @@ // SPDX-License-Identifier: MIT // -@preconcurrency import SpeziChat +import SpeziChat extension LLMContext { From 451ea4001d23cc488da5cc095b7e30afa7427cfc Mon Sep 17 00:00:00 2001 From: jdisho Date: Sun, 22 Dec 2024 01:42:19 +0100 Subject: [PATCH 7/8] Conform LLMState to Sendable --- Sources/SpeziLLM/Models/LLMState.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Sources/SpeziLLM/Models/LLMState.swift b/Sources/SpeziLLM/Models/LLMState.swift index ca2c435..0449d4d 100644 --- a/Sources/SpeziLLM/Models/LLMState.swift +++ b/Sources/SpeziLLM/Models/LLMState.swift @@ -11,7 +11,7 @@ import Foundation /// Describes possible states that the ``LLMSession`` can be in. /// /// Based on the ``LLMState``, `SpeziLLM` performs proper actions on the model as well as state management. -public enum LLMState: CustomStringConvertible, Equatable { +public enum LLMState: CustomStringConvertible, Equatable, Sendable { /// The Spezi ``LLMSession`` is allocated, but the underlying model has not yet been initialized. case uninitialized /// The Spezi ``LLMSession`` is in the process of being initialized. From ae6eacd7fb8cf81f2d0240cb42fc226524564a89 Mon Sep 17 00:00:00 2001 From: jdisho Date: Sun, 22 Dec 2024 01:43:19 +0100 Subject: [PATCH 8/8] Make LLMLocalDownloadManager @unchecked Sendable --- .../LLMLocalDownloadManager.swift | 36 +++++++++---------- 1 file changed, 17 insertions(+), 19 deletions(-) diff --git a/Sources/SpeziLLMLocalDownload/LLMLocalDownloadManager.swift b/Sources/SpeziLLMLocalDownload/LLMLocalDownloadManager.swift index a022e2b..e2e2415 100644 --- a/Sources/SpeziLLMLocalDownload/LLMLocalDownloadManager.swift +++ b/Sources/SpeziLLMLocalDownload/LLMLocalDownloadManager.swift @@ -23,7 +23,7 @@ import SpeziViews /// is of type ``LLMLocalDownloadManager/DownloadState``, containing states such as ``LLMLocalDownloadManager/DownloadState/downloading(progress:)`` /// which includes the progress of the download or ``LLMLocalDownloadManager/DownloadState/downloaded(storageUrl:)`` which indicates that the download has finished. @Observable -public final class LLMLocalDownloadManager: NSObject { +public final class LLMLocalDownloadManager: NSObject, @unchecked Sendable { /// An enum containing all possible states of the ``LLMLocalDownloadManager``. public enum DownloadState: Equatable { case idle @@ -79,11 +79,10 @@ public final class LLMLocalDownloadManager: NSObject { } /// Starts a `URLSessionDownloadTask` to download the specified model. + @MainActor public func startDownload() async { if modelExist { - Task { @MainActor in - self.state = .downloaded - } + state = .downloaded return } @@ -91,37 +90,36 @@ public final class LLMLocalDownloadManager: NSObject { downloadTask = Task(priority: .userInitiated) { do { try await downloadWithHub() - await MainActor.run { - self.state = .downloaded - } + state = .downloaded } catch { - await MainActor.run { - self.state = .error( - AnyLocalizedError( - error: error, - defaultErrorDescription: LocalizedStringResource("LLM_DOWNLOAD_FAILED_ERROR", bundle: .atURL(from: .module)) + state = .error( + AnyLocalizedError( + error: error, + defaultErrorDescription: LocalizedStringResource( + "LLM_DOWNLOAD_FAILED_ERROR", + bundle: .atURL(from: .module) ) ) - } + ) } } } /// Cancels the download of a specified model via a `URLSessionDownloadTask`. + @MainActor public func cancelDownload() async { downloadTask?.cancel() - await MainActor.run { - self.state = .idle - } + state = .idle } - - @MainActor + private func downloadWithHub() async throws { let repo = Hub.Repo(id: model.hubID) let modelFiles = ["*.safetensors", "config.json"] try await HubApi.shared.snapshot(from: repo, matching: modelFiles) { progress in - self.state = .downloading(progress: progress) + Task { @MainActor in + self.state = .downloading(progress: progress) + } } } }