From 454952b6687867cba729cd399332a149b7b94b85 Mon Sep 17 00:00:00 2001 From: Philipp Zagar Date: Fri, 3 Nov 2023 01:22:53 -0400 Subject: [PATCH] Linter --- Package.swift | 6 +++--- Sources/SpeziLLMLocal/LLMLocalRunnerSetupTask.swift | 2 +- .../LocalLLMExecutionDemo/LocalLLMChatView.swift | 5 +---- .../LocalLLMExecutionDemo/Onboarding/LLMDownloadView.swift | 2 +- 4 files changed, 6 insertions(+), 9 deletions(-) diff --git a/Package.swift b/Package.swift index cab5204..fcc96d7 100644 --- a/Package.swift +++ b/Package.swift @@ -27,7 +27,7 @@ let package = Package( ], dependencies: [ .package(url: "https://github.com/MacPaw/OpenAI", .upToNextMinor(from: "0.2.4")), - //.package(url: "https://github.com/StanfordBDHG/llama.cpp", .upToNextMinor(from: "0.1470.0")), + // .package(url: "https://github.com/StanfordBDHG/llama.cpp", .upToNextMinor(from: "0.1470.0")), .package(url: "https://github.com/ggerganov/llama.cpp", branch: "b1470"), .package(url: "https://github.com/StanfordSpezi/Spezi", .upToNextMinor(from: "0.7.0")), .package(url: "https://github.com/StanfordSpezi/SpeziStorage", .upToNextMinor(from: "0.4.0")), @@ -55,7 +55,7 @@ let package = Package( name: "SpeziLLM", dependencies: [ .target(name: "SpeziOpenAI"), - .product(name: "Spezi", package: "Spezi"), + .product(name: "Spezi", package: "Spezi") ] ), .target( @@ -69,7 +69,7 @@ let package = Package( .target( name: "SpeziLLMLocalDownload", dependencies: [ - .product(name: "SpeziOnboarding", package: "SpeziOnboarding"), + .product(name: "SpeziOnboarding", package: "SpeziOnboarding") ] ), .testTarget( diff --git a/Sources/SpeziLLMLocal/LLMLocalRunnerSetupTask.swift b/Sources/SpeziLLMLocal/LLMLocalRunnerSetupTask.swift index c24bbe0..2d0f0a5 100644 --- a/Sources/SpeziLLMLocal/LLMLocalRunnerSetupTask.swift +++ b/Sources/SpeziLLMLocal/LLMLocalRunnerSetupTask.swift @@ -6,8 +6,8 @@ // SPDX-License-Identifier: MIT // -import SpeziLLM import llama +import SpeziLLM public class LLMLocalRunnerSetupTask: LLMRunnerSetupTask { diff --git a/Tests/LocalLLMExecutionDemo/LocalLLMExecutionDemo/LocalLLMChatView.swift b/Tests/LocalLLMExecutionDemo/LocalLLMExecutionDemo/LocalLLMChatView.swift index e32368c..497f4b3 100644 --- a/Tests/LocalLLMExecutionDemo/LocalLLMExecutionDemo/LocalLLMChatView.swift +++ b/Tests/LocalLLMExecutionDemo/LocalLLMExecutionDemo/LocalLLMChatView.swift @@ -20,13 +20,10 @@ struct LocalLLMChatView: View { contextParameters: .init(nCtx: 512) /// Sets the context size of the model at 512 tokens ) - // TODO - private let model2 = LLMMock() - var body: some View { LLMChatView( - model: model2, // TODO + model: model, initialSystemPrompt: .init( role: .assistant, content: "Hello! I'm a locally executed Llama 2 7B model, enabled by the Spezi ecosystem!" diff --git a/Tests/LocalLLMExecutionDemo/LocalLLMExecutionDemo/Onboarding/LLMDownloadView.swift b/Tests/LocalLLMExecutionDemo/LocalLLMExecutionDemo/Onboarding/LLMDownloadView.swift index 43c183c..d27e417 100644 --- a/Tests/LocalLLMExecutionDemo/LocalLLMExecutionDemo/Onboarding/LLMDownloadView.swift +++ b/Tests/LocalLLMExecutionDemo/LocalLLMExecutionDemo/Onboarding/LLMDownloadView.swift @@ -6,8 +6,8 @@ // SPDX-License-Identifier: MIT // -import SpeziOnboarding import SpeziLLMLocalDownload +import SpeziOnboarding import SpeziViews import SwiftUI