diff --git a/Sidekick.xcodeproj/project.pbxproj b/Sidekick.xcodeproj/project.pbxproj index 193dfea..eed75c4 100644 --- a/Sidekick.xcodeproj/project.pbxproj +++ b/Sidekick.xcodeproj/project.pbxproj @@ -581,6 +581,13 @@ path = "GPU Info"; sourceTree = ""; }; + 2048941B2CE76ABD00473107 /* View Modifiers */ = { + isa = PBXGroup; + children = ( + ); + path = "View Modifiers"; + sourceTree = ""; + }; 204C79542CB765C100EED58E /* Profile */ = { isa = PBXGroup; children = ( @@ -762,6 +769,7 @@ 20571F752CBD5FF4004F3344 /* Settings */, 20B9E6132CB1458500BA0AD2 /* Misc */, 20E48C272CB560A30085E7D6 /* Styles */, + 2048941B2CE76ABD00473107 /* View Modifiers */, ); path = Views; sourceTree = ""; diff --git a/Sidekick/Localizable.xcstrings b/Sidekick/Localizable.xcstrings index f7fc21c..ecca418 100644 --- a/Sidekick/Localizable.xcstrings +++ b/Sidekick/Localizable.xcstrings @@ -1191,12 +1191,12 @@ } } }, - "Remote Model on Server" : { + "Remote Model: " : { "localizations" : { "zh-Hans" : { "stringUnit" : { "state" : "translated", - "value" : "远程模型" + "value" : "远端模型: " } } } @@ -1301,6 +1301,16 @@ } } }, + "Save as Image" : { + "localizations" : { + "zh-Hans" : { + "stringUnit" : { + "state" : "translated", + "value" : "导出图片" + } + } + } + }, "Say hi" : { "localizations" : { "zh-Hans" : { @@ -1641,12 +1651,12 @@ } } }, - "The endpoint on the server used for inference. This endpoint must be accessible from this device, and provide an OpenAI compatible API. (e.g. http://localhost:11434, where http://localhost:11434/v1/chat/completions is accessible)" : { + "The endpoint on the server used for inference. This endpoint must be accessible from this device, and provide an OpenAI compatible API. (e.g. http://localhost:11434, where http://localhost:11434/v1/chat/completions is accessible)\n\nTo ensure privacy and security of your data, host your own server." : { "localizations" : { "zh-Hans" : { "stringUnit" : { "state" : "translated", - "value" : "用于推理的服务器上的端点。此端点必须可从此设备访问,并提供与 OpenAI 兼容的 API。 (例如 http://localhost:11434,其中 http://localhost:11434/v1/chat/completions 可被访问)" + "value" : "用于推理的服务器上的端点。此端点必须可从此设备访问,并提供与 OpenAI 兼容的 API。 (例如 http://localhost:11434,其中 http://localhost:11434/v1/chat/completions 可被访问)\n\n为保障隐私和数据安全,我们建议你托管您自己的服务器。" } } } diff --git a/Sidekick/Logic/Types/Conversation/Message.swift b/Sidekick/Logic/Types/Conversation/Message.swift index 92b00f3..16fcfb2 100644 --- a/Sidekick/Logic/Types/Conversation/Message.swift +++ b/Sidekick/Logic/Types/Conversation/Message.swift @@ -14,7 +14,8 @@ public struct Message: Identifiable, Codable, Hashable { init( text: String, sender: Sender, - usedServer: Bool? = nil + model: String? = nil, + usedServer: Bool = false ) { self.id = UUID() self.text = text.replacingOccurrences( @@ -29,13 +30,13 @@ public struct Message: Identifiable, Codable, Hashable { self.startTime = .now self.lastUpdated = .now self.outputEnded = false + var modelName: String = model ?? String( + localized: "Unknown" + ) if usedServer == true { - self.model = String(localized: "Remote Model on Server") - } else { - self.model = Settings.modelUrl?.lastPathComponent ?? String( - localized: "Unknown" - ) + modelName = String(localized: "Remote Model: ") + modelName } + self.model = modelName } /// Stored property for `Identifiable` conformance diff --git a/Sidekick/Logic/llama.cpp/LlamaServer.swift b/Sidekick/Logic/llama.cpp/LlamaServer.swift index 07a9eda..87b5cae 100644 --- a/Sidekick/Logic/llama.cpp/LlamaServer.swift +++ b/Sidekick/Logic/llama.cpp/LlamaServer.swift @@ -45,16 +45,20 @@ public actor LlamaServer { private var process: Process = Process() /// Function to get path to llama-server - private func url(_ path: String) async -> URL { + private func url(_ path: String) async -> ( + url: URL, + usingRemoteServer: Bool + ) { // Check endpoint let endpoint: String = InferenceSettings.endpoint let urlString: String - if await !Self.remoteServerIsReachable() || !InferenceSettings.useServer { + let notUsingServer: Bool = await !Self.remoteServerIsReachable() || !InferenceSettings.useServer + if notUsingServer { urlString = "\(scheme)://\(host):\(port)\(path)" } else { urlString = "\(endpoint)\(path)" } - return URL(string: urlString)! + return (URL(string: urlString)!, !notUsingServer) } /// Function to check if the remote server is reachable @@ -65,12 +69,19 @@ public actor LlamaServer { let endpointUrl: URL = URL( string: "\(InferenceSettings.endpoint)/health" )! - let serverHealth = ServerHealth() - await serverHealth.updateURL(endpointUrl) - await serverHealth.check() - let score: Double = await serverHealth.score - let serverIsHealthy: Bool = score > 0.25 - return serverIsHealthy + do { + let response: (data: Data, URLResponse) = try await URLSession.shared.data( + from: endpointUrl + ) + let decoder: JSONDecoder = JSONDecoder() + let healthStatus: HealthResponse = try decoder.decode( + HealthResponse.self, + from: response.data + ) + return healthStatus.isHealthy + } catch { + return false + } } /// Function to start a monitor process that will terminate the server when our app dies @@ -99,10 +110,6 @@ public actor LlamaServer { /// Function to start the `llama-server` process private func startServer() async throws { - // If a server is used, exit - if await Self.remoteServerIsReachable() && InferenceSettings.useServer { - return - } // If server is running, exit guard !process.isRunning, let modelPath = self.modelPath else { return } await stopServer() @@ -179,8 +186,12 @@ public actor LlamaServer { progressHandler: (@Sendable (String) -> Void)? = nil ) async throws -> CompleteResponse { + let rawUrl = await self.url("/v1/chat/completions") + let start: CFAbsoluteTime = CFAbsoluteTimeGetCurrent() - try await startServer() + if !rawUrl.usingRemoteServer { + try await startServer() + } // Hit localhost for completion async let params = ChatParameters( @@ -188,13 +199,16 @@ public actor LlamaServer { systemPrompt: systemPrompt, similarityIndex: similarityIndex ) - var request = await URLRequest( - url: url("/v1/chat/completions") + var request = URLRequest( + url: rawUrl.url ) request.httpMethod = "POST" request.setValue("application/json", forHTTPHeaderField: "Content-Type") request.setValue("text/event-stream", forHTTPHeaderField: "Accept") request.setValue("keep-alive", forHTTPHeaderField: "Connection") + if rawUrl.usingRemoteServer { + request.setValue("nil", forHTTPHeaderField: "ngrok-skip-browser-warning") + } request.httpBody = await params.toJSON().data(using: .utf8) // Use EventSource to receive server sent events eventSource = EventSource(request: request) @@ -265,7 +279,8 @@ public actor LlamaServer { responseStartSeconds: responseDiff, predictedPerSecond: Double(tokens) / generationTime, modelName: modelName, - nPredicted: tokens + nPredicted: tokens, + usedServer: rawUrl.usingRemoteServer ) } @@ -280,7 +295,7 @@ public actor LlamaServer { serverErrorMessage = "" let serverHealth = ServerHealth() - await serverHealth.updateURL(url("/health")) + await serverHealth.updateURL(url("/health").url) await serverHealth.check() var timeout = 30 @@ -302,6 +317,14 @@ public actor LlamaServer { } } + struct HealthResponse: Codable { + + var status: String + + var isHealthy: Bool { self.status == "ok" } + + } + struct StreamMessage: Codable { let content: String? } @@ -332,6 +355,7 @@ public actor LlamaServer { var predictedPerSecond: Double? var modelName: String? var nPredicted: Int? + var usedServer: Bool } } diff --git a/Sidekick/Views/Conversation/Controls/Input Field/PromptInputField.swift b/Sidekick/Views/Conversation/Controls/Input Field/PromptInputField.swift index ecb7309..422dd91 100644 --- a/Sidekick/Views/Conversation/Controls/Input Field/PromptInputField.swift +++ b/Sidekick/Views/Conversation/Controls/Input Field/PromptInputField.swift @@ -145,8 +145,6 @@ struct PromptInputField: View { private func getResponse( tempResources: [TemporaryResource] ) async { - // Check if remote server was used - let usedServer: Bool = await LlamaServer.remoteServerIsReachable() && InferenceSettings.useServer // If processing, use recursion to update if (model.status == .processing || model.status == .coldProcessing) { Task { @@ -201,7 +199,8 @@ struct PromptInputField: View { var responseMessage: Message = Message( text: "", sender: .assistant, - usedServer: usedServer + model: response.modelName, + usedServer: response.usedServer ) responseMessage.update( newText: response.text, diff --git a/Sidekick/Views/Conversation/Messages/Message/MessageBackgroundView.swift b/Sidekick/Views/Conversation/Messages/Message/MessageBackgroundView.swift index 12f8bee..7252802 100644 --- a/Sidekick/Views/Conversation/Messages/Message/MessageBackgroundView.swift +++ b/Sidekick/Views/Conversation/Messages/Message/MessageBackgroundView.swift @@ -9,34 +9,36 @@ import SwiftUI struct MessageBackgroundView: View { - @Environment(\.colorScheme) private var colorScheme + private let cornerRadius: CGFloat = 13 + private let borderWidth: CGFloat = 0.5 - var shadowColor: SwiftUI.Color { - return colorScheme == .dark ? .white : .black + var body: some View { + unevenRoundedRectangle(cornerRadius) + .fill( + Color(nsColor: .textBackgroundColor) + ) + .padding(borderWidth) + .background { + unevenRoundedRectangle(cornerRadius + borderWidth) + .fill(Color.secondary) + .opacity(0.5) + } } - var shadowRadius: CGFloat { - return colorScheme == .dark ? 2.5 : 0 - } - - var body: some View { + private func unevenRoundedRectangle( + _ cornerRadius: CGFloat + ) -> some Shape { UnevenRoundedRectangle( cornerRadii: .init( topLeading: 0, - bottomLeading: 12, - bottomTrailing: 12, - topTrailing: 12 + bottomLeading: cornerRadius, + bottomTrailing: cornerRadius, + topTrailing: cornerRadius ), style: .circular ) - .fill( - Color(nsColor: .textBackgroundColor) - ) - .shadow( - color: shadowColor, - radius: shadowRadius - ) - } + } + } #Preview { diff --git a/Sidekick/Views/Conversation/Messages/MessageShareMenu.swift b/Sidekick/Views/Conversation/Messages/MessageShareMenu.swift index b1c62cd..b0e76ed 100644 --- a/Sidekick/Views/Conversation/Messages/MessageShareMenu.swift +++ b/Sidekick/Views/Conversation/Messages/MessageShareMenu.swift @@ -64,7 +64,7 @@ struct MessageShareMenu: View { } var pngButton: some View { - Button("Save as Image") { + Button { VStack( alignment: .leading, spacing: 15 @@ -75,6 +75,8 @@ struct MessageShareMenu: View { .background(Color.gray) .frame(width: 1000) .generatePng() + } label: { + Text("Save as Image") } } diff --git a/Sidekick/Views/Settings/InferenceSettingsView.swift b/Sidekick/Views/Settings/InferenceSettingsView.swift index eb2830b..30c1cdb 100644 --- a/Sidekick/Views/Settings/InferenceSettingsView.swift +++ b/Sidekick/Views/Settings/InferenceSettingsView.swift @@ -219,7 +219,7 @@ struct InferenceSettingsView: View { Text("Endpoint") .font(.title3) .bold() - Text("The endpoint on the server used for inference. This endpoint must be accessible from this device, and provide an OpenAI compatible API. (e.g. http://localhost:11434, where http://localhost:11434/v1/chat/completions is accessible)") + Text("The endpoint on the server used for inference. This endpoint must be accessible from this device, and provide an OpenAI compatible API. (e.g. http://localhost:11434, where http://localhost:11434/v1/chat/completions is accessible)\n\nTo ensure privacy and security of your data, host your own server.") .font(.caption) } Spacer()