Skip to content

Commit

Permalink
fix race condition issue
Browse files Browse the repository at this point in the history
  • Loading branch information
eastriverlee committed Jan 27, 2024
1 parent e27f19c commit c78102a
Showing 1 changed file with 8 additions and 0 deletions.
8 changes: 8 additions & 0 deletions Sources/LLM/LLM.swift
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,10 @@ public typealias Token = llama_token
public typealias Model = OpaquePointer
public typealias Chat = (role: Role, content: String)

@globalActor public actor InferenceActor {
static public let shared = InferenceActor()
}

open class LLM: ObservableObject {
public var model: Model
public var history: [Chat]
Expand Down Expand Up @@ -141,6 +145,7 @@ open class LLM: ObservableObject {
self.template = template
}

@InferenceActor
private func predictNextToken() async -> Token {
let logits = llama_get_logits_ith(context.pointer, batch.n_tokens - 1)!
var candidates: [llama_token_data] = (0..<totalTokenCount).map { token in
Expand Down Expand Up @@ -192,6 +197,7 @@ open class LLM: ObservableObject {
return true
}

@InferenceActor
private func finishResponse(from response: inout [String], to output: borrowing AsyncStream<String>.Continuation) async {
multibyteCharacter.removeAll()
var input = ""
Expand Down Expand Up @@ -262,6 +268,7 @@ open class LLM: ObservableObject {
private var input: String = ""
private var isAvailable = true

@InferenceActor
public func getCompletion(from input: borrowing String) async -> String {
guard isAvailable else { fatalError("LLM is being used") }
isAvailable = false
Expand All @@ -274,6 +281,7 @@ open class LLM: ObservableObject {
return output
}

@InferenceActor
public func respond(to input: String, with makeOutputFrom: @escaping (AsyncStream<String>) async -> String) async {
guard isAvailable else { return }
isAvailable = false
Expand Down

0 comments on commit c78102a

Please sign in to comment.