Skip to content

Commit

Permalink
add open func recoverFromLengthy
Browse files Browse the repository at this point in the history
  • Loading branch information
eastriverlee committed Jan 30, 2024
1 parent 94bcc54 commit 8ec80eb
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 5 deletions.
15 changes: 10 additions & 5 deletions Sources/LLM/LLM.swift
Original file line number Diff line number Diff line change
Expand Up @@ -205,23 +205,28 @@ open class LLM: ObservableObject {
private var currentCount: Int32!
private var decoded = ""

open func recoverFromLengthy(_ input: borrowing String, to output: borrowing AsyncStream<String>.Continuation) {
output.yield("tl;dr")
}

private func prepare(from input: borrowing String, to output: borrowing AsyncStream<String>.Continuation) -> Bool {
guard !input.isEmpty else { return false }
context = .init(model, params)
var tokens = encode(input)
var initialCount = tokens.count
currentCount = Int32(initialCount)
if maxTokenCount <= currentCount {
if history.isEmpty {
isFull = true
output.yield("Input is too long.")
return false
} else {
while !history.isEmpty {
history.removeFirst(min(2, history.count))
tokens = encode(preProcess(self.input, history))
initialCount = tokens.count
currentCount = Int32(initialCount)
}
if maxTokenCount <= currentCount {
isFull = true
recoverFromLengthy(input, to: output)
return false
}
}
for (i, token) in tokens.enumerated() {
batch.n_tokens = Int32(i)
Expand Down
7 changes: 7 additions & 0 deletions Tests/LLMTests/LLMTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -212,4 +212,11 @@ final class LLMTests: XCTestCase {
await bot.respond(to: input)
#assert(!bot.output.isEmpty)
}

func testRecoveryFromLengtyInput() async throws {
var bot = try await LLM(from: model, maxTokenCount: 16)
let input = "have you heard of this so-called LLM.swift library?"
await bot.respond(to: input)
#assert(bot.output == "tl;dr")
}
}

0 comments on commit 8ec80eb

Please sign in to comment.