From 8c2a3b8c561b34f62500abc6c17d778af7ab0a95 Mon Sep 17 00:00:00 2001 From: Meng Zhang Date: Mon, 25 Mar 2024 21:23:40 +0800 Subject: [PATCH] refactor(llama): enhance debug messages for llama requests (#1717) --- crates/llama-cpp-bindings/src/llama.rs | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/crates/llama-cpp-bindings/src/llama.rs b/crates/llama-cpp-bindings/src/llama.rs index 4b0cdd36ca38..4862d261d46c 100644 --- a/crates/llama-cpp-bindings/src/llama.rs +++ b/crates/llama-cpp-bindings/src/llama.rs @@ -5,6 +5,7 @@ use tokio::sync::{ mpsc::{channel, unbounded_channel, Receiver, Sender, UnboundedReceiver, UnboundedSender}, RwLock, }; +use tracing::debug; use crate::ffi; @@ -40,7 +41,19 @@ impl LlamaInitRequest { } pub(crate) fn step(&self, token: &str) -> bool { - self.tx.send(token.to_owned()).is_err() + match self.tx.send(token.to_owned()) { + Ok(_) => false, + Err(err) => { + debug!("Request <{}> is cancelled: `{}`", self.id, err); + true + } + } + } +} + +impl Drop for LlamaInitRequest { + fn drop(&mut self) { + debug!("Request <{}> is done", self.id) } } @@ -64,9 +77,11 @@ impl LlamaServiceImpl { } { // Drop canceled requests. if req.tx.is_closed() { + debug!("Request <{}> is cancelled before it got started", req.id); continue; } + debug!("Request <{}> started", req.id); self.engine.as_mut().unwrap().add_request(Box::new(req)); }