From af3963189ebf60d0c064cf58750f590f37914db9 Mon Sep 17 00:00:00 2001 From: boxbeam Date: Tue, 18 Jun 2024 14:24:08 -0400 Subject: [PATCH] chore(scheduler): remove experimental doc index config (#2438) --- crates/tabby-common/src/config.rs | 8 -------- crates/tabby-scheduler/src/lib.rs | 16 +--------------- 2 files changed, 1 insertion(+), 23 deletions(-) diff --git a/crates/tabby-common/src/config.rs b/crates/tabby-common/src/config.rs index 24061241b7aa..7b1c32b7f047 100644 --- a/crates/tabby-common/src/config.rs +++ b/crates/tabby-common/src/config.rs @@ -22,9 +22,6 @@ pub struct Config { #[serde(default)] pub model: ModelConfigGroup, - - #[serde(default)] - pub experimental: ExperimentalConfig, } impl Config { @@ -249,11 +246,6 @@ fn default_num_gpu_layers() -> u16 { 9999 } -#[derive(Serialize, Deserialize, Default, Debug, Clone)] -pub struct ExperimentalConfig { - pub doc: Option, -} - #[derive(Serialize, Deserialize, Default, Debug, Clone)] pub struct DocIndexConfig { pub start_urls: Vec, diff --git a/crates/tabby-scheduler/src/lib.rs b/crates/tabby-scheduler/src/lib.rs index 0eea06b9533c..213a6318f536 100644 --- a/crates/tabby-scheduler/src/lib.rs +++ b/crates/tabby-scheduler/src/lib.rs @@ -15,7 +15,7 @@ use indexer::{IndexAttributeBuilder, Indexer}; use tabby_inference::Embedding; mod doc; -use std::{env, sync::Arc}; +use std::sync::Arc; use tokio_cron_scheduler::{Job, JobScheduler}; use tracing::{debug, info, warn}; @@ -25,9 +25,6 @@ use crate::doc::SourceDocument; pub async fn scheduler(now: bool, config: &tabby_common::config::Config) { if now { scheduler_pipeline(config).await; - if env::var("TABBY_SCHEDULER_EXPERIMENTAL_DOC_INDEX").is_ok() { - doc_index_pipeline(config).await; - } } else { let scheduler = JobScheduler::new() .await @@ -78,17 +75,6 @@ async fn scheduler_pipeline(config: &tabby_common::config::Config) { code.garbage_collection(repositories); } -async fn doc_index_pipeline(config: &tabby_common::config::Config) { - let Some(index_config) = &config.experimental.doc else { - return; - }; - - let embedding_config = &config.model.embedding; - let embedding = llama_cpp_server::create_embedding(embedding_config).await; - - crawl_index_docs(&index_config.start_urls, embedding).await; -} - pub async fn crawl_index_docs(urls: &[String], embedding: Arc) { for url in urls { debug!("Starting doc index pipeline for {url}");