Skip to content

Commit

Permalink
refactor(scheduler): remove useless RepositoryAccess methods (#1827)
Browse files Browse the repository at this point in the history
* refactor(scheduler): remove useless RepositoryAccess methods

* [autofix.ci] apply automated fixes

* [autofix.ci] apply automated fixes (attempt 2/3)

---------

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
  • Loading branch information
wsxiaoys and autofix-ci[bot] authored Apr 11, 2024
1 parent aef123d commit 8a26271
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 30 deletions.
4 changes: 0 additions & 4 deletions crates/tabby-common/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ use serde::{Deserialize, Serialize};
use crate::{
path::repositories_dir,
terminal::{HeaderFormat, InfoMessage},
SourceFile,
};

#[derive(Serialize, Deserialize, Default)]
Expand Down Expand Up @@ -150,9 +149,6 @@ impl Default for ServerConfig {
#[async_trait]
pub trait RepositoryAccess: Send + Sync {
async fn list_repositories(&self) -> Result<Vec<RepositoryConfig>>;
fn start_snapshot(&self, _version: u64) {}
fn process_file(&self, _version: u64, _file: SourceFile) {}
fn finish_snapshot(&self, _version: u64) {}
}

pub struct ConfigRepositoryAccess;
Expand Down
27 changes: 5 additions & 22 deletions crates/tabby-scheduler/src/dataset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ use kdam::BarExt;
use lazy_static::lazy_static;
use serde_jsonlines::WriteExt;
use tabby_common::{
config::{RepositoryAccess, RepositoryConfig},
config::RepositoryConfig,
path::{dataset_dir, dependency_file},
DependencyFile, SourceFile,
};
Expand All @@ -25,21 +25,11 @@ use tree_sitter_tags::TagsContext;
use crate::utils::tqdm;

trait RepositoryExt {
fn create_dataset(
&self,
writer: &mut impl Write,
access: &impl RepositoryAccess,
snapshot_version: u64,
) -> Result<()>;
fn create_dataset(&self, writer: &mut impl Write) -> Result<()>;
}

impl RepositoryExt for RepositoryConfig {
fn create_dataset(
&self,
writer: &mut impl Write,
access: &impl RepositoryAccess,
snapshot_version: u64,
) -> Result<()> {
fn create_dataset(&self, writer: &mut impl Write) -> Result<()> {
let dir = self.dir();

let walk_dir_iter = || {
Expand Down Expand Up @@ -81,7 +71,6 @@ impl RepositoryExt for RepositoryConfig {
language,
};
writer.write_json_lines([source_file.clone()])?;
access.process_file(snapshot_version, source_file);
}
Err(e) => {
error!("Cannot read '{}': '{e}'", relative_path.display());
Expand All @@ -106,7 +95,7 @@ fn is_source_code(entry: &DirEntry) -> bool {
}
}

pub fn create_dataset(config: &[RepositoryConfig], access: &impl RepositoryAccess) -> Result<()> {
pub fn create_dataset(config: &[RepositoryConfig]) -> Result<()> {
fs::remove_dir_all(dataset_dir()).ok();
fs::create_dir_all(dataset_dir())?;

Expand All @@ -119,16 +108,10 @@ pub fn create_dataset(config: &[RepositoryConfig], access: &impl RepositoryAcces
None,
);

let snapshot_version = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("Failed to read system clock")
.as_millis() as u64;
access.start_snapshot(snapshot_version);

let mut deps = DependencyFile::default();
for repository in config {
deps::collect(repository.dir().as_path(), &mut deps);
repository.create_dataset(&mut writer, access, snapshot_version)?;
repository.create_dataset(&mut writer)?;
}

serdeconv::to_json_file(&deps, dependency_file())?;
Expand Down
8 changes: 4 additions & 4 deletions crates/tabby-scheduler/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ use tracing::{error, info, warn};
pub async fn scheduler<T: RepositoryAccess + 'static>(now: bool, access: T) -> Result<()> {
if now {
let repositories = access.list_repositories().await?;
job_sync(&repositories, &access)?;
job_sync(&repositories)?;
job_index(&repositories)?;
} else {
let access = Arc::new(access);
Expand All @@ -37,7 +37,7 @@ pub async fn scheduler<T: RepositoryAccess + 'static>(now: bool, access: T) -> R
.list_repositories()
.await
.expect("Must be able to retrieve repositories for sync");
if let Err(e) = job_sync(&repositories, &*access) {
if let Err(e) = job_sync(&repositories) {
error!("{e}");
}
if let Err(e) = job_index(&repositories) {
Expand Down Expand Up @@ -66,15 +66,15 @@ fn job_index(repositories: &[RepositoryConfig]) -> Result<()> {
Ok(())
}

fn job_sync(repositories: &[RepositoryConfig], access: &impl RepositoryAccess) -> Result<()> {
fn job_sync(repositories: &[RepositoryConfig]) -> Result<()> {
println!("Syncing {} repositories...", repositories.len());
let ret = repository::sync_repositories(repositories);
if let Err(err) = ret {
return Err(err.context("Failed to sync repositories"));
}

println!("Building dataset...");
let ret = dataset::create_dataset(repositories, access);
let ret = dataset::create_dataset(repositories);
if let Err(err) = ret {
return Err(err.context("Failed to build dataset"));
}
Expand Down

0 comments on commit 8a26271

Please sign in to comment.