diff --git a/src-tauri/Cargo.toml b/src-tauri/Cargo.toml index 574ebbfc4..7fa9f0094 100644 --- a/src-tauri/Cargo.toml +++ b/src-tauri/Cargo.toml @@ -9,12 +9,12 @@ version = "0.5.46" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [build-dependencies] -tauri-build = {version = "1.5.5", features = ["isolation"]} +tauri-build = {version = "1.5.5", features = ["isolation"] } [dependencies] anyhow = "1" async-trait = "0.1.81" -async_zip = {version = "0.0.17", features = ["full"]} +async_zip = {version = "0.0.17", features = ["full"] } auto-launch = "0.5.0" blake2 = "0.10" chrono = "0.4.38" @@ -29,26 +29,26 @@ keyring = {version = "3.0.5", features = [ "windows-native", "apple-native", "linux-native", -]} +] } libsqlite3-sys = {version = "0.25.1", features = [ "bundled", -]}# Required for tari_wallet +] }# Required for tari_wallet log = "0.4.22" log4rs = "1.3.0" minotari_node_grpc_client = {git = "https://github.com/tari-project/tari.git", branch = "development"} minotari_wallet_grpc_client = {git = "https://github.com/tari-project/tari.git", branch = "development"} -nix = {version = "0.29.0", features = ["signal"]} +nix = {version = "0.29.0", features = ["signal"] } nvml-wrapper = "0.10.0" open = "5" phraze = "0.3.15" rand = "0.8.5" regex = "1.10.5" -reqwest = {version = "0.12.5", features = ["stream", "json", "multipart"]} +reqwest = {version = "0.12.5", features = ["stream", "json", "multipart"] } sanitize-filename = "0.5" semver = "1.0.23" -sentry = {version = "0.34.0", features = ["anyhow"]} +sentry = {version = "0.34.0", features = ["anyhow"] } sentry-tauri = "0.3.0" -serde = {version = "1", features = ["derive"]} +serde = {version = "1", features = ["derive"] } serde_json = "1" sha2 = "0.10.8" sys-locale = "0.3.1" @@ -58,7 +58,7 @@ tari_common = {git = "https://github.com/tari-project/tari.git", branch = "devel tari_common_types = {git = "https://github.com/tari-project/tari.git", branch = "development"} tari_core = {git = "https://github.com/tari-project/tari.git", branch = "development", features = [ "transactions", -]} +] } tari_crypto = "0.21.0" tari_key_manager = {git = "https://github.com/tari-project/tari.git", branch = "development"} tari_shutdown = {git = "https://github.com/tari-project/tari.git", branch = "development"} @@ -80,12 +80,12 @@ tauri = {version = "1.8.0", features = [ "icon-ico", "icon-png", "process-command-api", -]} +] } tauri-plugin-single-instance = {git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1"} thiserror = "1.0.26" -tokio = {version = "1", features = ["full"]} -tokio-util = {version = "0.7.11", features = ["compat"]} -xz2 = {version = "0.1.7", features = ["static"]}# static bind lzma +tokio = {version = "1", features = ["full"] } +tokio-util = {version = "0.7.11", features = ["compat"] } +xz2 = {version = "0.1.7", features = ["static"] }# static bind lzma zip = "2.2.0" [target.'cfg(windows)'.dependencies] @@ -94,7 +94,7 @@ winreg = "0.52.0" # needed for keymanager. TODO: Find a way of creating a keymanager without bundling sqlite chrono = "0.4.38" device_query = "2.1.0" -libsqlite3-sys = {version = "0.25.1", features = ["bundled"]} +libsqlite3-sys = {version = "0.25.1", features = ["bundled"] } log = "0.4.22" nvml-wrapper = "0.10.0" rand = "0.8.5" diff --git a/src-tauri/src/binaries/adapter_github.rs b/src-tauri/src/binaries/adapter_github.rs index bc7f09079..7020b3f65 100644 --- a/src-tauri/src/binaries/adapter_github.rs +++ b/src-tauri/src/binaries/adapter_github.rs @@ -8,7 +8,9 @@ use tari_common::configuration::Network; use tauri::api::path::cache_dir; use crate::{ - download_utils::download_file_with_retries, github, progress_tracker::ProgressTracker, + download_utils::download_file_with_retries, + github::{self, request_client::RequestClient}, + progress_tracker::ProgressTracker, APPLICATION_FOLDER_ID, }; @@ -41,6 +43,12 @@ impl LatestVersionApiAdapter for GithubReleasesAdapter { .join(format!("{}.sha256", asset.name)); let checksum_url = format!("{}.sha256", asset.url); + if asset.source.is_mirror() { + RequestClient::current() + .check_if_cache_hits(checksum_url.as_str()) + .await?; + } + match download_file_with_retries(&checksum_url, &checksum_path, progress_tracker).await { Ok(_) => Ok(checksum_path), Err(e) => { diff --git a/src-tauri/src/binaries/adapter_tor.rs b/src-tauri/src/binaries/adapter_tor.rs index 0cb8ae751..26f9a73b8 100644 --- a/src-tauri/src/binaries/adapter_tor.rs +++ b/src-tauri/src/binaries/adapter_tor.rs @@ -2,6 +2,8 @@ use crate::binaries::binaries_resolver::{ LatestVersionApiAdapter, VersionAsset, VersionDownloadInfo, }; use crate::download_utils::download_file_with_retries; +use crate::github::request_client::RequestClient; +use crate::github::ReleaseSource; use crate::progress_tracker::ProgressTracker; use crate::APPLICATION_FOLDER_ID; use anyhow::Error; @@ -22,20 +24,10 @@ impl LatestVersionApiAdapter for TorReleaseAdapter { "https://cdn-universe.tari.com/torbrowser/13.5.7/tor-expert-bundle-{}-13.5.7.tar.gz", platform ); - let mut cdn_responded = false; - - let client = reqwest::Client::new(); - for _ in 0..3 { - let cloned_cdn_tor_bundle_url = cdn_tor_bundle_url.clone(); - let response = client.head(cloned_cdn_tor_bundle_url).send().await; - - if let Ok(resp) = response { - if resp.status().is_success() { - cdn_responded = true; - break; - } - } - } + + let cdn_responded = RequestClient::current() + .check_if_cache_hits(cdn_tor_bundle_url.as_str()) + .await?; if cdn_responded { let version = VersionDownloadInfo { @@ -43,6 +35,7 @@ impl LatestVersionApiAdapter for TorReleaseAdapter { assets: vec![VersionAsset { url: cdn_tor_bundle_url.to_string(), name: format!("tor-expert-bundle-{}-13.5.7.tar.gz", platform), + source: ReleaseSource::Mirror, }], }; return Ok(vec![version]); @@ -54,6 +47,7 @@ impl LatestVersionApiAdapter for TorReleaseAdapter { assets: vec![VersionAsset { url: format!("https://dist.torproject.org/torbrowser/13.5.7/tor-expert-bundle-{}-13.5.7.tar.gz", platform), name: format!("tor-expert-bundle-{}-13.5.7.tar.gz", platform), + source: ReleaseSource::Github }] }; Ok(vec![version]) diff --git a/src-tauri/src/binaries/binaries_manager.rs b/src-tauri/src/binaries/binaries_manager.rs index b7670eb52..2a4a2706f 100644 --- a/src-tauri/src/binaries/binaries_manager.rs +++ b/src-tauri/src/binaries/binaries_manager.rs @@ -6,6 +6,7 @@ use tari_common::configuration::Network; use crate::{ download_utils::{download_file_with_retries, extract, validate_checksum}, + github::request_client::RequestClient, progress_tracker::ProgressTracker, }; @@ -426,6 +427,12 @@ impl BinaryManager { .map_err(|e| anyhow!("Error creating in progress folder. Error: {:?}", e))?; let in_progress_file_zip = in_progress_dir.join(asset.name.clone()); + if asset.source.is_mirror() { + RequestClient::current() + .check_if_cache_hits(asset.url.as_str()) + .await?; + } + download_file_with_retries( asset.url.as_str(), &in_progress_file_zip, diff --git a/src-tauri/src/binaries/binaries_resolver.rs b/src-tauri/src/binaries/binaries_resolver.rs index 4502266c4..29738f24b 100644 --- a/src-tauri/src/binaries/binaries_resolver.rs +++ b/src-tauri/src/binaries/binaries_resolver.rs @@ -1,8 +1,11 @@ +use crate::github::ReleaseSource; use crate::ProgressTracker; use anyhow::{anyhow, Error}; use async_trait::async_trait; +use log::info; use regex::Regex; use semver::Version; +use serde::{Deserialize, Serialize}; use std::collections::HashMap; use std::path::PathBuf; use std::sync::LazyLock; @@ -15,19 +18,22 @@ use super::adapter_xmrig::XmrigVersionApiAdapter; use super::binaries_manager::BinaryManager; use super::Binaries; +pub const LOG_TARGET: &str = "tari::universe::binary_resolver"; + static INSTANCE: LazyLock> = LazyLock::new(|| RwLock::new(BinaryResolver::new())); -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct VersionDownloadInfo { pub(crate) version: Version, pub(crate) assets: Vec, } -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct VersionAsset { pub(crate) url: String, pub(crate) name: String, + pub(crate) source: ReleaseSource, } #[async_trait] @@ -208,6 +214,8 @@ impl BinaryResolver { progress_tracker: ProgressTracker, should_check_for_update: bool, ) -> Result<(), Error> { + info!(target: LOG_TARGET, "Initializing binary: {} | should check for update: {}", binary.name(), should_check_for_update); + let manager = self .managers .get_mut(&binary) @@ -215,7 +223,7 @@ impl BinaryResolver { manager.read_local_versions().await; - if should_check_for_update { + if true { // Will populate Vec of downloaded versions that meet the requirements manager.check_for_updates().await; } diff --git a/src-tauri/src/download_utils.rs b/src-tauri/src/download_utils.rs index 6ba21eb5d..8558a26e1 100644 --- a/src-tauri/src/download_utils.rs +++ b/src-tauri/src/download_utils.rs @@ -1,3 +1,4 @@ +use crate::github::request_client::RequestClient; use crate::ProgressTracker; use anyhow::{anyhow, Error}; use async_zip::base::read::seek::ZipFileReader; @@ -39,12 +40,12 @@ pub async fn download_file_with_retries( } } -async fn download_file( +pub async fn download_file( url: &str, destination: &Path, progress_tracker: ProgressTracker, ) -> Result<(), anyhow::Error> { - let response = reqwest::get(url).await?; + let response = RequestClient::current().send_get_request(url).await?; // Ensure the directory exists if let Some(parent) = destination.parent() { @@ -57,9 +58,9 @@ async fn download_file( // Stream the response body directly to the file let mut stream = response.bytes_stream(); while let Some(item) = stream.next().await { - let _ = progress_tracker - .update("downloading".to_string(), None, 10) - .await; + // let _ = progress_tracker + // .update("downloading".to_string(), None, 10) + // .await; dest.write_all(&item?).await?; } diff --git a/src-tauri/src/github/cache.rs b/src-tauri/src/github/cache.rs new file mode 100644 index 000000000..b3ee9444b --- /dev/null +++ b/src-tauri/src/github/cache.rs @@ -0,0 +1,197 @@ +use std::{collections::HashMap, path::PathBuf, sync::LazyLock}; + +use crate::{binaries::binaries_resolver::VersionDownloadInfo, APPLICATION_FOLDER_ID}; +use anyhow::{anyhow, Error, Ok}; +use log::info; +use serde::{Deserialize, Serialize}; +use tauri::api::path::cache_dir; +use tokio::sync::RwLock; + +const LOG_TARGET: &str = "tari::universe::github_cache"; + +static INSTANCE: LazyLock> = + LazyLock::new(|| RwLock::new(CacheJsonFile::new())); + +#[derive(Debug, Serialize, Deserialize)] +pub struct CacheEntry { + pub repo_owner: String, + pub repo_name: String, + pub github_etag: Option, + pub mirror_etag: Option, + pub file_path: PathBuf, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct CacheJsonFile { + pub cache_entries: HashMap, + pub cache_file_path: PathBuf, + pub versions_cache_folder_path: PathBuf, +} + +impl CacheJsonFile { + fn new() -> Self { + let (cache_file_path, versions_cache_folder_path) = Self::initialize_paths(); + + Self { + cache_entries: HashMap::new(), + cache_file_path, + versions_cache_folder_path, + } + } + + fn initialize_paths() -> (PathBuf, PathBuf) { + let base_path = PathBuf::from(APPLICATION_FOLDER_ID) + .join("cache") + .join("binaries_versions"); + let cache_file_path = base_path.join("versions_releases_responses.json"); + (cache_file_path, base_path) + } + + fn create_cache_entry_identifier(repo_owner: &str, repo_name: &str) -> String { + format!("{}-{}", repo_owner, repo_name) + } + + fn get_version_releases_responses_cache_file_path(&self) -> Result { + let cache_path = cache_dir().ok_or_else(|| anyhow!("Failed to get cache path"))?; + Ok(cache_path.join(self.cache_file_path.clone())) + } + + pub fn read_version_releases_responses_cache_file(&mut self) -> Result<(), Error> { + let cache_file_path = self.get_version_releases_responses_cache_file_path()?; + info!(target: LOG_TARGET, "Reading cache file: {:?}", cache_file_path); + if cache_file_path.exists() { + let json = std::fs::read_to_string(&cache_file_path)?; + self.cache_entries = serde_json::from_str(&json)?; + } + + info!(target: LOG_TARGET, "Version releases cache file read successfully"); + Ok(()) + } + + fn save_version_releases_responses_cache_file(&self) -> Result<(), Error> { + let cache_file_path = self.get_version_releases_responses_cache_file_path()?; + if !cache_file_path.exists() { + std::fs::create_dir_all( + cache_file_path + .parent() + .ok_or_else(|| anyhow!("Failed to create cache directory"))?, + )?; + } + let json = serde_json::to_string_pretty(&self.cache_entries)?; + std::fs::write(&cache_file_path, json)?; + + info!(target: LOG_TARGET, "Version releases cache file saved successfully"); + Ok(()) + } + + pub fn get_cache_entry(&self, repo_owner: &str, repo_name: &str) -> Option<&CacheEntry> { + self.cache_entries + .get(&Self::create_cache_entry_identifier(repo_owner, repo_name)) + } + + pub fn update_cache_entry( + &mut self, + repo_owner: &str, + repo_name: &str, + github_etag: Option, + mirror_etag: Option, + ) -> Result<(), Error> { + let cache_entry = self + .cache_entries + .get_mut(&Self::create_cache_entry_identifier(repo_owner, repo_name)) + .ok_or_else(|| anyhow!("Cache entry not found"))?; + if github_etag.is_some() { + cache_entry.github_etag = github_etag; + } + if mirror_etag.is_some() { + cache_entry.mirror_etag = mirror_etag; + } + self.save_version_releases_responses_cache_file()?; + Ok(()) + } + + pub fn create_cache_entry( + &mut self, + repo_owner: &str, + repo_name: &str, + github_etag: Option, + mirror_etag: Option, + ) -> Result<(), Error> { + let identifier = Self::create_cache_entry_identifier(repo_owner, repo_name); + + if self.cache_entries.contains_key(&identifier) { + self.update_cache_entry(repo_owner, repo_name, github_etag, mirror_etag)?; + } else { + let cache_entry = CacheEntry { + repo_owner: repo_owner.to_string(), + repo_name: repo_name.to_string(), + github_etag, + mirror_etag, + file_path: self + .versions_cache_folder_path + .join(format!("{}-{}.json", repo_owner, repo_name)), + }; + self.cache_entries.insert(identifier, cache_entry); + self.save_version_releases_responses_cache_file()?; + }; + + Ok(()) + } + + pub fn chech_if_content_file_exist(&self, repo_owner: &str, repo_name: &str) -> bool { + self.get_cache_entry(repo_owner, repo_name) + .map_or(false, |cache_entry| cache_entry.file_path.exists()) + } + + fn get_file_content_path(&self, repo_owner: &str, repo_name: &str) -> Result { + let cache_path = cache_dir().ok_or_else(|| anyhow!("Failed to get file content path"))?; + let cache_entry = self.get_cache_entry(repo_owner, repo_name).ok_or_else(|| { + anyhow!( + "File content not found for repo_owner: {}, repo_name: {}", + repo_owner, + repo_name + ) + })?; + Ok(cache_path.join(cache_entry.file_path.clone())) + } + + pub fn save_file_content( + &self, + repo_owner: &str, + repo_name: &str, + content: Vec, + ) -> Result<(), Error> { + let file_path = self.get_file_content_path(repo_owner, repo_name)?; + + if !file_path.exists() { + std::fs::create_dir_all( + file_path + .parent() + .ok_or_else(|| anyhow!("Failed to create cache directory"))?, + )?; + } + + let json = serde_json::to_string_pretty(&content)?; + std::fs::write(&file_path, json)?; + + info!(target: LOG_TARGET, "File content saved successfully"); + Ok(()) + } + + pub fn get_file_content( + &self, + repo_owner: &str, + repo_name: &str, + ) -> Result, Error> { + let file_path = self.get_file_content_path(repo_owner, repo_name)?; + let json = std::fs::read_to_string(&file_path)?; + let content: Vec = serde_json::from_str(&json)?; + + info!(target: LOG_TARGET, "File content read successfully"); + Ok(content) + } + + pub fn current() -> &'static RwLock { + &INSTANCE + } +} diff --git a/src-tauri/src/github/mod.rs b/src-tauri/src/github/mod.rs index 9309ce779..2a27bcb13 100644 --- a/src-tauri/src/github/mod.rs +++ b/src-tauri/src/github/mod.rs @@ -1,14 +1,18 @@ -use anyhow::anyhow; +mod cache; +pub mod request_client; + +use cache::CacheJsonFile; use log::{debug, info, warn}; -use reqwest::Client; -use serde::Deserialize; +use request_client::RequestClient; +use reqwest::Response; +use serde::{Deserialize, Serialize}; use crate::binaries::binaries_resolver::{VersionAsset, VersionDownloadInfo}; const LOG_TARGET: &str = "tari::universe::github"; #[derive(Deserialize)] -struct Release { +pub struct Release { name: String, tag_name: String, draft: bool, @@ -20,13 +24,22 @@ struct Asset { name: String, browser_download_url: String, } - -#[derive(Debug)] -enum ReleaseSource { +#[derive(Debug, Clone, Serialize, Deserialize)] +pub enum ReleaseSource { Github, Mirror, } +impl ReleaseSource { + pub fn is_github(&self) -> bool { + matches!(self, ReleaseSource::Github) + } + + pub fn is_mirror(&self) -> bool { + matches!(self, ReleaseSource::Mirror) + } +} + pub fn get_gh_url(repo_owner: &str, repo_name: &str) -> String { format!( "https://api.github.com/repos/{}/{}/releases", @@ -59,35 +72,41 @@ pub async fn list_releases( repo_owner: &str, repo_name: &str, ) -> Result, anyhow::Error> { - let mut attempts = 0; - let mut releases = loop { - match list_releases_from(ReleaseSource::Mirror, repo_owner, repo_name).await { - Ok(r) => break r, - Err(e) => { - warn!(target: LOG_TARGET, "Failed to fetch releases from mirror: {}", e); - } - }; - attempts += 1; - warn!( - target: LOG_TARGET, - "Failed to fetch releases from mirror, attempt {}", - attempts - ); - }; - // Add any missing releases from github - let github_releases = list_releases_from(ReleaseSource::Github, repo_owner, repo_name) + info!(target: LOG_TARGET, "Reading cache releases for {}/{}", repo_owner, repo_name); + CacheJsonFile::current() + .write() + .await + .read_version_releases_responses_cache_file()?; + + info!(target: LOG_TARGET, "Fetching mirror releases for {}/{}", repo_owner, repo_name); + + let mut mirror_releases = list_mirror_releases(repo_owner, repo_name) + .await + .inspect_err(|e| { + warn!(target: LOG_TARGET, "Failed to fetch releases from Mirror: {}", e); + }) + .unwrap_or_default(); + + info!(target: LOG_TARGET, "Found {} releases from mirror", mirror_releases.len()); + + info!(target: LOG_TARGET, "Fetching github releases for {}/{}", repo_owner, repo_name); + + let github_releases = list_github_releases(repo_owner, repo_name) .await .inspect_err(|e| { warn!(target: LOG_TARGET, "Failed to fetch releases from Github: {}", e); }) .unwrap_or_default(); + info!(target: LOG_TARGET, "Found {} releases from Github", github_releases.len()); + + // Add any missing releases from github for release in &github_releases { - if !releases.iter().any(|r| r.version == release.version) { - releases.push(release.clone()); + if !mirror_releases.iter().any(|r| r.version == release.version) { + mirror_releases.push(release.clone()); } } - Ok(releases) + Ok(mirror_releases) // if releases.as_ref().map_or(false, |r| !r.is_empty()) { // releases @@ -96,35 +115,149 @@ pub async fn list_releases( // } } -async fn list_releases_from( - source: ReleaseSource, +async fn list_mirror_releases( + repo_owner: &str, + repo_name: &str, +) -> Result, anyhow::Error> { + let url = get_mirror_url(repo_owner, repo_name); + info!(target: LOG_TARGET, "Mirror releases url: {}", url); + + let (need_to_download, cache_entry_present, response) = + check_if_need_download(repo_owner, repo_name, &url, ReleaseSource::Mirror).await?; + + info!(target: LOG_TARGET, "Mirror releases need to download: {}", need_to_download); + info!(target: LOG_TARGET, "Mirror releases cache entry present: {}", cache_entry_present); + + let mut versions_list: Vec = vec![]; + let mut does_hit = response + .and_then(|res| { + Some( + RequestClient::current() + .get_cf_cache_status_from_head_response(&res) + .is_hit(), + ) + }) + .unwrap_or(false); + + info!(target: LOG_TARGET, "Mirror releases cache hit: {}", does_hit); + + if need_to_download && !does_hit { + does_hit = RequestClient::current().check_if_cache_hits(&url).await?; + } + + let mut cache_json_file_lock = CacheJsonFile::current().write().await; + + if does_hit && need_to_download { + let (response, etag) = RequestClient::current() + .fetch_get_versions_download_info(&url) + .await?; + let remote_versions_list = + extract_versions_from_release(repo_owner, repo_name, response, ReleaseSource::Mirror) + .await?; + + if cache_entry_present { + cache_json_file_lock.update_cache_entry(repo_owner, repo_name, None, Some(etag))?; + } else { + cache_json_file_lock.create_cache_entry(repo_owner, repo_name, None, Some(etag))?; + }; + versions_list.extend(remote_versions_list); + cache_json_file_lock.save_file_content(repo_owner, repo_name, versions_list.clone())?; + } else { + let content = cache_json_file_lock.get_file_content(repo_owner, repo_name)?; + versions_list.extend(content); + } + + Ok(versions_list) +} + +async fn list_github_releases( repo_owner: &str, repo_name: &str, ) -> Result, anyhow::Error> { - let client = Client::new(); - let url = match source { - ReleaseSource::Github => get_gh_url(repo_owner, repo_name), - ReleaseSource::Mirror => get_mirror_url(repo_owner, repo_name), - }; - - let response = client - .get(&url) - .header("User-Agent", "request") - .send() - .await?; - if response.status() != 200 { - return Err(anyhow!( - "Failed to fetch releases for {}:{}: {} - ", - repo_owner, - repo_name, - response.status() - )); + let url = get_gh_url(repo_owner, repo_name); + info!(target: LOG_TARGET, "Github releases url: {}", url); + + let (need_to_download, cache_entry_present, _) = + check_if_need_download(repo_owner, repo_name, &url, ReleaseSource::Github).await?; + + info!(target: LOG_TARGET, "Github releases need to download: {}", need_to_download); + info!(target: LOG_TARGET, "Github releases cache entry present: {}", cache_entry_present); + + let mut versions_list: Vec = vec![]; + + let mut cache_json_file_lock = CacheJsonFile::current().write().await; + + if need_to_download { + let (response, etag) = RequestClient::current() + .fetch_get_versions_download_info(&url) + .await?; + let remote_versions_list = + extract_versions_from_release(repo_owner, repo_name, response, ReleaseSource::Github) + .await?; + + if cache_entry_present { + cache_json_file_lock.update_cache_entry(repo_owner, repo_name, Some(etag), None)?; + } else { + cache_json_file_lock.create_cache_entry(repo_owner, repo_name, Some(etag), None)?; + }; + + versions_list.extend(remote_versions_list); + cache_json_file_lock.save_file_content(repo_owner, repo_name, versions_list.clone())?; + } else { + let content = cache_json_file_lock.get_file_content(repo_owner, repo_name)?; + versions_list.extend(content); + } + + Ok(versions_list) +} + +async fn check_if_need_download( + repo_owner: &str, + repo_name: &str, + url: &str, + source: ReleaseSource, +) -> Result<(bool, bool, Option), anyhow::Error> { + let cache_json_file_lock = CacheJsonFile::current().write().await; + let cache_entry = cache_json_file_lock.get_cache_entry(repo_owner, repo_name); + let mut need_to_download = false; + let cache_entry_present = cache_entry.is_some(); + + match cache_entry { + Some(cache_entry) => { + if !cache_json_file_lock.chech_if_content_file_exist(repo_owner, repo_name) { + need_to_download = true; + } + + let response = RequestClient::current().send_head_request(url).await?; + let remote_etag = RequestClient::current().get_etag_from_head_response(&response); + let local_etag = match source { + ReleaseSource::Mirror => cache_entry.mirror_etag.clone(), + ReleaseSource::Github => cache_entry.github_etag.clone(), + }; + + info!(target: LOG_TARGET, "Remote etag: {}", remote_etag); + info!(target: LOG_TARGET, "Local etag: {:?}", cache_entry); + + if !remote_etag.eq(&local_etag.unwrap_or("".to_string())) { + need_to_download = true + }; + + Ok((need_to_download, cache_entry_present, Some(response))) + } + None => { + need_to_download = true; + Ok((need_to_download, cache_entry_present, None)) + } } - let data = response.text().await?; - let releases: Vec = serde_json::from_str(&data)?; +} - debug!(target: LOG_TARGET, "Releases for {}/{}:", repo_owner, repo_name); - let mut res = vec![]; +async fn extract_versions_from_release( + repo_owner: &str, + repo_name: &str, + releases: Vec, + source: ReleaseSource, +) -> Result, anyhow::Error> { + let mut versions_list = vec![]; for release in releases { if release.draft { continue; @@ -136,7 +269,6 @@ async fn list_releases_from( // Remove any v prefix let release_name = release.tag_name.trim_start_matches('v').to_string(); debug!(target: LOG_TARGET, " - release: {}", release_name); - // res.push(semver::Version::parse(&tag_name)?); let mut assets = vec![]; for asset in release.assets { let url = match source { @@ -149,11 +281,12 @@ async fn list_releases_from( assets.push(VersionAsset { url, name: asset.name, + source: source.clone(), }); } match semver::Version::parse(&release_name) { Ok(v) => { - res.push(VersionDownloadInfo { version: v, assets }); + versions_list.push(VersionDownloadInfo { version: v, assets }); } Err(e) => { info!(target: LOG_TARGET, "Failed to parse {:?} version: {}", release_name, e); @@ -162,5 +295,5 @@ async fn list_releases_from( } } - Ok(res) + Ok(versions_list) } diff --git a/src-tauri/src/github/request_client.rs b/src-tauri/src/github/request_client.rs new file mode 100644 index 000000000..66bad259b --- /dev/null +++ b/src-tauri/src/github/request_client.rs @@ -0,0 +1,228 @@ +use std::sync::LazyLock; + +use anyhow::anyhow; +use log::info; +use log::warn; +use reqwest::{Client, Response}; + +use super::Release; +const LOG_TARGET: &str = "tari::universe::request_client"; + +pub enum CloudFlareCacheStatus { + Hit, + Miss, + Unknown, + Expired, + Stale, + Bypass, + Revalidated, + Updating, + Dynamic, + NonExistent, +} + +impl CloudFlareCacheStatus { + pub fn from_str(s: &str) -> Self { + match s { + "HIT" => Self::Hit, + "MISS" => Self::Miss, + "EXPIRED" => Self::Expired, + "STALE" => Self::Stale, + "BYPASS" => Self::Bypass, + "REVALIDATED" => Self::Revalidated, + "UPDATING" => Self::Updating, + "DYNAMIC" => Self::Dynamic, + "UNKNOWN" => Self::Unknown, + "NONE" => Self::Unknown, + "NONE/UNKNOWN" => Self::Unknown, + "" => Self::NonExistent, + _ => Self::Unknown, + } + } + pub fn to_str(&self) -> &str { + match self { + Self::Hit => "HIT", + Self::Miss => "MISS", + Self::Unknown => "UNKNOWN", + Self::Expired => "EXPIRED", + Self::Stale => "STALE", + Self::Bypass => "BYPASS", + Self::Revalidated => "REVALIDATED", + Self::Updating => "UPDATING", + Self::Dynamic => "DYNAMIC", + Self::NonExistent => "NONEXISTENT", + } + } + + pub fn is_non_existent(&self) -> bool { + matches!(self, Self::NonExistent) + } + + pub fn is_hit(&self) -> bool { + matches!(self, Self::Hit) || matches!(self, Self::Revalidated) + } + + pub fn is_miss(&self) -> bool { + matches!(self, Self::Miss) + } + + pub fn should_log_warning(&self) -> bool { + matches!(self, Self::Unknown) + || matches!(self, Self::NonExistent) + || matches!(self, Self::Dynamic) + || matches!(self, Self::Bypass) + } + + pub fn log_warning_if_present(&self) { + if self.should_log_warning() { + warn!(target: LOG_TARGET, "Cloudflare cache status: {}", self.to_str()); + } + } +} + +static INSTANCE: LazyLock = LazyLock::new(RequestClient::new); +pub struct RequestClient { + client: Client, + user_agent: String, +} + +impl RequestClient { + pub fn new() -> Self { + let user_agent = format!( + "universe {}({})", + env!("CARGO_PKG_VERSION"), + std::env::consts::OS + ); + + Self { + client: Client::new(), + user_agent, + } + } + + fn convert_content_length_to_mb(&self, content_length: u64) -> f64 { + (content_length as f64) / 1024.0 / 1024.0 + } + + pub async fn send_head_request(&self, url: &str) -> Result { + self.client + .head(url) + .header("User-Agent", self.user_agent.clone()) + .send() + .await + } + + pub async fn send_get_request(&self, url: &str) -> Result { + self.client + .get(url) + .header("User-Agent", self.user_agent.clone()) + .send() + .await + } + + pub fn get_etag_from_head_response(&self, response: &Response) -> String { + if response.status().is_server_error() || response.status().is_client_error() { + return "".to_string(); + }; + response + .headers() + .get("etag") + .map_or("", |v| v.to_str().unwrap_or_default()) + .to_string() + } + + pub fn get_content_length_from_head_response(&self, response: &Response) -> u64 { + if response.status().is_server_error() || response.status().is_client_error() { + return 0; + }; + response + .headers() + .get("content-length") + .map_or(0, |v| v.to_str().unwrap_or_default().parse().unwrap_or(0)) + } + + pub fn get_cf_cache_status_from_head_response( + &self, + response: &Response, + ) -> CloudFlareCacheStatus { + info!(target: LOG_TARGET, "get_cf_cache_status_from_head_response, response status: {}, url: {}", response.status(), response.url()); + if response.status().is_server_error() || response.status().is_client_error() { + info!(target: LOG_TARGET, "get_cf_cache_status_from_head_response, error"); + return CloudFlareCacheStatus::Unknown; + }; + let cache_status = CloudFlareCacheStatus::from_str( + response + .headers() + .get("cf-cache-status") + .map_or("", |v| v.to_str().unwrap_or_default()), + ); + + info!(target: LOG_TARGET, "get_cf_cache_status_from_head_response, cache status: {:?}", cache_status.to_str()); + info!(target: LOG_TARGET, "get_cf_cache_status_from_head_response_raw, cache status: {:?}", response.headers().get("cf-cache-status")); + + cache_status.log_warning_if_present(); + cache_status + } + + pub async fn fetch_get_versions_download_info( + &self, + url: &str, + ) -> Result<(Vec, String), anyhow::Error> { + let get_response = self.send_get_request(url).await.map_err(|e| anyhow!(e))?; + let etag = get_response + .headers() + .get("etag") + .map_or("", |v| v.to_str().unwrap_or_default()) + .to_string(); + let body = get_response.text().await.map_err(|e| anyhow!(e))?; + + Ok((serde_json::from_str(&body)?, etag)) + } + + pub async fn check_if_cache_hits(&self, url: &str) -> Result { + const MAX_RETRIES: u8 = 3; + const MAX_WAIT_TIME: u64 = 30; + const MIN_WAIT_TIME: u64 = 2; + let mut retries = 0; + + loop { + if retries >= MAX_RETRIES { + return Ok(false); + } + + let head_response = self.send_head_request(url).await?; + + let cf_cache_status = self.get_cf_cache_status_from_head_response(&head_response); + cf_cache_status.log_warning_if_present(); + + let content_length = self.get_content_length_from_head_response(&head_response); + info!(target: LOG_TARGET, "Content length: {}", content_length); + info!(target: LOG_TARGET, "Content length in mb: {}", self.convert_content_length_to_mb(content_length)); + + let mut sleep_time = std::time::Duration::from_secs(MIN_WAIT_TIME); + + if !content_length.eq(&0) { + sleep_time = std::time::Duration::from_secs( + #[allow(clippy::cast_possible_truncation)] + ((self.convert_content_length_to_mb(content_length) / 10.0).trunc() as u64) + .min(MAX_WAIT_TIME) + .max(MIN_WAIT_TIME), + ); + } + + if cf_cache_status.is_hit() { + break; + } + + retries += 1; + warn!(target: LOG_TARGET, "Cache miss. Retrying in {} seconds. Try {}/{}", sleep_time.as_secs().to_string() ,retries, MAX_RETRIES); + tokio::time::sleep(sleep_time).await; + } + + Ok(true) + } + + pub fn current() -> &'static LazyLock { + &INSTANCE + } +} diff --git a/src-tauri/src/node_manager.rs b/src-tauri/src/node_manager.rs index 310b61918..b53577d10 100644 --- a/src-tauri/src/node_manager.rs +++ b/src-tauri/src/node_manager.rs @@ -3,7 +3,7 @@ use std::sync::Arc; use std::time::SystemTime; use chrono::{NaiveDateTime, TimeZone, Utc}; -use log::{debug, error}; +use log::error; use minotari_node_grpc_client::grpc::Peer; use tari_core::transactions::tari_amount::MicroMinotari; use tari_crypto::ristretto::RistrettoPublicKey; diff --git a/src-tauri/src/progress_tracker.rs b/src-tauri/src/progress_tracker.rs index 88daf64d4..416a25cdf 100644 --- a/src-tauri/src/progress_tracker.rs +++ b/src-tauri/src/progress_tracker.rs @@ -1,6 +1,6 @@ use std::{collections::HashMap, sync::Arc}; -use log::{debug, error}; +use log::error; use tokio::sync::RwLock; use crate::setup_status_event::SetupStatusEvent;