diff --git a/.vscode/settings.json b/.vscode/settings.json index caa48dd01..bf78ed3f2 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -31,5 +31,6 @@ "evenBetterToml.formatter.trailingNewline": true, "evenBetterToml.formatter.reorderKeys": true, "evenBetterToml.formatter.reorderArrays": true, + "rust-analyzer.showUnlinkedFileNotification": false, } \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index e77b5de6d..d61ac26d3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3940,7 +3940,6 @@ dependencies = [ "torrust-tracker-test-helpers", "torrust-tracker-torrent-repository", "tower-http", - "trace", "tracing", "url", "uuid", @@ -4071,17 +4070,6 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" -[[package]] -name = "trace" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ad0c048e114d19d1140662762bfdb10682f3bc806d8be18af846600214dd9af" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "tracing" version = "0.1.40" diff --git a/Cargo.toml b/Cargo.toml index 99b7a334a..623971552 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -70,7 +70,6 @@ torrust-tracker-located-error = { version = "3.0.0-alpha.12-develop", path = "pa torrust-tracker-primitives = { version = "3.0.0-alpha.12-develop", path = "packages/primitives" } torrust-tracker-torrent-repository = { version = "3.0.0-alpha.12-develop", path = "packages/torrent-repository" } tower-http = { version = "0", features = ["compression-full", "cors", "propagate-header", "request-id", "trace"] } -trace = "0" tracing = "0" url = "2" uuid = { version = "1", features = ["v4"] } diff --git a/cSpell.json b/cSpell.json index bbcba98a7..dbe0ca430 100644 --- a/cSpell.json +++ b/cSpell.json @@ -29,6 +29,7 @@ "certbot", "chrono", "clippy", + "Cloneable", "codecov", "codegen", "completei", @@ -69,8 +70,8 @@ "infoschema", "Intermodal", "intervali", - "kcachegrind", "Joakim", + "kcachegrind", "keyout", "lcov", "leecher", diff --git a/src/console/clients/checker/app.rs b/src/console/clients/checker/app.rs index 82ea800d0..0c9852557 100644 --- a/src/console/clients/checker/app.rs +++ b/src/console/clients/checker/app.rs @@ -53,7 +53,7 @@ pub async fn run() -> Result> { console: console_printer, }; - Ok(service.run_checks().await) + service.run_checks().await } fn setup_logging(level: LevelFilter) { @@ -78,15 +78,17 @@ fn setup_logging(level: LevelFilter) { } fn setup_config(args: Args) -> Result { - match (args.config_path, args.config_content) { - (Some(config_path), _) => load_config_from_file(&config_path), - (_, Some(config_content)) => parse_from_json(&config_content).context("invalid config format"), - _ => Err(anyhow::anyhow!("no configuration provided")), + // If a config is directly supplied, we use it. + if let Some(config) = args.config_content { + parse_from_json(&config).context("invalid config format") + } + // or we load it from a file... + else if let Some(path) = args.config_path { + let file_content = std::fs::read_to_string(path.clone()).with_context(|| format!("can't read config file {path:?}"))?; + parse_from_json(&file_content).context("invalid config format") + } + // but we cannot run without any config... + else { + Err(anyhow::anyhow!("no configuration provided")) } -} - -fn load_config_from_file(path: &PathBuf) -> Result { - let file_content = std::fs::read_to_string(path).with_context(|| format!("can't read config file {path:?}"))?; - - parse_from_json(&file_content).context("invalid config format") } diff --git a/src/console/clients/checker/checks/health.rs b/src/console/clients/checker/checks/health.rs index 9c28da514..a579ffe1e 100644 --- a/src/console/clients/checker/checks/health.rs +++ b/src/console/clients/checker/checks/health.rs @@ -1,51 +1,75 @@ +use std::sync::Arc; use std::time::Duration; +use anyhow::Result; use colored::Colorize; -use reqwest::{Client as HttpClient, Url, Url as ServiceUrl}; +use hyper::StatusCode; +use reqwest::{Client as HttpClient, Response}; +use thiserror::Error; +use url::Url; use crate::console::clients::checker::console::Console; use crate::console::clients::checker::printer::Printer; use crate::console::clients::checker::service::{CheckError, CheckResult}; -pub async fn run(health_checks: &Vec, console: &Console, check_results: &mut Vec) { - console.println("Health checks ..."); - - for health_check_url in health_checks { - match run_health_check(health_check_url.clone(), console).await { - Ok(()) => check_results.push(Ok(())), - Err(err) => check_results.push(Err(err)), - } - } +#[derive(Debug, Clone, Error)] +pub enum Error { + #[error("Failed to Build a Http Client: {err:?}")] + ClientBuildingError { err: Arc }, + #[error("Heath check failed to get a response: {err:?}")] + ResponseError { err: Arc }, + #[error("Http check returned a non-success code: \"{code}\" with the response: \"{response:?}\"")] + UnsuccessfulResponse { code: StatusCode, response: Arc }, } -async fn run_health_check(url: Url, console: &Console) -> Result<(), CheckError> { - let client = HttpClient::builder().timeout(Duration::from_secs(5)).build().unwrap(); +pub async fn run(health_checks: Vec, timeout: Duration, console: Console) -> Vec { + let mut check_results = Vec::default(); - let colored_url = url.to_string().yellow(); + console.println("Health checks ..."); + + for url in health_checks { + let colored_url = url.to_string().yellow(); - match client.get(url.clone()).send().await { - Ok(response) => { - if response.status().is_success() { - console.println(&format!("{} - Health API at {} is OK", "✓".green(), colored_url)); - Ok(()) - } else { - console.eprintln(&format!( - "{} - Health API at {} is failing: {:?}", - "✗".red(), + match run_health_check(url.clone(), timeout).await { + Ok(response) => { + console.println(&format!( + "{} - Health API at {} is {}", + "✓".green(), colored_url, - response + response.status() )); - Err(CheckError::HealthCheckError { url }) + + check_results.push(Ok(())); + } + Err(err) => { + console.eprintln(&format!("{} - Health API at {} is failing: {}", "✗".red(), colored_url, err)); + + check_results.push(Err(CheckError::HealthCheckError { url, err })); } } - Err(err) => { - console.eprintln(&format!( - "{} - Health API at {} is failing: {:?}", - "✗".red(), - colored_url, - err - )); - Err(CheckError::HealthCheckError { url }) - } + } + + check_results +} + +async fn run_health_check(url: Url, timeout: Duration) -> Result { + let client = HttpClient::builder() + .timeout(timeout) + .build() + .map_err(|e| Error::ClientBuildingError { err: e.into() })?; + + let response = client + .get(url.clone()) + .send() + .await + .map_err(|e| Error::ResponseError { err: e.into() })?; + + if response.status().is_success() { + Ok(response) + } else { + Err(Error::UnsuccessfulResponse { + code: response.status(), + response: response.into(), + }) } } diff --git a/src/console/clients/checker/checks/http.rs b/src/console/clients/checker/checks/http.rs index 501696df4..046963dd5 100644 --- a/src/console/clients/checker/checks/http.rs +++ b/src/console/clients/checker/checks/http.rs @@ -1,8 +1,10 @@ use std::str::FromStr; +use std::sync::Arc; +use std::time::Duration; +use anyhow::Result; use colored::Colorize; -use log::debug; -use reqwest::Url as ServiceUrl; +use thiserror::Error; use torrust_tracker_primitives::info_hash::InfoHash; use url::Url; @@ -14,82 +16,92 @@ use crate::shared::bit_torrent::tracker::http::client::responses::announce::Anno use crate::shared::bit_torrent::tracker::http::client::responses::scrape; use crate::shared::bit_torrent::tracker::http::client::{requests, Client}; -pub async fn run(http_trackers: &Vec, console: &Console, check_results: &mut Vec) { +#[derive(Debug, Clone, Error)] +pub enum Error { + #[error("Http request did not receive a response within the timeout: {err:?}")] + HttpClientError { + err: crate::shared::bit_torrent::tracker::http::client::Error, + }, + #[error("Http failed to get a response at all: {err:?}")] + ResponseError { err: Arc }, + #[error("Failed to deserialize the serde bencoded response data with the error: \"{err:?}\"")] + ParseSerdeBencodeError { + data: hyper::body::Bytes, + err: Arc, + }, + + #[error("Failed to deserialize the bencoded response data with the error: \"{err:?}\"")] + ParseScrapeBencodeError { + data: hyper::body::Bytes, + err: Arc, + }, +} + +pub async fn run(http_trackers: Vec, timeout: Duration, console: Console) -> Vec { + let mut check_results = Vec::default(); + console.println("HTTP trackers ..."); - for http_tracker in http_trackers { - let colored_tracker_url = http_tracker.to_string().yellow(); + for ref url in http_trackers { + let colored_url = url.to_string().yellow(); - match check_http_announce(http_tracker).await { - Ok(()) => { - check_results.push(Ok(())); - console.println(&format!("{} - Announce at {} is OK", "✓".green(), colored_tracker_url)); + check_results.push(match check_http_announce(url, timeout).await { + Ok(_) => { + console.println(&format!("{} - Announce at {} is OK", "✓".green(), colored_url)); + Ok(()) } Err(err) => { - check_results.push(Err(err)); - console.println(&format!("{} - Announce at {} is failing", "✗".red(), colored_tracker_url)); + console.println(&format!("{} - Announce at {} is failing", "✗".red(), colored_url)); + Err(CheckError::HttpCheckError { url: url.clone(), err }) } - } + }); - match check_http_scrape(http_tracker).await { - Ok(()) => { - check_results.push(Ok(())); - console.println(&format!("{} - Scrape at {} is OK", "✓".green(), colored_tracker_url)); + check_results.push(match check_http_scrape(url, timeout).await { + Ok(_) => { + console.println(&format!("{} - Scrape at {} is OK", "✓".green(), colored_url)); + Ok(()) } Err(err) => { - check_results.push(Err(err)); - console.println(&format!("{} - Scrape at {} is failing", "✗".red(), colored_tracker_url)); + console.println(&format!("{} - Scrape at {} is failing", "✗".red(), colored_url)); + Err(CheckError::HttpCheckError { url: url.clone(), err }) } - } + }); } + + check_results } -async fn check_http_announce(tracker_url: &Url) -> Result<(), CheckError> { +async fn check_http_announce(url: &Url, timeout: Duration) -> Result { let info_hash_str = "9c38422213e30bff212b30c360d26f9a02136422".to_string(); // # DevSkim: ignore DS173237 let info_hash = InfoHash::from_str(&info_hash_str).expect("a valid info-hash is required"); - // todo: HTTP request could panic.For example, if the server is not accessible. - // We should change the client to catch that error and return a `CheckError`. - // Otherwise the checking process will stop. The idea is to process all checks - // and return a final report. - let response = Client::new(tracker_url.clone()) + let client = Client::new(url.clone(), timeout).map_err(|err| Error::HttpClientError { err })?; + + let response = client .announce(&QueryBuilder::with_default_values().with_info_hash(&info_hash).query()) - .await; - - if let Ok(body) = response.bytes().await { - if let Ok(_announce_response) = serde_bencode::from_bytes::(&body) { - Ok(()) - } else { - debug!("announce body {:#?}", body); - Err(CheckError::HttpError { - url: tracker_url.clone(), - }) - } - } else { - Err(CheckError::HttpError { - url: tracker_url.clone(), - }) - } + .await + .map_err(|err| Error::HttpClientError { err })?; + + let body = response.bytes().await.map_err(|e| Error::ResponseError { err: e.into() })?; + + serde_bencode::from_bytes::(&body).map_err(|e| Error::ParseSerdeBencodeError { + data: body, + err: e.into(), + }) } -async fn check_http_scrape(url: &Url) -> Result<(), CheckError> { +async fn check_http_scrape(url: &Url, timeout: Duration) -> Result { let info_hashes: Vec = vec!["9c38422213e30bff212b30c360d26f9a02136422".to_string()]; // # DevSkim: ignore DS173237 let query = requests::scrape::Query::try_from(info_hashes).expect("a valid array of info-hashes is required"); - // todo: HTTP request could panic.For example, if the server is not accessible. - // We should change the client to catch that error and return a `CheckError`. - // Otherwise the checking process will stop. The idea is to process all checks - // and return a final report. - let response = Client::new(url.clone()).scrape(&query).await; - - if let Ok(body) = response.bytes().await { - if let Ok(_scrape_response) = scrape::Response::try_from_bencoded(&body) { - Ok(()) - } else { - debug!("scrape body {:#?}", body); - Err(CheckError::HttpError { url: url.clone() }) - } - } else { - Err(CheckError::HttpError { url: url.clone() }) - } + let client = Client::new(url.clone(), timeout).map_err(|err| Error::HttpClientError { err })?; + + let response = client.scrape(&query).await.map_err(|err| Error::HttpClientError { err })?; + + let body = response.bytes().await.map_err(|e| Error::ResponseError { err: e.into() })?; + + scrape::Response::try_from_bencoded(&body).map_err(|e| Error::ParseScrapeBencodeError { + data: body, + err: e.into(), + }) } diff --git a/src/console/clients/checker/checks/udp.rs b/src/console/clients/checker/checks/udp.rs index 47a2a1a00..6c268f74f 100644 --- a/src/console/clients/checker/checks/udp.rs +++ b/src/console/clients/checker/checks/udp.rs @@ -1,4 +1,5 @@ use std::net::SocketAddr; +use std::time::Duration; use aquatic_udp_protocol::{Port, TransactionId}; use colored::Colorize; @@ -14,10 +15,12 @@ use crate::console::clients::udp::checker; const ASSIGNED_BY_OS: u16 = 0; const RANDOM_TRANSACTION_ID: i32 = -888_840_697; -pub async fn run(udp_trackers: &Vec, console: &Console, check_results: &mut Vec) { +pub async fn run(udp_trackers: Vec, _timeout: Duration, console: Console) -> Vec { + let mut check_results = Vec::default(); + console.println("UDP trackers ..."); - for udp_tracker in udp_trackers { + for ref udp_tracker in udp_trackers { debug!("UDP tracker: {:?}", udp_tracker); let colored_tracker_url = udp_tracker.to_string().yellow(); @@ -84,4 +87,6 @@ pub async fn run(udp_trackers: &Vec, console: &Console, check_result console.println(&format!("{} - Announce at {} is failing", "✗".red(), colored_tracker_url)); } } + + check_results } diff --git a/src/console/clients/checker/config.rs b/src/console/clients/checker/config.rs index 6e44d889b..f3be50da1 100644 --- a/src/console/clients/checker/config.rs +++ b/src/console/clients/checker/config.rs @@ -1,19 +1,19 @@ use std::error::Error; use std::fmt; use std::net::SocketAddr; +use std::time::Duration; use reqwest::Url as ServiceUrl; use serde::Deserialize; +/// Client Timeout +const TIMEOUT_SEC: Duration = Duration::from_secs(5); + /// It parses the configuration from a JSON format. /// /// # Errors /// /// Will return an error if the configuration is not valid. -/// -/// # Panics -/// -/// Will panic if unable to read the configuration file. pub fn parse_from_json(json: &str) -> Result { let plain_config: PlainConfiguration = serde_json::from_str(json).map_err(ConfigurationError::JsonParseError)?; Configuration::try_from(plain_config) @@ -22,7 +22,7 @@ pub fn parse_from_json(json: &str) -> Result /// DTO for the configuration to serialize/deserialize configuration. /// /// Configuration does not need to be valid. -#[derive(Deserialize)] +#[derive(Debug, Clone, Deserialize)] struct PlainConfiguration { pub udp_trackers: Vec, pub http_trackers: Vec, @@ -30,10 +30,12 @@ struct PlainConfiguration { } /// Validated configuration +#[derive(Debug, Clone)] pub struct Configuration { pub udp_trackers: Vec, pub http_trackers: Vec, pub health_checks: Vec, + pub client_timeout: Duration, } #[derive(Debug)] @@ -81,6 +83,7 @@ impl TryFrom for Configuration { udp_trackers, http_trackers, health_checks, + client_timeout: TIMEOUT_SEC, }) } } @@ -107,11 +110,11 @@ mod tests { ); assert_eq!( config.http_trackers, - vec![ServiceUrl::parse("http://127.0.0.1:8080").unwrap()] + vec![ServiceUrl::parse("http://127.0.0.1:8080").expect("it should be a valid url")] ); assert_eq!( config.health_checks, - vec![ServiceUrl::parse("http://127.0.0.1:8080/health").unwrap()] + vec![ServiceUrl::parse("http://127.0.0.1:8080/health").expect("it should be a valid url")] ); } diff --git a/src/console/clients/checker/console.rs b/src/console/clients/checker/console.rs index b55c559fc..b516c81ce 100644 --- a/src/console/clients/checker/console.rs +++ b/src/console/clients/checker/console.rs @@ -1,5 +1,6 @@ use super::printer::{Printer, CLEAR_SCREEN}; +#[derive(Debug, Clone, Copy)] pub struct Console {} impl Default for Console { diff --git a/src/console/clients/checker/logger.rs b/src/console/clients/checker/logger.rs index 50e97189f..a1295bacc 100644 --- a/src/console/clients/checker/logger.rs +++ b/src/console/clients/checker/logger.rs @@ -2,6 +2,7 @@ use std::cell::RefCell; use super::printer::{Printer, CLEAR_SCREEN}; +#[derive(Debug, Clone)] pub struct Logger { output: RefCell, } diff --git a/src/console/clients/checker/service.rs b/src/console/clients/checker/service.rs index 94eff4a88..d484c560a 100644 --- a/src/console/clients/checker/service.rs +++ b/src/console/clients/checker/service.rs @@ -1,7 +1,10 @@ use std::net::SocketAddr; use std::sync::Arc; +use anyhow::{Context, Result}; use reqwest::Url; +use thiserror::Error; +use tokio::task::JoinSet; use super::checks; use super::config::Configuration; @@ -15,28 +18,37 @@ pub struct Service { pub type CheckResult = Result<(), CheckError>; -#[derive(Debug)] +#[derive(Debug, Clone, Error)] pub enum CheckError { + #[error("Error In Udp: socket: {socket_addr:?}")] UdpError { socket_addr: SocketAddr }, - HttpError { url: Url }, - HealthCheckError { url: Url }, + #[error("Error In Http: url: {url:?}")] + HttpCheckError { url: Url, err: checks::http::Error }, + #[error("Error In HeathCheck: url: {url:?}")] + HealthCheckError { url: Url, err: checks::health::Error }, } impl Service { /// # Errors /// - /// Will return OK is all checks pass or an array with the check errors. - pub async fn run_checks(&self) -> Vec { + /// It will return an error if some of the tests panic or otherwise fail to run. + /// On success it will return a vector of `Ok(())` of [`CheckResult`]. + pub async fn run_checks(self) -> Result> { self.console.println("Running checks for trackers ..."); - let mut check_results = vec![]; + let mut check_results = Vec::::default(); - checks::udp::run(&self.config.udp_trackers, &self.console, &mut check_results).await; + let timeout = self.config.client_timeout; - checks::http::run(&self.config.http_trackers, &self.console, &mut check_results).await; + let mut checks = JoinSet::new(); + checks.spawn(checks::udp::run(self.config.udp_trackers.clone(), timeout, self.console)); + checks.spawn(checks::http::run(self.config.http_trackers.clone(), timeout, self.console)); + checks.spawn(checks::health::run(self.config.health_checks.clone(), timeout, self.console)); - checks::health::run(&self.config.health_checks, &self.console, &mut check_results).await; + while let Some(results) = checks.join_next().await { + check_results.append(&mut results.context("failed to join check")?); + } - check_results + Ok(check_results) } } diff --git a/src/console/clients/http/app.rs b/src/console/clients/http/app.rs index 511fb6628..b183be7ac 100644 --- a/src/console/clients/http/app.rs +++ b/src/console/clients/http/app.rs @@ -14,6 +14,7 @@ //! cargo run --bin http_tracker_client scrape http://127.0.0.1:7070 9c38422213e30bff212b30c360d26f9a02136422 | jq //! ``` use std::str::FromStr; +use std::time::Duration; use anyhow::Context; use clap::{Parser, Subcommand}; @@ -30,6 +31,7 @@ use crate::shared::bit_torrent::tracker::http::client::{requests, Client}; struct Args { #[command(subcommand)] command: Command, + timeout: u64, } #[derive(Subcommand, Debug)] @@ -44,34 +46,38 @@ enum Command { pub async fn run() -> anyhow::Result<()> { let args = Args::parse(); + let timeout = Duration::from_secs(args.timeout); + match args.command { Command::Announce { tracker_url, info_hash } => { - announce_command(tracker_url, info_hash).await?; + announce_command(tracker_url, timeout, info_hash).await?; } Command::Scrape { tracker_url, info_hashes, } => { - scrape_command(&tracker_url, &info_hashes).await?; + scrape_command(&tracker_url, timeout, &info_hashes).await?; } } Ok(()) } -async fn announce_command(tracker_url: String, info_hash: String) -> anyhow::Result<()> { +async fn announce_command(tracker_url: String, timeout: Duration, info_hash: String) -> anyhow::Result<()> { let base_url = Url::parse(&tracker_url).context("failed to parse HTTP tracker base URL")?; let info_hash = InfoHash::from_str(&info_hash).expect("Invalid infohash. Example infohash: `9c38422213e30bff212b30c360d26f9a02136422`"); - let response = Client::new(base_url) + let response = Client::new(base_url, timeout)? .announce(&QueryBuilder::with_default_values().with_info_hash(&info_hash).query()) - .await; + .await?; - let body = response.bytes().await.unwrap(); + let body = response.bytes().await.context("it should get back a valid response")?; - let announce_response: Announce = serde_bencode::from_bytes(&body) - .unwrap_or_else(|_| panic!("response body should be a valid announce response, got: \"{:#?}\"", &body)); + let announce_response: Announce = serde_bencode::from_bytes(&body).context(format!( + "response body should be a valid announce response, got: \"{:#?}\"", + &body + ))?; let json = serde_json::to_string(&announce_response).context("failed to serialize scrape response into JSON")?; @@ -80,17 +86,19 @@ async fn announce_command(tracker_url: String, info_hash: String) -> anyhow::Res Ok(()) } -async fn scrape_command(tracker_url: &str, info_hashes: &[String]) -> anyhow::Result<()> { +async fn scrape_command(tracker_url: &str, timeout: Duration, info_hashes: &[String]) -> anyhow::Result<()> { let base_url = Url::parse(tracker_url).context("failed to parse HTTP tracker base URL")?; let query = requests::scrape::Query::try_from(info_hashes).context("failed to parse infohashes")?; - let response = Client::new(base_url).scrape(&query).await; + let response = Client::new(base_url, timeout)?.scrape(&query).await?; - let body = response.bytes().await.unwrap(); + let body = response.bytes().await.context("it should get back a valid response")?; - let scrape_response = scrape::Response::try_from_bencoded(&body) - .unwrap_or_else(|_| panic!("response body should be a valid scrape response, got: \"{:#?}\"", &body)); + let scrape_response = scrape::Response::try_from_bencoded(&body).context(format!( + "response body should be a valid scrape response, got: \"{:#?}\"", + &body + ))?; let json = serde_json::to_string(&scrape_response).context("failed to serialize scrape response into JSON")?; diff --git a/src/shared/bit_torrent/tracker/http/client/mod.rs b/src/shared/bit_torrent/tracker/http/client/mod.rs index a75b0fec3..4cddc98f4 100644 --- a/src/shared/bit_torrent/tracker/http/client/mod.rs +++ b/src/shared/bit_torrent/tracker/http/client/mod.rs @@ -2,17 +2,31 @@ pub mod requests; pub mod responses; use std::net::IpAddr; +use std::sync::Arc; +use std::time::Duration; +use hyper::StatusCode; use requests::announce::{self, Query}; use requests::scrape; use reqwest::{Client as ReqwestClient, Response, Url}; +use thiserror::Error; use crate::core::auth::Key; +#[derive(Debug, Clone, Error)] +pub enum Error { + #[error("Failed to Build a Http Client: {err:?}")] + ClientBuildingError { err: Arc }, + #[error("Failed to get a response: {err:?}")] + ResponseError { err: Arc }, + #[error("Returned a non-success code: \"{code}\" with the response: \"{response:?}\"")] + UnsuccessfulResponse { code: StatusCode, response: Arc }, +} + /// HTTP Tracker Client pub struct Client { + client: ReqwestClient, base_url: Url, - reqwest: ReqwestClient, key: Option, } @@ -25,78 +39,144 @@ pub struct Client { /// base url path query /// ``` impl Client { - /// # Panics + /// # Errors /// /// This method fails if the client builder fails. - #[must_use] - pub fn new(base_url: Url) -> Self { - Self { + pub fn new(base_url: Url, timeout: Duration) -> Result { + let client = reqwest::Client::builder() + .timeout(timeout) + .build() + .map_err(|e| Error::ClientBuildingError { err: e.into() })?; + + Ok(Self { base_url, - reqwest: reqwest::Client::builder().build().unwrap(), + client, key: None, - } + }) } /// Creates the new client binding it to an specific local address. /// - /// # Panics + /// # Errors /// /// This method fails if the client builder fails. - #[must_use] - pub fn bind(base_url: Url, local_address: IpAddr) -> Self { - Self { + pub fn bind(base_url: Url, timeout: Duration, local_address: IpAddr) -> Result { + let client = reqwest::Client::builder() + .timeout(timeout) + .local_address(local_address) + .build() + .map_err(|e| Error::ClientBuildingError { err: e.into() })?; + + Ok(Self { base_url, - reqwest: reqwest::Client::builder().local_address(local_address).build().unwrap(), + client, key: None, - } + }) } - /// # Panics + /// # Errors /// /// This method fails if the client builder fails. - #[must_use] - pub fn authenticated(base_url: Url, key: Key) -> Self { - Self { + pub fn authenticated(base_url: Url, key: Key, timeout: Duration) -> Result { + let client = reqwest::Client::builder() + .timeout(timeout) + .build() + .map_err(|e| Error::ClientBuildingError { err: e.into() })?; + + Ok(Self { base_url, - reqwest: reqwest::Client::builder().build().unwrap(), + client, key: Some(key), - } + }) } - pub async fn announce(&self, query: &announce::Query) -> Response { - self.get(&self.build_announce_path_and_query(query)).await + /// # Errors + /// + /// This method fails if the returned response was not successful + pub async fn announce(&self, query: &announce::Query) -> Result { + let response = self.get(&self.build_announce_path_and_query(query)).await?; + + if response.status().is_success() { + Ok(response) + } else { + Err(Error::UnsuccessfulResponse { + code: response.status(), + response: response.into(), + }) + } } - pub async fn scrape(&self, query: &scrape::Query) -> Response { - self.get(&self.build_scrape_path_and_query(query)).await + /// # Errors + /// + /// This method fails if the returned response was not successful + pub async fn scrape(&self, query: &scrape::Query) -> Result { + let response = self.get(&self.build_scrape_path_and_query(query)).await?; + + if response.status().is_success() { + Ok(response) + } else { + Err(Error::UnsuccessfulResponse { + code: response.status(), + response: response.into(), + }) + } } - pub async fn announce_with_header(&self, query: &Query, key: &str, value: &str) -> Response { - self.get_with_header(&self.build_announce_path_and_query(query), key, value) - .await + /// # Errors + /// + /// This method fails if the returned response was not successful + pub async fn announce_with_header(&self, query: &Query, key: &str, value: &str) -> Result { + let response = self + .get_with_header(&self.build_announce_path_and_query(query), key, value) + .await?; + + if response.status().is_success() { + Ok(response) + } else { + Err(Error::UnsuccessfulResponse { + code: response.status(), + response: response.into(), + }) + } } - pub async fn health_check(&self) -> Response { - self.get(&self.build_path("health_check")).await + /// # Errors + /// + /// This method fails if the returned response was not successful + pub async fn health_check(&self) -> Result { + let response = self.get(&self.build_path("health_check")).await?; + + if response.status().is_success() { + Ok(response) + } else { + Err(Error::UnsuccessfulResponse { + code: response.status(), + response: response.into(), + }) + } } - /// # Panics + /// # Errors /// /// This method fails if there was an error while sending request. - pub async fn get(&self, path: &str) -> Response { - self.reqwest.get(self.build_url(path)).send().await.unwrap() + pub async fn get(&self, path: &str) -> Result { + self.client + .get(self.build_url(path)) + .send() + .await + .map_err(|e| Error::ResponseError { err: e.into() }) } - /// # Panics + /// # Errors /// /// This method fails if there was an error while sending request. - pub async fn get_with_header(&self, path: &str, key: &str, value: &str) -> Response { - self.reqwest + pub async fn get_with_header(&self, path: &str, key: &str, value: &str) -> Result { + self.client .get(self.build_url(path)) .header(key, value) .send() .await - .unwrap() + .map_err(|e| Error::ResponseError { err: e.into() }) } fn build_announce_path_and_query(&self, query: &announce::Query) -> String { diff --git a/src/shared/bit_torrent/tracker/http/client/responses/scrape.rs b/src/shared/bit_torrent/tracker/http/client/responses/scrape.rs index 25a2f0a81..0cb7eeb17 100644 --- a/src/shared/bit_torrent/tracker/http/client/responses/scrape.rs +++ b/src/shared/bit_torrent/tracker/http/client/responses/scrape.rs @@ -5,6 +5,7 @@ use std::str; use serde::ser::SerializeMap; use serde::{Deserialize, Serialize, Serializer}; use serde_bencode::value::Value; +use thiserror::Error; use crate::shared::bit_torrent::tracker::http::{ByteArray20, InfoHash}; @@ -105,11 +106,15 @@ impl ResponseBuilder { } } -#[derive(Debug)] +#[derive(Debug, Error)] pub enum BencodeParseError { + #[error("Invalid Value in Dictionary: {value:?}")] InvalidValueExpectedDict { value: Value }, + #[error("Invalid Value in Integer: {value:?}")] InvalidValueExpectedInt { value: Value }, + #[error("Invalid File Field: {value:?}")] InvalidFileField { value: Value }, + #[error("Missing File Field: {field_name}")] MissingFileField { field_name: String }, }