-
Notifications
You must be signed in to change notification settings - Fork 44
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge #923: Client Work: Improve Error Enums and Some Basic Cleanup
c202db7 dev: tracker client error enums (Cameron Garnham) 988f1c7 dev: add vscode 'code-workspace' to git ignore file (Cameron Garnham) Pull request description: most work extracted from #765 Top commit has no ACKs. Tree-SHA512: bfa36bb3849b4694adc812c9ec2f401a14c0f4fabfd201abe88c43cfc215493ebbe7a6847fb075456ad85d48bf5d2b7a91b2af65ef5b05805a5f7fc5758ccc75
- Loading branch information
Showing
21 changed files
with
772 additions
and
560 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -12,4 +12,5 @@ | |
/tracker.* | ||
/tracker.toml | ||
callgrind.out | ||
perf.data* | ||
perf.data* | ||
*.code-workspace |
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,49 +1,77 @@ | ||
use std::sync::Arc; | ||
use std::time::Duration; | ||
|
||
use reqwest::{Client as HttpClient, Url, Url as ServiceUrl}; | ||
use anyhow::Result; | ||
use hyper::StatusCode; | ||
use reqwest::{Client as HttpClient, Response}; | ||
use serde::Serialize; | ||
use thiserror::Error; | ||
use url::Url; | ||
|
||
use super::structs::{CheckerOutput, Status}; | ||
use crate::console::clients::checker::service::{CheckError, CheckResult}; | ||
#[derive(Debug, Clone, Error, Serialize)] | ||
#[serde(into = "String")] | ||
pub enum Error { | ||
#[error("Failed to Build a Http Client: {err:?}")] | ||
ClientBuildingError { err: Arc<reqwest::Error> }, | ||
#[error("Heath check failed to get a response: {err:?}")] | ||
ResponseError { err: Arc<reqwest::Error> }, | ||
#[error("Http check returned a non-success code: \"{code}\" with the response: \"{response:?}\"")] | ||
UnsuccessfulResponse { code: StatusCode, response: Arc<Response> }, | ||
} | ||
|
||
impl From<Error> for String { | ||
fn from(value: Error) -> Self { | ||
value.to_string() | ||
} | ||
} | ||
|
||
#[derive(Debug, Clone, Serialize)] | ||
pub struct Checks { | ||
url: Url, | ||
result: Result<String, Error>, | ||
} | ||
|
||
#[allow(clippy::missing_panics_doc)] | ||
pub async fn run(health_checks: &Vec<ServiceUrl>, check_results: &mut Vec<CheckResult>) -> Vec<CheckerOutput> { | ||
let mut health_checkers: Vec<CheckerOutput> = Vec::new(); | ||
pub async fn run(health_checks: Vec<Url>, timeout: Duration) -> Vec<Result<Checks, Checks>> { | ||
let mut results = Vec::default(); | ||
|
||
for health_check_url in health_checks { | ||
let mut health_checker = CheckerOutput { | ||
url: health_check_url.to_string(), | ||
status: Status { | ||
code: String::new(), | ||
message: String::new(), | ||
}, | ||
tracing::debug!("Health checks ..."); | ||
|
||
for url in health_checks { | ||
let result = match run_health_check(url.clone(), timeout).await { | ||
Ok(response) => Ok(response.status().to_string()), | ||
Err(err) => Err(err), | ||
}; | ||
match run_health_check(health_check_url.clone()).await { | ||
Ok(()) => { | ||
check_results.push(Ok(())); | ||
health_checker.status.code = "ok".to_string(); | ||
} | ||
Err(err) => { | ||
check_results.push(Err(err)); | ||
health_checker.status.code = "error".to_string(); | ||
health_checker.status.message = "Health API is failing.".to_string(); | ||
} | ||
|
||
let check = Checks { url, result }; | ||
|
||
if check.result.is_err() { | ||
results.push(Err(check)); | ||
} else { | ||
results.push(Ok(check)); | ||
} | ||
health_checkers.push(health_checker); | ||
} | ||
health_checkers | ||
|
||
results | ||
} | ||
|
||
async fn run_health_check(url: Url) -> Result<(), CheckError> { | ||
let client = HttpClient::builder().timeout(Duration::from_secs(5)).build().unwrap(); | ||
async fn run_health_check(url: Url, timeout: Duration) -> Result<Response, Error> { | ||
let client = HttpClient::builder() | ||
.timeout(timeout) | ||
.build() | ||
.map_err(|e| Error::ClientBuildingError { err: e.into() })?; | ||
|
||
match client.get(url.clone()).send().await { | ||
Ok(response) => { | ||
if response.status().is_success() { | ||
Ok(()) | ||
} else { | ||
Err(CheckError::HealthCheckError { url }) | ||
} | ||
} | ||
Err(_) => Err(CheckError::HealthCheckError { url }), | ||
let response = client | ||
.get(url.clone()) | ||
.send() | ||
.await | ||
.map_err(|e| Error::ResponseError { err: e.into() })?; | ||
|
||
if response.status().is_success() { | ||
Ok(response) | ||
} else { | ||
Err(Error::UnsuccessfulResponse { | ||
code: response.status(), | ||
response: response.into(), | ||
}) | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,120 +1,101 @@ | ||
use std::str::FromStr; | ||
use std::str::FromStr as _; | ||
use std::time::Duration; | ||
|
||
use reqwest::Url as ServiceUrl; | ||
use serde::Serialize; | ||
use torrust_tracker_primitives::info_hash::InfoHash; | ||
use tracing::debug; | ||
use url::Url; | ||
|
||
use super::structs::{CheckerOutput, Status}; | ||
use crate::console::clients::checker::service::{CheckError, CheckResult}; | ||
use crate::shared::bit_torrent::tracker::http::client::requests::announce::QueryBuilder; | ||
use crate::console::clients::http::Error; | ||
use crate::shared::bit_torrent::tracker::http::client::responses::announce::Announce; | ||
use crate::shared::bit_torrent::tracker::http::client::responses::scrape; | ||
use crate::shared::bit_torrent::tracker::http::client::{requests, Client}; | ||
|
||
#[allow(clippy::missing_panics_doc)] | ||
pub async fn run(http_trackers: &Vec<ServiceUrl>, check_results: &mut Vec<CheckResult>) -> Vec<CheckerOutput> { | ||
let mut http_checkers: Vec<CheckerOutput> = Vec::new(); | ||
|
||
for http_tracker in http_trackers { | ||
let mut http_checker = CheckerOutput { | ||
url: http_tracker.to_string(), | ||
status: Status { | ||
code: String::new(), | ||
message: String::new(), | ||
}, | ||
#[derive(Debug, Clone, Serialize)] | ||
pub struct Checks { | ||
url: Url, | ||
results: Vec<(Check, Result<(), Error>)>, | ||
} | ||
|
||
#[derive(Debug, Clone, Serialize)] | ||
pub enum Check { | ||
Announce, | ||
Scrape, | ||
} | ||
|
||
pub async fn run(http_trackers: Vec<Url>, timeout: Duration) -> Vec<Result<Checks, Checks>> { | ||
let mut results = Vec::default(); | ||
|
||
tracing::debug!("HTTP trackers ..."); | ||
|
||
for ref url in http_trackers { | ||
let mut checks = Checks { | ||
url: url.clone(), | ||
results: Vec::default(), | ||
}; | ||
|
||
match check_http_announce(http_tracker).await { | ||
Ok(()) => { | ||
check_results.push(Ok(())); | ||
http_checker.status.code = "ok".to_string(); | ||
} | ||
Err(err) => { | ||
check_results.push(Err(err)); | ||
http_checker.status.code = "error".to_string(); | ||
http_checker.status.message = "Announce is failing.".to_string(); | ||
} | ||
// Announce | ||
{ | ||
let check = check_http_announce(url, timeout).await.map(|_| ()); | ||
|
||
checks.results.push((Check::Announce, check)); | ||
} | ||
|
||
match check_http_scrape(http_tracker).await { | ||
Ok(()) => { | ||
check_results.push(Ok(())); | ||
http_checker.status.code = "ok".to_string(); | ||
} | ||
Err(err) => { | ||
check_results.push(Err(err)); | ||
http_checker.status.code = "error".to_string(); | ||
http_checker.status.message = "Scrape is failing.".to_string(); | ||
} | ||
// Scrape | ||
{ | ||
let check = check_http_scrape(url, timeout).await.map(|_| ()); | ||
|
||
checks.results.push((Check::Scrape, check)); | ||
} | ||
|
||
if checks.results.iter().any(|f| f.1.is_err()) { | ||
results.push(Err(checks)); | ||
} else { | ||
results.push(Ok(checks)); | ||
} | ||
http_checkers.push(http_checker); | ||
} | ||
http_checkers | ||
|
||
results | ||
} | ||
|
||
async fn check_http_announce(tracker_url: &Url) -> Result<(), CheckError> { | ||
async fn check_http_announce(url: &Url, timeout: Duration) -> Result<Announce, Error> { | ||
let info_hash_str = "9c38422213e30bff212b30c360d26f9a02136422".to_string(); // # DevSkim: ignore DS173237 | ||
let info_hash = InfoHash::from_str(&info_hash_str).expect("a valid info-hash is required"); | ||
|
||
// todo: HTTP request could panic.For example, if the server is not accessible. | ||
// We should change the client to catch that error and return a `CheckError`. | ||
// Otherwise the checking process will stop. The idea is to process all checks | ||
// and return a final report. | ||
let Ok(client) = Client::new(tracker_url.clone()) else { | ||
return Err(CheckError::HttpError { | ||
url: (tracker_url.to_owned()), | ||
}); | ||
}; | ||
let Ok(response) = client | ||
.announce(&QueryBuilder::with_default_values().with_info_hash(&info_hash).query()) | ||
let client = Client::new(url.clone(), timeout).map_err(|err| Error::HttpClientError { err })?; | ||
|
||
let response = client | ||
.announce( | ||
&requests::announce::QueryBuilder::with_default_values() | ||
.with_info_hash(&info_hash) | ||
.query(), | ||
) | ||
.await | ||
else { | ||
return Err(CheckError::HttpError { | ||
url: (tracker_url.to_owned()), | ||
}); | ||
}; | ||
|
||
if let Ok(body) = response.bytes().await { | ||
if let Ok(_announce_response) = serde_bencode::from_bytes::<Announce>(&body) { | ||
Ok(()) | ||
} else { | ||
debug!("announce body {:#?}", body); | ||
Err(CheckError::HttpError { | ||
url: tracker_url.clone(), | ||
}) | ||
} | ||
} else { | ||
Err(CheckError::HttpError { | ||
url: tracker_url.clone(), | ||
}) | ||
} | ||
.map_err(|err| Error::HttpClientError { err })?; | ||
|
||
let response = response.bytes().await.map_err(|e| Error::ResponseError { err: e.into() })?; | ||
|
||
let response = serde_bencode::from_bytes::<Announce>(&response).map_err(|e| Error::ParseBencodeError { | ||
data: response, | ||
err: e.into(), | ||
})?; | ||
|
||
Ok(response) | ||
} | ||
|
||
async fn check_http_scrape(url: &Url) -> Result<(), CheckError> { | ||
async fn check_http_scrape(url: &Url, timeout: Duration) -> Result<scrape::Response, Error> { | ||
let info_hashes: Vec<String> = vec!["9c38422213e30bff212b30c360d26f9a02136422".to_string()]; // # DevSkim: ignore DS173237 | ||
let query = requests::scrape::Query::try_from(info_hashes).expect("a valid array of info-hashes is required"); | ||
|
||
// todo: HTTP request could panic.For example, if the server is not accessible. | ||
// We should change the client to catch that error and return a `CheckError`. | ||
// Otherwise the checking process will stop. The idea is to process all checks | ||
// and return a final report. | ||
|
||
let Ok(client) = Client::new(url.clone()) else { | ||
return Err(CheckError::HttpError { url: (url.to_owned()) }); | ||
}; | ||
let Ok(response) = client.scrape(&query).await else { | ||
return Err(CheckError::HttpError { url: (url.to_owned()) }); | ||
}; | ||
|
||
if let Ok(body) = response.bytes().await { | ||
if let Ok(_scrape_response) = scrape::Response::try_from_bencoded(&body) { | ||
Ok(()) | ||
} else { | ||
debug!("scrape body {:#?}", body); | ||
Err(CheckError::HttpError { url: url.clone() }) | ||
} | ||
} else { | ||
Err(CheckError::HttpError { url: url.clone() }) | ||
} | ||
let client = Client::new(url.clone(), timeout).map_err(|err| Error::HttpClientError { err })?; | ||
|
||
let response = client.scrape(&query).await.map_err(|err| Error::HttpClientError { err })?; | ||
|
||
let response = response.bytes().await.map_err(|e| Error::ResponseError { err: e.into() })?; | ||
|
||
let response = scrape::Response::try_from_bencoded(&response).map_err(|e| Error::BencodeParseError { | ||
data: response, | ||
err: e.into(), | ||
})?; | ||
|
||
Ok(response) | ||
} |
Oops, something went wrong.