Skip to content

Commit

Permalink
added optional cached parameter to rich list and token distribution
Browse files Browse the repository at this point in the history
  • Loading branch information
shufps committed Jul 8, 2024
1 parent 8248b36 commit ee13b46
Show file tree
Hide file tree
Showing 2 changed files with 60 additions and 36 deletions.
9 changes: 6 additions & 3 deletions src/bin/inx-chronicle/api/explorer/extractors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -265,13 +265,15 @@ const DEFAULT_TOP_RICHLIST: usize = 100;
pub struct RichestAddressesQuery {
pub top: usize,
pub ledger_index: Option<MilestoneIndex>,
pub cached: Option<bool>,
}

impl Default for RichestAddressesQuery {
fn default() -> Self {
Self {
top: DEFAULT_TOP_RICHLIST,
ledger_index: None,
cached: None,
}
}
}
Expand All @@ -292,16 +294,17 @@ impl<B: Send> FromRequest<B> for RichestAddressesQuery {

#[derive(Copy, Clone, Deserialize, Default)]
#[serde(default, deny_unknown_fields, rename_all = "camelCase")]
pub struct LedgerIndex {
pub struct TokenDistributionQuery {
pub ledger_index: Option<MilestoneIndex>,
pub cached: Option<bool>,
}

#[async_trait]
impl<B: Send> FromRequest<B> for LedgerIndex {
impl<B: Send> FromRequest<B> for TokenDistributionQuery {
type Rejection = ApiError;

async fn from_request(req: &mut axum::extract::RequestParts<B>) -> Result<Self, Self::Rejection> {
let Query(query) = Query::<LedgerIndex>::from_request(req)
let Query(query) = Query::<TokenDistributionQuery>::from_request(req)
.await
.map_err(RequestError::from)?;
Ok(query)
Expand Down
87 changes: 54 additions & 33 deletions src/bin/inx-chronicle/api/explorer/routes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ use iota_sdk::types::block::address::{Hrp, ToBech32Ext};

use super::{
extractors::{
BlocksByMilestoneCursor, BlocksByMilestoneIdPagination, BlocksByMilestoneIndexPagination, LedgerIndex,
BlocksByMilestoneCursor, BlocksByMilestoneIdPagination, BlocksByMilestoneIndexPagination, TokenDistributionQuery,
LedgerUpdatesByAddressCursor, LedgerUpdatesByAddressPagination, LedgerUpdatesByMilestoneCursor,
LedgerUpdatesByMilestonePagination, MilestonesCursor, MilestonesPagination, RichestAddressesQuery,
},
Expand All @@ -44,7 +44,6 @@ use crate::api::{
};

use once_cell::sync::Lazy;
use std::time::{Duration, Instant};
use tokio::sync::RwLock;

pub fn routes() -> Router {
Expand Down Expand Up @@ -327,42 +326,57 @@ async fn blocks_by_milestone_id(
}

struct RichestCacheData {
last_updated: Instant,
last_updated: u64,
data: RichestAddressesResponse,
}

struct TokenCacheData {
last_updated: Instant,
last_updated: u64,
data: TokenDistributionResponse,
}

fn calculate_seconds_until_midnight() -> u64 {
fn get_seconds_until_midnight() -> u64 {
let now = SystemTime::now();
let since_epoch = now.duration_since(SystemTime::UNIX_EPOCH).expect("Time went backwards");
let seconds_today = since_epoch.as_secs() % 86400;
86400 - seconds_today
86400 - (since_epoch.as_secs() % 86400)
}

fn get_days_since_epoch() -> u64 {
let now = SystemTime::now();
let secs_since_epoch = now.duration_since(SystemTime::UNIX_EPOCH).expect("Time went backwards").as_secs();
secs_since_epoch / 86400
}

static RICHEST_ADDRESSES_CACHE: Lazy<RwLock<Option<RichestCacheData>>> = Lazy::new(|| RwLock::new(None));
static TOKEN_DISTRIBUTION_CACHE: Lazy<RwLock<Option<TokenCacheData>>> = Lazy::new(|| RwLock::new(None));

fn get_cache_bool(cache: Option<bool>) -> bool {
// default case is use the cache
match cache {
Some(b) => b,
None => true,
}
}

async fn richest_addresses_ledger_analytics(
database: Extension<MongoDb>,
RichestAddressesQuery { top, ledger_index }: RichestAddressesQuery,
RichestAddressesQuery { top, ledger_index , cached}: RichestAddressesQuery,
) -> ApiResult<RichestAddressesResponse> {
let ledger_index = resolve_ledger_index(&database, ledger_index).await?;
let mut cache = RICHEST_ADDRESSES_CACHE.write().await;
let seconds_until_midnight = calculate_seconds_until_midnight();

if let Some(cached_data) = &*cache {
if cached_data.last_updated.elapsed() < Duration::from_secs(86400) {
return Ok(cached_data.data.clone());
let cached = get_cache_bool(cached);
let days_since_epoch = get_days_since_epoch();

if cached {
if let Some(cached_data) = &*cache {
if cached_data.last_updated == days_since_epoch {
return Ok(cached_data.data.clone());
}
}
info!("refreshing richest-addresses cache ...");
}

info!("refreshing richest-addresses cache ...");
let refresh_start = SystemTime::now();

let res = database
.collection::<OutputCollection>()
.get_richest_addresses(ledger_index, top)
Expand Down Expand Up @@ -391,33 +405,38 @@ async fn richest_addresses_ledger_analytics(
ledger_index,
};

// Store the response in the cache
*cache = Some(RichestCacheData { last_updated: Instant::now(), data: response.clone() });
if cached {
// Store the response in the cache
*cache = Some(RichestCacheData { last_updated: days_since_epoch, data: response.clone() });

let refresh_elapsed = refresh_start.elapsed().unwrap();
info!("refreshing richest-addresses cache done. Took {:?}", refresh_elapsed);
info!("next refresh in {} seconds", seconds_until_midnight);
let refresh_elapsed = refresh_start.elapsed().unwrap();
info!("refreshing richest-addresses cache done. Took {:?}", refresh_elapsed);
info!("next refresh in {} seconds", get_seconds_until_midnight());
}

Ok(response)
}

async fn token_distribution_ledger_analytics(
database: Extension<MongoDb>,
LedgerIndex { ledger_index }: LedgerIndex,
TokenDistributionQuery { ledger_index, cached}: TokenDistributionQuery,
) -> ApiResult<TokenDistributionResponse> {
let ledger_index = resolve_ledger_index(&database, ledger_index).await?;
let mut cache = TOKEN_DISTRIBUTION_CACHE.write().await;

let seconds_until_midnight = calculate_seconds_until_midnight();
if let Some(cached_data) = &*cache {
if cached_data.last_updated.elapsed() < Duration::from_secs(86400) {
return Ok(cached_data.data.clone());
let cached = get_cache_bool(cached);
let days_since_epoch = get_days_since_epoch();

if cached {
if let Some(cached_data) = &*cache {
if cached_data.last_updated == days_since_epoch {
return Ok(cached_data.data.clone());
}
}

info!("refreshing token-distribution cache ...");
}

info!("refreshing token-distribution cache ...");
let refresh_start = SystemTime::now();

let res = database
.collection::<OutputCollection>()
.get_token_distribution(ledger_index)
Expand All @@ -428,12 +447,14 @@ async fn token_distribution_ledger_analytics(
ledger_index,
};

// Store the response in the cache
*cache = Some(TokenCacheData { last_updated: Instant::now(), data: response.clone() });
if cached {
// Store the response in the cache
*cache = Some(TokenCacheData { last_updated: days_since_epoch, data: response.clone() });

let refresh_elapsed = refresh_start.elapsed().unwrap();
info!("refreshing token-distribution cache done. Took {:?}", refresh_elapsed);
info!("next refresh in {} seconds", seconds_until_midnight);
let refresh_elapsed = refresh_start.elapsed().unwrap();
info!("refreshing token-distribution cache done. Took {:?}", refresh_elapsed);
info!("next refresh in {} seconds", get_seconds_until_midnight());
}

Ok(response)
}
Expand Down

0 comments on commit ee13b46

Please sign in to comment.