Skip to content

Commit

Permalink
Merge commit '3c86a8d805d6ac79a3b3770cb7addf987b087bd8' into feat/cac…
Browse files Browse the repository at this point in the history
…he-for-explorer-endpoints
  • Loading branch information
shufps committed Jan 18, 2024
2 parents 1c8af3b + 3c86a8d commit d4a9307
Showing 1 changed file with 30 additions and 14 deletions.
44 changes: 30 additions & 14 deletions src/bin/inx-chronicle/api/explorer/routes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@
// SPDX-License-Identifier: Apache-2.0

use std::str::FromStr;
use std::time::SystemTime;

use tracing::info;

use axum::{extract::Path, routing::get, Extension};
use chronicle::{
Expand Down Expand Up @@ -333,6 +336,13 @@ struct TokenCacheData {
data: TokenDistributionResponse,
}

fn calculate_seconds_until_midnight() -> u64 {
let now = SystemTime::now();
let since_epoch = now.duration_since(SystemTime::UNIX_EPOCH).expect("Time went backwards");
let seconds_today = since_epoch.as_secs() % 86400;
86400 - seconds_today
}

static RICHEST_ADDRESSES_CACHE: Lazy<RwLock<Option<RichestCacheData>>> = Lazy::new(|| RwLock::new(None));
static TOKEN_DISTRIBUTION_CACHE: Lazy<RwLock<Option<TokenCacheData>>> = Lazy::new(|| RwLock::new(None));

Expand All @@ -341,15 +351,17 @@ async fn richest_addresses_ledger_analytics(
RichestAddressesQuery { top, ledger_index }: RichestAddressesQuery,
) -> ApiResult<RichestAddressesResponse> {
let ledger_index = resolve_ledger_index(&database, ledger_index).await?;
let cache = RICHEST_ADDRESSES_CACHE.read().await;
let mut cache = RICHEST_ADDRESSES_CACHE.write().await;
let seconds_until_midnight = calculate_seconds_until_midnight();

if let Some(cached_data) = &*cache {
if cached_data.last_updated.elapsed() < Duration::from_secs(86400) {
if cached_data.last_updated.elapsed() < Duration::from_secs(seconds_until_midnight) {
return Ok(cached_data.data.clone());
}
}

drop(cache); // release the read lock
info!("refreshing richest-addresses cache ...");
let refresh_start = SystemTime::now();

let res = database
.collection::<OutputCollection>()
Expand Down Expand Up @@ -380,10 +392,11 @@ async fn richest_addresses_ledger_analytics(
};

// Store the response in the cache
*RICHEST_ADDRESSES_CACHE.write().await = Some(RichestCacheData {
last_updated: Instant::now(),
data: response.clone(),
});
*cache = Some(RichestCacheData { last_updated: Instant::now(), data: response.clone() });

let refresh_elapsed = refresh_start.elapsed().unwrap();
info!("refreshing richest-addresses cache done. Took {:?}", refresh_elapsed);
info!("next refresh in {} seconds", seconds_until_midnight);

Ok(response)
}
Expand All @@ -393,15 +406,17 @@ async fn token_distribution_ledger_analytics(
LedgerIndex { ledger_index }: LedgerIndex,
) -> ApiResult<TokenDistributionResponse> {
let ledger_index = resolve_ledger_index(&database, ledger_index).await?;
let cache = TOKEN_DISTRIBUTION_CACHE.read().await;
let mut cache = TOKEN_DISTRIBUTION_CACHE.write().await;

let seconds_until_midnight = calculate_seconds_until_midnight();
if let Some(cached_data) = &*cache {
if cached_data.last_updated.elapsed() < Duration::from_secs(86400) {
if cached_data.last_updated.elapsed() < Duration::from_secs(seconds_until_midnight) {
return Ok(cached_data.data.clone());
}
}

drop(cache); // release the read lock
info!("refreshing token-distribution cache ...");
let refresh_start = SystemTime::now();

let res = database
.collection::<OutputCollection>()
Expand All @@ -414,10 +429,11 @@ async fn token_distribution_ledger_analytics(
};

// Store the response in the cache
*TOKEN_DISTRIBUTION_CACHE.write().await = Some(TokenCacheData {
last_updated: Instant::now(),
data: response.clone(),
});
*cache = Some(TokenCacheData { last_updated: Instant::now(), data: response.clone() });

let refresh_elapsed = refresh_start.elapsed().unwrap();
info!("refreshing token-distribution cache done. Took {:?}", refresh_elapsed);
info!("next refresh in {} seconds", seconds_until_midnight);

Ok(response)
}
Expand Down

0 comments on commit d4a9307

Please sign in to comment.