fix(cache): address in-memory cache invalidation using global tenant as key_prefix (#6976)

This commit is contained in:
Chethan Rao
2025-01-03 13:20:45 +05:30
committed by GitHub
parent 7d00583a80
commit fce5ffa4e0
9 changed files with 74 additions and 84 deletions

View File

@ -211,18 +211,13 @@ impl super::RedisConnectionPool {
#[instrument(level = "DEBUG", skip(self))] #[instrument(level = "DEBUG", skip(self))]
pub async fn delete_multiple_keys( pub async fn delete_multiple_keys(
&self, &self,
keys: Vec<String>, keys: &[String],
) -> CustomResult<Vec<DelReply>, errors::RedisError> { ) -> CustomResult<Vec<DelReply>, errors::RedisError> {
let mut del_result = Vec::with_capacity(keys.len()); let futures = keys.iter().map(|key| self.pool.del(self.add_prefix(key)));
for key in keys { let del_result = futures::future::try_join_all(futures)
del_result.push( .await
self.pool .change_context(errors::RedisError::DeleteFailed)?;
.del(self.add_prefix(&key))
.await
.change_context(errors::RedisError::DeleteFailed)?,
);
}
Ok(del_result) Ok(del_result)
} }

View File

@ -4288,7 +4288,7 @@ impl ProfileWrapper {
.change_context(errors::ApiErrorResponse::InternalServerError) .change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to update routing algorithm ref in business profile")?; .attach_printable("Failed to update routing algorithm ref in business profile")?;
storage_impl::redis::cache::publish_into_redact_channel( storage_impl::redis::cache::redact_from_redis_and_publish(
db.get_cache_store().as_ref(), db.get_cache_store().as_ref(),
[routing_cache_key], [routing_cache_key],
) )

View File

@ -1,6 +1,6 @@
use common_utils::errors::CustomResult; use common_utils::errors::CustomResult;
use error_stack::{report, ResultExt}; use error_stack::{report, ResultExt};
use storage_impl::redis::cache::{publish_into_redact_channel, CacheKind}; use storage_impl::redis::cache::{redact_from_redis_and_publish, CacheKind};
use super::errors; use super::errors;
use crate::{routes::SessionState, services}; use crate::{routes::SessionState, services};
@ -10,7 +10,7 @@ pub async fn invalidate(
key: &str, key: &str,
) -> CustomResult<services::api::ApplicationResponse<serde_json::Value>, errors::ApiErrorResponse> { ) -> CustomResult<services::api::ApplicationResponse<serde_json::Value>, errors::ApiErrorResponse> {
let store = state.store.as_ref(); let store = state.store.as_ref();
let result = publish_into_redact_channel( let result = redact_from_redis_and_publish(
store.get_cache_store().as_ref(), store.get_cache_store().as_ref(),
[CacheKind::All(key.into())], [CacheKind::All(key.into())],
) )

View File

@ -1383,7 +1383,7 @@ pub async fn success_based_routing_update_configs(
let cache_entries_to_redact = vec![cache::CacheKind::SuccessBasedDynamicRoutingCache( let cache_entries_to_redact = vec![cache::CacheKind::SuccessBasedDynamicRoutingCache(
cache_key.into(), cache_key.into(),
)]; )];
let _ = cache::publish_into_redact_channel( let _ = cache::redact_from_redis_and_publish(
state.store.get_cache_store().as_ref(), state.store.get_cache_store().as_ref(),
cache_entries_to_redact, cache_entries_to_redact,
) )

View File

@ -189,7 +189,7 @@ pub async fn update_merchant_active_algorithm_ref(
.change_context(errors::ApiErrorResponse::InternalServerError) .change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to update routing algorithm ref in merchant account")?; .attach_printable("Failed to update routing algorithm ref in merchant account")?;
cache::publish_into_redact_channel(db.get_cache_store().as_ref(), [config_key]) cache::redact_from_redis_and_publish(db.get_cache_store().as_ref(), [config_key])
.await .await
.change_context(errors::ApiErrorResponse::InternalServerError) .change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to invalidate the config cache")?; .attach_printable("Failed to invalidate the config cache")?;
@ -256,7 +256,7 @@ pub async fn update_profile_active_algorithm_ref(
.change_context(errors::ApiErrorResponse::InternalServerError) .change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to update routing algorithm ref in business profile")?; .attach_printable("Failed to update routing algorithm ref in business profile")?;
cache::publish_into_redact_channel(db.get_cache_store().as_ref(), [routing_cache_key]) cache::redact_from_redis_and_publish(db.get_cache_store().as_ref(), [routing_cache_key])
.await .await
.change_context(errors::ApiErrorResponse::InternalServerError) .change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to invalidate routing cache")?; .attach_printable("Failed to invalidate routing cache")?;
@ -1031,7 +1031,7 @@ pub async fn disable_dynamic_routing_algorithm(
}; };
// redact cache for dynamic routing config // redact cache for dynamic routing config
let _ = cache::publish_into_redact_channel( let _ = cache::redact_from_redis_and_publish(
state.store.get_cache_store().as_ref(), state.store.get_cache_store().as_ref(),
cache_entries_to_redact, cache_entries_to_redact,
) )

View File

@ -1,16 +1,13 @@
use diesel_models::configs::ConfigUpdateInternal; use diesel_models::configs::ConfigUpdateInternal;
use error_stack::{report, ResultExt}; use error_stack::{report, ResultExt};
use router_env::{instrument, tracing}; use router_env::{instrument, tracing};
use storage_impl::redis::{ use storage_impl::redis::cache::{self, CacheKind, CONFIG_CACHE};
cache::{self, CacheKind, CONFIG_CACHE},
kv_store::RedisConnInterface,
pub_sub::PubSubInterface,
};
use super::{MockDb, Store}; use super::{MockDb, Store};
use crate::{ use crate::{
connection, connection,
core::errors::{self, CustomResult}, core::errors::{self, CustomResult},
db::StorageInterface,
types::storage, types::storage,
}; };
@ -69,14 +66,11 @@ impl ConfigInterface for Store {
.await .await
.map_err(|error| report!(errors::StorageError::from(error)))?; .map_err(|error| report!(errors::StorageError::from(error)))?;
self.get_redis_conn() cache::redact_from_redis_and_publish(
.map_err(Into::<errors::StorageError>::into)? self.get_cache_store().as_ref(),
.publish( [CacheKind::Config((&inserted.key).into())],
cache::IMC_INVALIDATION_CHANNEL, )
CacheKind::Config((&inserted.key).into()), .await?;
)
.await
.map_err(Into::<errors::StorageError>::into)?;
Ok(inserted) Ok(inserted)
} }
@ -177,14 +171,11 @@ impl ConfigInterface for Store {
.await .await
.map_err(|error| report!(errors::StorageError::from(error)))?; .map_err(|error| report!(errors::StorageError::from(error)))?;
self.get_redis_conn() cache::redact_from_redis_and_publish(
.map_err(Into::<errors::StorageError>::into)? self.get_cache_store().as_ref(),
.publish( [CacheKind::Config((&deleted.key).into())],
cache::IMC_INVALIDATION_CHANNEL, )
CacheKind::Config(key.into()), .await?;
)
.await
.map_err(Into::<errors::StorageError>::into)?;
Ok(deleted) Ok(deleted)
} }

View File

@ -801,7 +801,7 @@ async fn publish_and_redact_merchant_account_cache(
cache_keys.extend(publishable_key.into_iter()); cache_keys.extend(publishable_key.into_iter());
cache_keys.extend(cgraph_key.into_iter()); cache_keys.extend(cgraph_key.into_iter());
cache::publish_into_redact_channel(store.get_cache_store().as_ref(), cache_keys).await?; cache::redact_from_redis_and_publish(store.get_cache_store().as_ref(), cache_keys).await?;
Ok(()) Ok(())
} }
@ -822,6 +822,6 @@ async fn publish_and_redact_all_merchant_account_cache(
.map(|s| CacheKind::Accounts(s.into())) .map(|s| CacheKind::Accounts(s.into()))
.collect(); .collect();
cache::publish_into_redact_channel(store.get_cache_store().as_ref(), cache_keys).await?; cache::redact_from_redis_and_publish(store.get_cache_store().as_ref(), cache_keys).await?;
Ok(()) Ok(())
} }

View File

@ -2,14 +2,17 @@ use std::{any::Any, borrow::Cow, fmt::Debug, sync::Arc};
use common_utils::{ use common_utils::{
errors::{self, CustomResult}, errors::{self, CustomResult},
ext_traits::{AsyncExt, ByteSliceExt}, ext_traits::ByteSliceExt,
}; };
use dyn_clone::DynClone; use dyn_clone::DynClone;
use error_stack::{Report, ResultExt}; use error_stack::{Report, ResultExt};
use moka::future::Cache as MokaCache; use moka::future::Cache as MokaCache;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use redis_interface::{errors::RedisError, RedisConnectionPool, RedisValue}; use redis_interface::{errors::RedisError, RedisConnectionPool, RedisValue};
use router_env::tracing::{self, instrument}; use router_env::{
logger,
tracing::{self, instrument},
};
use crate::{ use crate::{
errors::StorageError, errors::StorageError,
@ -100,7 +103,7 @@ pub struct CacheRedact<'a> {
pub kind: CacheKind<'a>, pub kind: CacheKind<'a>,
} }
#[derive(serde::Serialize, serde::Deserialize)] #[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
pub enum CacheKind<'a> { pub enum CacheKind<'a> {
Config(Cow<'a, str>), Config(Cow<'a, str>),
Accounts(Cow<'a, str>), Accounts(Cow<'a, str>),
@ -114,6 +117,23 @@ pub enum CacheKind<'a> {
All(Cow<'a, str>), All(Cow<'a, str>),
} }
impl CacheKind<'_> {
pub(crate) fn get_key_without_prefix(&self) -> &str {
match self {
CacheKind::Config(key)
| CacheKind::Accounts(key)
| CacheKind::Routing(key)
| CacheKind::DecisionManager(key)
| CacheKind::Surcharge(key)
| CacheKind::CGraph(key)
| CacheKind::SuccessBasedDynamicRoutingCache(key)
| CacheKind::EliminationBasedDynamicRoutingCache(key)
| CacheKind::PmFiltersCGraph(key)
| CacheKind::All(key) => key,
}
}
}
impl<'a> TryFrom<CacheRedact<'a>> for RedisValue { impl<'a> TryFrom<CacheRedact<'a>> for RedisValue {
type Error = Report<errors::ValidationError>; type Error = Report<errors::ValidationError>;
fn try_from(v: CacheRedact<'a>) -> Result<Self, Self::Error> { fn try_from(v: CacheRedact<'a>) -> Result<Self, Self::Error> {
@ -343,39 +363,10 @@ where
} }
#[instrument(skip_all)] #[instrument(skip_all)]
pub async fn redact_cache<T, F, Fut>( pub async fn redact_from_redis_and_publish<
store: &(dyn RedisConnInterface + Send + Sync), 'a,
key: &'static str, K: IntoIterator<Item = CacheKind<'a>> + Send + Clone,
fun: F, >(
in_memory: Option<&Cache>,
) -> CustomResult<T, StorageError>
where
F: FnOnce() -> Fut + Send,
Fut: futures::Future<Output = CustomResult<T, StorageError>> + Send,
{
let data = fun().await?;
let redis_conn = store
.get_redis_conn()
.change_context(StorageError::RedisError(
RedisError::RedisConnectionError.into(),
))
.attach_printable("Failed to get redis connection")?;
let tenant_key = CacheKey {
key: key.to_string(),
prefix: redis_conn.key_prefix.clone(),
};
in_memory.async_map(|cache| cache.remove(tenant_key)).await;
redis_conn
.delete_key(key)
.await
.change_context(StorageError::KVError)?;
Ok(data)
}
#[instrument(skip_all)]
pub async fn publish_into_redact_channel<'a, K: IntoIterator<Item = CacheKind<'a>> + Send>(
store: &(dyn RedisConnInterface + Send + Sync), store: &(dyn RedisConnInterface + Send + Sync),
keys: K, keys: K,
) -> CustomResult<usize, StorageError> { ) -> CustomResult<usize, StorageError> {
@ -386,6 +377,24 @@ pub async fn publish_into_redact_channel<'a, K: IntoIterator<Item = CacheKind<'a
)) ))
.attach_printable("Failed to get redis connection")?; .attach_printable("Failed to get redis connection")?;
let redis_keys_to_be_deleted = keys
.clone()
.into_iter()
.map(|val| val.get_key_without_prefix().to_owned())
.collect::<Vec<_>>();
let del_replies = redis_conn
.delete_multiple_keys(&redis_keys_to_be_deleted)
.await
.map_err(StorageError::RedisError)?;
let deletion_result = redis_keys_to_be_deleted
.into_iter()
.zip(del_replies)
.collect::<Vec<_>>();
logger::debug!(redis_deletion_result=?deletion_result);
let futures = keys.into_iter().map(|key| async { let futures = keys.into_iter().map(|key| async {
redis_conn redis_conn
.clone() .clone()
@ -411,7 +420,7 @@ where
Fut: futures::Future<Output = CustomResult<T, StorageError>> + Send, Fut: futures::Future<Output = CustomResult<T, StorageError>> + Send,
{ {
let data = fun().await?; let data = fun().await?;
publish_into_redact_channel(store, [key]).await?; redact_from_redis_and_publish(store, [key]).await?;
Ok(data) Ok(data)
} }
@ -424,10 +433,10 @@ pub async fn publish_and_redact_multiple<'a, T, F, Fut, K>(
where where
F: FnOnce() -> Fut + Send, F: FnOnce() -> Fut + Send,
Fut: futures::Future<Output = CustomResult<T, StorageError>> + Send, Fut: futures::Future<Output = CustomResult<T, StorageError>> + Send,
K: IntoIterator<Item = CacheKind<'a>> + Send, K: IntoIterator<Item = CacheKind<'a>> + Send + Clone,
{ {
let data = fun().await?; let data = fun().await?;
publish_into_redact_channel(store, keys).await?; redact_from_redis_and_publish(store, keys).await?;
Ok(data) Ok(data)
} }

View File

@ -243,11 +243,6 @@ impl PubSubInterface for std::sync::Arc<redis_interface::RedisConnectionPool> {
} }
}; };
self.delete_key(key.as_ref())
.await
.map_err(|err| logger::error!("Error while deleting redis key: {err:?}"))
.ok();
logger::debug!( logger::debug!(
key_prefix=?message.tenant.clone(), key_prefix=?message.tenant.clone(),
channel_name=?channel_name, channel_name=?channel_name,