build(deps): bump opentelemetry crates to 0.27 (#6774)

This commit is contained in:
Sanchith Hegde
2024-12-10 16:35:34 +05:30
committed by GitHub
parent a52828296a
commit 47a3d2b2ab
86 changed files with 739 additions and 1086 deletions

View File

@ -262,9 +262,9 @@ impl<T: DatabaseStore> KVRouterStore<T> {
.change_context(RedisError::JsonSerializationFailed)?,
)
.await
.map(|_| metrics::KV_PUSHED_TO_DRAINER.add(&metrics::CONTEXT, 1, &[]))
.map(|_| metrics::KV_PUSHED_TO_DRAINER.add(1, &[]))
.inspect_err(|error| {
metrics::KV_FAILED_TO_PUSH_TO_DRAINER.add(&metrics::CONTEXT, 1, &[]);
metrics::KV_FAILED_TO_PUSH_TO_DRAINER.add(1, &[]);
logger::error!(?error, "Failed to add entry in drainer stream");
})
.change_context(RedisError::StreamAppendFailed)

View File

@ -1,6 +1,5 @@
use router_env::{counter_metric, gauge_metric, global_meter, metrics_context};
use router_env::{counter_metric, gauge_metric, global_meter};
metrics_context!(CONTEXT);
global_meter!(GLOBAL_METER, "ROUTER_API");
counter_metric!(KV_MISS, GLOBAL_METER); // No. of KV misses

View File

@ -9,10 +9,7 @@ use error_stack::{Report, ResultExt};
use moka::future::Cache as MokaCache;
use once_cell::sync::Lazy;
use redis_interface::{errors::RedisError, RedisConnectionPool, RedisValue};
use router_env::{
metrics::add_attributes,
tracing::{self, instrument},
};
use router_env::tracing::{self, instrument};
use crate::{
errors::StorageError,
@ -193,12 +190,11 @@ impl Cache {
// Record the metrics of manual invalidation of cache entry by the application
let eviction_listener = move |_, _, cause| {
metrics::IN_MEMORY_CACHE_EVICTION_COUNT.add(
&metrics::CONTEXT,
1,
&add_attributes([
router_env::metric_attributes!(
("cache_type", name.to_owned()),
("removal_cause", format!("{:?}", cause)),
]),
),
);
};
let mut cache_builder = MokaCache::builder()
@ -225,17 +221,11 @@ impl Cache {
// Add cache hit and cache miss metrics
if val.is_some() {
metrics::IN_MEMORY_CACHE_HIT.add(
&metrics::CONTEXT,
1,
&add_attributes([("cache_type", self.name)]),
);
metrics::IN_MEMORY_CACHE_HIT
.add(1, router_env::metric_attributes!(("cache_type", self.name)));
} else {
metrics::IN_MEMORY_CACHE_MISS.add(
&metrics::CONTEXT,
1,
&add_attributes([("cache_type", self.name)]),
);
metrics::IN_MEMORY_CACHE_MISS
.add(1, router_env::metric_attributes!(("cache_type", self.name)));
}
let val = (*val?).as_any().downcast_ref::<T>().cloned();
@ -269,10 +259,9 @@ impl Cache {
pub async fn record_entry_count_metric(&self) {
self.run_pending_tasks().await;
metrics::IN_MEMORY_CACHE_ENTRY_COUNT.observe(
&metrics::CONTEXT,
metrics::IN_MEMORY_CACHE_ENTRY_COUNT.record(
self.get_entry_count(),
&add_attributes([("cache_type", self.name)]),
router_env::metric_attributes!(("cache_type", self.name)),
);
}
}

View File

@ -257,19 +257,16 @@ where
}
};
let attributes = router_env::metric_attributes!(("operation", operation.clone()));
result
.await
.inspect(|_| {
logger::debug!(kv_operation= %operation, status="success");
let keyvalue = router_env::opentelemetry::KeyValue::new("operation", operation.clone());
metrics::KV_OPERATION_SUCCESSFUL.add(&metrics::CONTEXT, 1, &[keyvalue]);
metrics::KV_OPERATION_SUCCESSFUL.add(1, attributes);
})
.inspect_err(|err| {
logger::error!(kv_operation = %operation, status="error", error = ?err);
let keyvalue = router_env::opentelemetry::KeyValue::new("operation", operation);
metrics::KV_OPERATION_FAILED.add(&metrics::CONTEXT, 1, &[keyvalue]);
metrics::KV_OPERATION_FAILED.add(1, attributes);
})
}
@ -320,7 +317,7 @@ where
.await
{
Ok(_) => {
metrics::KV_SOFT_KILL_ACTIVE_UPDATE.add(&metrics::CONTEXT, 1, &[]);
metrics::KV_SOFT_KILL_ACTIVE_UPDATE.add(1, &[]);
MerchantStorageScheme::RedisKv
}
Err(_) => MerchantStorageScheme::PostgresOnly,

View File

@ -59,7 +59,7 @@ where
Ok(output) => Ok(output),
Err(redis_error) => match redis_error.current_context() {
redis_interface::errors::RedisError::NotFound => {
metrics::KV_MISS.add(&metrics::CONTEXT, 1, &[]);
metrics::KV_MISS.add(1, &[]);
database_call_closure().await
}
// Keeping the key empty here since the error would never go here.