From 774a53ee8935e2e28827b986e5bf0ed5dc55cf33 Mon Sep 17 00:00:00 2001 From: Sandeep Kumar <83278309+tsdk02@users.noreply.github.com> Date: Thu, 5 Dec 2024 15:39:40 +0530 Subject: [PATCH] feat(analytics): Add refund sessionized metrics for Analytics V2 dashboard (#6616) Co-authored-by: hyperswitch-bot[bot] <148525504+hyperswitch-bot[bot]@users.noreply.github.com> --- crates/analytics/src/clickhouse.rs | 15 +- crates/analytics/src/lib.rs | 115 +++++++++- crates/analytics/src/payments/distribution.rs | 6 +- .../distribution/payment_error_message.rs | 4 +- crates/analytics/src/query.rs | 3 +- crates/analytics/src/refunds.rs | 1 + crates/analytics/src/refunds/accumulator.rs | 217 +++++++++++++++--- crates/analytics/src/refunds/core.rs | 192 ++++++++++++---- crates/analytics/src/refunds/distribution.rs | 105 +++++++++ .../distribution/sessionized_distribution.rs | 7 + .../refund_error_message.rs | 177 ++++++++++++++ .../sessionized_distribution/refund_reason.rs | 169 ++++++++++++++ crates/analytics/src/refunds/filters.rs | 2 + crates/analytics/src/refunds/metrics.rs | 12 + .../src/refunds/metrics/refund_count.rs | 2 + .../metrics/refund_processed_amount.rs | 2 + .../refunds/metrics/refund_success_count.rs | 2 + .../refunds/metrics/refund_success_rate.rs | 2 + .../refunds/metrics/sessionized_metrics.rs | 4 + .../sessionized_metrics/refund_count.rs | 2 + .../refund_error_message.rs | 190 +++++++++++++++ .../refund_processed_amount.rs | 8 + .../sessionized_metrics/refund_reason.rs | 182 +++++++++++++++ .../refund_success_count.rs | 2 + .../refund_success_rate.rs | 2 + crates/analytics/src/refunds/types.rs | 15 ++ crates/analytics/src/sqlx.rs | 88 +++++++ crates/api_models/src/analytics.rs | 18 +- crates/api_models/src/analytics/refunds.rs | 61 ++++- 29 files changed, 1527 insertions(+), 78 deletions(-) create mode 100644 crates/analytics/src/refunds/distribution.rs create mode 100644 crates/analytics/src/refunds/distribution/sessionized_distribution.rs create mode 100644 crates/analytics/src/refunds/distribution/sessionized_distribution/refund_error_message.rs create mode 100644 crates/analytics/src/refunds/distribution/sessionized_distribution/refund_reason.rs create mode 100644 crates/analytics/src/refunds/metrics/sessionized_metrics/refund_error_message.rs create mode 100644 crates/analytics/src/refunds/metrics/sessionized_metrics/refund_reason.rs diff --git a/crates/analytics/src/clickhouse.rs b/crates/analytics/src/clickhouse.rs index f56e875f72..cd870c12b2 100644 --- a/crates/analytics/src/clickhouse.rs +++ b/crates/analytics/src/clickhouse.rs @@ -16,7 +16,9 @@ use super::{ distribution::PaymentDistributionRow, filters::PaymentFilterRow, metrics::PaymentMetricRow, }, query::{Aggregate, ToSql, Window}, - refunds::{filters::RefundFilterRow, metrics::RefundMetricRow}, + refunds::{ + distribution::RefundDistributionRow, filters::RefundFilterRow, metrics::RefundMetricRow, + }, sdk_events::{filters::SdkEventFilter, metrics::SdkEventMetricRow}, types::{AnalyticsCollection, AnalyticsDataSource, LoadRow, QueryExecutionError}, }; @@ -170,6 +172,7 @@ impl super::payment_intents::filters::PaymentIntentFilterAnalytics for Clickhous impl super::payment_intents::metrics::PaymentIntentMetricAnalytics for ClickhouseClient {} impl super::refunds::metrics::RefundMetricAnalytics for ClickhouseClient {} impl super::refunds::filters::RefundFilterAnalytics for ClickhouseClient {} +impl super::refunds::distribution::RefundDistributionAnalytics for ClickhouseClient {} impl super::frm::metrics::FrmMetricAnalytics for ClickhouseClient {} impl super::frm::filters::FrmFilterAnalytics for ClickhouseClient {} impl super::sdk_events::filters::SdkEventFilterAnalytics for ClickhouseClient {} @@ -300,6 +303,16 @@ impl TryInto for serde_json::Value { } } +impl TryInto for serde_json::Value { + type Error = Report; + + fn try_into(self) -> Result { + serde_json::from_value(self).change_context(ParsingError::StructParseFailure( + "Failed to parse RefundDistributionRow in clickhouse results", + )) + } +} + impl TryInto for serde_json::Value { type Error = Report; diff --git a/crates/analytics/src/lib.rs b/crates/analytics/src/lib.rs index 224cd82ccd..13fdefe864 100644 --- a/crates/analytics/src/lib.rs +++ b/crates/analytics/src/lib.rs @@ -29,6 +29,7 @@ use hyperswitch_interfaces::secrets_interface::{ secret_state::{RawSecret, SecretStateContainer, SecuredSecret}, SecretManagementInterface, SecretsManagementError, }; +use refunds::distribution::{RefundDistribution, RefundDistributionRow}; pub use types::AnalyticsDomain; pub mod lambda_utils; pub mod utils; @@ -52,7 +53,7 @@ use api_models::analytics::{ sdk_events::{ SdkEventDimensions, SdkEventFilters, SdkEventMetrics, SdkEventMetricsBucketIdentifier, }, - Distribution, Granularity, TimeRange, + Granularity, PaymentDistributionBody, RefundDistributionBody, TimeRange, }; use clickhouse::ClickhouseClient; pub use clickhouse::ClickhouseConfig; @@ -215,7 +216,7 @@ impl AnalyticsProvider { pub async fn get_payment_distribution( &self, - distribution: &Distribution, + distribution: &PaymentDistributionBody, dimensions: &[PaymentDimensions], auth: &AuthInfo, filters: &PaymentFilters, @@ -528,6 +529,116 @@ impl AnalyticsProvider { .await } + pub async fn get_refund_distribution( + &self, + distribution: &RefundDistributionBody, + dimensions: &[RefundDimensions], + auth: &AuthInfo, + filters: &RefundFilters, + granularity: &Option, + time_range: &TimeRange, + ) -> types::MetricsResult> { + // Metrics to get the fetch time for each payment metric + metrics::request::record_operation_time( + async { + match self { + Self::Sqlx(pool) => { + distribution.distribution_for + .load_distribution( + distribution, + dimensions, + auth, + filters, + granularity, + time_range, + pool, + ) + .await + } + Self::Clickhouse(pool) => { + distribution.distribution_for + .load_distribution( + distribution, + dimensions, + auth, + filters, + granularity, + time_range, + pool, + ) + .await + } + Self::CombinedCkh(sqlx_pool, ckh_pool) => { + let (ckh_result, sqlx_result) = tokio::join!(distribution.distribution_for + .load_distribution( + distribution, + dimensions, + auth, + filters, + granularity, + time_range, + ckh_pool, + ), + distribution.distribution_for + .load_distribution( + distribution, + dimensions, + auth, + filters, + granularity, + time_range, + sqlx_pool, + )); + match (&sqlx_result, &ckh_result) { + (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { + router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics distribution") + }, + _ => {} + + }; + + ckh_result + } + Self::CombinedSqlx(sqlx_pool, ckh_pool) => { + let (ckh_result, sqlx_result) = tokio::join!(distribution.distribution_for + .load_distribution( + distribution, + dimensions, + auth, + filters, + granularity, + time_range, + ckh_pool, + ), + distribution.distribution_for + .load_distribution( + distribution, + dimensions, + auth, + filters, + granularity, + time_range, + sqlx_pool, + )); + match (&sqlx_result, &ckh_result) { + (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { + router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics distribution") + }, + _ => {} + + }; + + sqlx_result + } + } + }, + &metrics::METRIC_FETCH_TIME, + &distribution.distribution_for, + self, + ) + .await + } + pub async fn get_frm_metrics( &self, metric: &FrmMetrics, diff --git a/crates/analytics/src/payments/distribution.rs b/crates/analytics/src/payments/distribution.rs index 213b624457..055572a080 100644 --- a/crates/analytics/src/payments/distribution.rs +++ b/crates/analytics/src/payments/distribution.rs @@ -2,7 +2,7 @@ use api_models::analytics::{ payments::{ PaymentDimensions, PaymentDistributions, PaymentFilters, PaymentMetricsBucketIdentifier, }, - Distribution, Granularity, TimeRange, + Granularity, PaymentDistributionBody, TimeRange, }; use diesel_models::enums as storage_enums; use time::PrimitiveDateTime; @@ -53,7 +53,7 @@ where #[allow(clippy::too_many_arguments)] async fn load_distribution( &self, - distribution: &Distribution, + distribution: &PaymentDistributionBody, dimensions: &[PaymentDimensions], auth: &AuthInfo, filters: &PaymentFilters, @@ -75,7 +75,7 @@ where { async fn load_distribution( &self, - distribution: &Distribution, + distribution: &PaymentDistributionBody, dimensions: &[PaymentDimensions], auth: &AuthInfo, filters: &PaymentFilters, diff --git a/crates/analytics/src/payments/distribution/payment_error_message.rs b/crates/analytics/src/payments/distribution/payment_error_message.rs index 241754ee04..de5cb3ae5e 100644 --- a/crates/analytics/src/payments/distribution/payment_error_message.rs +++ b/crates/analytics/src/payments/distribution/payment_error_message.rs @@ -1,6 +1,6 @@ use api_models::analytics::{ payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier}, - Distribution, Granularity, TimeRange, + Granularity, PaymentDistributionBody, TimeRange, }; use common_utils::errors::ReportSwitchExt; use diesel_models::enums as storage_enums; @@ -31,7 +31,7 @@ where { async fn load_distribution( &self, - distribution: &Distribution, + distribution: &PaymentDistributionBody, dimensions: &[PaymentDimensions], auth: &AuthInfo, filters: &PaymentFilters, diff --git a/crates/analytics/src/query.rs b/crates/analytics/src/query.rs index caa112ec17..cbb0cf6737 100644 --- a/crates/analytics/src/query.rs +++ b/crates/analytics/src/query.rs @@ -9,7 +9,7 @@ use api_models::{ frm::{FrmDimensions, FrmTransactionType}, payment_intents::PaymentIntentDimensions, payments::{PaymentDimensions, PaymentDistributions}, - refunds::{RefundDimensions, RefundType}, + refunds::{RefundDimensions, RefundDistributions, RefundType}, sdk_events::{SdkEventDimensions, SdkEventNames}, Granularity, }, @@ -488,6 +488,7 @@ impl_to_sql_for_to_string!( PaymentIntentDimensions, &PaymentDistributions, RefundDimensions, + &RefundDistributions, FrmDimensions, PaymentMethod, PaymentMethodType, diff --git a/crates/analytics/src/refunds.rs b/crates/analytics/src/refunds.rs index 590dc148eb..ed6f396cce 100644 --- a/crates/analytics/src/refunds.rs +++ b/crates/analytics/src/refunds.rs @@ -1,6 +1,7 @@ pub mod accumulator; mod core; +pub mod distribution; pub mod filters; pub mod metrics; pub mod types; diff --git a/crates/analytics/src/refunds/accumulator.rs b/crates/analytics/src/refunds/accumulator.rs index add38c9816..840d46bbab 100644 --- a/crates/analytics/src/refunds/accumulator.rs +++ b/crates/analytics/src/refunds/accumulator.rs @@ -1,19 +1,56 @@ -use api_models::analytics::refunds::RefundMetricsBucketValue; +use api_models::analytics::refunds::{ + ErrorMessagesResult, ReasonsResult, RefundMetricsBucketValue, +}; +use bigdecimal::ToPrimitive; use diesel_models::enums as storage_enums; -use super::metrics::RefundMetricRow; +use super::{distribution::RefundDistributionRow, metrics::RefundMetricRow}; #[derive(Debug, Default)] pub struct RefundMetricsAccumulator { pub refund_success_rate: SuccessRateAccumulator, pub refund_count: CountAccumulator, pub refund_success: CountAccumulator, - pub processed_amount: PaymentProcessedAmountAccumulator, + pub processed_amount: RefundProcessedAmountAccumulator, + pub refund_reason: RefundReasonAccumulator, + pub refund_reason_distribution: RefundReasonDistributionAccumulator, + pub refund_error_message: RefundReasonAccumulator, + pub refund_error_message_distribution: RefundErrorMessageDistributionAccumulator, +} + +#[derive(Debug, Default)] +pub struct RefundReasonDistributionRow { + pub count: i64, + pub total: i64, + pub refund_reason: String, +} + +#[derive(Debug, Default)] +pub struct RefundReasonDistributionAccumulator { + pub refund_reason_vec: Vec, +} + +#[derive(Debug, Default)] +pub struct RefundErrorMessageDistributionRow { + pub count: i64, + pub total: i64, + pub refund_error_message: String, +} + +#[derive(Debug, Default)] +pub struct RefundErrorMessageDistributionAccumulator { + pub refund_error_message_vec: Vec, +} + +#[derive(Debug, Default)] +#[repr(transparent)] +pub struct RefundReasonAccumulator { + pub count: u64, } #[derive(Debug, Default)] pub struct SuccessRateAccumulator { - pub success: i64, - pub total: i64, + pub success: u32, + pub total: u32, } #[derive(Debug, Default)] #[repr(transparent)] @@ -21,8 +58,8 @@ pub struct CountAccumulator { pub count: Option, } #[derive(Debug, Default)] -#[repr(transparent)] -pub struct PaymentProcessedAmountAccumulator { +pub struct RefundProcessedAmountAccumulator { + pub count: Option, pub total: Option, } @@ -34,6 +71,93 @@ pub trait RefundMetricAccumulator { fn collect(self) -> Self::MetricOutput; } +pub trait RefundDistributionAccumulator { + type DistributionOutput; + + fn add_distribution_bucket(&mut self, distribution: &RefundDistributionRow); + + fn collect(self) -> Self::DistributionOutput; +} + +impl RefundDistributionAccumulator for RefundReasonDistributionAccumulator { + type DistributionOutput = Option>; + + fn add_distribution_bucket(&mut self, distribution: &RefundDistributionRow) { + self.refund_reason_vec.push(RefundReasonDistributionRow { + count: distribution.count.unwrap_or_default(), + total: distribution + .total + .clone() + .map(|i| i.to_i64().unwrap_or_default()) + .unwrap_or_default(), + refund_reason: distribution.refund_reason.clone().unwrap_or_default(), + }) + } + + fn collect(mut self) -> Self::DistributionOutput { + if self.refund_reason_vec.is_empty() { + None + } else { + self.refund_reason_vec.sort_by(|a, b| b.count.cmp(&a.count)); + let mut res: Vec = Vec::new(); + for val in self.refund_reason_vec.into_iter() { + let perc = f64::from(u32::try_from(val.count).ok()?) * 100.0 + / f64::from(u32::try_from(val.total).ok()?); + + res.push(ReasonsResult { + reason: val.refund_reason, + count: val.count, + percentage: (perc * 100.0).round() / 100.0, + }) + } + + Some(res) + } + } +} + +impl RefundDistributionAccumulator for RefundErrorMessageDistributionAccumulator { + type DistributionOutput = Option>; + + fn add_distribution_bucket(&mut self, distribution: &RefundDistributionRow) { + self.refund_error_message_vec + .push(RefundErrorMessageDistributionRow { + count: distribution.count.unwrap_or_default(), + total: distribution + .total + .clone() + .map(|i| i.to_i64().unwrap_or_default()) + .unwrap_or_default(), + refund_error_message: distribution + .refund_error_message + .clone() + .unwrap_or_default(), + }) + } + + fn collect(mut self) -> Self::DistributionOutput { + if self.refund_error_message_vec.is_empty() { + None + } else { + self.refund_error_message_vec + .sort_by(|a, b| b.count.cmp(&a.count)); + let mut res: Vec = Vec::new(); + for val in self.refund_error_message_vec.into_iter() { + let perc = f64::from(u32::try_from(val.count).ok()?) * 100.0 + / f64::from(u32::try_from(val.total).ok()?); + + res.push(ErrorMessagesResult { + error_message: val.refund_error_message, + count: val.count, + percentage: (perc * 100.0).round() / 100.0, + }) + } + + Some(res) + } + } +} + impl RefundMetricAccumulator for CountAccumulator { type MetricOutput = Option; #[inline] @@ -50,62 +174,103 @@ impl RefundMetricAccumulator for CountAccumulator { } } -impl RefundMetricAccumulator for PaymentProcessedAmountAccumulator { - type MetricOutput = (Option, Option); +impl RefundMetricAccumulator for RefundProcessedAmountAccumulator { + type MetricOutput = (Option, Option, Option); #[inline] fn add_metrics_bucket(&mut self, metrics: &RefundMetricRow) { self.total = match ( self.total, - metrics - .total - .as_ref() - .and_then(bigdecimal::ToPrimitive::to_i64), + metrics.total.as_ref().and_then(ToPrimitive::to_i64), ) { (None, None) => None, (None, i @ Some(_)) | (i @ Some(_), None) => i, (Some(a), Some(b)) => Some(a + b), - } + }; + + self.count = match (self.count, metrics.count) { + (None, None) => None, + (None, i @ Some(_)) | (i @ Some(_), None) => i, + (Some(a), Some(b)) => Some(a + b), + }; } #[inline] fn collect(self) -> Self::MetricOutput { - (self.total.and_then(|i| u64::try_from(i).ok()), Some(0)) + let total = u64::try_from(self.total.unwrap_or_default()).ok(); + let count = self.count.and_then(|i| u64::try_from(i).ok()); + + (total, count, Some(0)) } } impl RefundMetricAccumulator for SuccessRateAccumulator { - type MetricOutput = Option; + type MetricOutput = (Option, Option, Option); fn add_metrics_bucket(&mut self, metrics: &RefundMetricRow) { if let Some(ref refund_status) = metrics.refund_status { if refund_status.as_ref() == &storage_enums::RefundStatus::Success { - self.success += metrics.count.unwrap_or_default(); + if let Some(success) = metrics + .count + .and_then(|success| u32::try_from(success).ok()) + { + self.success += success; + } } }; - self.total += metrics.count.unwrap_or_default(); + if let Some(total) = metrics.count.and_then(|total| u32::try_from(total).ok()) { + self.total += total; + } } fn collect(self) -> Self::MetricOutput { - if self.total <= 0 { - None + if self.total == 0 { + (None, None, None) } else { - Some( - f64::from(u32::try_from(self.success).ok()?) * 100.0 - / f64::from(u32::try_from(self.total).ok()?), - ) + let success = Some(self.success); + let total = Some(self.total); + let success_rate = match (success, total) { + (Some(s), Some(t)) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)), + _ => None, + }; + (success, total, success_rate) } } } +impl RefundMetricAccumulator for RefundReasonAccumulator { + type MetricOutput = Option; + + fn add_metrics_bucket(&mut self, metrics: &RefundMetricRow) { + if let Some(count) = metrics.count { + if let Ok(count_u64) = u64::try_from(count) { + self.count += count_u64; + } + } + } + + fn collect(self) -> Self::MetricOutput { + Some(self.count) + } +} + impl RefundMetricsAccumulator { pub fn collect(self) -> RefundMetricsBucketValue { - let (refund_processed_amount, refund_processed_amount_in_usd) = + let (successful_refunds, total_refunds, refund_success_rate) = + self.refund_success_rate.collect(); + let (refund_processed_amount, refund_processed_count, refund_processed_amount_in_usd) = self.processed_amount.collect(); RefundMetricsBucketValue { - refund_success_rate: self.refund_success_rate.collect(), + successful_refunds, + total_refunds, + refund_success_rate, refund_count: self.refund_count.collect(), refund_success_count: self.refund_success.collect(), refund_processed_amount, refund_processed_amount_in_usd, + refund_processed_count, + refund_reason_distribution: self.refund_reason_distribution.collect(), + refund_error_message_distribution: self.refund_error_message_distribution.collect(), + refund_reason_count: self.refund_reason.collect(), + refund_error_message_count: self.refund_error_message.collect(), } } } diff --git a/crates/analytics/src/refunds/core.rs b/crates/analytics/src/refunds/core.rs index e3bfa4da9d..205600b925 100644 --- a/crates/analytics/src/refunds/core.rs +++ b/crates/analytics/src/refunds/core.rs @@ -1,15 +1,17 @@ #![allow(dead_code)] -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; use api_models::analytics::{ refunds::{ - RefundDimensions, RefundMetrics, RefundMetricsBucketIdentifier, RefundMetricsBucketResponse, + RefundDimensions, RefundDistributions, RefundMetrics, RefundMetricsBucketIdentifier, + RefundMetricsBucketResponse, }, GetRefundFilterRequest, GetRefundMetricRequest, RefundFilterValue, RefundFiltersResponse, RefundsAnalyticsMetadata, RefundsMetricsResponse, }; use bigdecimal::ToPrimitive; use common_enums::Currency; +use common_utils::errors::CustomResult; use currency_conversion::{conversion::convert, types::ExchangeRates}; use error_stack::ResultExt; use router_env::{ @@ -19,17 +21,31 @@ use router_env::{ }; use super::{ + distribution::RefundDistributionRow, filters::{get_refund_filter_for_dimension, RefundFilterRow}, + metrics::RefundMetricRow, RefundMetricsAccumulator, }; use crate::{ enums::AuthInfo, errors::{AnalyticsError, AnalyticsResult}, metrics, - refunds::RefundMetricAccumulator, + refunds::{accumulator::RefundDistributionAccumulator, RefundMetricAccumulator}, AnalyticsProvider, }; +#[derive(Debug)] +pub enum TaskType { + MetricTask( + RefundMetrics, + CustomResult, AnalyticsError>, + ), + DistributionTask( + RefundDistributions, + CustomResult, AnalyticsError>, + ), +} + pub async fn get_metrics( pool: &AnalyticsProvider, ex_rates: &ExchangeRates, @@ -62,65 +78,145 @@ pub async fn get_metrics( ) .await .change_context(AnalyticsError::UnknownError); - (metric_type, data) + TaskType::MetricTask(metric_type, data) } .instrument(task_span), ); } - while let Some((metric, data)) = set + if let Some(distribution) = req.clone().distribution { + let req = req.clone(); + let pool = pool.clone(); + let task_span = tracing::debug_span!( + "analytics_refunds_distribution_query", + refund_distribution = distribution.distribution_for.as_ref() + ); + + let auth_scoped = auth.to_owned(); + set.spawn( + async move { + let data = pool + .get_refund_distribution( + &distribution, + &req.group_by_names.clone(), + &auth_scoped, + &req.filters, + &req.time_series.map(|t| t.granularity), + &req.time_range, + ) + .await + .change_context(AnalyticsError::UnknownError); + TaskType::DistributionTask(distribution.distribution_for, data) + } + .instrument(task_span), + ); + } + + while let Some(task_type) = set .join_next() .await .transpose() .change_context(AnalyticsError::UnknownError)? { - let data = data?; - let attributes = &add_attributes([ - ("metric_type", metric.to_string()), - ("source", pool.to_string()), - ]); + match task_type { + TaskType::MetricTask(metric, data) => { + let data = data?; + let attributes = &add_attributes([ + ("metric_type", metric.to_string()), + ("source", pool.to_string()), + ]); - let value = u64::try_from(data.len()); - if let Ok(val) = value { - metrics::BUCKETS_FETCHED.record(&metrics::CONTEXT, val, attributes); - logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val); - } + let value = u64::try_from(data.len()); + if let Ok(val) = value { + metrics::BUCKETS_FETCHED.record(&metrics::CONTEXT, val, attributes); + logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val); + } - for (id, value) in data { - logger::debug!(bucket_id=?id, bucket_value=?value, "Bucket row for metric {metric}"); - let metrics_builder = metrics_accumulator.entry(id).or_default(); - match metric { - RefundMetrics::RefundSuccessRate | RefundMetrics::SessionizedRefundSuccessRate => { - metrics_builder - .refund_success_rate - .add_metrics_bucket(&value) + for (id, value) in data { + logger::debug!(bucket_id=?id, bucket_value=?value, "Bucket row for metric {metric}"); + let metrics_builder = metrics_accumulator.entry(id).or_default(); + match metric { + RefundMetrics::RefundSuccessRate + | RefundMetrics::SessionizedRefundSuccessRate => metrics_builder + .refund_success_rate + .add_metrics_bucket(&value), + RefundMetrics::RefundCount | RefundMetrics::SessionizedRefundCount => { + metrics_builder.refund_count.add_metrics_bucket(&value) + } + RefundMetrics::RefundSuccessCount + | RefundMetrics::SessionizedRefundSuccessCount => { + metrics_builder.refund_success.add_metrics_bucket(&value) + } + RefundMetrics::RefundProcessedAmount + | RefundMetrics::SessionizedRefundProcessedAmount => { + metrics_builder.processed_amount.add_metrics_bucket(&value) + } + RefundMetrics::SessionizedRefundReason => { + metrics_builder.refund_reason.add_metrics_bucket(&value) + } + RefundMetrics::SessionizedRefundErrorMessage => metrics_builder + .refund_error_message + .add_metrics_bucket(&value), + } } - RefundMetrics::RefundCount | RefundMetrics::SessionizedRefundCount => { - metrics_builder.refund_count.add_metrics_bucket(&value) + + logger::debug!( + "Analytics Accumulated Results: metric: {}, results: {:#?}", + metric, + metrics_accumulator + ); + } + TaskType::DistributionTask(distribution, data) => { + let data = data?; + let attributes = &add_attributes([ + ("distribution_type", distribution.to_string()), + ("source", pool.to_string()), + ]); + let value = u64::try_from(data.len()); + if let Ok(val) = value { + metrics::BUCKETS_FETCHED.record(&metrics::CONTEXT, val, attributes); + logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val); } - RefundMetrics::RefundSuccessCount - | RefundMetrics::SessionizedRefundSuccessCount => { - metrics_builder.refund_success.add_metrics_bucket(&value) - } - RefundMetrics::RefundProcessedAmount - | RefundMetrics::SessionizedRefundProcessedAmount => { - metrics_builder.processed_amount.add_metrics_bucket(&value) + + for (id, value) in data { + logger::debug!(bucket_id=?id, bucket_value=?value, "Bucket row for distribution {distribution}"); + + let metrics_builder = metrics_accumulator.entry(id).or_default(); + match distribution { + RefundDistributions::SessionizedRefundReason => metrics_builder + .refund_reason_distribution + .add_distribution_bucket(&value), + RefundDistributions::SessionizedRefundErrorMessage => metrics_builder + .refund_error_message_distribution + .add_distribution_bucket(&value), + } } + logger::debug!( + "Analytics Accumulated Results: distribution: {}, results: {:#?}", + distribution, + metrics_accumulator + ); } } - - logger::debug!( - "Analytics Accumulated Results: metric: {}, results: {:#?}", - metric, - metrics_accumulator - ); } + + let mut success = 0; + let mut total = 0; let mut total_refund_processed_amount = 0; let mut total_refund_processed_amount_in_usd = 0; + let mut total_refund_processed_count = 0; + let mut total_refund_reason_count = 0; + let mut total_refund_error_message_count = 0; let query_data: Vec = metrics_accumulator .into_iter() .map(|(id, val)| { let mut collected_values = val.collect(); + if let Some(success_count) = collected_values.successful_refunds { + success += success_count; + } + if let Some(total_count) = collected_values.total_refunds { + total += total_count; + } if let Some(amount) = collected_values.refund_processed_amount { let amount_in_usd = id .currency @@ -142,18 +238,34 @@ pub async fn get_metrics( total_refund_processed_amount += amount; total_refund_processed_amount_in_usd += amount_in_usd.unwrap_or(0); } + if let Some(count) = collected_values.refund_processed_count { + total_refund_processed_count += count; + } + if let Some(total_count) = collected_values.refund_reason_count { + total_refund_reason_count += total_count; + } + if let Some(total_count) = collected_values.refund_error_message_count { + total_refund_error_message_count += total_count; + } RefundMetricsBucketResponse { values: collected_values, dimensions: id, } }) .collect(); - + let total_refund_success_rate = match (success, total) { + (s, t) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)), + _ => None, + }; Ok(RefundsMetricsResponse { query_data, meta_data: [RefundsAnalyticsMetadata { + total_refund_success_rate, total_refund_processed_amount: Some(total_refund_processed_amount), total_refund_processed_amount_in_usd: Some(total_refund_processed_amount_in_usd), + total_refund_processed_count: Some(total_refund_processed_count), + total_refund_reason_count: Some(total_refund_reason_count), + total_refund_error_message_count: Some(total_refund_error_message_count), }], }) } @@ -229,6 +341,8 @@ pub async fn get_filters( RefundDimensions::Connector => fil.connector, RefundDimensions::RefundType => fil.refund_type.map(|i| i.as_ref().to_string()), RefundDimensions::ProfileId => fil.profile_id, + RefundDimensions::RefundReason => fil.refund_reason, + RefundDimensions::RefundErrorMessage => fil.refund_error_message, }) .collect::>(); res.query_data.push(RefundFilterValue { diff --git a/crates/analytics/src/refunds/distribution.rs b/crates/analytics/src/refunds/distribution.rs new file mode 100644 index 0000000000..962f74acd1 --- /dev/null +++ b/crates/analytics/src/refunds/distribution.rs @@ -0,0 +1,105 @@ +use api_models::analytics::{ + refunds::{ + RefundDimensions, RefundDistributions, RefundFilters, RefundMetricsBucketIdentifier, + RefundType, + }, + Granularity, RefundDistributionBody, TimeRange, +}; +use diesel_models::enums as storage_enums; +use time::PrimitiveDateTime; + +use crate::{ + enums::AuthInfo, + query::{Aggregate, GroupByClause, ToSql, Window}, + types::{AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, LoadRow, MetricsResult}, +}; + +mod sessionized_distribution; + +#[derive(Debug, PartialEq, Eq, serde::Deserialize)] +pub struct RefundDistributionRow { + pub currency: Option>, + pub refund_status: Option>, + pub connector: Option, + pub refund_type: Option>, + pub profile_id: Option, + pub total: Option, + pub count: Option, + pub refund_reason: Option, + pub refund_error_message: Option, + #[serde(with = "common_utils::custom_serde::iso8601::option")] + pub start_bucket: Option, + #[serde(with = "common_utils::custom_serde::iso8601::option")] + pub end_bucket: Option, +} + +pub trait RefundDistributionAnalytics: LoadRow {} + +#[async_trait::async_trait] +pub trait RefundDistribution +where + T: AnalyticsDataSource + RefundDistributionAnalytics, +{ + #[allow(clippy::too_many_arguments)] + async fn load_distribution( + &self, + distribution: &RefundDistributionBody, + dimensions: &[RefundDimensions], + auth: &AuthInfo, + filters: &RefundFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult>; +} + +#[async_trait::async_trait] +impl RefundDistribution for RefundDistributions +where + T: AnalyticsDataSource + RefundDistributionAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_distribution( + &self, + distribution: &RefundDistributionBody, + dimensions: &[RefundDimensions], + auth: &AuthInfo, + filters: &RefundFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + match self { + Self::SessionizedRefundReason => { + sessionized_distribution::RefundReason + .load_distribution( + distribution, + dimensions, + auth, + filters, + granularity, + time_range, + pool, + ) + .await + } + Self::SessionizedRefundErrorMessage => { + sessionized_distribution::RefundErrorMessage + .load_distribution( + distribution, + dimensions, + auth, + filters, + granularity, + time_range, + pool, + ) + .await + } + } + } +} diff --git a/crates/analytics/src/refunds/distribution/sessionized_distribution.rs b/crates/analytics/src/refunds/distribution/sessionized_distribution.rs new file mode 100644 index 0000000000..391b855e96 --- /dev/null +++ b/crates/analytics/src/refunds/distribution/sessionized_distribution.rs @@ -0,0 +1,7 @@ +mod refund_error_message; +mod refund_reason; + +pub(super) use refund_error_message::RefundErrorMessage; +pub(super) use refund_reason::RefundReason; + +pub use super::{RefundDistribution, RefundDistributionAnalytics, RefundDistributionRow}; diff --git a/crates/analytics/src/refunds/distribution/sessionized_distribution/refund_error_message.rs b/crates/analytics/src/refunds/distribution/sessionized_distribution/refund_error_message.rs new file mode 100644 index 0000000000..a4268c86cb --- /dev/null +++ b/crates/analytics/src/refunds/distribution/sessionized_distribution/refund_error_message.rs @@ -0,0 +1,177 @@ +use api_models::analytics::{ + refunds::{RefundDimensions, RefundFilters, RefundMetricsBucketIdentifier}, + Granularity, RefundDistributionBody, TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use diesel_models::enums as storage_enums; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::{RefundDistribution, RefundDistributionRow}; +use crate::{ + enums::AuthInfo, + query::{ + Aggregate, GroupByClause, Order, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window, + }, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(crate) struct RefundErrorMessage; + +#[async_trait::async_trait] +impl RefundDistribution for RefundErrorMessage +where + T: AnalyticsDataSource + super::RefundDistributionAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_distribution( + &self, + distribution: &RefundDistributionBody, + dimensions: &[RefundDimensions], + auth: &AuthInfo, + filters: &RefundFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = + QueryBuilder::new(AnalyticsCollection::RefundSessionized); + + for dim in dimensions.iter() { + query_builder.add_select_column(dim).switch()?; + } + + query_builder + .add_select_column(&distribution.distribution_for) + .switch()?; + + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Min { + field: "created_at", + alias: Some("start_bucket"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Max { + field: "created_at", + alias: Some("end_bucket"), + }) + .switch()?; + + filters.set_filter_clause(&mut query_builder).switch()?; + + auth.set_filter_clause(&mut query_builder).switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + query_builder + .add_filter_clause( + RefundDimensions::RefundStatus, + storage_enums::RefundStatus::Failure, + ) + .switch()?; + + for dim in dimensions.iter() { + query_builder + .add_group_by_clause(dim) + .attach_printable("Error grouping by dimensions") + .switch()?; + } + + query_builder + .add_group_by_clause(&distribution.distribution_for) + .attach_printable("Error grouping by distribution_for") + .switch()?; + + if let Some(granularity) = granularity.as_ref() { + granularity + .set_group_by_clause(&mut query_builder) + .attach_printable("Error adding granularity") + .switch()?; + } + + for dim in dimensions.iter() { + query_builder.add_outer_select_column(dim).switch()?; + } + + query_builder + .add_outer_select_column(&distribution.distribution_for) + .switch()?; + query_builder.add_outer_select_column("count").switch()?; + query_builder + .add_outer_select_column("start_bucket") + .switch()?; + query_builder + .add_outer_select_column("end_bucket") + .switch()?; + let sql_dimensions = query_builder.transform_to_sql_values(dimensions).switch()?; + + query_builder + .add_outer_select_column(Window::Sum { + field: "count", + partition_by: Some(sql_dimensions), + order_by: None, + alias: Some("total"), + }) + .switch()?; + + query_builder + .add_top_n_clause( + dimensions, + distribution.distribution_cardinality.into(), + "count", + Order::Descending, + ) + .switch()?; + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + RefundMetricsBucketIdentifier::new( + i.currency.as_ref().map(|i| i.0), + i.refund_status.as_ref().map(|i| i.0.to_string()), + i.connector.clone(), + i.refund_type.as_ref().map(|i| i.0.to_string()), + i.profile_id.clone(), + i.refund_reason.clone(), + i.refund_error_message.clone(), + TimeRange { + start_time: match (granularity, i.start_bucket) { + (Some(g), Some(st)) => g.clip_to_start(st)?, + _ => time_range.start_time, + }, + end_time: granularity.as_ref().map_or_else( + || Ok(time_range.end_time), + |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), + )?, + }, + ), + i, + )) + }) + .collect::, + crate::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/analytics/src/refunds/distribution/sessionized_distribution/refund_reason.rs b/crates/analytics/src/refunds/distribution/sessionized_distribution/refund_reason.rs new file mode 100644 index 0000000000..a2a933db8c --- /dev/null +++ b/crates/analytics/src/refunds/distribution/sessionized_distribution/refund_reason.rs @@ -0,0 +1,169 @@ +use api_models::analytics::{ + refunds::{RefundDimensions, RefundFilters, RefundMetricsBucketIdentifier}, + Granularity, RefundDistributionBody, TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::{RefundDistribution, RefundDistributionRow}; +use crate::{ + enums::AuthInfo, + query::{ + Aggregate, GroupByClause, Order, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window, + }, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(crate) struct RefundReason; + +#[async_trait::async_trait] +impl RefundDistribution for RefundReason +where + T: AnalyticsDataSource + super::RefundDistributionAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_distribution( + &self, + distribution: &RefundDistributionBody, + dimensions: &[RefundDimensions], + auth: &AuthInfo, + filters: &RefundFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = + QueryBuilder::new(AnalyticsCollection::RefundSessionized); + + for dim in dimensions.iter() { + query_builder.add_select_column(dim).switch()?; + } + + query_builder + .add_select_column(&distribution.distribution_for) + .switch()?; + + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Min { + field: "created_at", + alias: Some("start_bucket"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Max { + field: "created_at", + alias: Some("end_bucket"), + }) + .switch()?; + + filters.set_filter_clause(&mut query_builder).switch()?; + + auth.set_filter_clause(&mut query_builder).switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + for dim in dimensions.iter() { + query_builder + .add_group_by_clause(dim) + .attach_printable("Error grouping by dimensions") + .switch()?; + } + + query_builder + .add_group_by_clause(&distribution.distribution_for) + .attach_printable("Error grouping by distribution_for") + .switch()?; + + if let Some(granularity) = granularity.as_ref() { + granularity + .set_group_by_clause(&mut query_builder) + .attach_printable("Error adding granularity") + .switch()?; + } + + for dim in dimensions.iter() { + query_builder.add_outer_select_column(dim).switch()?; + } + + query_builder + .add_outer_select_column(&distribution.distribution_for) + .switch()?; + query_builder.add_outer_select_column("count").switch()?; + query_builder + .add_outer_select_column("start_bucket") + .switch()?; + query_builder + .add_outer_select_column("end_bucket") + .switch()?; + let sql_dimensions = query_builder.transform_to_sql_values(dimensions).switch()?; + + query_builder + .add_outer_select_column(Window::Sum { + field: "count", + partition_by: Some(sql_dimensions), + order_by: None, + alias: Some("total"), + }) + .switch()?; + + query_builder + .add_top_n_clause( + dimensions, + distribution.distribution_cardinality.into(), + "count", + Order::Descending, + ) + .switch()?; + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + RefundMetricsBucketIdentifier::new( + i.currency.as_ref().map(|i| i.0), + i.refund_status.as_ref().map(|i| i.0.to_string()), + i.connector.clone(), + i.refund_type.as_ref().map(|i| i.0.to_string()), + i.profile_id.clone(), + i.refund_reason.clone(), + i.refund_error_message.clone(), + TimeRange { + start_time: match (granularity, i.start_bucket) { + (Some(g), Some(st)) => g.clip_to_start(st)?, + _ => time_range.start_time, + }, + end_time: granularity.as_ref().map_or_else( + || Ok(time_range.end_time), + |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), + )?, + }, + ), + i, + )) + }) + .collect::, + crate::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/analytics/src/refunds/filters.rs b/crates/analytics/src/refunds/filters.rs index d87a778ebf..b742187c4e 100644 --- a/crates/analytics/src/refunds/filters.rs +++ b/crates/analytics/src/refunds/filters.rs @@ -56,4 +56,6 @@ pub struct RefundFilterRow { pub connector: Option, pub refund_type: Option>, pub profile_id: Option, + pub refund_reason: Option, + pub refund_error_message: Option, } diff --git a/crates/analytics/src/refunds/metrics.rs b/crates/analytics/src/refunds/metrics.rs index c211ea82d7..57e6511d92 100644 --- a/crates/analytics/src/refunds/metrics.rs +++ b/crates/analytics/src/refunds/metrics.rs @@ -31,6 +31,8 @@ pub struct RefundMetricRow { pub connector: Option, pub refund_type: Option>, pub profile_id: Option, + pub refund_reason: Option, + pub refund_error_message: Option, pub total: Option, pub count: Option, #[serde(with = "common_utils::custom_serde::iso8601::option")] @@ -122,6 +124,16 @@ where .load_metrics(dimensions, auth, filters, granularity, time_range, pool) .await } + Self::SessionizedRefundReason => { + sessionized_metrics::RefundReason + .load_metrics(dimensions, auth, filters, granularity, time_range, pool) + .await + } + Self::SessionizedRefundErrorMessage => { + sessionized_metrics::RefundErrorMessage + .load_metrics(dimensions, auth, filters, granularity, time_range, pool) + .await + } } } } diff --git a/crates/analytics/src/refunds/metrics/refund_count.rs b/crates/analytics/src/refunds/metrics/refund_count.rs index 07de04c589..7079993094 100644 --- a/crates/analytics/src/refunds/metrics/refund_count.rs +++ b/crates/analytics/src/refunds/metrics/refund_count.rs @@ -99,6 +99,8 @@ where i.connector.clone(), i.refund_type.as_ref().map(|i| i.0.to_string()), i.profile_id.clone(), + i.refund_reason.clone(), + i.refund_error_message.clone(), TimeRange { start_time: match (granularity, i.start_bucket) { (Some(g), Some(st)) => g.clip_to_start(st)?, diff --git a/crates/analytics/src/refunds/metrics/refund_processed_amount.rs b/crates/analytics/src/refunds/metrics/refund_processed_amount.rs index 6cba5f58fe..3890b8be6e 100644 --- a/crates/analytics/src/refunds/metrics/refund_processed_amount.rs +++ b/crates/analytics/src/refunds/metrics/refund_processed_amount.rs @@ -107,6 +107,8 @@ where i.connector.clone(), i.refund_type.as_ref().map(|i| i.0.to_string()), i.profile_id.clone(), + i.refund_reason.clone(), + i.refund_error_message.clone(), TimeRange { start_time: match (granularity, i.start_bucket) { (Some(g), Some(st)) => g.clip_to_start(st)?, diff --git a/crates/analytics/src/refunds/metrics/refund_success_count.rs b/crates/analytics/src/refunds/metrics/refund_success_count.rs index 642cf70580..4c3f600b05 100644 --- a/crates/analytics/src/refunds/metrics/refund_success_count.rs +++ b/crates/analytics/src/refunds/metrics/refund_success_count.rs @@ -102,6 +102,8 @@ where i.connector.clone(), i.refund_type.as_ref().map(|i| i.0.to_string()), i.profile_id.clone(), + i.refund_reason.clone(), + i.refund_error_message.clone(), TimeRange { start_time: match (granularity, i.start_bucket) { (Some(g), Some(st)) => g.clip_to_start(st)?, diff --git a/crates/analytics/src/refunds/metrics/refund_success_rate.rs b/crates/analytics/src/refunds/metrics/refund_success_rate.rs index 7b5716ba41..8ed144999a 100644 --- a/crates/analytics/src/refunds/metrics/refund_success_rate.rs +++ b/crates/analytics/src/refunds/metrics/refund_success_rate.rs @@ -97,6 +97,8 @@ where i.connector.clone(), i.refund_type.as_ref().map(|i| i.0.to_string()), i.profile_id.clone(), + i.refund_reason.clone(), + i.refund_error_message.clone(), TimeRange { start_time: match (granularity, i.start_bucket) { (Some(g), Some(st)) => g.clip_to_start(st)?, diff --git a/crates/analytics/src/refunds/metrics/sessionized_metrics.rs b/crates/analytics/src/refunds/metrics/sessionized_metrics.rs index bb404cd341..3a5195be6c 100644 --- a/crates/analytics/src/refunds/metrics/sessionized_metrics.rs +++ b/crates/analytics/src/refunds/metrics/sessionized_metrics.rs @@ -1,10 +1,14 @@ mod refund_count; +mod refund_error_message; mod refund_processed_amount; +mod refund_reason; mod refund_success_count; mod refund_success_rate; pub(super) use refund_count::RefundCount; +pub(super) use refund_error_message::RefundErrorMessage; pub(super) use refund_processed_amount::RefundProcessedAmount; +pub(super) use refund_reason::RefundReason; pub(super) use refund_success_count::RefundSuccessCount; pub(super) use refund_success_rate::RefundSuccessRate; diff --git a/crates/analytics/src/refunds/metrics/sessionized_metrics/refund_count.rs b/crates/analytics/src/refunds/metrics/sessionized_metrics/refund_count.rs index c77e1f7a52..20989daca7 100644 --- a/crates/analytics/src/refunds/metrics/sessionized_metrics/refund_count.rs +++ b/crates/analytics/src/refunds/metrics/sessionized_metrics/refund_count.rs @@ -100,6 +100,8 @@ where i.connector.clone(), i.refund_type.as_ref().map(|i| i.0.to_string()), i.profile_id.clone(), + i.refund_reason.clone(), + i.refund_error_message.clone(), TimeRange { start_time: match (granularity, i.start_bucket) { (Some(g), Some(st)) => g.clip_to_start(st)?, diff --git a/crates/analytics/src/refunds/metrics/sessionized_metrics/refund_error_message.rs b/crates/analytics/src/refunds/metrics/sessionized_metrics/refund_error_message.rs new file mode 100644 index 0000000000..72e32907ef --- /dev/null +++ b/crates/analytics/src/refunds/metrics/sessionized_metrics/refund_error_message.rs @@ -0,0 +1,190 @@ +use std::collections::HashSet; + +use api_models::analytics::{ + refunds::{RefundDimensions, RefundFilters, RefundMetricsBucketIdentifier}, + Granularity, TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use diesel_models::enums as storage_enums; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::RefundMetricRow; +use crate::{ + enums::AuthInfo, + query::{ + Aggregate, FilterTypes, GroupByClause, Order, QueryBuilder, QueryFilter, SeriesBucket, + ToSql, Window, + }, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(crate) struct RefundErrorMessage; + +#[async_trait::async_trait] +impl super::RefundMetric for RefundErrorMessage +where + T: AnalyticsDataSource + super::RefundMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + dimensions: &[RefundDimensions], + auth: &AuthInfo, + filters: &RefundFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut inner_query_builder: QueryBuilder = + QueryBuilder::new(AnalyticsCollection::RefundSessionized); + inner_query_builder + .add_select_column("sum(sign_flag)") + .switch()?; + + inner_query_builder + .add_custom_filter_clause( + RefundDimensions::RefundErrorMessage, + "NULL", + FilterTypes::IsNotNull, + ) + .switch()?; + + time_range + .set_filter_clause(&mut inner_query_builder) + .attach_printable("Error filtering time range for inner query") + .switch()?; + + let inner_query_string = inner_query_builder + .build_query() + .attach_printable("Error building inner query") + .change_context(MetricsError::QueryBuildingError)?; + + let mut outer_query_builder: QueryBuilder = + QueryBuilder::new(AnalyticsCollection::RefundSessionized); + + for dim in dimensions.iter() { + outer_query_builder.add_select_column(dim).switch()?; + } + + outer_query_builder + .add_select_column("sum(sign_flag) AS count") + .switch()?; + + outer_query_builder + .add_select_column(format!("({}) AS total", inner_query_string)) + .switch()?; + + outer_query_builder + .add_select_column(Aggregate::Min { + field: "created_at", + alias: Some("start_bucket"), + }) + .switch()?; + + outer_query_builder + .add_select_column(Aggregate::Max { + field: "created_at", + alias: Some("end_bucket"), + }) + .switch()?; + + filters + .set_filter_clause(&mut outer_query_builder) + .switch()?; + + auth.set_filter_clause(&mut outer_query_builder).switch()?; + + time_range + .set_filter_clause(&mut outer_query_builder) + .attach_printable("Error filtering time range for outer query") + .switch()?; + + outer_query_builder + .add_filter_clause( + RefundDimensions::RefundStatus, + storage_enums::RefundStatus::Failure, + ) + .switch()?; + + outer_query_builder + .add_custom_filter_clause( + RefundDimensions::RefundErrorMessage, + "NULL", + FilterTypes::IsNotNull, + ) + .switch()?; + + for dim in dimensions.iter() { + outer_query_builder + .add_group_by_clause(dim) + .attach_printable("Error grouping by dimensions") + .switch()?; + } + + if let Some(granularity) = granularity.as_ref() { + granularity + .set_group_by_clause(&mut outer_query_builder) + .attach_printable("Error adding granularity") + .switch()?; + } + + outer_query_builder + .add_order_by_clause("count", Order::Descending) + .attach_printable("Error adding order by clause") + .switch()?; + + let filtered_dimensions: Vec<&RefundDimensions> = dimensions + .iter() + .filter(|&&dim| dim != RefundDimensions::RefundErrorMessage) + .collect(); + + for dim in &filtered_dimensions { + outer_query_builder + .add_order_by_clause(*dim, Order::Ascending) + .attach_printable("Error adding order by clause") + .switch()?; + } + + outer_query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + RefundMetricsBucketIdentifier::new( + i.currency.as_ref().map(|i| i.0), + None, + i.connector.clone(), + i.refund_type.as_ref().map(|i| i.0.to_string()), + i.profile_id.clone(), + i.refund_reason.clone(), + i.refund_error_message.clone(), + TimeRange { + start_time: match (granularity, i.start_bucket) { + (Some(g), Some(st)) => g.clip_to_start(st)?, + _ => time_range.start_time, + }, + end_time: granularity.as_ref().map_or_else( + || Ok(time_range.end_time), + |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), + )?, + }, + ), + i, + )) + }) + .collect::, + crate::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/analytics/src/refunds/metrics/sessionized_metrics/refund_processed_amount.rs b/crates/analytics/src/refunds/metrics/sessionized_metrics/refund_processed_amount.rs index c91938228a..93880824ef 100644 --- a/crates/analytics/src/refunds/metrics/sessionized_metrics/refund_processed_amount.rs +++ b/crates/analytics/src/refunds/metrics/sessionized_metrics/refund_processed_amount.rs @@ -47,6 +47,12 @@ where query_builder.add_select_column(dim).switch()?; } + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; query_builder .add_select_column(Aggregate::Sum { field: "refund_amount", @@ -109,6 +115,8 @@ where i.connector.clone(), i.refund_type.as_ref().map(|i| i.0.to_string()), i.profile_id.clone(), + i.refund_reason.clone(), + i.refund_error_message.clone(), TimeRange { start_time: match (granularity, i.start_bucket) { (Some(g), Some(st)) => g.clip_to_start(st)?, diff --git a/crates/analytics/src/refunds/metrics/sessionized_metrics/refund_reason.rs b/crates/analytics/src/refunds/metrics/sessionized_metrics/refund_reason.rs new file mode 100644 index 0000000000..0df28901e8 --- /dev/null +++ b/crates/analytics/src/refunds/metrics/sessionized_metrics/refund_reason.rs @@ -0,0 +1,182 @@ +use std::collections::HashSet; + +use api_models::analytics::{ + refunds::{RefundDimensions, RefundFilters, RefundMetricsBucketIdentifier}, + Granularity, TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::RefundMetricRow; +use crate::{ + enums::AuthInfo, + query::{ + Aggregate, FilterTypes, GroupByClause, Order, QueryBuilder, QueryFilter, SeriesBucket, + ToSql, Window, + }, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(crate) struct RefundReason; + +#[async_trait::async_trait] +impl super::RefundMetric for RefundReason +where + T: AnalyticsDataSource + super::RefundMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + dimensions: &[RefundDimensions], + auth: &AuthInfo, + filters: &RefundFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut inner_query_builder: QueryBuilder = + QueryBuilder::new(AnalyticsCollection::RefundSessionized); + inner_query_builder + .add_select_column("sum(sign_flag)") + .switch()?; + + inner_query_builder + .add_custom_filter_clause( + RefundDimensions::RefundReason, + "NULL", + FilterTypes::IsNotNull, + ) + .switch()?; + + time_range + .set_filter_clause(&mut inner_query_builder) + .attach_printable("Error filtering time range for inner query") + .switch()?; + + let inner_query_string = inner_query_builder + .build_query() + .attach_printable("Error building inner query") + .change_context(MetricsError::QueryBuildingError)?; + + let mut outer_query_builder: QueryBuilder = + QueryBuilder::new(AnalyticsCollection::RefundSessionized); + + for dim in dimensions.iter() { + outer_query_builder.add_select_column(dim).switch()?; + } + + outer_query_builder + .add_select_column("sum(sign_flag) AS count") + .switch()?; + + outer_query_builder + .add_select_column(format!("({}) AS total", inner_query_string)) + .switch()?; + + outer_query_builder + .add_select_column(Aggregate::Min { + field: "created_at", + alias: Some("start_bucket"), + }) + .switch()?; + + outer_query_builder + .add_select_column(Aggregate::Max { + field: "created_at", + alias: Some("end_bucket"), + }) + .switch()?; + + filters + .set_filter_clause(&mut outer_query_builder) + .switch()?; + + auth.set_filter_clause(&mut outer_query_builder).switch()?; + + time_range + .set_filter_clause(&mut outer_query_builder) + .attach_printable("Error filtering time range for outer query") + .switch()?; + + outer_query_builder + .add_custom_filter_clause( + RefundDimensions::RefundReason, + "NULL", + FilterTypes::IsNotNull, + ) + .switch()?; + + for dim in dimensions.iter() { + outer_query_builder + .add_group_by_clause(dim) + .attach_printable("Error grouping by dimensions") + .switch()?; + } + + if let Some(granularity) = granularity.as_ref() { + granularity + .set_group_by_clause(&mut outer_query_builder) + .attach_printable("Error adding granularity") + .switch()?; + } + + outer_query_builder + .add_order_by_clause("count", Order::Descending) + .attach_printable("Error adding order by clause") + .switch()?; + + let filtered_dimensions: Vec<&RefundDimensions> = dimensions + .iter() + .filter(|&&dim| dim != RefundDimensions::RefundReason) + .collect(); + + for dim in &filtered_dimensions { + outer_query_builder + .add_order_by_clause(*dim, Order::Ascending) + .attach_printable("Error adding order by clause") + .switch()?; + } + + outer_query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + RefundMetricsBucketIdentifier::new( + i.currency.as_ref().map(|i| i.0), + None, + i.connector.clone(), + i.refund_type.as_ref().map(|i| i.0.to_string()), + i.profile_id.clone(), + i.refund_reason.clone(), + i.refund_error_message.clone(), + TimeRange { + start_time: match (granularity, i.start_bucket) { + (Some(g), Some(st)) => g.clip_to_start(st)?, + _ => time_range.start_time, + }, + end_time: granularity.as_ref().map_or_else( + || Ok(time_range.end_time), + |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), + )?, + }, + ), + i, + )) + }) + .collect::, + crate::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/analytics/src/refunds/metrics/sessionized_metrics/refund_success_count.rs b/crates/analytics/src/refunds/metrics/sessionized_metrics/refund_success_count.rs index 332261a320..c0bb139c46 100644 --- a/crates/analytics/src/refunds/metrics/sessionized_metrics/refund_success_count.rs +++ b/crates/analytics/src/refunds/metrics/sessionized_metrics/refund_success_count.rs @@ -102,6 +102,8 @@ where i.connector.clone(), i.refund_type.as_ref().map(|i| i.0.to_string()), i.profile_id.clone(), + i.refund_reason.clone(), + i.refund_error_message.clone(), TimeRange { start_time: match (granularity, i.start_bucket) { (Some(g), Some(st)) => g.clip_to_start(st)?, diff --git a/crates/analytics/src/refunds/metrics/sessionized_metrics/refund_success_rate.rs b/crates/analytics/src/refunds/metrics/sessionized_metrics/refund_success_rate.rs index 35ee0d61b5..e2348d51ad 100644 --- a/crates/analytics/src/refunds/metrics/sessionized_metrics/refund_success_rate.rs +++ b/crates/analytics/src/refunds/metrics/sessionized_metrics/refund_success_rate.rs @@ -97,6 +97,8 @@ where i.connector.clone(), i.refund_type.as_ref().map(|i| i.0.to_string()), i.profile_id.clone(), + i.refund_reason.clone(), + i.refund_error_message.clone(), TimeRange { start_time: match (granularity, i.start_bucket) { (Some(g), Some(st)) => g.clip_to_start(st)?, diff --git a/crates/analytics/src/refunds/types.rs b/crates/analytics/src/refunds/types.rs index 3f22081a69..c0735557d1 100644 --- a/crates/analytics/src/refunds/types.rs +++ b/crates/analytics/src/refunds/types.rs @@ -42,6 +42,21 @@ where .attach_printable("Error adding profile id filter")?; } + if !self.refund_reason.is_empty() { + builder + .add_filter_in_range_clause(RefundDimensions::RefundReason, &self.refund_reason) + .attach_printable("Error adding refund reason filter")?; + } + + if !self.refund_error_message.is_empty() { + builder + .add_filter_in_range_clause( + RefundDimensions::RefundErrorMessage, + &self.refund_error_message, + ) + .attach_printable("Error adding refund error message filter")?; + } + Ok(()) } } diff --git a/crates/analytics/src/sqlx.rs b/crates/analytics/src/sqlx.rs index 1d91ce17c6..2a94d52876 100644 --- a/crates/analytics/src/sqlx.rs +++ b/crates/analytics/src/sqlx.rs @@ -154,6 +154,7 @@ impl super::payment_intents::filters::PaymentIntentFilterAnalytics for SqlxClien impl super::payment_intents::metrics::PaymentIntentMetricAnalytics for SqlxClient {} impl super::refunds::metrics::RefundMetricAnalytics for SqlxClient {} impl super::refunds::filters::RefundFilterAnalytics for SqlxClient {} +impl super::refunds::distribution::RefundDistributionAnalytics for SqlxClient {} impl super::disputes::filters::DisputeFilterAnalytics for SqlxClient {} impl super::disputes::metrics::DisputeMetricAnalytics for SqlxClient {} impl super::frm::metrics::FrmMetricAnalytics for SqlxClient {} @@ -214,6 +215,15 @@ impl<'a> FromRow<'a, PgRow> for super::refunds::metrics::RefundMetricRow { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; + let refund_reason: Option = row.try_get("refund_reason").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let refund_error_message: Option = + row.try_get("refund_error_message").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; let total: Option = row.try_get("total").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), @@ -235,6 +245,8 @@ impl<'a> FromRow<'a, PgRow> for super::refunds::metrics::RefundMetricRow { connector, refund_type, profile_id, + refund_reason, + refund_error_message, total, count, start_bucket, @@ -791,12 +803,88 @@ impl<'a> FromRow<'a, PgRow> for super::refunds::filters::RefundFilterRow { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; + let refund_reason: Option = row.try_get("refund_reason").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let refund_error_message: Option = + row.try_get("refund_error_message").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; Ok(Self { currency, refund_status, connector, refund_type, profile_id, + refund_reason, + refund_error_message, + }) + } +} + +impl<'a> FromRow<'a, PgRow> for super::refunds::distribution::RefundDistributionRow { + fn from_row(row: &'a PgRow) -> sqlx::Result { + let currency: Option> = + row.try_get("currency").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let refund_status: Option> = + row.try_get("refund_status").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let connector: Option = row.try_get("connector").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let refund_type: Option> = + row.try_get("refund_type").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let profile_id: Option = row.try_get("profile_id").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let total: Option = row.try_get("total").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let count: Option = row.try_get("count").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let refund_reason: Option = row.try_get("refund_reason").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let refund_error_message: Option = + row.try_get("refund_error_message").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + // Removing millisecond precision to get accurate diffs against clickhouse + let start_bucket: Option = row + .try_get::, _>("start_bucket")? + .and_then(|dt| dt.replace_millisecond(0).ok()); + let end_bucket: Option = row + .try_get::, _>("end_bucket")? + .and_then(|dt| dt.replace_millisecond(0).ok()); + Ok(Self { + currency, + refund_status, + connector, + refund_type, + profile_id, + total, + count, + refund_reason, + refund_error_message, + start_bucket, + end_bucket, }) } } diff --git a/crates/api_models/src/analytics.rs b/crates/api_models/src/analytics.rs index b6d4044c5f..806eb4b6e0 100644 --- a/crates/api_models/src/analytics.rs +++ b/crates/api_models/src/analytics.rs @@ -12,7 +12,7 @@ use self::{ frm::{FrmDimensions, FrmMetrics}, payment_intents::{PaymentIntentDimensions, PaymentIntentMetrics}, payments::{PaymentDimensions, PaymentDistributions, PaymentMetrics}, - refunds::{RefundDimensions, RefundMetrics}, + refunds::{RefundDimensions, RefundDistributions, RefundMetrics}, sdk_events::{SdkEventDimensions, SdkEventMetrics}, }; pub mod active_payments; @@ -73,7 +73,7 @@ pub struct GetPaymentMetricRequest { #[serde(default)] pub filters: payments::PaymentFilters, pub metrics: HashSet, - pub distribution: Option, + pub distribution: Option, #[serde(default)] pub delta: bool, } @@ -98,11 +98,18 @@ impl Into for QueryLimit { #[derive(Clone, Debug, serde::Deserialize, serde::Serialize)] #[serde(rename_all = "camelCase")] -pub struct Distribution { +pub struct PaymentDistributionBody { pub distribution_for: PaymentDistributions, pub distribution_cardinality: QueryLimit, } +#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct RefundDistributionBody { + pub distribution_for: RefundDistributions, + pub distribution_cardinality: QueryLimit, +} + #[derive(Clone, Debug, serde::Deserialize, serde::Serialize)] #[serde(rename_all = "camelCase")] pub struct ReportRequest { @@ -142,6 +149,7 @@ pub struct GetRefundMetricRequest { #[serde(default)] pub filters: refunds::RefundFilters, pub metrics: HashSet, + pub distribution: Option, #[serde(default)] pub delta: bool, } @@ -230,8 +238,12 @@ pub struct PaymentIntentsAnalyticsMetadata { #[derive(Debug, serde::Serialize)] pub struct RefundsAnalyticsMetadata { + pub total_refund_success_rate: Option, pub total_refund_processed_amount: Option, pub total_refund_processed_amount_in_usd: Option, + pub total_refund_processed_count: Option, + pub total_refund_reason_count: Option, + pub total_refund_error_message_count: Option, } #[derive(Debug, serde::Deserialize, serde::Serialize)] #[serde(rename_all = "camelCase")] diff --git a/crates/api_models/src/analytics/refunds.rs b/crates/api_models/src/analytics/refunds.rs index d981bd4382..0afca6c1ef 100644 --- a/crates/api_models/src/analytics/refunds.rs +++ b/crates/api_models/src/analytics/refunds.rs @@ -43,6 +43,10 @@ pub struct RefundFilters { pub refund_type: Vec, #[serde(default)] pub profile_id: Vec, + #[serde(default)] + pub refund_reason: Vec, + #[serde(default)] + pub refund_error_message: Vec, } #[derive( @@ -67,6 +71,8 @@ pub enum RefundDimensions { Connector, RefundType, ProfileId, + RefundReason, + RefundErrorMessage, } #[derive( @@ -92,6 +98,44 @@ pub enum RefundMetrics { SessionizedRefundCount, SessionizedRefundSuccessCount, SessionizedRefundProcessedAmount, + SessionizedRefundReason, + SessionizedRefundErrorMessage, +} + +#[derive(Debug, Default, serde::Serialize)] +pub struct ReasonsResult { + pub reason: String, + pub count: i64, + pub percentage: f64, +} + +#[derive(Debug, Default, serde::Serialize)] +pub struct ErrorMessagesResult { + pub error_message: String, + pub count: i64, + pub percentage: f64, +} + +#[derive( + Clone, + Copy, + Debug, + Hash, + PartialEq, + Eq, + serde::Serialize, + serde::Deserialize, + strum::Display, + strum::EnumIter, + strum::AsRefStr, +)] +#[strum(serialize_all = "snake_case")] +#[serde(rename_all = "snake_case")] +pub enum RefundDistributions { + #[strum(serialize = "refund_reason")] + SessionizedRefundReason, + #[strum(serialize = "refund_error_message")] + SessionizedRefundErrorMessage, } pub mod metric_behaviour { @@ -124,9 +168,10 @@ pub struct RefundMetricsBucketIdentifier { pub currency: Option, pub refund_status: Option, pub connector: Option, - pub refund_type: Option, pub profile_id: Option, + pub refund_reason: Option, + pub refund_error_message: Option, #[serde(rename = "time_range")] pub time_bucket: TimeRange, #[serde(rename = "time_bucket")] @@ -141,6 +186,8 @@ impl Hash for RefundMetricsBucketIdentifier { self.connector.hash(state); self.refund_type.hash(state); self.profile_id.hash(state); + self.refund_reason.hash(state); + self.refund_error_message.hash(state); self.time_bucket.hash(state); } } @@ -155,12 +202,15 @@ impl PartialEq for RefundMetricsBucketIdentifier { } impl RefundMetricsBucketIdentifier { + #[allow(clippy::too_many_arguments)] pub fn new( currency: Option, refund_status: Option, connector: Option, refund_type: Option, profile_id: Option, + refund_reason: Option, + refund_error_message: Option, normalized_time_range: TimeRange, ) -> Self { Self { @@ -169,6 +219,8 @@ impl RefundMetricsBucketIdentifier { connector, refund_type, profile_id, + refund_reason, + refund_error_message, time_bucket: normalized_time_range, start_time: normalized_time_range.start_time, } @@ -176,11 +228,18 @@ impl RefundMetricsBucketIdentifier { } #[derive(Debug, serde::Serialize)] pub struct RefundMetricsBucketValue { + pub successful_refunds: Option, + pub total_refunds: Option, pub refund_success_rate: Option, pub refund_count: Option, pub refund_success_count: Option, pub refund_processed_amount: Option, pub refund_processed_amount_in_usd: Option, + pub refund_processed_count: Option, + pub refund_reason_distribution: Option>, + pub refund_error_message_distribution: Option>, + pub refund_reason_count: Option, + pub refund_error_message_count: Option, } #[derive(Debug, serde::Serialize)] pub struct RefundMetricsBucketResponse {