feat(analytics): authentication analytics (#4429)

Co-authored-by: Sampras Lopes <lsampras@pm.me>
Co-authored-by: hyperswitch-bot[bot] <148525504+hyperswitch-bot[bot]@users.noreply.github.com>
This commit is contained in:
Vrishab Srivatsa
2024-05-10 15:38:26 +05:30
committed by GitHub
parent 86e05501cb
commit 24d154248c
28 changed files with 790 additions and 418 deletions

View File

@ -0,0 +1,6 @@
pub mod accumulator;
mod core;
pub mod metrics;
pub use accumulator::{AuthEventMetricAccumulator, AuthEventMetricsAccumulator};
pub use self::core::get_metrics;

View File

@ -0,0 +1,58 @@
use api_models::analytics::auth_events::AuthEventMetricsBucketValue;
use super::metrics::AuthEventMetricRow;
#[derive(Debug, Default)]
pub struct AuthEventMetricsAccumulator {
pub three_ds_sdk_count: CountAccumulator,
pub authentication_attempt_count: CountAccumulator,
pub authentication_success_count: CountAccumulator,
pub challenge_flow_count: CountAccumulator,
pub challenge_attempt_count: CountAccumulator,
pub challenge_success_count: CountAccumulator,
pub frictionless_flow_count: CountAccumulator,
}
#[derive(Debug, Default)]
#[repr(transparent)]
pub struct CountAccumulator {
pub count: Option<i64>,
}
pub trait AuthEventMetricAccumulator {
type MetricOutput;
fn add_metrics_bucket(&mut self, metrics: &AuthEventMetricRow);
fn collect(self) -> Self::MetricOutput;
}
impl AuthEventMetricAccumulator for CountAccumulator {
type MetricOutput = Option<u64>;
#[inline]
fn add_metrics_bucket(&mut self, metrics: &AuthEventMetricRow) {
self.count = match (self.count, metrics.count) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
}
}
#[inline]
fn collect(self) -> Self::MetricOutput {
self.count.and_then(|i| u64::try_from(i).ok())
}
}
impl AuthEventMetricsAccumulator {
pub fn collect(self) -> AuthEventMetricsBucketValue {
AuthEventMetricsBucketValue {
three_ds_sdk_count: self.three_ds_sdk_count.collect(),
authentication_attempt_count: self.authentication_attempt_count.collect(),
authentication_success_count: self.authentication_success_count.collect(),
challenge_flow_count: self.challenge_flow_count.collect(),
challenge_attempt_count: self.challenge_attempt_count.collect(),
challenge_success_count: self.challenge_success_count.collect(),
frictionless_flow_count: self.frictionless_flow_count.collect(),
}
}
}

View File

@ -0,0 +1,108 @@
use std::collections::HashMap;
use api_models::analytics::{
auth_events::{AuthEventMetrics, AuthEventMetricsBucketIdentifier, MetricsBucketResponse},
AnalyticsMetadata, GetAuthEventMetricRequest, MetricsResponse,
};
use error_stack::ResultExt;
use router_env::{instrument, logger, tracing};
use super::AuthEventMetricsAccumulator;
use crate::{
auth_events::AuthEventMetricAccumulator,
errors::{AnalyticsError, AnalyticsResult},
AnalyticsProvider,
};
#[instrument(skip_all)]
pub async fn get_metrics(
pool: &AnalyticsProvider,
merchant_id: &String,
publishable_key: Option<&String>,
req: GetAuthEventMetricRequest,
) -> AnalyticsResult<MetricsResponse<MetricsBucketResponse>> {
let mut metrics_accumulator: HashMap<
AuthEventMetricsBucketIdentifier,
AuthEventMetricsAccumulator,
> = HashMap::new();
if let Some(publishable_key) = publishable_key {
let mut set = tokio::task::JoinSet::new();
for metric_type in req.metrics.iter().cloned() {
let req = req.clone();
let merchant_id_scoped = merchant_id.to_owned();
let publishable_key_scoped = publishable_key.to_owned();
let pool = pool.clone();
set.spawn(async move {
let data = pool
.get_auth_event_metrics(
&metric_type,
&merchant_id_scoped,
&publishable_key_scoped,
&req.time_series.map(|t| t.granularity),
&req.time_range,
)
.await
.change_context(AnalyticsError::UnknownError);
(metric_type, data)
});
}
while let Some((metric, data)) = set
.join_next()
.await
.transpose()
.change_context(AnalyticsError::UnknownError)?
{
for (id, value) in data? {
let metrics_builder = metrics_accumulator.entry(id).or_default();
match metric {
AuthEventMetrics::ThreeDsSdkCount => metrics_builder
.three_ds_sdk_count
.add_metrics_bucket(&value),
AuthEventMetrics::AuthenticationAttemptCount => metrics_builder
.authentication_attempt_count
.add_metrics_bucket(&value),
AuthEventMetrics::AuthenticationSuccessCount => metrics_builder
.authentication_success_count
.add_metrics_bucket(&value),
AuthEventMetrics::ChallengeFlowCount => metrics_builder
.challenge_flow_count
.add_metrics_bucket(&value),
AuthEventMetrics::ChallengeAttemptCount => metrics_builder
.challenge_attempt_count
.add_metrics_bucket(&value),
AuthEventMetrics::ChallengeSuccessCount => metrics_builder
.challenge_success_count
.add_metrics_bucket(&value),
AuthEventMetrics::FrictionlessFlowCount => metrics_builder
.frictionless_flow_count
.add_metrics_bucket(&value),
}
}
}
let query_data: Vec<MetricsBucketResponse> = metrics_accumulator
.into_iter()
.map(|(id, val)| MetricsBucketResponse {
values: val.collect(),
dimensions: id,
})
.collect();
Ok(MetricsResponse {
query_data,
meta_data: [AnalyticsMetadata {
current_time_range: req.time_range,
}],
})
} else {
logger::error!("Publishable key not present for merchant ID");
Ok(MetricsResponse {
query_data: vec![],
meta_data: [AnalyticsMetadata {
current_time_range: req.time_range,
}],
})
}
}

View File

@ -0,0 +1,107 @@
use api_models::analytics::{
auth_events::{AuthEventMetrics, AuthEventMetricsBucketIdentifier},
Granularity, TimeRange,
};
use time::PrimitiveDateTime;
use crate::{
query::{Aggregate, GroupByClause, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, LoadRow, MetricsResult},
};
mod authentication_attempt_count;
mod authentication_success_count;
mod challenge_attempt_count;
mod challenge_flow_count;
mod challenge_success_count;
mod frictionless_flow_count;
mod three_ds_sdk_count;
use authentication_attempt_count::AuthenticationAttemptCount;
use authentication_success_count::AuthenticationSuccessCount;
use challenge_attempt_count::ChallengeAttemptCount;
use challenge_flow_count::ChallengeFlowCount;
use challenge_success_count::ChallengeSuccessCount;
use frictionless_flow_count::FrictionlessFlowCount;
use three_ds_sdk_count::ThreeDsSdkCount;
#[derive(Debug, PartialEq, Eq, serde::Deserialize)]
pub struct AuthEventMetricRow {
pub count: Option<i64>,
pub time_bucket: Option<String>,
}
pub trait AuthEventMetricAnalytics: LoadRow<AuthEventMetricRow> {}
#[async_trait::async_trait]
pub trait AuthEventMetric<T>
where
T: AnalyticsDataSource + AuthEventMetricAnalytics,
{
async fn load_metrics(
&self,
merchant_id: &str,
publishable_key: &str,
granularity: &Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<Vec<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>>;
}
#[async_trait::async_trait]
impl<T> AuthEventMetric<T> for AuthEventMetrics
where
T: AnalyticsDataSource + AuthEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
merchant_id: &str,
publishable_key: &str,
granularity: &Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<Vec<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> {
match self {
Self::ThreeDsSdkCount => {
ThreeDsSdkCount
.load_metrics(merchant_id, publishable_key, granularity, time_range, pool)
.await
}
Self::AuthenticationAttemptCount => {
AuthenticationAttemptCount
.load_metrics(merchant_id, publishable_key, granularity, time_range, pool)
.await
}
Self::AuthenticationSuccessCount => {
AuthenticationSuccessCount
.load_metrics(merchant_id, publishable_key, granularity, time_range, pool)
.await
}
Self::ChallengeFlowCount => {
ChallengeFlowCount
.load_metrics(merchant_id, publishable_key, granularity, time_range, pool)
.await
}
Self::ChallengeAttemptCount => {
ChallengeAttemptCount
.load_metrics(merchant_id, publishable_key, granularity, time_range, pool)
.await
}
Self::ChallengeSuccessCount => {
ChallengeSuccessCount
.load_metrics(merchant_id, publishable_key, granularity, time_range, pool)
.await
}
Self::FrictionlessFlowCount => {
FrictionlessFlowCount
.load_metrics(merchant_id, publishable_key, granularity, time_range, pool)
.await
}
}
}
}

View File

@ -1,26 +1,24 @@
use api_models::analytics::{
sdk_events::{
SdkEventDimensions, SdkEventFilters, SdkEventMetricsBucketIdentifier, SdkEventNames,
},
Granularity, TimeRange,
auth_events::AuthEventMetricsBucketIdentifier, sdk_events::SdkEventNames, Granularity,
TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::SdkEventMetricRow;
use super::AuthEventMetricRow;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct ThreeDsMethodInvokedCount;
pub(super) struct AuthenticationAttemptCount;
#[async_trait::async_trait]
impl<T> super::SdkEventMetric<T> for ThreeDsMethodInvokedCount
impl<T> super::AuthEventMetric<T> for AuthenticationAttemptCount
where
T: AnalyticsDataSource + super::SdkEventMetricAnalytics,
T: AnalyticsDataSource + super::AuthEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
@ -29,19 +27,13 @@ where
{
async fn load_metrics(
&self,
dimensions: &[SdkEventDimensions],
_merchant_id: &str,
publishable_key: &str,
filters: &SdkEventFilters,
granularity: &Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<Vec<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>> {
) -> MetricsResult<Vec<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::SdkEvents);
let dimensions = dimensions.to_vec();
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
@ -56,14 +48,16 @@ where
.switch()?;
}
filters.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_filter_clause("merchant_id", publishable_key)
.switch()?;
query_builder
.add_filter_clause("event_name", SdkEventNames::ThreeDsMethod)
.add_bool_filter_clause("first_event", 1)
.switch()?;
query_builder
.add_filter_clause("event_name", SdkEventNames::AuthenticationCallInit)
.switch()?;
query_builder
@ -71,23 +65,14 @@ where
.switch()?;
query_builder
.add_filter_clause("category", "USER_EVENT")
.add_filter_clause("category", "API")
.switch()?;
query_builder.add_filter_clause("value", "Y").switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(_granularity) = granularity.as_ref() {
query_builder
.add_group_by_clause("time_bucket")
@ -96,27 +81,19 @@ where
}
query_builder
.execute_query::<SdkEventMetricRow, _>(pool)
.execute_query::<AuthEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
SdkEventMetricsBucketIdentifier::new(
i.payment_method.clone(),
i.platform.clone(),
i.browser_name.clone(),
i.source.clone(),
i.component.clone(),
i.payment_experience.clone(),
i.time_bucket.clone(),
),
AuthEventMetricsBucketIdentifier::new(i.time_bucket.clone()),
i,
))
})
.collect::<error_stack::Result<
Vec<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>,
Vec<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)

View File

@ -1,26 +1,24 @@
use api_models::analytics::{
sdk_events::{
SdkEventDimensions, SdkEventFilters, SdkEventMetricsBucketIdentifier, SdkEventNames,
},
Granularity, TimeRange,
auth_events::AuthEventMetricsBucketIdentifier, sdk_events::SdkEventNames, Granularity,
TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::SdkEventMetricRow;
use super::AuthEventMetricRow;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct AuthenticationUnsuccessfulCount;
pub(super) struct AuthenticationSuccessCount;
#[async_trait::async_trait]
impl<T> super::SdkEventMetric<T> for AuthenticationUnsuccessfulCount
impl<T> super::AuthEventMetric<T> for AuthenticationSuccessCount
where
T: AnalyticsDataSource + super::SdkEventMetricAnalytics,
T: AnalyticsDataSource + super::AuthEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
@ -29,19 +27,13 @@ where
{
async fn load_metrics(
&self,
dimensions: &[SdkEventDimensions],
_merchant_id: &str,
publishable_key: &str,
filters: &SdkEventFilters,
granularity: &Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<Vec<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>> {
) -> MetricsResult<Vec<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::SdkEvents);
let dimensions = dimensions.to_vec();
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
@ -56,22 +48,24 @@ where
.switch()?;
}
filters.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_filter_clause("merchant_id", publishable_key)
.switch()?;
query_builder
.add_bool_filter_clause("first_event", 1)
.switch()?;
query_builder
.add_filter_clause("event_name", SdkEventNames::AuthenticationCall)
.switch()?;
query_builder
.add_filter_clause("log_type", "ERROR")
.add_filter_clause("log_type", "INFO")
.switch()?;
query_builder
.add_filter_clause("category", "USER_EVENT")
.add_filter_clause("category", "API")
.switch()?;
time_range
@ -79,13 +73,6 @@ where
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(_granularity) = granularity.as_ref() {
query_builder
.add_group_by_clause("time_bucket")
@ -94,27 +81,19 @@ where
}
query_builder
.execute_query::<SdkEventMetricRow, _>(pool)
.execute_query::<AuthEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
SdkEventMetricsBucketIdentifier::new(
i.payment_method.clone(),
i.platform.clone(),
i.browser_name.clone(),
i.source.clone(),
i.component.clone(),
i.payment_experience.clone(),
i.time_bucket.clone(),
),
AuthEventMetricsBucketIdentifier::new(i.time_bucket.clone()),
i,
))
})
.collect::<error_stack::Result<
Vec<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>,
Vec<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)

View File

@ -0,0 +1,90 @@
use api_models::analytics::{
auth_events::{AuthEventFlows, AuthEventMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::AuthEventMetricRow;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct ChallengeAttemptCount;
#[async_trait::async_trait]
impl<T> super::AuthEventMetric<T> for ChallengeAttemptCount
where
T: AnalyticsDataSource + super::AuthEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
merchant_id: &str,
_publishable_key: &str,
granularity: &Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<Vec<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::ConnectorEventsAnalytics);
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
if let Some(granularity) = granularity.as_ref() {
query_builder
.add_granularity_in_mins(granularity)
.switch()?;
}
query_builder
.add_filter_clause("merchant_id", merchant_id)
.switch()?;
query_builder
.add_filter_clause("flow", AuthEventFlows::PostAuthentication)
.switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
if let Some(_granularity) = granularity.as_ref() {
query_builder
.add_group_by_clause("time_bucket")
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<AuthEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
AuthEventMetricsBucketIdentifier::new(i.time_bucket.clone()),
i,
))
})
.collect::<error_stack::Result<
Vec<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}

View File

@ -1,26 +1,24 @@
use api_models::analytics::{
sdk_events::{
SdkEventDimensions, SdkEventFilters, SdkEventMetricsBucketIdentifier, SdkEventNames,
},
Granularity, TimeRange,
auth_events::AuthEventMetricsBucketIdentifier, sdk_events::SdkEventNames, Granularity,
TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::SdkEventMetricRow;
use super::AuthEventMetricRow;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct ThreeDsChallengeFlowCount;
pub(super) struct ChallengeFlowCount;
#[async_trait::async_trait]
impl<T> super::SdkEventMetric<T> for ThreeDsChallengeFlowCount
impl<T> super::AuthEventMetric<T> for ChallengeFlowCount
where
T: AnalyticsDataSource + super::SdkEventMetricAnalytics,
T: AnalyticsDataSource + super::AuthEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
@ -29,19 +27,13 @@ where
{
async fn load_metrics(
&self,
dimensions: &[SdkEventDimensions],
_merchant_id: &str,
publishable_key: &str,
filters: &SdkEventFilters,
granularity: &Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<Vec<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>> {
) -> MetricsResult<Vec<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::SdkEvents);
let dimensions = dimensions.to_vec();
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
@ -56,14 +48,16 @@ where
.switch()?;
}
filters.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_filter_clause("merchant_id", publishable_key)
.switch()?;
query_builder
.add_filter_clause("event_name", SdkEventNames::DisplayThreeDsSdk)
.add_bool_filter_clause("first_event", 1)
.switch()?;
query_builder
.add_filter_clause("category", "USER_EVENT")
.switch()?;
query_builder
@ -71,7 +65,7 @@ where
.switch()?;
query_builder
.add_filter_clause("category", "USER_EVENT")
.add_filter_clause("event_name", SdkEventNames::DisplayThreeDsSdk)
.switch()?;
query_builder.add_filter_clause("value", "C").switch()?;
@ -81,13 +75,6 @@ where
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(_granularity) = granularity.as_ref() {
query_builder
.add_group_by_clause("time_bucket")
@ -96,27 +83,19 @@ where
}
query_builder
.execute_query::<SdkEventMetricRow, _>(pool)
.execute_query::<AuthEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
SdkEventMetricsBucketIdentifier::new(
i.payment_method.clone(),
i.platform.clone(),
i.browser_name.clone(),
i.source.clone(),
i.component.clone(),
i.payment_experience.clone(),
i.time_bucket.clone(),
),
AuthEventMetricsBucketIdentifier::new(i.time_bucket.clone()),
i,
))
})
.collect::<error_stack::Result<
Vec<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>,
Vec<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)

View File

@ -1,26 +1,24 @@
use api_models::analytics::{
sdk_events::{
SdkEventDimensions, SdkEventFilters, SdkEventMetricsBucketIdentifier, SdkEventNames,
},
auth_events::{AuthEventFlows, AuthEventMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::SdkEventMetricRow;
use super::AuthEventMetricRow;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct ThreeDsMethodUnsuccessfulCount;
pub(super) struct ChallengeSuccessCount;
#[async_trait::async_trait]
impl<T> super::SdkEventMetric<T> for ThreeDsMethodUnsuccessfulCount
impl<T> super::AuthEventMetric<T> for ChallengeSuccessCount
where
T: AnalyticsDataSource + super::SdkEventMetricAnalytics,
T: AnalyticsDataSource + super::AuthEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
@ -29,19 +27,14 @@ where
{
async fn load_metrics(
&self,
dimensions: &[SdkEventDimensions],
publishable_key: &str,
filters: &SdkEventFilters,
merchant_id: &str,
_publishable_key: &str,
granularity: &Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<Vec<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::SdkEvents);
let dimensions = dimensions.to_vec();
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
) -> MetricsResult<Vec<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::ConnectorEventsAnalytics);
query_builder
.add_select_column(Aggregate::Count {
@ -56,22 +49,16 @@ where
.switch()?;
}
filters.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_filter_clause("merchant_id", publishable_key)
.add_filter_clause("merchant_id", merchant_id)
.switch()?;
query_builder
.add_filter_clause("event_name", SdkEventNames::ThreeDsMethodResult)
.add_filter_clause("flow", AuthEventFlows::PostAuthentication)
.switch()?;
query_builder
.add_filter_clause("log_type", "ERROR")
.switch()?;
query_builder
.add_filter_clause("category", "USER_EVENT")
.add_filter_clause("visitParamExtractRaw(response, 'transStatus')", "\"Y\"")
.switch()?;
time_range
@ -79,13 +66,6 @@ where
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(_granularity) = granularity.as_ref() {
query_builder
.add_group_by_clause("time_bucket")
@ -94,27 +74,19 @@ where
}
query_builder
.execute_query::<SdkEventMetricRow, _>(pool)
.execute_query::<AuthEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
SdkEventMetricsBucketIdentifier::new(
i.payment_method.clone(),
i.platform.clone(),
i.browser_name.clone(),
i.source.clone(),
i.component.clone(),
i.payment_experience.clone(),
i.time_bucket.clone(),
),
AuthEventMetricsBucketIdentifier::new(i.time_bucket.clone()),
i,
))
})
.collect::<error_stack::Result<
Vec<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>,
Vec<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)

View File

@ -1,26 +1,24 @@
use api_models::analytics::{
sdk_events::{
SdkEventDimensions, SdkEventFilters, SdkEventMetricsBucketIdentifier, SdkEventNames,
},
Granularity, TimeRange,
auth_events::AuthEventMetricsBucketIdentifier, sdk_events::SdkEventNames, Granularity,
TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::SdkEventMetricRow;
use super::AuthEventMetricRow;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct ThreeDsMethodSkippedCount;
pub(super) struct FrictionlessFlowCount;
#[async_trait::async_trait]
impl<T> super::SdkEventMetric<T> for ThreeDsMethodSkippedCount
impl<T> super::AuthEventMetric<T> for FrictionlessFlowCount
where
T: AnalyticsDataSource + super::SdkEventMetricAnalytics,
T: AnalyticsDataSource + super::AuthEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
@ -29,19 +27,13 @@ where
{
async fn load_metrics(
&self,
dimensions: &[SdkEventDimensions],
_merchant_id: &str,
publishable_key: &str,
filters: &SdkEventFilters,
granularity: &Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<Vec<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>> {
) -> MetricsResult<Vec<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::SdkEvents);
let dimensions = dimensions.to_vec();
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
@ -56,14 +48,16 @@ where
.switch()?;
}
filters.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_filter_clause("merchant_id", publishable_key)
.switch()?;
query_builder
.add_filter_clause("event_name", SdkEventNames::ThreeDsMethod)
.add_bool_filter_clause("first_event", 1)
.switch()?;
query_builder
.add_filter_clause("category", "USER_EVENT")
.switch()?;
query_builder
@ -71,23 +65,18 @@ where
.switch()?;
query_builder
.add_filter_clause("category", "USER_EVENT")
.add_filter_clause("event_name", SdkEventNames::DisplayThreeDsSdk)
.switch()?;
query_builder.add_filter_clause("value", "N").switch()?;
query_builder
.add_negative_filter_clause("value", "C")
.switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(_granularity) = granularity.as_ref() {
query_builder
.add_group_by_clause("time_bucket")
@ -96,27 +85,19 @@ where
}
query_builder
.execute_query::<SdkEventMetricRow, _>(pool)
.execute_query::<AuthEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
SdkEventMetricsBucketIdentifier::new(
i.payment_method.clone(),
i.platform.clone(),
i.browser_name.clone(),
i.source.clone(),
i.component.clone(),
i.payment_experience.clone(),
i.time_bucket.clone(),
),
AuthEventMetricsBucketIdentifier::new(i.time_bucket.clone()),
i,
))
})
.collect::<error_stack::Result<
Vec<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>,
Vec<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)

View File

@ -1,26 +1,24 @@
use api_models::analytics::{
sdk_events::{
SdkEventDimensions, SdkEventFilters, SdkEventMetricsBucketIdentifier, SdkEventNames,
},
Granularity, TimeRange,
auth_events::AuthEventMetricsBucketIdentifier, sdk_events::SdkEventNames, Granularity,
TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::SdkEventMetricRow;
use super::AuthEventMetricRow;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct ThreeDsMethodSuccessfulCount;
pub(super) struct ThreeDsSdkCount;
#[async_trait::async_trait]
impl<T> super::SdkEventMetric<T> for ThreeDsMethodSuccessfulCount
impl<T> super::AuthEventMetric<T> for ThreeDsSdkCount
where
T: AnalyticsDataSource + super::SdkEventMetricAnalytics,
T: AnalyticsDataSource + super::AuthEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
@ -29,19 +27,13 @@ where
{
async fn load_metrics(
&self,
dimensions: &[SdkEventDimensions],
_merchant_id: &str,
publishable_key: &str,
filters: &SdkEventFilters,
granularity: &Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<Vec<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>> {
) -> MetricsResult<Vec<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::SdkEvents);
let dimensions = dimensions.to_vec();
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
@ -56,14 +48,16 @@ where
.switch()?;
}
filters.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_filter_clause("merchant_id", publishable_key)
.switch()?;
query_builder
.add_filter_clause("event_name", SdkEventNames::ThreeDsMethodResult)
.add_bool_filter_clause("first_event", 1)
.switch()?;
query_builder
.add_filter_clause("category", "USER_EVENT")
.switch()?;
query_builder
@ -71,7 +65,7 @@ where
.switch()?;
query_builder
.add_filter_clause("category", "USER_EVENT")
.add_filter_clause("event_name", SdkEventNames::ThreeDsMethod)
.switch()?;
time_range
@ -79,13 +73,6 @@ where
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(_granularity) = granularity.as_ref() {
query_builder
.add_group_by_clause("time_bucket")
@ -94,27 +81,19 @@ where
}
query_builder
.execute_query::<SdkEventMetricRow, _>(pool)
.execute_query::<AuthEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
SdkEventMetricsBucketIdentifier::new(
i.payment_method.clone(),
i.platform.clone(),
i.browser_name.clone(),
i.source.clone(),
i.component.clone(),
i.payment_experience.clone(),
i.time_bucket.clone(),
),
AuthEventMetricsBucketIdentifier::new(i.time_bucket.clone()),
i,
))
})
.collect::<error_stack::Result<
Vec<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>,
Vec<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)

View File

@ -7,6 +7,7 @@ use router_env::logger;
use time::PrimitiveDateTime;
use super::{
auth_events::metrics::AuthEventMetricRow,
health_check::HealthCheck,
payments::{
distribution::PaymentDistributionRow, filters::FilterRow, metrics::PaymentMetricRow,
@ -132,10 +133,11 @@ impl AnalyticsDataSource for ClickhouseClient {
| AnalyticsCollection::Dispute => {
TableEngine::CollapsingMergeTree { sign: "sign_flag" }
}
AnalyticsCollection::SdkEvents => TableEngine::BasicTree,
AnalyticsCollection::ApiEvents => TableEngine::BasicTree,
AnalyticsCollection::ConnectorEvents => TableEngine::BasicTree,
AnalyticsCollection::OutgoingWebhookEvent => TableEngine::BasicTree,
AnalyticsCollection::SdkEvents
| AnalyticsCollection::ApiEvents
| AnalyticsCollection::ConnectorEvents
| AnalyticsCollection::ConnectorEventsAnalytics
| AnalyticsCollection::OutgoingWebhookEvent => TableEngine::BasicTree,
}
}
}
@ -158,6 +160,7 @@ impl super::refunds::filters::RefundFilterAnalytics for ClickhouseClient {}
impl super::sdk_events::filters::SdkEventFilterAnalytics for ClickhouseClient {}
impl super::sdk_events::metrics::SdkEventMetricAnalytics for ClickhouseClient {}
impl super::sdk_events::events::SdkEventsFilterAnalytics for ClickhouseClient {}
impl super::auth_events::metrics::AuthEventMetricAnalytics for ClickhouseClient {}
impl super::api_event::events::ApiLogsFilterAnalytics for ClickhouseClient {}
impl super::api_event::filters::ApiEventFilterAnalytics for ClickhouseClient {}
impl super::api_event::metrics::ApiEventMetricAnalytics for ClickhouseClient {}
@ -320,6 +323,16 @@ impl TryInto<SdkEventFilter> for serde_json::Value {
}
}
impl TryInto<AuthEventMetricRow> for serde_json::Value {
type Error = Report<ParsingError>;
fn try_into(self) -> Result<AuthEventMetricRow, Self::Error> {
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse AuthEventMetricRow in clickhouse results",
))
}
}
impl TryInto<ApiEventFilter> for serde_json::Value {
type Error = Report<ParsingError>;
@ -361,6 +374,7 @@ impl ToSql<ClickhouseClient> for AnalyticsCollection {
Self::Refund => Ok("refunds".to_string()),
Self::SdkEvents => Ok("sdk_events_audit".to_string()),
Self::ApiEvents => Ok("api_events_audit".to_string()),
Self::ConnectorEventsAnalytics => Ok("connector_events".to_string()),
Self::PaymentIntent => Ok("payment_intents".to_string()),
Self::ConnectorEvents => Ok("connector_events_audit".to_string()),
Self::OutgoingWebhookEvent => Ok("outgoing_webhook_events_audit".to_string()),
@ -423,6 +437,20 @@ where
alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
)
}
Self::Percentile {
field,
alias,
percentile,
} => {
format!(
"quantilesExact(0.{})({})[1]{}",
percentile.map_or_else(|| "50".to_owned(), |percentile| percentile.to_string()),
field
.to_sql(table_engine)
.attach_printable("Failed to percentile aggregate")?,
alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
)
}
})
}
}

View File

@ -21,6 +21,11 @@ pub async fn get_domain_info(
download_dimensions: None,
dimensions: utils::get_sdk_event_dimensions(),
},
AnalyticsDomain::AuthEvents => GetInfoResponse {
metrics: utils::get_auth_event_metrics_info(),
download_dimensions: None,
dimensions: Vec::new(),
},
AnalyticsDomain::ApiEvents => GetInfoResponse {
metrics: utils::get_api_event_metrics_info(),
download_dimensions: None,

View File

@ -8,6 +8,7 @@ mod query;
pub mod refunds;
pub mod api_event;
pub mod auth_events;
pub mod connector_events;
pub mod health_check;
pub mod opensearch;
@ -34,6 +35,7 @@ use api_models::analytics::{
api_event::{
ApiEventDimensions, ApiEventFilters, ApiEventMetrics, ApiEventMetricsBucketIdentifier,
},
auth_events::{AuthEventMetrics, AuthEventMetricsBucketIdentifier},
disputes::{DisputeDimensions, DisputeFilters, DisputeMetrics, DisputeMetricsBucketIdentifier},
payments::{PaymentDimensions, PaymentFilters, PaymentMetrics, PaymentMetricsBucketIdentifier},
refunds::{RefundDimensions, RefundFilters, RefundMetrics, RefundMetricsBucketIdentifier},
@ -54,6 +56,7 @@ use storage_impl::config::Database;
use strum::Display;
use self::{
auth_events::metrics::{AuthEventMetric, AuthEventMetricRow},
payments::{
distribution::{PaymentDistribution, PaymentDistributionRow},
metrics::{PaymentMetric, PaymentMetricRow},
@ -539,6 +542,36 @@ impl AnalyticsProvider {
}
}
pub async fn get_auth_event_metrics(
&self,
metric: &AuthEventMetrics,
merchant_id: &str,
publishable_key: &str,
granularity: &Option<Granularity>,
time_range: &TimeRange,
) -> types::MetricsResult<Vec<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> {
match self {
Self::Sqlx(_pool) => Err(report!(MetricsError::NotImplemented)),
Self::Clickhouse(pool) => {
metric
.load_metrics(merchant_id, publishable_key, granularity, time_range, pool)
.await
}
Self::CombinedCkh(_sqlx_pool, ckh_pool) | Self::CombinedSqlx(_sqlx_pool, ckh_pool) => {
metric
.load_metrics(
merchant_id,
publishable_key,
granularity,
// Since API events are ckh only use ckh here
time_range,
ckh_pool,
)
.await
}
}
}
pub async fn get_api_event_metrics(
&self,
metric: &ApiEventMetrics,
@ -681,6 +714,7 @@ pub enum AnalyticsFlow {
GetPaymentMetrics,
GetRefundsMetrics,
GetSdkMetrics,
GetAuthMetrics,
GetPaymentFilters,
GetRefundFilters,
GetSdkEventFilters,

View File

@ -4,6 +4,7 @@ use api_models::{
analytics::{
self as analytics_api,
api_event::ApiEventDimensions,
auth_events::AuthEventFlows,
disputes::DisputeDimensions,
payments::{PaymentDimensions, PaymentDistributions},
refunds::{RefundDimensions, RefundType},
@ -247,6 +248,11 @@ pub enum Aggregate<R> {
field: R,
alias: Option<&'static str>,
},
Percentile {
field: R,
alias: Option<&'static str>,
percentile: Option<&'static u8>,
},
}
// Window functions in query
@ -379,11 +385,17 @@ impl_to_sql_for_to_string!(
Order
);
impl_to_sql_for_to_string!(&SdkEventDimensions, SdkEventDimensions, SdkEventNames);
impl_to_sql_for_to_string!(&ApiEventDimensions, ApiEventDimensions);
impl_to_sql_for_to_string!(&DisputeDimensions, DisputeDimensions, DisputeStage);
impl_to_sql_for_to_string!(
&SdkEventDimensions,
SdkEventDimensions,
SdkEventNames,
AuthEventFlows,
&ApiEventDimensions,
ApiEventDimensions,
&DisputeDimensions,
DisputeDimensions,
DisputeStage
);
#[derive(Debug)]
pub enum FilterTypes {
@ -507,6 +519,14 @@ where
self.add_custom_filter_clause(key, value, FilterTypes::EqualBool)
}
pub fn add_negative_filter_clause(
&mut self,
key: impl ToSql<T>,
value: impl ToSql<T>,
) -> QueryResult<()> {
self.add_custom_filter_clause(key, value, FilterTypes::NotEqual)
}
pub fn add_custom_filter_clause(
&mut self,
lhs: impl ToSql<T>,

View File

@ -7,18 +7,12 @@ use super::metrics::SdkEventMetricRow;
pub struct SdkEventMetricsAccumulator {
pub payment_attempts: CountAccumulator,
pub payment_methods_call_count: CountAccumulator,
pub average_payment_time: AverageAccumulator,
pub average_payment_time: CountAccumulator,
pub load_time: CountAccumulator,
pub sdk_initiated_count: CountAccumulator,
pub sdk_rendered_count: CountAccumulator,
pub payment_method_selected_count: CountAccumulator,
pub payment_data_filled_count: CountAccumulator,
pub three_ds_method_invoked_count: CountAccumulator,
pub three_ds_method_skipped_count: CountAccumulator,
pub three_ds_method_successful_count: CountAccumulator,
pub three_ds_method_unsuccessful_count: CountAccumulator,
pub authentication_unsuccessful_count: CountAccumulator,
pub three_ds_challenge_flow_count: CountAccumulator,
pub three_ds_frictionless_flow_count: CountAccumulator,
}
#[derive(Debug, Default)]
@ -94,17 +88,11 @@ impl SdkEventMetricsAccumulator {
payment_attempts: self.payment_attempts.collect(),
payment_methods_call_count: self.payment_methods_call_count.collect(),
average_payment_time: self.average_payment_time.collect(),
load_time: self.load_time.collect(),
sdk_initiated_count: self.sdk_initiated_count.collect(),
sdk_rendered_count: self.sdk_rendered_count.collect(),
payment_method_selected_count: self.payment_method_selected_count.collect(),
payment_data_filled_count: self.payment_data_filled_count.collect(),
three_ds_method_invoked_count: self.three_ds_method_invoked_count.collect(),
three_ds_method_skipped_count: self.three_ds_method_skipped_count.collect(),
three_ds_method_successful_count: self.three_ds_method_successful_count.collect(),
three_ds_method_unsuccessful_count: self.three_ds_method_unsuccessful_count.collect(),
authentication_unsuccessful_count: self.authentication_unsuccessful_count.collect(),
three_ds_challenge_flow_count: self.three_ds_challenge_flow_count.collect(),
three_ds_frictionless_flow_count: self.three_ds_frictionless_flow_count.collect(),
}
}
}

View File

@ -106,27 +106,9 @@ pub async fn get_metrics(
SdkEventMetrics::AveragePaymentTime => metrics_builder
.average_payment_time
.add_metrics_bucket(&value),
SdkEventMetrics::ThreeDsMethodInvokedCount => metrics_builder
.three_ds_method_invoked_count
.add_metrics_bucket(&value),
SdkEventMetrics::ThreeDsMethodSkippedCount => metrics_builder
.three_ds_method_skipped_count
.add_metrics_bucket(&value),
SdkEventMetrics::ThreeDsMethodSuccessfulCount => metrics_builder
.three_ds_method_successful_count
.add_metrics_bucket(&value),
SdkEventMetrics::ThreeDsMethodUnsuccessfulCount => metrics_builder
.three_ds_method_unsuccessful_count
.add_metrics_bucket(&value),
SdkEventMetrics::AuthenticationUnsuccessfulCount => metrics_builder
.authentication_unsuccessful_count
.add_metrics_bucket(&value),
SdkEventMetrics::ThreeDsChallengeFlowCount => metrics_builder
.three_ds_challenge_flow_count
.add_metrics_bucket(&value),
SdkEventMetrics::ThreeDsFrictionlessFlowCount => metrics_builder
.three_ds_frictionless_flow_count
.add_metrics_bucket(&value),
SdkEventMetrics::LoadTime => {
metrics_builder.load_time.add_metrics_bucket(&value)
}
}
}

View File

@ -11,35 +11,23 @@ use crate::{
types::{AnalyticsCollection, AnalyticsDataSource, LoadRow, MetricsResult},
};
mod authentication_unsuccessful_count;
mod average_payment_time;
mod load_time;
mod payment_attempts;
mod payment_data_filled_count;
mod payment_method_selected_count;
mod payment_methods_call_count;
mod sdk_initiated_count;
mod sdk_rendered_count;
mod three_ds_challenge_flow_count;
mod three_ds_frictionless_flow_count;
mod three_ds_method_invoked_count;
mod three_ds_method_skipped_count;
mod three_ds_method_successful_count;
mod three_ds_method_unsuccessful_count;
use authentication_unsuccessful_count::AuthenticationUnsuccessfulCount;
use average_payment_time::AveragePaymentTime;
use load_time::LoadTime;
use payment_attempts::PaymentAttempts;
use payment_data_filled_count::PaymentDataFilledCount;
use payment_method_selected_count::PaymentMethodSelectedCount;
use payment_methods_call_count::PaymentMethodsCallCount;
use sdk_initiated_count::SdkInitiatedCount;
use sdk_rendered_count::SdkRenderedCount;
use three_ds_challenge_flow_count::ThreeDsChallengeFlowCount;
use three_ds_frictionless_flow_count::ThreeDsFrictionlessFlowCount;
use three_ds_method_invoked_count::ThreeDsMethodInvokedCount;
use three_ds_method_skipped_count::ThreeDsMethodSkippedCount;
use three_ds_method_successful_count::ThreeDsMethodSuccessfulCount;
use three_ds_method_unsuccessful_count::ThreeDsMethodUnsuccessfulCount;
#[derive(Debug, PartialEq, Eq, serde::Deserialize)]
pub struct SdkEventMetricRow {
@ -176,80 +164,8 @@ where
)
.await
}
Self::ThreeDsMethodSkippedCount => {
ThreeDsMethodSkippedCount
.load_metrics(
dimensions,
publishable_key,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::ThreeDsMethodInvokedCount => {
ThreeDsMethodInvokedCount
.load_metrics(
dimensions,
publishable_key,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::ThreeDsMethodSuccessfulCount => {
ThreeDsMethodSuccessfulCount
.load_metrics(
dimensions,
publishable_key,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::ThreeDsMethodUnsuccessfulCount => {
ThreeDsMethodUnsuccessfulCount
.load_metrics(
dimensions,
publishable_key,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::AuthenticationUnsuccessfulCount => {
AuthenticationUnsuccessfulCount
.load_metrics(
dimensions,
publishable_key,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::ThreeDsChallengeFlowCount => {
ThreeDsChallengeFlowCount
.load_metrics(
dimensions,
publishable_key,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::ThreeDsFrictionlessFlowCount => {
ThreeDsFrictionlessFlowCount
Self::LoadTime => {
LoadTime
.load_metrics(
dimensions,
publishable_key,

View File

@ -44,16 +44,10 @@ where
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Sum {
.add_select_column(Aggregate::Percentile {
field: "latency",
alias: Some("total"),
alias: Some("count"),
percentile: Some(&50),
})
.switch()?;

View File

@ -15,10 +15,10 @@ use crate::{
};
#[derive(Default)]
pub(super) struct ThreeDsFrictionlessFlowCount;
pub(super) struct LoadTime;
#[async_trait::async_trait]
impl<T> super::SdkEventMetric<T> for ThreeDsFrictionlessFlowCount
impl<T> super::SdkEventMetric<T> for LoadTime
where
T: AnalyticsDataSource + super::SdkEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
@ -44,9 +44,10 @@ where
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
.add_select_column(Aggregate::Percentile {
field: "latency",
alias: Some("count"),
percentile: Some(&50),
})
.switch()?;
@ -63,19 +64,15 @@ where
.switch()?;
query_builder
.add_filter_clause("event_name", SdkEventNames::DisplayThreeDsSdk)
.add_bool_filter_clause("first_event", 1)
.switch()?;
query_builder
.add_filter_clause("log_type", "INFO")
.add_filter_clause("event_name", SdkEventNames::AppRendered)
.switch()?;
query_builder
.add_filter_clause("category", "USER_EVENT")
.switch()?;
query_builder
.add_custom_filter_clause("value", "C", FilterTypes::NotEqual)
.add_custom_filter_clause("latency", 0, FilterTypes::Gt)
.switch()?;
time_range

View File

@ -515,8 +515,10 @@ impl ToSql<SqlxClient> for AnalyticsCollection {
Self::ApiEvents => Err(error_stack::report!(ParsingError::UnknownError)
.attach_printable("ApiEvents table is not implemented for Sqlx"))?,
Self::PaymentIntent => Ok("payment_intent".to_string()),
Self::ConnectorEvents => Err(error_stack::report!(ParsingError::UnknownError)
.attach_printable("ConnectorEvents table is not implemented for Sqlx"))?,
Self::ConnectorEvents | Self::ConnectorEventsAnalytics => {
Err(error_stack::report!(ParsingError::UnknownError)
.attach_printable("ConnectorEvents table is not implemented for Sqlx"))?
}
Self::OutgoingWebhookEvent => Err(error_stack::report!(ParsingError::UnknownError)
.attach_printable("OutgoingWebhookEvents table is not implemented for Sqlx"))?,
Self::Dispute => Ok("dispute".to_string()),
@ -563,6 +565,20 @@ where
alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
)
}
Self::Percentile {
field,
alias,
percentile,
} => {
format!(
"percentile_cont(0.{}) within group (order by {} asc){}",
percentile.map_or_else(|| "50".to_owned(), |percentile| percentile.to_string()),
field
.to_sql(table_engine)
.attach_printable("Failed to percentile aggregate")?,
alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
)
}
})
}
}

View File

@ -15,6 +15,7 @@ use crate::errors::AnalyticsError;
pub enum AnalyticsDomain {
Payments,
Refunds,
AuthEvents,
SdkEvents,
ApiEvents,
Dispute,
@ -30,6 +31,7 @@ pub enum AnalyticsCollection {
ConnectorEvents,
OutgoingWebhookEvent,
Dispute,
ConnectorEventsAnalytics,
}
#[allow(dead_code)]

View File

@ -1,5 +1,6 @@
use api_models::analytics::{
api_event::{ApiEventDimensions, ApiEventMetrics},
auth_events::AuthEventMetrics,
disputes::{DisputeDimensions, DisputeMetrics},
payments::{PaymentDimensions, PaymentMetrics},
refunds::{RefundDimensions, RefundMetrics},
@ -36,6 +37,10 @@ pub fn get_sdk_event_metrics_info() -> Vec<NameDescription> {
SdkEventMetrics::iter().map(Into::into).collect()
}
pub fn get_auth_event_metrics_info() -> Vec<NameDescription> {
AuthEventMetrics::iter().map(Into::into).collect()
}
pub fn get_api_event_metrics_info() -> Vec<NameDescription> {
ApiEventMetrics::iter().map(Into::into).collect()
}

View File

@ -5,6 +5,7 @@ use masking::Secret;
use self::{
api_event::{ApiEventDimensions, ApiEventMetrics},
auth_events::AuthEventMetrics,
disputes::{DisputeDimensions, DisputeMetrics},
payments::{PaymentDimensions, PaymentDistributions, PaymentMetrics},
refunds::{RefundDimensions, RefundMetrics},
@ -13,6 +14,7 @@ use self::{
pub use crate::payments::TimeRange;
pub mod api_event;
pub mod auth_events;
pub mod connector_events;
pub mod disputes;
pub mod outgoing_webhook_event;
@ -138,6 +140,17 @@ pub struct GetSdkEventMetricRequest {
pub delta: bool,
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[serde(rename_all = "camelCase")]
pub struct GetAuthEventMetricRequest {
pub time_series: Option<TimeSeries>,
pub time_range: TimeRange,
#[serde(default)]
pub metrics: HashSet<AuthEventMetrics>,
#[serde(default)]
pub delta: bool,
}
#[derive(Debug, serde::Serialize)]
pub struct AnalyticsMetadata {
pub current_time_range: TimeRange,

View File

@ -0,0 +1,111 @@
use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
};
use super::NameDescription;
#[derive(
Clone,
Debug,
Hash,
PartialEq,
Eq,
serde::Serialize,
serde::Deserialize,
strum::Display,
strum::EnumIter,
strum::AsRefStr,
)]
#[strum(serialize_all = "snake_case")]
#[serde(rename_all = "snake_case")]
pub enum AuthEventMetrics {
ThreeDsSdkCount,
AuthenticationAttemptCount,
AuthenticationSuccessCount,
ChallengeFlowCount,
FrictionlessFlowCount,
ChallengeAttemptCount,
ChallengeSuccessCount,
}
#[derive(
Clone,
Debug,
Hash,
PartialEq,
Eq,
serde::Serialize,
serde::Deserialize,
strum::Display,
strum::EnumIter,
strum::AsRefStr,
)]
pub enum AuthEventFlows {
PostAuthentication,
}
pub mod metric_behaviour {
pub struct ThreeDsSdkCount;
pub struct AuthenticationAttemptCount;
pub struct AuthenticationSuccessCount;
pub struct ChallengeFlowCount;
pub struct FrictionlessFlowCount;
pub struct ChallengeAttemptCount;
pub struct ChallengeSuccessCount;
}
impl From<AuthEventMetrics> for NameDescription {
fn from(value: AuthEventMetrics) -> Self {
Self {
name: value.to_string(),
desc: String::new(),
}
}
}
#[derive(Debug, serde::Serialize, Eq)]
pub struct AuthEventMetricsBucketIdentifier {
pub time_bucket: Option<String>,
}
impl AuthEventMetricsBucketIdentifier {
pub fn new(time_bucket: Option<String>) -> Self {
Self { time_bucket }
}
}
impl Hash for AuthEventMetricsBucketIdentifier {
fn hash<H: Hasher>(&self, state: &mut H) {
self.time_bucket.hash(state);
}
}
impl PartialEq for AuthEventMetricsBucketIdentifier {
fn eq(&self, other: &Self) -> bool {
let mut left = DefaultHasher::new();
self.hash(&mut left);
let mut right = DefaultHasher::new();
other.hash(&mut right);
left.finish() == right.finish()
}
}
#[derive(Debug, serde::Serialize)]
pub struct AuthEventMetricsBucketValue {
pub three_ds_sdk_count: Option<u64>,
pub authentication_attempt_count: Option<u64>,
pub authentication_success_count: Option<u64>,
pub challenge_flow_count: Option<u64>,
pub challenge_attempt_count: Option<u64>,
pub challenge_success_count: Option<u64>,
pub frictionless_flow_count: Option<u64>,
}
#[derive(Debug, serde::Serialize)]
pub struct MetricsBucketResponse {
#[serde(flatten)]
pub values: AuthEventMetricsBucketValue,
#[serde(flatten)]
pub dimensions: AuthEventMetricsBucketIdentifier,
}

View File

@ -72,18 +72,12 @@ pub enum SdkEventDimensions {
pub enum SdkEventMetrics {
PaymentAttempts,
PaymentMethodsCallCount,
ThreeDsMethodInvokedCount,
ThreeDsMethodSkippedCount,
ThreeDsMethodSuccessfulCount,
ThreeDsMethodUnsuccessfulCount,
AuthenticationUnsuccessfulCount,
ThreeDsChallengeFlowCount,
ThreeDsFrictionlessFlowCount,
SdkRenderedCount,
SdkInitiatedCount,
PaymentMethodSelectedCount,
PaymentDataFilledCount,
AveragePaymentTime,
LoadTime,
}
#[derive(
@ -114,6 +108,7 @@ pub enum SdkEventNames {
DisplayBankTransferInfoPage,
DisplayQrCodeInfoPage,
AuthenticationCall,
AuthenticationCallInit,
ThreeDsMethodCall,
ThreeDsMethodResult,
ThreeDsMethod,
@ -124,18 +119,12 @@ pub enum SdkEventNames {
pub mod metric_behaviour {
pub struct PaymentAttempts;
pub struct PaymentMethodsCallCount;
pub struct ThreeDsMethodInvokedCount;
pub struct ThreeDsMethodSkippedCount;
pub struct ThreeDsMethodSuccessfulCount;
pub struct ThreeDsMethodUnsuccessfulCount;
pub struct AuthenticationUnsuccessfulCount;
pub struct ThreeDsChallengeFlowCount;
pub struct ThreeDsFrictionlessFlowCount;
pub struct SdkRenderedCount;
pub struct SdkInitiatedCount;
pub struct PaymentMethodSelectedCount;
pub struct PaymentDataFilledCount;
pub struct AveragePaymentTime;
pub struct LoadTime;
}
impl From<SdkEventMetrics> for NameDescription {
@ -215,18 +204,12 @@ impl PartialEq for SdkEventMetricsBucketIdentifier {
pub struct SdkEventMetricsBucketValue {
pub payment_attempts: Option<u64>,
pub payment_methods_call_count: Option<u64>,
pub average_payment_time: Option<f64>,
pub average_payment_time: Option<u64>,
pub load_time: Option<u64>,
pub sdk_rendered_count: Option<u64>,
pub sdk_initiated_count: Option<u64>,
pub payment_method_selected_count: Option<u64>,
pub payment_data_filled_count: Option<u64>,
pub three_ds_method_invoked_count: Option<u64>,
pub three_ds_method_skipped_count: Option<u64>,
pub three_ds_method_successful_count: Option<u64>,
pub three_ds_method_unsuccessful_count: Option<u64>,
pub authentication_unsuccessful_count: Option<u64>,
pub three_ds_challenge_flow_count: Option<u64>,
pub three_ds_frictionless_flow_count: Option<u64>,
}
#[derive(Debug, serde::Serialize)]

View File

@ -22,7 +22,7 @@ use common_utils::{
use crate::{
admin::*,
analytics::{
api_event::*, connector_events::ConnectorEventsRequest,
api_event::*, auth_events::*, connector_events::ConnectorEventsRequest,
outgoing_webhook_event::OutgoingWebhookLogsRequest, sdk_events::*, search::*, *,
},
api_keys::*,
@ -84,6 +84,7 @@ impl_misc_api_event_type!(
GetPaymentMetricRequest,
GetRefundMetricRequest,
GetSdkEventMetricRequest,
GetAuthEventMetricRequest,
GetPaymentFiltersRequest,
PaymentFiltersResponse,
GetRefundFilterRequest,

View File

@ -13,9 +13,9 @@ pub mod routes {
GetGlobalSearchRequest, GetSearchRequest, GetSearchRequestWithIndex, SearchIndex,
},
GenerateReportRequest, GetApiEventFiltersRequest, GetApiEventMetricRequest,
GetDisputeMetricRequest, GetPaymentFiltersRequest, GetPaymentMetricRequest,
GetRefundFilterRequest, GetRefundMetricRequest, GetSdkEventFiltersRequest,
GetSdkEventMetricRequest, ReportRequest,
GetAuthEventMetricRequest, GetDisputeMetricRequest, GetPaymentFiltersRequest,
GetPaymentMetricRequest, GetRefundFilterRequest, GetRefundMetricRequest,
GetSdkEventFiltersRequest, GetSdkEventMetricRequest, ReportRequest,
};
use error_stack::ResultExt;
@ -74,6 +74,10 @@ pub mod routes {
web::resource("filters/sdk_events")
.route(web::post().to(get_sdk_event_filters)),
)
.service(
web::resource("metrics/auth_events")
.route(web::post().to(get_auth_event_metrics)),
)
.service(web::resource("api_event_logs").route(web::get().to(get_api_events)))
.service(web::resource("sdk_event_logs").route(web::post().to(get_sdk_events)))
.service(
@ -241,6 +245,43 @@ pub mod routes {
.await
}
/// # Panics
///
/// Panics if `json_payload` array does not contain one `GetAuthEventMetricRequest` element.
pub async fn get_auth_event_metrics(
state: web::Data<AppState>,
req: actix_web::HttpRequest,
json_payload: web::Json<[GetAuthEventMetricRequest; 1]>,
) -> impl Responder {
// safety: This shouldn't panic owing to the data type
#[allow(clippy::expect_used)]
let payload = json_payload
.into_inner()
.to_vec()
.pop()
.expect("Couldn't get GetAuthEventMetricRequest");
let flow = AnalyticsFlow::GetAuthMetrics;
Box::pin(api::server_wrap(
flow,
state,
&req,
payload,
|state, auth: AuthenticationData, req, _| async move {
analytics::auth_events::get_metrics(
&state.pool,
&auth.merchant_account.merchant_id,
auth.merchant_account.publishable_key.as_ref(),
req,
)
.await
.map(ApplicationResponse::Json)
},
&auth::JWTAuth(Permission::Analytics),
api_locking::LockAction::NotApplicable,
))
.await
}
pub async fn get_payment_filters(
state: web::Data<AppState>,
req: actix_web::HttpRequest,