mirror of
https://github.com/juspay/hyperswitch.git
synced 2025-11-02 12:06:56 +08:00
feat(merchant_account): add merchant account create v2 route (#5061)
Co-authored-by: hyperswitch-bot[bot] <148525504+hyperswitch-bot[bot]@users.noreply.github.com> Co-authored-by: Arun Raj M <jarnura47@gmail.com> Co-authored-by: github-actions <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: Sanchith Hegde <22217505+SanchithHegde@users.noreply.github.com> Co-authored-by: Chethan Rao <70657455+Chethan-rao@users.noreply.github.com> Co-authored-by: Sarthak Soni <76486416+Sarthak1799@users.noreply.github.com> Co-authored-by: Sandeep Kumar <83278309+tsdk02@users.noreply.github.com> Co-authored-by: Abhitator216 <abhishek.kanojia@juspay.in> Co-authored-by: Abhishek Kanojia <89402434+Abhitator216@users.noreply.github.com> Co-authored-by: ivor-juspay <138492857+ivor-juspay@users.noreply.github.com> Co-authored-by: Sampras Lopes <sampras.lopes@juspay.in> Co-authored-by: Pa1NarK <69745008+pixincreate@users.noreply.github.com> Co-authored-by: likhinbopanna <131246334+likhinbopanna@users.noreply.github.com> Co-authored-by: Sahkal Poddar <sahkalplanet@gmail.com> Co-authored-by: Hrithikesh <61539176+hrithikesh026@users.noreply.github.com> Co-authored-by: Shankar Singh C <83439957+ShankarSinghC@users.noreply.github.com> Co-authored-by: SamraatBansal <55536657+SamraatBansal@users.noreply.github.com> Co-authored-by: GORAKHNATH YADAV <gorakhcodes@gmail.com> Co-authored-by: Apoorv Dixit <64925866+apoorvdixit88@users.noreply.github.com> Co-authored-by: ShivanshMathurJuspay <104988143+ShivanshMathurJuspay@users.noreply.github.com> Co-authored-by: awasthi21 <107559116+awasthi21@users.noreply.github.com> Co-authored-by: Prajjwal Kumar <prajjwal.kumar@juspay.in>
This commit is contained in:
@ -19,8 +19,8 @@ use crate::{
|
||||
#[instrument(skip_all)]
|
||||
pub async fn get_metrics(
|
||||
pool: &AnalyticsProvider,
|
||||
publishable_key: Option<&String>,
|
||||
merchant_id: Option<&String>,
|
||||
publishable_key: &String,
|
||||
merchant_id: &String,
|
||||
req: GetActivePaymentsMetricRequest,
|
||||
) -> AnalyticsResult<MetricsResponse<MetricsBucketResponse>> {
|
||||
let mut metrics_accumulator: HashMap<
|
||||
@ -28,80 +28,60 @@ pub async fn get_metrics(
|
||||
ActivePaymentsMetricsAccumulator,
|
||||
> = HashMap::new();
|
||||
|
||||
if let Some(publishable_key) = publishable_key {
|
||||
if let Some(merchant_id) = merchant_id {
|
||||
let mut set = tokio::task::JoinSet::new();
|
||||
for metric_type in req.metrics.iter().cloned() {
|
||||
let publishable_key_scoped = publishable_key.to_owned();
|
||||
let merchant_id_scoped = merchant_id.to_owned();
|
||||
let pool = pool.clone();
|
||||
set.spawn(async move {
|
||||
let data = pool
|
||||
.get_active_payments_metrics(
|
||||
&metric_type,
|
||||
&merchant_id_scoped,
|
||||
&publishable_key_scoped,
|
||||
&req.time_range,
|
||||
)
|
||||
.await
|
||||
.change_context(AnalyticsError::UnknownError);
|
||||
(metric_type, data)
|
||||
});
|
||||
}
|
||||
|
||||
while let Some((metric, data)) = set
|
||||
.join_next()
|
||||
let mut set = tokio::task::JoinSet::new();
|
||||
for metric_type in req.metrics.iter().cloned() {
|
||||
let publishable_key_scoped = publishable_key.to_owned();
|
||||
let merchant_id_scoped = merchant_id.to_owned();
|
||||
let pool = pool.clone();
|
||||
set.spawn(async move {
|
||||
let data = pool
|
||||
.get_active_payments_metrics(
|
||||
&metric_type,
|
||||
&merchant_id_scoped,
|
||||
&publishable_key_scoped,
|
||||
&req.time_range,
|
||||
)
|
||||
.await
|
||||
.transpose()
|
||||
.change_context(AnalyticsError::UnknownError)?
|
||||
{
|
||||
logger::info!("Logging metric: {metric} Result: {:?}", data);
|
||||
for (id, value) in data? {
|
||||
let metrics_builder = metrics_accumulator.entry(id).or_default();
|
||||
match metric {
|
||||
ActivePaymentsMetrics::ActivePayments => {
|
||||
metrics_builder.active_payments.add_metrics_bucket(&value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger::debug!(
|
||||
"Analytics Accumulated Results: metric: {}, results: {:#?}",
|
||||
metric,
|
||||
metrics_accumulator
|
||||
);
|
||||
}
|
||||
|
||||
let query_data: Vec<MetricsBucketResponse> = metrics_accumulator
|
||||
.into_iter()
|
||||
.map(|(id, val)| MetricsBucketResponse {
|
||||
values: val.collect(),
|
||||
dimensions: id,
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(MetricsResponse {
|
||||
query_data,
|
||||
meta_data: [AnalyticsMetadata {
|
||||
current_time_range: req.time_range,
|
||||
}],
|
||||
})
|
||||
} else {
|
||||
logger::error!("Merchant ID not present");
|
||||
Ok(MetricsResponse {
|
||||
query_data: vec![],
|
||||
meta_data: [AnalyticsMetadata {
|
||||
current_time_range: req.time_range,
|
||||
}],
|
||||
})
|
||||
}
|
||||
} else {
|
||||
logger::error!("Publishable key not present for merchant ID");
|
||||
Ok(MetricsResponse {
|
||||
query_data: vec![],
|
||||
meta_data: [AnalyticsMetadata {
|
||||
current_time_range: req.time_range,
|
||||
}],
|
||||
})
|
||||
.change_context(AnalyticsError::UnknownError);
|
||||
(metric_type, data)
|
||||
});
|
||||
}
|
||||
|
||||
while let Some((metric, data)) = set
|
||||
.join_next()
|
||||
.await
|
||||
.transpose()
|
||||
.change_context(AnalyticsError::UnknownError)?
|
||||
{
|
||||
logger::info!("Logging metric: {metric} Result: {:?}", data);
|
||||
for (id, value) in data? {
|
||||
let metrics_builder = metrics_accumulator.entry(id).or_default();
|
||||
match metric {
|
||||
ActivePaymentsMetrics::ActivePayments => {
|
||||
metrics_builder.active_payments.add_metrics_bucket(&value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger::debug!(
|
||||
"Analytics Accumulated Results: metric: {}, results: {:#?}",
|
||||
metric,
|
||||
metrics_accumulator
|
||||
);
|
||||
}
|
||||
|
||||
let query_data: Vec<MetricsBucketResponse> = metrics_accumulator
|
||||
.into_iter()
|
||||
.map(|(id, val)| MetricsBucketResponse {
|
||||
values: val.collect(),
|
||||
dimensions: id,
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(MetricsResponse {
|
||||
query_data,
|
||||
meta_data: [AnalyticsMetadata {
|
||||
current_time_range: req.time_range,
|
||||
}],
|
||||
})
|
||||
}
|
||||
|
||||
@ -5,7 +5,7 @@ use api_models::analytics::{
|
||||
AnalyticsMetadata, GetAuthEventMetricRequest, MetricsResponse,
|
||||
};
|
||||
use error_stack::ResultExt;
|
||||
use router_env::{instrument, logger, tracing};
|
||||
use router_env::{instrument, tracing};
|
||||
|
||||
use super::AuthEventMetricsAccumulator;
|
||||
use crate::{
|
||||
@ -18,7 +18,7 @@ use crate::{
|
||||
pub async fn get_metrics(
|
||||
pool: &AnalyticsProvider,
|
||||
merchant_id: &String,
|
||||
publishable_key: Option<&String>,
|
||||
publishable_key: &String,
|
||||
req: GetAuthEventMetricRequest,
|
||||
) -> AnalyticsResult<MetricsResponse<MetricsBucketResponse>> {
|
||||
let mut metrics_accumulator: HashMap<
|
||||
@ -26,86 +26,76 @@ pub async fn get_metrics(
|
||||
AuthEventMetricsAccumulator,
|
||||
> = HashMap::new();
|
||||
|
||||
if let Some(publishable_key) = publishable_key {
|
||||
let mut set = tokio::task::JoinSet::new();
|
||||
for metric_type in req.metrics.iter().cloned() {
|
||||
let req = req.clone();
|
||||
let merchant_id_scoped = merchant_id.to_owned();
|
||||
let publishable_key_scoped = publishable_key.to_owned();
|
||||
let pool = pool.clone();
|
||||
set.spawn(async move {
|
||||
let data = pool
|
||||
.get_auth_event_metrics(
|
||||
&metric_type,
|
||||
&merchant_id_scoped,
|
||||
&publishable_key_scoped,
|
||||
&req.time_series.map(|t| t.granularity),
|
||||
&req.time_range,
|
||||
)
|
||||
.await
|
||||
.change_context(AnalyticsError::UnknownError);
|
||||
(metric_type, data)
|
||||
});
|
||||
}
|
||||
let mut set = tokio::task::JoinSet::new();
|
||||
for metric_type in req.metrics.iter().cloned() {
|
||||
let req = req.clone();
|
||||
let merchant_id_scoped = merchant_id.to_owned();
|
||||
let publishable_key_scoped = publishable_key.to_owned();
|
||||
let pool = pool.clone();
|
||||
set.spawn(async move {
|
||||
let data = pool
|
||||
.get_auth_event_metrics(
|
||||
&metric_type,
|
||||
&merchant_id_scoped,
|
||||
&publishable_key_scoped,
|
||||
&req.time_series.map(|t| t.granularity),
|
||||
&req.time_range,
|
||||
)
|
||||
.await
|
||||
.change_context(AnalyticsError::UnknownError);
|
||||
(metric_type, data)
|
||||
});
|
||||
}
|
||||
|
||||
while let Some((metric, data)) = set
|
||||
.join_next()
|
||||
.await
|
||||
.transpose()
|
||||
.change_context(AnalyticsError::UnknownError)?
|
||||
{
|
||||
for (id, value) in data? {
|
||||
let metrics_builder = metrics_accumulator.entry(id).or_default();
|
||||
match metric {
|
||||
AuthEventMetrics::ThreeDsSdkCount => metrics_builder
|
||||
.three_ds_sdk_count
|
||||
.add_metrics_bucket(&value),
|
||||
AuthEventMetrics::AuthenticationAttemptCount => metrics_builder
|
||||
.authentication_attempt_count
|
||||
.add_metrics_bucket(&value),
|
||||
AuthEventMetrics::AuthenticationSuccessCount => metrics_builder
|
||||
.authentication_success_count
|
||||
.add_metrics_bucket(&value),
|
||||
AuthEventMetrics::ChallengeFlowCount => metrics_builder
|
||||
.challenge_flow_count
|
||||
.add_metrics_bucket(&value),
|
||||
AuthEventMetrics::ChallengeAttemptCount => metrics_builder
|
||||
.challenge_attempt_count
|
||||
.add_metrics_bucket(&value),
|
||||
AuthEventMetrics::ChallengeSuccessCount => metrics_builder
|
||||
.challenge_success_count
|
||||
.add_metrics_bucket(&value),
|
||||
AuthEventMetrics::FrictionlessFlowCount => metrics_builder
|
||||
.frictionless_flow_count
|
||||
.add_metrics_bucket(&value),
|
||||
AuthEventMetrics::FrictionlessSuccessCount => metrics_builder
|
||||
.frictionless_success_count
|
||||
.add_metrics_bucket(&value),
|
||||
}
|
||||
while let Some((metric, data)) = set
|
||||
.join_next()
|
||||
.await
|
||||
.transpose()
|
||||
.change_context(AnalyticsError::UnknownError)?
|
||||
{
|
||||
for (id, value) in data? {
|
||||
let metrics_builder = metrics_accumulator.entry(id).or_default();
|
||||
match metric {
|
||||
AuthEventMetrics::ThreeDsSdkCount => metrics_builder
|
||||
.three_ds_sdk_count
|
||||
.add_metrics_bucket(&value),
|
||||
AuthEventMetrics::AuthenticationAttemptCount => metrics_builder
|
||||
.authentication_attempt_count
|
||||
.add_metrics_bucket(&value),
|
||||
AuthEventMetrics::AuthenticationSuccessCount => metrics_builder
|
||||
.authentication_success_count
|
||||
.add_metrics_bucket(&value),
|
||||
AuthEventMetrics::ChallengeFlowCount => metrics_builder
|
||||
.challenge_flow_count
|
||||
.add_metrics_bucket(&value),
|
||||
AuthEventMetrics::ChallengeAttemptCount => metrics_builder
|
||||
.challenge_attempt_count
|
||||
.add_metrics_bucket(&value),
|
||||
AuthEventMetrics::ChallengeSuccessCount => metrics_builder
|
||||
.challenge_success_count
|
||||
.add_metrics_bucket(&value),
|
||||
AuthEventMetrics::FrictionlessFlowCount => metrics_builder
|
||||
.frictionless_flow_count
|
||||
.add_metrics_bucket(&value),
|
||||
AuthEventMetrics::FrictionlessSuccessCount => metrics_builder
|
||||
.frictionless_success_count
|
||||
.add_metrics_bucket(&value),
|
||||
}
|
||||
}
|
||||
|
||||
let query_data: Vec<MetricsBucketResponse> = metrics_accumulator
|
||||
.into_iter()
|
||||
.map(|(id, val)| MetricsBucketResponse {
|
||||
values: val.collect(),
|
||||
dimensions: id,
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(MetricsResponse {
|
||||
query_data,
|
||||
meta_data: [AnalyticsMetadata {
|
||||
current_time_range: req.time_range,
|
||||
}],
|
||||
})
|
||||
} else {
|
||||
logger::error!("Publishable key not present for merchant ID");
|
||||
Ok(MetricsResponse {
|
||||
query_data: vec![],
|
||||
meta_data: [AnalyticsMetadata {
|
||||
current_time_range: req.time_range,
|
||||
}],
|
||||
})
|
||||
}
|
||||
|
||||
let query_data: Vec<MetricsBucketResponse> = metrics_accumulator
|
||||
.into_iter()
|
||||
.map(|(id, val)| MetricsBucketResponse {
|
||||
values: val.collect(),
|
||||
dimensions: id,
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(MetricsResponse {
|
||||
query_data,
|
||||
meta_data: [AnalyticsMetadata {
|
||||
current_time_range: req.time_range,
|
||||
}],
|
||||
})
|
||||
}
|
||||
|
||||
@ -26,17 +26,17 @@ use crate::{
|
||||
pub async fn sdk_events_core(
|
||||
pool: &AnalyticsProvider,
|
||||
req: SdkEventsRequest,
|
||||
publishable_key: String,
|
||||
publishable_key: &str,
|
||||
) -> AnalyticsResult<Vec<SdkEventsResult>> {
|
||||
match pool {
|
||||
AnalyticsProvider::Sqlx(_) => Err(FiltersError::NotImplemented(
|
||||
"SDK Events not implemented for SQLX",
|
||||
))
|
||||
.attach_printable("SQL Analytics is not implemented for Sdk Events"),
|
||||
AnalyticsProvider::Clickhouse(pool) => get_sdk_event(&publishable_key, req, pool).await,
|
||||
AnalyticsProvider::Clickhouse(pool) => get_sdk_event(publishable_key, req, pool).await,
|
||||
AnalyticsProvider::CombinedSqlx(_sqlx_pool, ckh_pool)
|
||||
| AnalyticsProvider::CombinedCkh(_sqlx_pool, ckh_pool) => {
|
||||
get_sdk_event(&publishable_key, req, ckh_pool).await
|
||||
get_sdk_event(publishable_key, req, ckh_pool).await
|
||||
}
|
||||
}
|
||||
.switch()
|
||||
@ -45,7 +45,7 @@ pub async fn sdk_events_core(
|
||||
#[instrument(skip_all)]
|
||||
pub async fn get_metrics(
|
||||
pool: &AnalyticsProvider,
|
||||
publishable_key: Option<&String>,
|
||||
publishable_key: &String,
|
||||
req: GetSdkEventMetricRequest,
|
||||
) -> AnalyticsResult<MetricsResponse<MetricsBucketResponse>> {
|
||||
let mut metrics_accumulator: HashMap<
|
||||
@ -53,102 +53,90 @@ pub async fn get_metrics(
|
||||
SdkEventMetricsAccumulator,
|
||||
> = HashMap::new();
|
||||
|
||||
if let Some(publishable_key) = publishable_key {
|
||||
let mut set = tokio::task::JoinSet::new();
|
||||
for metric_type in req.metrics.iter().cloned() {
|
||||
let req = req.clone();
|
||||
let publishable_key_scoped = publishable_key.to_owned();
|
||||
let pool = pool.clone();
|
||||
set.spawn(async move {
|
||||
let data = pool
|
||||
.get_sdk_event_metrics(
|
||||
&metric_type,
|
||||
&req.group_by_names.clone(),
|
||||
&publishable_key_scoped,
|
||||
&req.filters,
|
||||
&req.time_series.map(|t| t.granularity),
|
||||
&req.time_range,
|
||||
)
|
||||
.await
|
||||
.change_context(AnalyticsError::UnknownError);
|
||||
(metric_type, data)
|
||||
});
|
||||
}
|
||||
|
||||
while let Some((metric, data)) = set
|
||||
.join_next()
|
||||
.await
|
||||
.transpose()
|
||||
.change_context(AnalyticsError::UnknownError)?
|
||||
{
|
||||
logger::info!("Logging Result {:?}", data);
|
||||
for (id, value) in data? {
|
||||
let metrics_builder = metrics_accumulator.entry(id).or_default();
|
||||
match metric {
|
||||
SdkEventMetrics::PaymentAttempts => {
|
||||
metrics_builder.payment_attempts.add_metrics_bucket(&value)
|
||||
}
|
||||
SdkEventMetrics::PaymentMethodsCallCount => metrics_builder
|
||||
.payment_methods_call_count
|
||||
.add_metrics_bucket(&value),
|
||||
SdkEventMetrics::SdkRenderedCount => metrics_builder
|
||||
.sdk_rendered_count
|
||||
.add_metrics_bucket(&value),
|
||||
SdkEventMetrics::SdkInitiatedCount => metrics_builder
|
||||
.sdk_initiated_count
|
||||
.add_metrics_bucket(&value),
|
||||
SdkEventMetrics::PaymentMethodSelectedCount => metrics_builder
|
||||
.payment_method_selected_count
|
||||
.add_metrics_bucket(&value),
|
||||
SdkEventMetrics::PaymentDataFilledCount => metrics_builder
|
||||
.payment_data_filled_count
|
||||
.add_metrics_bucket(&value),
|
||||
SdkEventMetrics::AveragePaymentTime => metrics_builder
|
||||
.average_payment_time
|
||||
.add_metrics_bucket(&value),
|
||||
SdkEventMetrics::LoadTime => {
|
||||
metrics_builder.load_time.add_metrics_bucket(&value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger::debug!(
|
||||
"Analytics Accumulated Results: metric: {}, results: {:#?}",
|
||||
metric,
|
||||
metrics_accumulator
|
||||
);
|
||||
}
|
||||
|
||||
let query_data: Vec<MetricsBucketResponse> = metrics_accumulator
|
||||
.into_iter()
|
||||
.map(|(id, val)| MetricsBucketResponse {
|
||||
values: val.collect(),
|
||||
dimensions: id,
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(MetricsResponse {
|
||||
query_data,
|
||||
meta_data: [AnalyticsMetadata {
|
||||
current_time_range: req.time_range,
|
||||
}],
|
||||
})
|
||||
} else {
|
||||
logger::error!("Publishable key not present for merchant ID");
|
||||
Ok(MetricsResponse {
|
||||
query_data: vec![],
|
||||
meta_data: [AnalyticsMetadata {
|
||||
current_time_range: req.time_range,
|
||||
}],
|
||||
})
|
||||
let mut set = tokio::task::JoinSet::new();
|
||||
for metric_type in req.metrics.iter().cloned() {
|
||||
let req = req.clone();
|
||||
let publishable_key_scoped = publishable_key.to_owned();
|
||||
let pool = pool.clone();
|
||||
set.spawn(async move {
|
||||
let data = pool
|
||||
.get_sdk_event_metrics(
|
||||
&metric_type,
|
||||
&req.group_by_names.clone(),
|
||||
&publishable_key_scoped,
|
||||
&req.filters,
|
||||
&req.time_series.map(|t| t.granularity),
|
||||
&req.time_range,
|
||||
)
|
||||
.await
|
||||
.change_context(AnalyticsError::UnknownError);
|
||||
(metric_type, data)
|
||||
});
|
||||
}
|
||||
|
||||
while let Some((metric, data)) = set
|
||||
.join_next()
|
||||
.await
|
||||
.transpose()
|
||||
.change_context(AnalyticsError::UnknownError)?
|
||||
{
|
||||
logger::info!("Logging Result {:?}", data);
|
||||
for (id, value) in data? {
|
||||
let metrics_builder = metrics_accumulator.entry(id).or_default();
|
||||
match metric {
|
||||
SdkEventMetrics::PaymentAttempts => {
|
||||
metrics_builder.payment_attempts.add_metrics_bucket(&value)
|
||||
}
|
||||
SdkEventMetrics::PaymentMethodsCallCount => metrics_builder
|
||||
.payment_methods_call_count
|
||||
.add_metrics_bucket(&value),
|
||||
SdkEventMetrics::SdkRenderedCount => metrics_builder
|
||||
.sdk_rendered_count
|
||||
.add_metrics_bucket(&value),
|
||||
SdkEventMetrics::SdkInitiatedCount => metrics_builder
|
||||
.sdk_initiated_count
|
||||
.add_metrics_bucket(&value),
|
||||
SdkEventMetrics::PaymentMethodSelectedCount => metrics_builder
|
||||
.payment_method_selected_count
|
||||
.add_metrics_bucket(&value),
|
||||
SdkEventMetrics::PaymentDataFilledCount => metrics_builder
|
||||
.payment_data_filled_count
|
||||
.add_metrics_bucket(&value),
|
||||
SdkEventMetrics::AveragePaymentTime => metrics_builder
|
||||
.average_payment_time
|
||||
.add_metrics_bucket(&value),
|
||||
SdkEventMetrics::LoadTime => metrics_builder.load_time.add_metrics_bucket(&value),
|
||||
}
|
||||
}
|
||||
|
||||
logger::debug!(
|
||||
"Analytics Accumulated Results: metric: {}, results: {:#?}",
|
||||
metric,
|
||||
metrics_accumulator
|
||||
);
|
||||
}
|
||||
|
||||
let query_data: Vec<MetricsBucketResponse> = metrics_accumulator
|
||||
.into_iter()
|
||||
.map(|(id, val)| MetricsBucketResponse {
|
||||
values: val.collect(),
|
||||
dimensions: id,
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(MetricsResponse {
|
||||
query_data,
|
||||
meta_data: [AnalyticsMetadata {
|
||||
current_time_range: req.time_range,
|
||||
}],
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub async fn get_filters(
|
||||
pool: &AnalyticsProvider,
|
||||
req: GetSdkEventFiltersRequest,
|
||||
publishable_key: Option<&String>,
|
||||
publishable_key: &String,
|
||||
) -> AnalyticsResult<SdkEventFiltersResponse> {
|
||||
use api_models::analytics::{sdk_events::SdkEventDimensions, SdkEventFilterValue};
|
||||
|
||||
@ -157,46 +145,37 @@ pub async fn get_filters(
|
||||
|
||||
let mut res = SdkEventFiltersResponse::default();
|
||||
|
||||
if let Some(publishable_key) = publishable_key {
|
||||
for dim in req.group_by_names {
|
||||
let values = match pool {
|
||||
AnalyticsProvider::Sqlx(_pool) => Err(FiltersError::NotImplemented(
|
||||
"SDK Events not implemented for SQLX",
|
||||
))
|
||||
.attach_printable("SQL Analytics is not implemented for SDK Events"),
|
||||
AnalyticsProvider::Clickhouse(pool) => {
|
||||
get_sdk_event_filter_for_dimension(dim, publishable_key, &req.time_range, pool)
|
||||
.await
|
||||
}
|
||||
AnalyticsProvider::CombinedSqlx(_sqlx_pool, ckh_pool)
|
||||
| AnalyticsProvider::CombinedCkh(_sqlx_pool, ckh_pool) => {
|
||||
get_sdk_event_filter_for_dimension(
|
||||
dim,
|
||||
publishable_key,
|
||||
&req.time_range,
|
||||
ckh_pool,
|
||||
)
|
||||
for dim in req.group_by_names {
|
||||
let values = match pool {
|
||||
AnalyticsProvider::Sqlx(_pool) => Err(FiltersError::NotImplemented(
|
||||
"SDK Events not implemented for SQLX",
|
||||
))
|
||||
.attach_printable("SQL Analytics is not implemented for SDK Events"),
|
||||
AnalyticsProvider::Clickhouse(pool) => {
|
||||
get_sdk_event_filter_for_dimension(dim, publishable_key, &req.time_range, pool)
|
||||
.await
|
||||
}
|
||||
AnalyticsProvider::CombinedSqlx(_sqlx_pool, ckh_pool)
|
||||
| AnalyticsProvider::CombinedCkh(_sqlx_pool, ckh_pool) => {
|
||||
get_sdk_event_filter_for_dimension(dim, publishable_key, &req.time_range, ckh_pool)
|
||||
.await
|
||||
}
|
||||
}
|
||||
.change_context(AnalyticsError::UnknownError)?
|
||||
.into_iter()
|
||||
.filter_map(|fil: SdkEventFilter| match dim {
|
||||
SdkEventDimensions::PaymentMethod => fil.payment_method,
|
||||
SdkEventDimensions::Platform => fil.platform,
|
||||
SdkEventDimensions::BrowserName => fil.browser_name,
|
||||
SdkEventDimensions::Source => fil.source,
|
||||
SdkEventDimensions::Component => fil.component,
|
||||
SdkEventDimensions::PaymentExperience => fil.payment_experience,
|
||||
})
|
||||
.collect::<Vec<String>>();
|
||||
res.query_data.push(SdkEventFilterValue {
|
||||
dimension: dim,
|
||||
values,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
router_env::logger::error!("Publishable key not found for merchant");
|
||||
.change_context(AnalyticsError::UnknownError)?
|
||||
.into_iter()
|
||||
.filter_map(|fil: SdkEventFilter| match dim {
|
||||
SdkEventDimensions::PaymentMethod => fil.payment_method,
|
||||
SdkEventDimensions::Platform => fil.platform,
|
||||
SdkEventDimensions::BrowserName => fil.browser_name,
|
||||
SdkEventDimensions::Source => fil.source,
|
||||
SdkEventDimensions::Component => fil.component,
|
||||
SdkEventDimensions::PaymentExperience => fil.payment_experience,
|
||||
})
|
||||
.collect::<Vec<String>>();
|
||||
res.query_data.push(SdkEventFilterValue {
|
||||
dimension: dim,
|
||||
values,
|
||||
})
|
||||
}
|
||||
|
||||
Ok(res)
|
||||
|
||||
Reference in New Issue
Block a user