diff --git a/Cargo.lock b/Cargo.lock index 3ad36999ac..665703f3d5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -19,7 +19,7 @@ dependencies = [ "futures-util", "log", "once_cell", - "parking_lot 0.12.1", + "parking_lot", "pin-project-lite", "smallvec", "tokio", @@ -361,12 +361,6 @@ dependencies = [ "alloc-no-stdlib", ] -[[package]] -name = "allocator-api2" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" - [[package]] name = "android-tzdata" version = "0.1.1" @@ -589,15 +583,6 @@ dependencies = [ "syn 2.0.38", ] -[[package]] -name = "atoi" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7c57d12312ff59c811c0643f4d80830505833c9ffaebd193d819392b265be8e" -dependencies = [ - "num-traits", -] - [[package]] name = "atomic" version = "0.5.3" @@ -1147,21 +1132,10 @@ dependencies = [ "async-trait", "futures-channel", "futures-util", - "parking_lot 0.12.1", + "parking_lot", "tokio", ] -[[package]] -name = "bigdecimal" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6773ddc0eafc0e509fb60e48dff7f450f8e674a0686ae8605e8d9901bd5eefa" -dependencies = [ - "num-bigint", - "num-integer", - "num-traits", -] - [[package]] name = "bincode" version = "1.3.3" @@ -1617,21 +1591,6 @@ dependencies = [ "libc", ] -[[package]] -name = "crc" -version = "3.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86ec7a15cbe22e59248fc7eadb1907dab5ba09372595da4d73dd805ed4417dfe" -dependencies = [ - "crc-catalog", -] - -[[package]] -name = "crc-catalog" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cace84e55f07e7301bae1c519df89cdad8cc3cd868413d3fdbdeca9ff3db484" - [[package]] name = "crc16" version = "0.4.0" @@ -1690,16 +1649,6 @@ dependencies = [ "scopeguard", ] -[[package]] -name = "crossbeam-queue" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" -dependencies = [ - "cfg-if", - "crossbeam-utils", -] - [[package]] name = "crossbeam-utils" version = "0.8.16" @@ -1799,7 +1748,7 @@ dependencies = [ "hashbrown 0.14.1", "lock_api", "once_cell", - "parking_lot_core 0.9.8", + "parking_lot_core", ] [[package]] @@ -2022,12 +1971,6 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0688c2a7f92e427f44895cd63841bff7b29f8d7a1648b9e7e07a4a365b2e1257" -[[package]] -name = "dotenvy" -version = "0.15.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" - [[package]] name = "drainer" version = "0.1.0" @@ -2194,12 +2137,6 @@ version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" -[[package]] -name = "finl_unicode" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6" - [[package]] name = "flate2" version = "1.0.27" @@ -2265,7 +2202,7 @@ dependencies = [ "futures", "lazy_static", "log", - "parking_lot 0.12.1", + "parking_lot", "rand 0.8.5", "redis-protocol", "semver", @@ -2372,17 +2309,6 @@ dependencies = [ "futures-util", ] -[[package]] -name = "futures-intrusive" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a604f7a68fbf8103337523b1fadc8ade7361ee3f112f7c680ad179651616aed5" -dependencies = [ - "futures-core", - "lock_api", - "parking_lot 0.11.2", -] - [[package]] name = "futures-io" version = "0.3.28" @@ -2585,28 +2511,12 @@ name = "hashbrown" version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7dfda62a12f55daeae5015f81b0baea145391cb4520f86c248fc615d72640d12" -dependencies = [ - "ahash 0.8.3", - "allocator-api2", -] - -[[package]] -name = "hashlink" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" -dependencies = [ - "hashbrown 0.14.1", -] [[package]] name = "heck" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" -dependencies = [ - "unicode-segmentation", -] [[package]] name = "hermit-abi" @@ -2620,15 +2530,6 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" -[[package]] -name = "hkdf" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "791a029f6b9fc27657f6f188ec6e5e43f6911f6f878e0dc5501396e09809d437" -dependencies = [ - "hmac", -] - [[package]] name = "hmac" version = "0.12.1" @@ -3312,7 +3213,7 @@ dependencies = [ "crossbeam-utils", "futures-util", "once_cell", - "parking_lot 0.12.1", + "parking_lot", "quanta", "rustc_version", "scheduled-thread-pool", @@ -3621,17 +3522,6 @@ version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e52c774a4c39359c1d1c52e43f73dd91a75a614652c825408eec30c95a9b2067" -[[package]] -name = "parking_lot" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" -dependencies = [ - "instant", - "lock_api", - "parking_lot_core 0.8.6", -] - [[package]] name = "parking_lot" version = "0.12.1" @@ -3639,21 +3529,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", - "parking_lot_core 0.9.8", -] - -[[package]] -name = "parking_lot_core" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" -dependencies = [ - "cfg-if", - "instant", - "libc", - "redox_syscall 0.2.16", - "smallvec", - "winapi", + "parking_lot_core", ] [[package]] @@ -4041,7 +3917,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51de85fb3fb6524929c8a2eb85e6b6d363de4e8c48f9e2c2eac4944abc181c93" dependencies = [ "log", - "parking_lot 0.12.1", + "parking_lot", "scheduled-thread-pool", ] @@ -4358,12 +4234,10 @@ dependencies = [ "aws-sdk-s3", "base64 0.21.4", "bb8", - "bigdecimal", "blake3", "bytes", "cards", "clap", - "common_enums", "common_utils", "config", "data_models", @@ -4412,7 +4286,6 @@ dependencies = [ "sha-1 0.9.8", "signal-hook", "signal-hook-tokio", - "sqlx", "storage_impl", "strum 0.24.1", "tera", @@ -4686,7 +4559,7 @@ version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3cbc66816425a074528352f5789333ecff06ca41b36b0b0efdfbb29edc391a19" dependencies = [ - "parking_lot 0.12.1", + "parking_lot", ] [[package]] @@ -4913,7 +4786,7 @@ dependencies = [ "futures", "lazy_static", "log", - "parking_lot 0.12.1", + "parking_lot", "serial_test_derive", ] @@ -5106,111 +4979,6 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" -[[package]] -name = "sqlformat" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b7b278788e7be4d0d29c0f39497a0eef3fba6bbc8e70d8bf7fde46edeaa9e85" -dependencies = [ - "itertools 0.11.0", - "nom", - "unicode_categories", -] - -[[package]] -name = "sqlx" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8de3b03a925878ed54a954f621e64bf55a3c1bd29652d0d1a17830405350188" -dependencies = [ - "sqlx-core", - "sqlx-macros", -] - -[[package]] -name = "sqlx-core" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa8241483a83a3f33aa5fff7e7d9def398ff9990b2752b6c6112b83c6d246029" -dependencies = [ - "ahash 0.7.6", - "atoi", - "base64 0.13.1", - "bigdecimal", - "bitflags 1.3.2", - "byteorder", - "bytes", - "crc", - "crossbeam-queue", - "dirs", - "dotenvy", - "either", - "event-listener", - "futures-channel", - "futures-core", - "futures-intrusive", - "futures-util", - "hashlink", - "hex", - "hkdf", - "hmac", - "indexmap 1.9.3", - "itoa", - "libc", - "log", - "md-5", - "memchr", - "num-bigint", - "once_cell", - "paste", - "percent-encoding", - "rand 0.8.5", - "serde", - "serde_json", - "sha1", - "sha2", - "smallvec", - "sqlformat", - "sqlx-rt", - "stringprep", - "thiserror", - "time", - "tokio-stream", - "url", - "whoami", -] - -[[package]] -name = "sqlx-macros" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9966e64ae989e7e575b19d7265cb79d7fc3cbbdf179835cb0d716f294c2049c9" -dependencies = [ - "dotenvy", - "either", - "heck", - "once_cell", - "proc-macro2", - "quote", - "sha2", - "sqlx-core", - "sqlx-rt", - "syn 1.0.109", - "url", -] - -[[package]] -name = "sqlx-rt" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "804d3f245f894e61b1e6263c84b23ca675d96753b5abfd5cc8597d86806e8024" -dependencies = [ - "native-tls", - "once_cell", - "tokio", - "tokio-native-tls", -] - [[package]] name = "storage_impl" version = "0.1.0" @@ -5255,17 +5023,6 @@ dependencies = [ "regex", ] -[[package]] -name = "stringprep" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb41d74e231a107a1b4ee36bd1214b11285b77768d2e3824aedafa988fd36ee6" -dependencies = [ - "finl_unicode", - "unicode-bidi", - "unicode-normalization", -] - [[package]] name = "strsim" version = "0.10.0" @@ -5500,7 +5257,7 @@ dependencies = [ "futures", "http", "log", - "parking_lot 0.12.1", + "parking_lot", "serde", "serde_json", "serde_repr", @@ -5618,7 +5375,7 @@ dependencies = [ "libc", "mio", "num_cpus", - "parking_lot 0.12.1", + "parking_lot", "pin-project-lite", "signal-hook-registry", "socket2 0.5.4", @@ -6047,12 +5804,6 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" -[[package]] -name = "unicode_categories" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" - [[package]] name = "unidecode" version = "0.3.0" @@ -6343,16 +6094,6 @@ version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9193164d4de03a926d909d3bc7c30543cecb35400c02114792c2cae20d5e2dbb" -[[package]] -name = "whoami" -version = "1.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22fc3756b8a9133049b26c7f61ab35416c130e8c09b660f5b3958b446f52cc50" -dependencies = [ - "wasm-bindgen", - "web-sys", -] - [[package]] name = "winapi" version = "0.3.9" diff --git a/config/config.example.toml b/config/config.example.toml index 5943c05e61..59083d6c71 100644 --- a/config/config.example.toml +++ b/config/config.example.toml @@ -433,22 +433,10 @@ apple_pay_ppc_key = "APPLE_PAY_PAYMENT_PROCESSING_CERTIFICATE_KEY" #Private apple_pay_merchant_cert = "APPLE_PAY_MERCHNAT_CERTIFICATE" #Merchant Certificate provided by Apple Pay (https://developer.apple.com/) Certificates, Identifiers & Profiles > Apple Pay Merchant Identity Certificate apple_pay_merchant_cert_key = "APPLE_PAY_MERCHNAT_CERTIFICATE_KEY" #Private key generate by RSA:2048 algorithm + [payment_link] sdk_url = "http://localhost:9090/dist/HyperLoader.js" -# Analytics configuration. -[analytics] -source = "sqlx" # The Analytics source/strategy to be used - -[analytics.sqlx] -username = "db_user" # Analytics DB Username -password = "db_pass" # Analytics DB Password -host = "localhost" # Analytics DB Host -port = 5432 # Analytics DB Port -dbname = "hyperswitch_db" # Name of Database -pool_size = 5 # Number of connections to keep open -connection_timeout = 10 # Timeout for database connection in seconds - # Config for KV setup [kv_config] # TTL for KV in seconds diff --git a/config/docker_compose.toml b/config/docker_compose.toml index 4e630cd46f..20ca175ceb 100644 --- a/config/docker_compose.toml +++ b/config/docker_compose.toml @@ -317,16 +317,5 @@ supported_connectors = "braintree" redis_lock_expiry_seconds = 180 # 3 * 60 seconds delay_between_retries_in_milliseconds = 500 -[analytics] -source = "sqlx" - -[analytics.sqlx] -username = "db_user" -password = "db_pass" -host = "pg" -port = 5432 -dbname = "hyperswitch_db" -pool_size = 5 - [kv_config] ttl = 900 # 15 * 60 seconds diff --git a/crates/api_models/src/analytics.rs b/crates/api_models/src/analytics.rs deleted file mode 100644 index 3d94cf21fd..0000000000 --- a/crates/api_models/src/analytics.rs +++ /dev/null @@ -1,137 +0,0 @@ -use std::collections::HashSet; - -use time::PrimitiveDateTime; - -use self::{ - payments::{PaymentDimensions, PaymentMetrics}, - refunds::{RefundDimensions, RefundMetrics}, -}; - -pub mod payments; -pub mod refunds; - -#[derive(Debug, serde::Serialize)] -pub struct NameDescription { - pub name: String, - pub desc: String, -} - -#[derive(Debug, serde::Serialize)] -#[serde(rename_all = "camelCase")] -pub struct GetInfoResponse { - pub metrics: Vec, - pub download_dimensions: Option>, - pub dimensions: Vec, -} - -#[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize, PartialEq, Eq, Hash)] -#[serde(rename_all = "camelCase")] -pub struct TimeRange { - #[serde(with = "common_utils::custom_serde::iso8601")] - pub start_time: PrimitiveDateTime, - #[serde(default, with = "common_utils::custom_serde::iso8601::option")] - pub end_time: Option, -} - -#[derive(Clone, Copy, Debug, serde::Deserialize, masking::Serialize)] -pub struct TimeSeries { - pub granularity: Granularity, -} - -#[derive(Clone, Copy, Debug, serde::Deserialize, masking::Serialize)] -pub enum Granularity { - #[serde(rename = "G_ONEMIN")] - OneMin, - #[serde(rename = "G_FIVEMIN")] - FiveMin, - #[serde(rename = "G_FIFTEENMIN")] - FifteenMin, - #[serde(rename = "G_THIRTYMIN")] - ThirtyMin, - #[serde(rename = "G_ONEHOUR")] - OneHour, - #[serde(rename = "G_ONEDAY")] - OneDay, -} - -#[derive(Clone, Debug, serde::Deserialize, masking::Serialize)] -#[serde(rename_all = "camelCase")] -pub struct GetPaymentMetricRequest { - pub time_series: Option, - pub time_range: TimeRange, - #[serde(default)] - pub group_by_names: Vec, - #[serde(default)] - pub filters: payments::PaymentFilters, - pub metrics: HashSet, - #[serde(default)] - pub delta: bool, -} - -#[derive(Clone, Debug, serde::Deserialize, masking::Serialize)] -#[serde(rename_all = "camelCase")] -pub struct GetRefundMetricRequest { - pub time_series: Option, - pub time_range: TimeRange, - #[serde(default)] - pub group_by_names: Vec, - #[serde(default)] - pub filters: refunds::RefundFilters, - pub metrics: HashSet, - #[serde(default)] - pub delta: bool, -} - -#[derive(Debug, serde::Serialize)] -pub struct AnalyticsMetadata { - pub current_time_range: TimeRange, -} - -#[derive(Debug, serde::Deserialize, masking::Serialize)] -#[serde(rename_all = "camelCase")] -pub struct GetPaymentFiltersRequest { - pub time_range: TimeRange, - #[serde(default)] - pub group_by_names: Vec, -} - -#[derive(Debug, Default, serde::Serialize)] -#[serde(rename_all = "camelCase")] -pub struct PaymentFiltersResponse { - pub query_data: Vec, -} - -#[derive(Debug, serde::Serialize)] -#[serde(rename_all = "camelCase")] -pub struct FilterValue { - pub dimension: PaymentDimensions, - pub values: Vec, -} - -#[derive(Debug, serde::Deserialize, masking::Serialize)] -#[serde(rename_all = "camelCase")] -pub struct GetRefundFilterRequest { - pub time_range: TimeRange, - #[serde(default)] - pub group_by_names: Vec, -} - -#[derive(Debug, Default, serde::Serialize, Eq, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct RefundFiltersResponse { - pub query_data: Vec, -} - -#[derive(Debug, serde::Serialize, Eq, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct RefundFilterValue { - pub dimension: RefundDimensions, - pub values: Vec, -} - -#[derive(Debug, serde::Serialize)] -#[serde(rename_all = "camelCase")] -pub struct MetricsResponse { - pub query_data: Vec, - pub meta_data: [AnalyticsMetadata; 1], -} diff --git a/crates/api_models/src/analytics/payments.rs b/crates/api_models/src/analytics/payments.rs deleted file mode 100644 index 5e1d673736..0000000000 --- a/crates/api_models/src/analytics/payments.rs +++ /dev/null @@ -1,176 +0,0 @@ -use std::{ - collections::hash_map::DefaultHasher, - hash::{Hash, Hasher}, -}; - -use common_enums::enums::{AttemptStatus, AuthenticationType, Currency, PaymentMethod}; - -use super::{NameDescription, TimeRange}; -use crate::enums::Connector; - -#[derive(Clone, Debug, Default, serde::Deserialize, masking::Serialize)] -pub struct PaymentFilters { - #[serde(default)] - pub currency: Vec, - #[serde(default)] - pub status: Vec, - #[serde(default)] - pub connector: Vec, - #[serde(default)] - pub auth_type: Vec, - #[serde(default)] - pub payment_method: Vec, -} - -#[derive( - Debug, - serde::Serialize, - serde::Deserialize, - strum::AsRefStr, - PartialEq, - PartialOrd, - Eq, - Ord, - strum::Display, - strum::EnumIter, - Clone, - Copy, -)] -#[serde(rename_all = "snake_case")] -#[strum(serialize_all = "snake_case")] -pub enum PaymentDimensions { - // Do not change the order of these enums - // Consult the Dashboard FE folks since these also affects the order of metrics on FE - Connector, - PaymentMethod, - Currency, - #[strum(serialize = "authentication_type")] - #[serde(rename = "authentication_type")] - AuthType, - #[strum(serialize = "status")] - #[serde(rename = "status")] - PaymentStatus, -} - -#[derive( - Clone, - Debug, - Hash, - PartialEq, - Eq, - serde::Serialize, - serde::Deserialize, - strum::Display, - strum::EnumIter, - strum::AsRefStr, -)] -#[strum(serialize_all = "snake_case")] -#[serde(rename_all = "snake_case")] -pub enum PaymentMetrics { - PaymentSuccessRate, - PaymentCount, - PaymentSuccessCount, - PaymentProcessedAmount, - AvgTicketSize, -} - -pub mod metric_behaviour { - pub struct PaymentSuccessRate; - pub struct PaymentCount; - pub struct PaymentSuccessCount; - pub struct PaymentProcessedAmount; - pub struct AvgTicketSize; -} - -impl From for NameDescription { - fn from(value: PaymentMetrics) -> Self { - Self { - name: value.to_string(), - desc: String::new(), - } - } -} - -impl From for NameDescription { - fn from(value: PaymentDimensions) -> Self { - Self { - name: value.to_string(), - desc: String::new(), - } - } -} - -#[derive(Debug, serde::Serialize, Eq)] -pub struct PaymentMetricsBucketIdentifier { - pub currency: Option, - pub status: Option, - pub connector: Option, - #[serde(rename = "authentication_type")] - pub auth_type: Option, - pub payment_method: Option, - #[serde(rename = "time_range")] - pub time_bucket: TimeRange, - // Coz FE sucks - #[serde(rename = "time_bucket")] - #[serde(with = "common_utils::custom_serde::iso8601custom")] - pub start_time: time::PrimitiveDateTime, -} - -impl PaymentMetricsBucketIdentifier { - pub fn new( - currency: Option, - status: Option, - connector: Option, - auth_type: Option, - payment_method: Option, - normalized_time_range: TimeRange, - ) -> Self { - Self { - currency, - status, - connector, - auth_type, - payment_method, - time_bucket: normalized_time_range, - start_time: normalized_time_range.start_time, - } - } -} - -impl Hash for PaymentMetricsBucketIdentifier { - fn hash(&self, state: &mut H) { - self.currency.hash(state); - self.status.map(|i| i.to_string()).hash(state); - self.connector.hash(state); - self.auth_type.map(|i| i.to_string()).hash(state); - self.payment_method.hash(state); - self.time_bucket.hash(state); - } -} - -impl PartialEq for PaymentMetricsBucketIdentifier { - fn eq(&self, other: &Self) -> bool { - let mut left = DefaultHasher::new(); - self.hash(&mut left); - let mut right = DefaultHasher::new(); - other.hash(&mut right); - left.finish() == right.finish() - } -} - -#[derive(Debug, serde::Serialize)] -pub struct PaymentMetricsBucketValue { - pub payment_success_rate: Option, - pub payment_count: Option, - pub payment_success_count: Option, - pub payment_processed_amount: Option, - pub avg_ticket_size: Option, -} - -#[derive(Debug, serde::Serialize)] -pub struct MetricsBucketResponse { - #[serde(flatten)] - pub values: PaymentMetricsBucketValue, - #[serde(flatten)] - pub dimensions: PaymentMetricsBucketIdentifier, -} diff --git a/crates/api_models/src/analytics/refunds.rs b/crates/api_models/src/analytics/refunds.rs deleted file mode 100644 index 1ee05db41f..0000000000 --- a/crates/api_models/src/analytics/refunds.rs +++ /dev/null @@ -1,177 +0,0 @@ -use std::{ - collections::hash_map::DefaultHasher, - hash::{Hash, Hasher}, -}; - -use common_enums::enums::{Currency, RefundStatus}; - -#[derive( - Clone, - Copy, - Debug, - Default, - Eq, - PartialEq, - serde::Serialize, - serde::Deserialize, - strum::Display, - strum::EnumString, -)] -// TODO RefundType common_enums need to mapped to storage_model -#[serde(rename_all = "snake_case")] -#[strum(serialize_all = "snake_case")] -pub enum RefundType { - InstantRefund, - #[default] - RegularRefund, - RetryRefund, -} - -use super::{NameDescription, TimeRange}; -#[derive(Clone, Debug, Default, serde::Deserialize, masking::Serialize)] -pub struct RefundFilters { - #[serde(default)] - pub currency: Vec, - #[serde(default)] - pub refund_status: Vec, - #[serde(default)] - pub connector: Vec, - #[serde(default)] - pub refund_type: Vec, -} - -#[derive( - Debug, - serde::Serialize, - serde::Deserialize, - strum::AsRefStr, - PartialEq, - PartialOrd, - Eq, - Ord, - strum::Display, - strum::EnumIter, - Clone, - Copy, -)] -#[serde(rename_all = "snake_case")] -#[strum(serialize_all = "snake_case")] -pub enum RefundDimensions { - Currency, - RefundStatus, - Connector, - RefundType, -} - -#[derive( - Clone, - Debug, - Hash, - PartialEq, - Eq, - serde::Serialize, - serde::Deserialize, - strum::Display, - strum::EnumIter, - strum::AsRefStr, -)] -#[strum(serialize_all = "snake_case")] -#[serde(rename_all = "snake_case")] -pub enum RefundMetrics { - RefundSuccessRate, - RefundCount, - RefundSuccessCount, - RefundProcessedAmount, -} - -pub mod metric_behaviour { - pub struct RefundSuccessRate; - pub struct RefundCount; - pub struct RefundSuccessCount; - pub struct RefundProcessedAmount; -} - -impl From for NameDescription { - fn from(value: RefundMetrics) -> Self { - Self { - name: value.to_string(), - desc: String::new(), - } - } -} - -impl From for NameDescription { - fn from(value: RefundDimensions) -> Self { - Self { - name: value.to_string(), - desc: String::new(), - } - } -} - -#[derive(Debug, serde::Serialize, Eq)] -pub struct RefundMetricsBucketIdentifier { - pub currency: Option, - pub refund_status: Option, - pub connector: Option, - pub refund_type: Option, - #[serde(rename = "time_range")] - pub time_bucket: TimeRange, - #[serde(rename = "time_bucket")] - #[serde(with = "common_utils::custom_serde::iso8601custom")] - pub start_time: time::PrimitiveDateTime, -} - -impl Hash for RefundMetricsBucketIdentifier { - fn hash(&self, state: &mut H) { - self.currency.hash(state); - self.refund_status.map(|i| i.to_string()).hash(state); - self.connector.hash(state); - self.refund_type.hash(state); - self.time_bucket.hash(state); - } -} -impl PartialEq for RefundMetricsBucketIdentifier { - fn eq(&self, other: &Self) -> bool { - let mut left = DefaultHasher::new(); - self.hash(&mut left); - let mut right = DefaultHasher::new(); - other.hash(&mut right); - left.finish() == right.finish() - } -} - -impl RefundMetricsBucketIdentifier { - pub fn new( - currency: Option, - refund_status: Option, - connector: Option, - refund_type: Option, - normalized_time_range: TimeRange, - ) -> Self { - Self { - currency, - refund_status, - connector, - refund_type, - time_bucket: normalized_time_range, - start_time: normalized_time_range.start_time, - } - } -} - -#[derive(Debug, serde::Serialize)] -pub struct RefundMetricsBucketValue { - pub refund_success_rate: Option, - pub refund_count: Option, - pub refund_success_count: Option, - pub refund_processed_amount: Option, -} - -#[derive(Debug, serde::Serialize)] -pub struct RefundMetricsBucketResponse { - #[serde(flatten)] - pub values: RefundMetricsBucketValue, - #[serde(flatten)] - pub dimensions: RefundMetricsBucketIdentifier, -} diff --git a/crates/api_models/src/lib.rs b/crates/api_models/src/lib.rs index b71645e2d1..dab1b46adb 100644 --- a/crates/api_models/src/lib.rs +++ b/crates/api_models/src/lib.rs @@ -1,6 +1,5 @@ #![forbid(unsafe_code)] pub mod admin; -pub mod analytics; pub mod api_keys; pub mod bank_accounts; pub mod cards_info; diff --git a/crates/common_utils/src/custom_serde.rs b/crates/common_utils/src/custom_serde.rs index edbfa143a6..d64abe38e5 100644 --- a/crates/common_utils/src/custom_serde.rs +++ b/crates/common_utils/src/custom_serde.rs @@ -170,51 +170,3 @@ pub mod json_string { serde_json::from_str(&j).map_err(de::Error::custom) } } - -/// Use a custom ISO 8601 format when serializing and deserializing -/// [`PrimitiveDateTime`][PrimitiveDateTime]. -/// -/// [PrimitiveDateTime]: ::time::PrimitiveDateTime -pub mod iso8601custom { - - use serde::{ser::Error as _, Deserializer, Serialize, Serializer}; - use time::{ - format_description::well_known::{ - iso8601::{Config, EncodedConfig, TimePrecision}, - Iso8601, - }, - serde::iso8601, - PrimitiveDateTime, UtcOffset, - }; - - const FORMAT_CONFIG: EncodedConfig = Config::DEFAULT - .set_time_precision(TimePrecision::Second { - decimal_digits: None, - }) - .encode(); - - /// Serialize a [`PrimitiveDateTime`] using the well-known ISO 8601 format. - pub fn serialize(date_time: &PrimitiveDateTime, serializer: S) -> Result - where - S: Serializer, - { - date_time - .assume_utc() - .format(&Iso8601::) - .map_err(S::Error::custom)? - .replace('T', " ") - .replace('Z', "") - .serialize(serializer) - } - - /// Deserialize an [`PrimitiveDateTime`] from its ISO 8601 representation. - pub fn deserialize<'a, D>(deserializer: D) -> Result - where - D: Deserializer<'a>, - { - iso8601::deserialize(deserializer).map(|offset_date_time| { - let utc_date_time = offset_date_time.to_offset(UtcOffset::UTC); - PrimitiveDateTime::new(utc_date_time.date(), utc_date_time.time()) - }) - } -} diff --git a/crates/router/Cargo.toml b/crates/router/Cargo.toml index 0349a6b9d4..81b23314ff 100644 --- a/crates/router/Cargo.toml +++ b/crates/router/Cargo.toml @@ -41,7 +41,6 @@ aws-config = { version = "0.55.3", optional = true } aws-sdk-s3 = { version = "0.28.0", optional = true } base64 = "0.21.2" bb8 = "0.8" -bigdecimal = "0.3.1" blake3 = "1.3.3" bytes = "1.4.0" clap = { version = "4.3.2", default-features = false, features = ["std", "derive", "help", "usage"] } @@ -79,7 +78,6 @@ serde_urlencoded = "0.7.1" serde_with = "3.0.0" signal-hook = "0.3.15" strum = { version = "0.24.1", features = ["derive"] } -sqlx = { version = "0.6.3", features = ["postgres", "runtime-actix", "runtime-actix-native-tls", "time", "bigdecimal"] } thiserror = "1.0.40" time = { version = "0.3.21", features = ["serde", "serde-well-known", "std"] } tokio = { version = "1.28.2", features = ["macros", "rt-multi-thread"] } @@ -97,7 +95,6 @@ digest = "0.9" api_models = { version = "0.1.0", path = "../api_models", features = ["errors"] } cards = { version = "0.1.0", path = "../cards" } common_utils = { version = "0.1.0", path = "../common_utils", features = ["signals", "async_ext", "logs"] } -common_enums = { version = "0.1.0", path = "../common_enums"} external_services = { version = "0.1.0", path = "../external_services" } masking = { version = "0.1.0", path = "../masking" } redis_interface = { version = "0.1.0", path = "../redis_interface" } diff --git a/crates/router/src/analytics.rs b/crates/router/src/analytics.rs deleted file mode 100644 index fbb848ea96..0000000000 --- a/crates/router/src/analytics.rs +++ /dev/null @@ -1,123 +0,0 @@ -mod core; -mod errors; -pub mod metrics; -mod payments; -mod query; -mod refunds; -pub mod routes; - -mod sqlx; -mod types; -mod utils; - -use api_models::analytics::{ - payments::{PaymentDimensions, PaymentFilters, PaymentMetrics, PaymentMetricsBucketIdentifier}, - refunds::{RefundDimensions, RefundFilters, RefundMetrics, RefundMetricsBucketIdentifier}, - Granularity, TimeRange, -}; -use router_env::{instrument, tracing}; - -use self::{ - payments::metrics::{PaymentMetric, PaymentMetricRow}, - refunds::metrics::{RefundMetric, RefundMetricRow}, - sqlx::SqlxClient, -}; -use crate::configs::settings::Database; - -#[derive(Clone, Debug)] -pub enum AnalyticsProvider { - Sqlx(SqlxClient), -} - -impl AnalyticsProvider { - #[instrument(skip_all)] - pub async fn get_payment_metrics( - &self, - metric: &PaymentMetrics, - dimensions: &[PaymentDimensions], - merchant_id: &str, - filters: &PaymentFilters, - granularity: &Option, - time_range: &TimeRange, - ) -> types::MetricsResult> { - // Metrics to get the fetch time for each payment metric - metrics::request::record_operation_time( - async { - match self { - Self::Sqlx(pool) => { - metric - .load_metrics( - dimensions, - merchant_id, - filters, - granularity, - time_range, - pool, - ) - .await - } - } - }, - &metrics::METRIC_FETCH_TIME, - metric, - self, - ) - .await - } - - pub async fn get_refund_metrics( - &self, - metric: &RefundMetrics, - dimensions: &[RefundDimensions], - merchant_id: &str, - filters: &RefundFilters, - granularity: &Option, - time_range: &TimeRange, - ) -> types::MetricsResult> { - match self { - Self::Sqlx(pool) => { - metric - .load_metrics( - dimensions, - merchant_id, - filters, - granularity, - time_range, - pool, - ) - .await - } - } - } - - pub async fn from_conf( - config: &AnalyticsConfig, - #[cfg(feature = "kms")] kms_client: &external_services::kms::KmsClient, - ) -> Self { - match config { - AnalyticsConfig::Sqlx { sqlx } => Self::Sqlx( - SqlxClient::from_conf( - sqlx, - #[cfg(feature = "kms")] - kms_client, - ) - .await, - ), - } - } -} - -#[derive(Clone, Debug, serde::Deserialize)] -#[serde(tag = "source")] -#[serde(rename_all = "lowercase")] -pub enum AnalyticsConfig { - Sqlx { sqlx: Database }, -} - -impl Default for AnalyticsConfig { - fn default() -> Self { - Self::Sqlx { - sqlx: Database::default(), - } - } -} diff --git a/crates/router/src/analytics/core.rs b/crates/router/src/analytics/core.rs deleted file mode 100644 index bf124a6c0e..0000000000 --- a/crates/router/src/analytics/core.rs +++ /dev/null @@ -1,96 +0,0 @@ -use api_models::analytics::{ - payments::PaymentDimensions, refunds::RefundDimensions, FilterValue, GetInfoResponse, - GetPaymentFiltersRequest, GetRefundFilterRequest, PaymentFiltersResponse, RefundFilterValue, - RefundFiltersResponse, -}; -use error_stack::ResultExt; - -use super::{ - errors::{self, AnalyticsError}, - payments::filters::{get_payment_filter_for_dimension, FilterRow}, - refunds::filters::{get_refund_filter_for_dimension, RefundFilterRow}, - types::AnalyticsDomain, - utils, AnalyticsProvider, -}; -use crate::{services::ApplicationResponse, types::domain}; - -pub type AnalyticsApiResponse = errors::AnalyticsResult>; - -pub async fn get_domain_info(domain: AnalyticsDomain) -> AnalyticsApiResponse { - let info = match domain { - AnalyticsDomain::Payments => GetInfoResponse { - metrics: utils::get_payment_metrics_info(), - download_dimensions: None, - dimensions: utils::get_payment_dimensions(), - }, - AnalyticsDomain::Refunds => GetInfoResponse { - metrics: utils::get_refund_metrics_info(), - download_dimensions: None, - dimensions: utils::get_refund_dimensions(), - }, - }; - Ok(ApplicationResponse::Json(info)) -} - -pub async fn payment_filters_core( - pool: AnalyticsProvider, - req: GetPaymentFiltersRequest, - merchant: domain::MerchantAccount, -) -> AnalyticsApiResponse { - let mut res = PaymentFiltersResponse::default(); - - for dim in req.group_by_names { - let values = match pool.clone() { - AnalyticsProvider::Sqlx(pool) => { - get_payment_filter_for_dimension(dim, &merchant.merchant_id, &req.time_range, &pool) - .await - } - } - .change_context(AnalyticsError::UnknownError)? - .into_iter() - .filter_map(|fil: FilterRow| match dim { - PaymentDimensions::Currency => fil.currency.map(|i| i.as_ref().to_string()), - PaymentDimensions::PaymentStatus => fil.status.map(|i| i.as_ref().to_string()), - PaymentDimensions::Connector => fil.connector, - PaymentDimensions::AuthType => fil.authentication_type.map(|i| i.as_ref().to_string()), - PaymentDimensions::PaymentMethod => fil.payment_method, - }) - .collect::>(); - res.query_data.push(FilterValue { - dimension: dim, - values, - }) - } - - Ok(ApplicationResponse::Json(res)) -} - -pub async fn refund_filter_core( - pool: AnalyticsProvider, - req: GetRefundFilterRequest, - merchant: domain::MerchantAccount, -) -> AnalyticsApiResponse { - let mut res = RefundFiltersResponse::default(); - for dim in req.group_by_names { - let values = match pool.clone() { - AnalyticsProvider::Sqlx(pool) => { - get_refund_filter_for_dimension(dim, &merchant.merchant_id, &req.time_range, &pool) - .await - } - } - .change_context(AnalyticsError::UnknownError)? - .into_iter() - .filter_map(|fil: RefundFilterRow| match dim { - RefundDimensions::Currency => fil.currency.map(|i| i.as_ref().to_string()), - RefundDimensions::RefundStatus => fil.refund_status.map(|i| i.as_ref().to_string()), - RefundDimensions::Connector => fil.connector, - RefundDimensions::RefundType => fil.refund_type.map(|i| i.as_ref().to_string()), - }) - .collect::>(); - res.query_data.push(RefundFilterValue { - dimension: dim, - values, - }) - } - Ok(ApplicationResponse::Json(res)) -} diff --git a/crates/router/src/analytics/errors.rs b/crates/router/src/analytics/errors.rs deleted file mode 100644 index da0b2f239c..0000000000 --- a/crates/router/src/analytics/errors.rs +++ /dev/null @@ -1,32 +0,0 @@ -use api_models::errors::types::{ApiError, ApiErrorResponse}; -use common_utils::errors::{CustomResult, ErrorSwitch}; - -pub type AnalyticsResult = CustomResult; - -#[derive(Debug, Clone, serde::Serialize, thiserror::Error)] -pub enum AnalyticsError { - #[allow(dead_code)] - #[error("Not implemented: {0}")] - NotImplemented(&'static str), - #[error("Unknown Analytics Error")] - UnknownError, -} - -impl ErrorSwitch for AnalyticsError { - fn switch(&self) -> ApiErrorResponse { - match self { - Self::NotImplemented(feature) => ApiErrorResponse::NotImplemented(ApiError::new( - "IR", - 0, - format!("{feature} is not implemented."), - None, - )), - Self::UnknownError => ApiErrorResponse::InternalServerError(ApiError::new( - "HE", - 0, - "Something went wrong", - None, - )), - } - } -} diff --git a/crates/router/src/analytics/metrics.rs b/crates/router/src/analytics/metrics.rs deleted file mode 100644 index 6222315a8c..0000000000 --- a/crates/router/src/analytics/metrics.rs +++ /dev/null @@ -1,9 +0,0 @@ -use router_env::{global_meter, histogram_metric, histogram_metric_u64, metrics_context}; - -metrics_context!(CONTEXT); -global_meter!(GLOBAL_METER, "ROUTER_API"); - -histogram_metric!(METRIC_FETCH_TIME, GLOBAL_METER); -histogram_metric_u64!(BUCKETS_FETCHED, GLOBAL_METER); - -pub mod request; diff --git a/crates/router/src/analytics/metrics/request.rs b/crates/router/src/analytics/metrics/request.rs deleted file mode 100644 index b7c202f2db..0000000000 --- a/crates/router/src/analytics/metrics/request.rs +++ /dev/null @@ -1,60 +0,0 @@ -pub fn add_attributes>( - key: &'static str, - value: T, -) -> router_env::opentelemetry::KeyValue { - router_env::opentelemetry::KeyValue::new(key, value) -} - -#[inline] -pub async fn record_operation_time( - future: F, - metric: &once_cell::sync::Lazy>, - metric_name: &api_models::analytics::payments::PaymentMetrics, - source: &crate::analytics::AnalyticsProvider, -) -> R -where - F: futures::Future, -{ - let (result, time) = time_future(future).await; - let attributes = &[ - add_attributes("metric_name", metric_name.to_string()), - add_attributes( - "source", - match source { - crate::analytics::AnalyticsProvider::Sqlx(_) => "Sqlx", - }, - ), - ]; - let value = time.as_secs_f64(); - metric.record(&super::CONTEXT, value, attributes); - - router_env::logger::debug!("Attributes: {:?}, Time: {}", attributes, value); - result -} - -use std::time; - -#[inline] -pub async fn time_future(future: F) -> (R, time::Duration) -where - F: futures::Future, -{ - let start = time::Instant::now(); - let result = future.await; - let time_spent = start.elapsed(); - (result, time_spent) -} - -#[macro_export] -macro_rules! histogram_metric { - ($name:ident, $meter:ident) => { - pub(crate) static $name: once_cell::sync::Lazy< - $crate::opentelemetry::metrics::Histogram, - > = once_cell::sync::Lazy::new(|| $meter.u64_histogram(stringify!($name)).init()); - }; - ($name:ident, $meter:ident, $description:literal) => { - pub(crate) static $name: once_cell::sync::Lazy< - $crate::opentelemetry::metrics::Histogram, - > = once_cell::sync::Lazy::new(|| $meter.u64_histogram($description).init()); - }; -} diff --git a/crates/router/src/analytics/payments.rs b/crates/router/src/analytics/payments.rs deleted file mode 100644 index 527bf75a3c..0000000000 --- a/crates/router/src/analytics/payments.rs +++ /dev/null @@ -1,13 +0,0 @@ -pub mod accumulator; -mod core; -pub mod filters; -pub mod metrics; -pub mod types; -pub use accumulator::{PaymentMetricAccumulator, PaymentMetricsAccumulator}; - -pub trait PaymentAnalytics: - metrics::PaymentMetricAnalytics + filters::PaymentFilterAnalytics -{ -} - -pub use self::core::get_metrics; diff --git a/crates/router/src/analytics/payments/accumulator.rs b/crates/router/src/analytics/payments/accumulator.rs deleted file mode 100644 index 5eebd09746..0000000000 --- a/crates/router/src/analytics/payments/accumulator.rs +++ /dev/null @@ -1,150 +0,0 @@ -use api_models::analytics::payments::PaymentMetricsBucketValue; -use common_enums::enums as storage_enums; -use router_env::logger; - -use super::metrics::PaymentMetricRow; - -#[derive(Debug, Default)] -pub struct PaymentMetricsAccumulator { - pub payment_success_rate: SuccessRateAccumulator, - pub payment_count: CountAccumulator, - pub payment_success: CountAccumulator, - pub processed_amount: SumAccumulator, - pub avg_ticket_size: AverageAccumulator, -} - -#[derive(Debug, Default)] -pub struct SuccessRateAccumulator { - pub success: i64, - pub total: i64, -} - -#[derive(Debug, Default)] -#[repr(transparent)] -pub struct CountAccumulator { - pub count: Option, -} - -#[derive(Debug, Default)] -#[repr(transparent)] -pub struct SumAccumulator { - pub total: Option, -} - -#[derive(Debug, Default)] -pub struct AverageAccumulator { - pub total: u32, - pub count: u32, -} - -pub trait PaymentMetricAccumulator { - type MetricOutput; - - fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow); - - fn collect(self) -> Self::MetricOutput; -} - -impl PaymentMetricAccumulator for SuccessRateAccumulator { - type MetricOutput = Option; - - fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow) { - if let Some(ref status) = metrics.status { - if status.as_ref() == &storage_enums::AttemptStatus::Charged { - self.success += metrics.count.unwrap_or_default(); - } - }; - self.total += metrics.count.unwrap_or_default(); - } - - fn collect(self) -> Self::MetricOutput { - if self.total <= 0 { - None - } else { - Some( - f64::from(u32::try_from(self.success).ok()?) * 100.0 - / f64::from(u32::try_from(self.total).ok()?), - ) - } - } -} - -impl PaymentMetricAccumulator for CountAccumulator { - type MetricOutput = Option; - #[inline] - fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow) { - self.count = match (self.count, metrics.count) { - (None, None) => None, - (None, i @ Some(_)) | (i @ Some(_), None) => i, - (Some(a), Some(b)) => Some(a + b), - } - } - #[inline] - fn collect(self) -> Self::MetricOutput { - self.count.and_then(|i| u64::try_from(i).ok()) - } -} - -impl PaymentMetricAccumulator for SumAccumulator { - type MetricOutput = Option; - #[inline] - fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow) { - self.total = match ( - self.total, - metrics - .total - .as_ref() - .and_then(bigdecimal::ToPrimitive::to_i64), - ) { - (None, None) => None, - (None, i @ Some(_)) | (i @ Some(_), None) => i, - (Some(a), Some(b)) => Some(a + b), - } - } - #[inline] - fn collect(self) -> Self::MetricOutput { - u64::try_from(self.total.unwrap_or(0)).ok() - } -} - -impl PaymentMetricAccumulator for AverageAccumulator { - type MetricOutput = Option; - - fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow) { - let total = metrics - .total - .as_ref() - .and_then(bigdecimal::ToPrimitive::to_u32); - let count = metrics.count.and_then(|total| u32::try_from(total).ok()); - - match (total, count) { - (Some(total), Some(count)) => { - self.total += total; - self.count += count; - } - _ => { - logger::error!(message="Dropping metrics for average accumulator", metric=?metrics); - } - } - } - - fn collect(self) -> Self::MetricOutput { - if self.count == 0 { - None - } else { - Some(f64::from(self.total) / f64::from(self.count)) - } - } -} - -impl PaymentMetricsAccumulator { - pub fn collect(self) -> PaymentMetricsBucketValue { - PaymentMetricsBucketValue { - payment_success_rate: self.payment_success_rate.collect(), - payment_count: self.payment_count.collect(), - payment_success_count: self.payment_success.collect(), - payment_processed_amount: self.processed_amount.collect(), - avg_ticket_size: self.avg_ticket_size.collect(), - } - } -} diff --git a/crates/router/src/analytics/payments/core.rs b/crates/router/src/analytics/payments/core.rs deleted file mode 100644 index 23eca8879a..0000000000 --- a/crates/router/src/analytics/payments/core.rs +++ /dev/null @@ -1,129 +0,0 @@ -use std::collections::HashMap; - -use api_models::analytics::{ - payments::{MetricsBucketResponse, PaymentMetrics, PaymentMetricsBucketIdentifier}, - AnalyticsMetadata, GetPaymentMetricRequest, MetricsResponse, -}; -use error_stack::{IntoReport, ResultExt}; -use router_env::{ - instrument, logger, - tracing::{self, Instrument}, -}; - -use super::PaymentMetricsAccumulator; -use crate::{ - analytics::{ - core::AnalyticsApiResponse, errors::AnalyticsError, metrics, - payments::PaymentMetricAccumulator, AnalyticsProvider, - }, - services::ApplicationResponse, - types::domain, -}; - -#[instrument(skip_all)] -pub async fn get_metrics( - pool: AnalyticsProvider, - merchant_account: domain::MerchantAccount, - req: GetPaymentMetricRequest, -) -> AnalyticsApiResponse> { - let mut metrics_accumulator: HashMap< - PaymentMetricsBucketIdentifier, - PaymentMetricsAccumulator, - > = HashMap::new(); - - let mut set = tokio::task::JoinSet::new(); - for metric_type in req.metrics.iter().cloned() { - let req = req.clone(); - let merchant_id = merchant_account.merchant_id.clone(); - let pool = pool.clone(); - let task_span = tracing::debug_span!( - "analytics_payments_query", - payment_metric = metric_type.as_ref() - ); - set.spawn( - async move { - let data = pool - .get_payment_metrics( - &metric_type, - &req.group_by_names.clone(), - &merchant_id, - &req.filters, - &req.time_series.map(|t| t.granularity), - &req.time_range, - ) - .await - .change_context(AnalyticsError::UnknownError); - (metric_type, data) - } - .instrument(task_span), - ); - } - - while let Some((metric, data)) = set - .join_next() - .await - .transpose() - .into_report() - .change_context(AnalyticsError::UnknownError)? - { - let data = data?; - let attributes = &[ - metrics::request::add_attributes("metric_type", metric.to_string()), - metrics::request::add_attributes( - "source", - match pool { - crate::analytics::AnalyticsProvider::Sqlx(_) => "Sqlx", - }, - ), - ]; - - let value = u64::try_from(data.len()); - if let Ok(val) = value { - metrics::BUCKETS_FETCHED.record(&metrics::CONTEXT, val, attributes); - logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val); - } - - for (id, value) in data { - logger::debug!(bucket_id=?id, bucket_value=?value, "Bucket row for metric {metric}"); - let metrics_builder = metrics_accumulator.entry(id).or_default(); - match metric { - PaymentMetrics::PaymentSuccessRate => metrics_builder - .payment_success_rate - .add_metrics_bucket(&value), - PaymentMetrics::PaymentCount => { - metrics_builder.payment_count.add_metrics_bucket(&value) - } - PaymentMetrics::PaymentSuccessCount => { - metrics_builder.payment_success.add_metrics_bucket(&value) - } - PaymentMetrics::PaymentProcessedAmount => { - metrics_builder.processed_amount.add_metrics_bucket(&value) - } - PaymentMetrics::AvgTicketSize => { - metrics_builder.avg_ticket_size.add_metrics_bucket(&value) - } - } - } - - logger::debug!( - "Analytics Accumulated Results: metric: {}, results: {:#?}", - metric, - metrics_accumulator - ); - } - - let query_data: Vec = metrics_accumulator - .into_iter() - .map(|(id, val)| MetricsBucketResponse { - values: val.collect(), - dimensions: id, - }) - .collect(); - - Ok(ApplicationResponse::Json(MetricsResponse { - query_data, - meta_data: [AnalyticsMetadata { - current_time_range: req.time_range, - }], - })) -} diff --git a/crates/router/src/analytics/payments/filters.rs b/crates/router/src/analytics/payments/filters.rs deleted file mode 100644 index f009aaa763..0000000000 --- a/crates/router/src/analytics/payments/filters.rs +++ /dev/null @@ -1,58 +0,0 @@ -use api_models::analytics::{payments::PaymentDimensions, Granularity, TimeRange}; -use common_enums::enums::{AttemptStatus, AuthenticationType, Currency}; -use common_utils::errors::ReportSwitchExt; -use error_stack::ResultExt; -use time::PrimitiveDateTime; - -use crate::analytics::{ - query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql}, - types::{ - AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, FiltersError, FiltersResult, - LoadRow, - }, -}; - -pub trait PaymentFilterAnalytics: LoadRow {} - -pub async fn get_payment_filter_for_dimension( - dimension: PaymentDimensions, - merchant: &String, - time_range: &TimeRange, - pool: &T, -) -> FiltersResult> -where - T: AnalyticsDataSource + PaymentFilterAnalytics, - PrimitiveDateTime: ToSql, - AnalyticsCollection: ToSql, - Granularity: GroupByClause, - Aggregate<&'static str>: ToSql, -{ - let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::Payment); - - query_builder.add_select_column(dimension).switch()?; - time_range - .set_filter_clause(&mut query_builder) - .attach_printable("Error filtering time range") - .switch()?; - - query_builder - .add_filter_clause("merchant_id", merchant) - .switch()?; - - query_builder.set_distinct(); - - query_builder - .execute_query::(pool) - .await - .change_context(FiltersError::QueryBuildingError)? - .change_context(FiltersError::QueryExecutionFailure) -} - -#[derive(Debug, serde::Serialize, Eq, PartialEq)] -pub struct FilterRow { - pub currency: Option>, - pub status: Option>, - pub connector: Option, - pub authentication_type: Option>, - pub payment_method: Option, -} diff --git a/crates/router/src/analytics/payments/metrics.rs b/crates/router/src/analytics/payments/metrics.rs deleted file mode 100644 index f492e5bd4d..0000000000 --- a/crates/router/src/analytics/payments/metrics.rs +++ /dev/null @@ -1,137 +0,0 @@ -use api_models::analytics::{ - payments::{PaymentDimensions, PaymentFilters, PaymentMetrics, PaymentMetricsBucketIdentifier}, - Granularity, TimeRange, -}; -use common_enums::enums as storage_enums; -use time::PrimitiveDateTime; - -use crate::analytics::{ - query::{Aggregate, GroupByClause, ToSql}, - types::{AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, LoadRow, MetricsResult}, -}; - -mod avg_ticket_size; -mod payment_count; -mod payment_processed_amount; -mod payment_success_count; -mod success_rate; - -use avg_ticket_size::AvgTicketSize; -use payment_count::PaymentCount; -use payment_processed_amount::PaymentProcessedAmount; -use payment_success_count::PaymentSuccessCount; -use success_rate::PaymentSuccessRate; - -#[derive(Debug, PartialEq, Eq)] -pub struct PaymentMetricRow { - pub currency: Option>, - pub status: Option>, - pub connector: Option, - pub authentication_type: Option>, - pub payment_method: Option, - pub total: Option, - pub count: Option, - pub start_bucket: Option, - pub end_bucket: Option, -} - -pub trait PaymentMetricAnalytics: LoadRow {} - -#[async_trait::async_trait] -pub trait PaymentMetric -where - T: AnalyticsDataSource + PaymentMetricAnalytics, -{ - async fn load_metrics( - &self, - dimensions: &[PaymentDimensions], - merchant_id: &str, - filters: &PaymentFilters, - granularity: &Option, - time_range: &TimeRange, - pool: &T, - ) -> MetricsResult>; -} - -#[async_trait::async_trait] -impl PaymentMetric for PaymentMetrics -where - T: AnalyticsDataSource + PaymentMetricAnalytics, - PrimitiveDateTime: ToSql, - AnalyticsCollection: ToSql, - Granularity: GroupByClause, - Aggregate<&'static str>: ToSql, -{ - async fn load_metrics( - &self, - dimensions: &[PaymentDimensions], - merchant_id: &str, - filters: &PaymentFilters, - granularity: &Option, - time_range: &TimeRange, - pool: &T, - ) -> MetricsResult> { - match self { - Self::PaymentSuccessRate => { - PaymentSuccessRate - .load_metrics( - dimensions, - merchant_id, - filters, - granularity, - time_range, - pool, - ) - .await - } - Self::PaymentCount => { - PaymentCount - .load_metrics( - dimensions, - merchant_id, - filters, - granularity, - time_range, - pool, - ) - .await - } - Self::PaymentSuccessCount => { - PaymentSuccessCount - .load_metrics( - dimensions, - merchant_id, - filters, - granularity, - time_range, - pool, - ) - .await - } - Self::PaymentProcessedAmount => { - PaymentProcessedAmount - .load_metrics( - dimensions, - merchant_id, - filters, - granularity, - time_range, - pool, - ) - .await - } - Self::AvgTicketSize => { - AvgTicketSize - .load_metrics( - dimensions, - merchant_id, - filters, - granularity, - time_range, - pool, - ) - .await - } - } - } -} diff --git a/crates/router/src/analytics/payments/metrics/avg_ticket_size.rs b/crates/router/src/analytics/payments/metrics/avg_ticket_size.rs deleted file mode 100644 index 2230d870e6..0000000000 --- a/crates/router/src/analytics/payments/metrics/avg_ticket_size.rs +++ /dev/null @@ -1,126 +0,0 @@ -use api_models::analytics::{ - payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier}, - Granularity, TimeRange, -}; -use common_utils::errors::ReportSwitchExt; -use error_stack::ResultExt; -use time::PrimitiveDateTime; - -use super::{PaymentMetric, PaymentMetricRow}; -use crate::analytics::{ - query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, - types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, -}; - -#[derive(Default)] -pub(super) struct AvgTicketSize; - -#[async_trait::async_trait] -impl PaymentMetric for AvgTicketSize -where - T: AnalyticsDataSource + super::PaymentMetricAnalytics, - PrimitiveDateTime: ToSql, - AnalyticsCollection: ToSql, - Granularity: GroupByClause, - Aggregate<&'static str>: ToSql, -{ - async fn load_metrics( - &self, - dimensions: &[PaymentDimensions], - merchant_id: &str, - filters: &PaymentFilters, - granularity: &Option, - time_range: &TimeRange, - pool: &T, - ) -> MetricsResult> { - let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::Payment); - - for dim in dimensions.iter() { - query_builder.add_select_column(dim).switch()?; - } - - query_builder - .add_select_column(Aggregate::Sum { - field: "amount", - alias: Some("total"), - }) - .switch()?; - query_builder - .add_select_column(Aggregate::Count { - field: None, - alias: Some("count"), - }) - .switch()?; - query_builder - .add_select_column(Aggregate::Min { - field: "created_at", - alias: Some("start_bucket"), - }) - .switch()?; - query_builder - .add_select_column(Aggregate::Max { - field: "created_at", - alias: Some("end_bucket"), - }) - .switch()?; - - filters.set_filter_clause(&mut query_builder).switch()?; - - query_builder - .add_filter_clause("merchant_id", merchant_id) - .switch()?; - - time_range - .set_filter_clause(&mut query_builder) - .attach_printable("Error filtering time range") - .switch()?; - - for dim in dimensions.iter() { - query_builder - .add_group_by_clause(dim) - .attach_printable("Error grouping by dimensions") - .switch()?; - } - - if let Some(granularity) = granularity.as_ref() { - granularity - .set_group_by_clause(&mut query_builder) - .attach_printable("Error adding granularity") - .switch()?; - } - - query_builder - .execute_query::(pool) - .await - .change_context(MetricsError::QueryBuildingError)? - .change_context(MetricsError::QueryExecutionFailure)? - .into_iter() - .map(|i| { - Ok(( - PaymentMetricsBucketIdentifier::new( - i.currency.as_ref().map(|i| i.0), - i.status.as_ref().map(|i| i.0), - i.connector.clone(), - i.authentication_type.as_ref().map(|i| i.0), - i.payment_method.clone(), - TimeRange { - start_time: match (granularity, i.start_bucket) { - (Some(g), Some(st)) => g.clip_to_start(st)?, - _ => time_range.start_time, - }, - end_time: granularity.as_ref().map_or_else( - || Ok(time_range.end_time), - |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), - )?, - }, - ), - i, - )) - }) - .collect::, - crate::analytics::query::PostProcessingError, - >>() - .change_context(MetricsError::PostProcessingFailure) - } -} diff --git a/crates/router/src/analytics/payments/metrics/payment_count.rs b/crates/router/src/analytics/payments/metrics/payment_count.rs deleted file mode 100644 index 661cec3dac..0000000000 --- a/crates/router/src/analytics/payments/metrics/payment_count.rs +++ /dev/null @@ -1,117 +0,0 @@ -use api_models::analytics::{ - payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier}, - Granularity, TimeRange, -}; -use common_utils::errors::ReportSwitchExt; -use error_stack::ResultExt; -use time::PrimitiveDateTime; - -use super::PaymentMetricRow; -use crate::analytics::{ - query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, - types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, -}; - -#[derive(Default)] -pub(super) struct PaymentCount; - -#[async_trait::async_trait] -impl super::PaymentMetric for PaymentCount -where - T: AnalyticsDataSource + super::PaymentMetricAnalytics, - PrimitiveDateTime: ToSql, - AnalyticsCollection: ToSql, - Granularity: GroupByClause, - Aggregate<&'static str>: ToSql, -{ - async fn load_metrics( - &self, - dimensions: &[PaymentDimensions], - merchant_id: &str, - filters: &PaymentFilters, - granularity: &Option, - time_range: &TimeRange, - pool: &T, - ) -> MetricsResult> { - let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::Payment); - - for dim in dimensions.iter() { - query_builder.add_select_column(dim).switch()?; - } - - query_builder - .add_select_column(Aggregate::Count { - field: None, - alias: Some("count"), - }) - .switch()?; - query_builder - .add_select_column(Aggregate::Min { - field: "created_at", - alias: Some("start_bucket"), - }) - .switch()?; - query_builder - .add_select_column(Aggregate::Max { - field: "created_at", - alias: Some("end_bucket"), - }) - .switch()?; - - filters.set_filter_clause(&mut query_builder).switch()?; - - query_builder - .add_filter_clause("merchant_id", merchant_id) - .switch()?; - - time_range - .set_filter_clause(&mut query_builder) - .attach_printable("Error filtering time range") - .switch()?; - - for dim in dimensions.iter() { - query_builder - .add_group_by_clause(dim) - .attach_printable("Error grouping by dimensions") - .switch()?; - } - - if let Some(granularity) = granularity.as_ref() { - granularity - .set_group_by_clause(&mut query_builder) - .attach_printable("Error adding granularity") - .switch()?; - } - - query_builder - .execute_query::(pool) - .await - .change_context(MetricsError::QueryBuildingError)? - .change_context(MetricsError::QueryExecutionFailure)? - .into_iter() - .map(|i| { - Ok(( - PaymentMetricsBucketIdentifier::new( - i.currency.as_ref().map(|i| i.0), - i.status.as_ref().map(|i| i.0), - i.connector.clone(), - i.authentication_type.as_ref().map(|i| i.0), - i.payment_method.clone(), - TimeRange { - start_time: match (granularity, i.start_bucket) { - (Some(g), Some(st)) => g.clip_to_start(st)?, - _ => time_range.start_time, - }, - end_time: granularity.as_ref().map_or_else( - || Ok(time_range.end_time), - |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), - )?, - }, - ), - i, - )) - }) - .collect::, crate::analytics::query::PostProcessingError>>() - .change_context(MetricsError::PostProcessingFailure) - } -} diff --git a/crates/router/src/analytics/payments/metrics/payment_processed_amount.rs b/crates/router/src/analytics/payments/metrics/payment_processed_amount.rs deleted file mode 100644 index 2ec0c6f18f..0000000000 --- a/crates/router/src/analytics/payments/metrics/payment_processed_amount.rs +++ /dev/null @@ -1,128 +0,0 @@ -use api_models::analytics::{ - payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier}, - Granularity, TimeRange, -}; -use common_enums::enums as storage_enums; -use common_utils::errors::ReportSwitchExt; -use error_stack::ResultExt; -use time::PrimitiveDateTime; - -use super::PaymentMetricRow; -use crate::analytics::{ - query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, - types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, -}; - -#[derive(Default)] -pub(super) struct PaymentProcessedAmount; - -#[async_trait::async_trait] -impl super::PaymentMetric for PaymentProcessedAmount -where - T: AnalyticsDataSource + super::PaymentMetricAnalytics, - PrimitiveDateTime: ToSql, - AnalyticsCollection: ToSql, - Granularity: GroupByClause, - Aggregate<&'static str>: ToSql, -{ - async fn load_metrics( - &self, - dimensions: &[PaymentDimensions], - merchant_id: &str, - filters: &PaymentFilters, - granularity: &Option, - time_range: &TimeRange, - pool: &T, - ) -> MetricsResult> { - let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::Payment); - - for dim in dimensions.iter() { - query_builder.add_select_column(dim).switch()?; - } - - query_builder - .add_select_column(Aggregate::Sum { - field: "amount", - alias: Some("total"), - }) - .switch()?; - query_builder - .add_select_column(Aggregate::Min { - field: "created_at", - alias: Some("start_bucket"), - }) - .switch()?; - query_builder - .add_select_column(Aggregate::Max { - field: "created_at", - alias: Some("end_bucket"), - }) - .switch()?; - - filters.set_filter_clause(&mut query_builder).switch()?; - - query_builder - .add_filter_clause("merchant_id", merchant_id) - .switch()?; - - time_range - .set_filter_clause(&mut query_builder) - .attach_printable("Error filtering time range") - .switch()?; - - for dim in dimensions.iter() { - query_builder - .add_group_by_clause(dim) - .attach_printable("Error grouping by dimensions") - .switch()?; - } - - if let Some(granularity) = granularity.as_ref() { - granularity - .set_group_by_clause(&mut query_builder) - .attach_printable("Error adding granularity") - .switch()?; - } - - query_builder - .add_filter_clause( - PaymentDimensions::PaymentStatus, - storage_enums::AttemptStatus::Charged, - ) - .switch()?; - - query_builder - .execute_query::(pool) - .await - .change_context(MetricsError::QueryBuildingError)? - .change_context(MetricsError::QueryExecutionFailure)? - .into_iter() - .map(|i| { - Ok(( - PaymentMetricsBucketIdentifier::new( - i.currency.as_ref().map(|i| i.0), - None, - i.connector.clone(), - i.authentication_type.as_ref().map(|i| i.0), - i.payment_method.clone(), - TimeRange { - start_time: match (granularity, i.start_bucket) { - (Some(g), Some(st)) => g.clip_to_start(st)?, - _ => time_range.start_time, - }, - end_time: granularity.as_ref().map_or_else( - || Ok(time_range.end_time), - |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), - )?, - }, - ), - i, - )) - }) - .collect::, - crate::analytics::query::PostProcessingError, - >>() - .change_context(MetricsError::PostProcessingFailure) - } -} diff --git a/crates/router/src/analytics/payments/metrics/payment_success_count.rs b/crates/router/src/analytics/payments/metrics/payment_success_count.rs deleted file mode 100644 index 8245fe7aeb..0000000000 --- a/crates/router/src/analytics/payments/metrics/payment_success_count.rs +++ /dev/null @@ -1,127 +0,0 @@ -use api_models::analytics::{ - payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier}, - Granularity, TimeRange, -}; -use common_enums::enums as storage_enums; -use common_utils::errors::ReportSwitchExt; -use error_stack::ResultExt; -use time::PrimitiveDateTime; - -use super::PaymentMetricRow; -use crate::analytics::{ - query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, - types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, -}; - -#[derive(Default)] -pub(super) struct PaymentSuccessCount; - -#[async_trait::async_trait] -impl super::PaymentMetric for PaymentSuccessCount -where - T: AnalyticsDataSource + super::PaymentMetricAnalytics, - PrimitiveDateTime: ToSql, - AnalyticsCollection: ToSql, - Granularity: GroupByClause, - Aggregate<&'static str>: ToSql, -{ - async fn load_metrics( - &self, - dimensions: &[PaymentDimensions], - merchant_id: &str, - filters: &PaymentFilters, - granularity: &Option, - time_range: &TimeRange, - pool: &T, - ) -> MetricsResult> { - let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::Payment); - - for dim in dimensions.iter() { - query_builder.add_select_column(dim).switch()?; - } - - query_builder - .add_select_column(Aggregate::Count { - field: None, - alias: Some("count"), - }) - .switch()?; - query_builder - .add_select_column(Aggregate::Min { - field: "created_at", - alias: Some("start_bucket"), - }) - .switch()?; - query_builder - .add_select_column(Aggregate::Max { - field: "created_at", - alias: Some("end_bucket"), - }) - .switch()?; - - filters.set_filter_clause(&mut query_builder).switch()?; - - query_builder - .add_filter_clause("merchant_id", merchant_id) - .switch()?; - - time_range - .set_filter_clause(&mut query_builder) - .attach_printable("Error filtering time range") - .switch()?; - - for dim in dimensions.iter() { - query_builder - .add_group_by_clause(dim) - .attach_printable("Error grouping by dimensions") - .switch()?; - } - - if let Some(granularity) = granularity.as_ref() { - granularity - .set_group_by_clause(&mut query_builder) - .attach_printable("Error adding granularity") - .switch()?; - } - - query_builder - .add_filter_clause( - PaymentDimensions::PaymentStatus, - storage_enums::AttemptStatus::Charged, - ) - .switch()?; - query_builder - .execute_query::(pool) - .await - .change_context(MetricsError::QueryBuildingError)? - .change_context(MetricsError::QueryExecutionFailure)? - .into_iter() - .map(|i| { - Ok(( - PaymentMetricsBucketIdentifier::new( - i.currency.as_ref().map(|i| i.0), - None, - i.connector.clone(), - i.authentication_type.as_ref().map(|i| i.0), - i.payment_method.clone(), - TimeRange { - start_time: match (granularity, i.start_bucket) { - (Some(g), Some(st)) => g.clip_to_start(st)?, - _ => time_range.start_time, - }, - end_time: granularity.as_ref().map_or_else( - || Ok(time_range.end_time), - |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), - )?, - }, - ), - i, - )) - }) - .collect::, - crate::analytics::query::PostProcessingError, - >>() - .change_context(MetricsError::PostProcessingFailure) - } -} diff --git a/crates/router/src/analytics/payments/metrics/success_rate.rs b/crates/router/src/analytics/payments/metrics/success_rate.rs deleted file mode 100644 index c63956d4b1..0000000000 --- a/crates/router/src/analytics/payments/metrics/success_rate.rs +++ /dev/null @@ -1,123 +0,0 @@ -use api_models::analytics::{ - payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier}, - Granularity, TimeRange, -}; -use common_utils::errors::ReportSwitchExt; -use error_stack::ResultExt; -use time::PrimitiveDateTime; - -use super::PaymentMetricRow; -use crate::analytics::{ - query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, - types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, -}; - -#[derive(Default)] -pub(super) struct PaymentSuccessRate; - -#[async_trait::async_trait] -impl super::PaymentMetric for PaymentSuccessRate -where - T: AnalyticsDataSource + super::PaymentMetricAnalytics, - PrimitiveDateTime: ToSql, - AnalyticsCollection: ToSql, - Granularity: GroupByClause, - Aggregate<&'static str>: ToSql, -{ - async fn load_metrics( - &self, - dimensions: &[PaymentDimensions], - merchant_id: &str, - filters: &PaymentFilters, - granularity: &Option, - time_range: &TimeRange, - pool: &T, - ) -> MetricsResult> { - let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::Payment); - let mut dimensions = dimensions.to_vec(); - - dimensions.push(PaymentDimensions::PaymentStatus); - - for dim in dimensions.iter() { - query_builder.add_select_column(dim).switch()?; - } - - query_builder - .add_select_column(Aggregate::Count { - field: None, - alias: Some("count"), - }) - .switch()?; - query_builder - .add_select_column(Aggregate::Min { - field: "created_at", - alias: Some("start_bucket"), - }) - .switch()?; - query_builder - .add_select_column(Aggregate::Max { - field: "created_at", - alias: Some("end_bucket"), - }) - .switch()?; - - filters.set_filter_clause(&mut query_builder).switch()?; - - query_builder - .add_filter_clause("merchant_id", merchant_id) - .switch()?; - - time_range - .set_filter_clause(&mut query_builder) - .attach_printable("Error filtering time range") - .switch()?; - - for dim in dimensions.iter() { - query_builder - .add_group_by_clause(dim) - .attach_printable("Error grouping by dimensions") - .switch()?; - } - - if let Some(granularity) = granularity.as_ref() { - granularity - .set_group_by_clause(&mut query_builder) - .attach_printable("Error adding granularity") - .switch()?; - } - - query_builder - .execute_query::(pool) - .await - .change_context(MetricsError::QueryBuildingError)? - .change_context(MetricsError::QueryExecutionFailure)? - .into_iter() - .map(|i| { - Ok(( - PaymentMetricsBucketIdentifier::new( - i.currency.as_ref().map(|i| i.0), - None, - i.connector.clone(), - i.authentication_type.as_ref().map(|i| i.0), - i.payment_method.clone(), - TimeRange { - start_time: match (granularity, i.start_bucket) { - (Some(g), Some(st)) => g.clip_to_start(st)?, - _ => time_range.start_time, - }, - end_time: granularity.as_ref().map_or_else( - || Ok(time_range.end_time), - |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), - )?, - }, - ), - i, - )) - }) - .collect::, - crate::analytics::query::PostProcessingError, - >>() - .change_context(MetricsError::PostProcessingFailure) - } -} diff --git a/crates/router/src/analytics/payments/types.rs b/crates/router/src/analytics/payments/types.rs deleted file mode 100644 index fdfbedef38..0000000000 --- a/crates/router/src/analytics/payments/types.rs +++ /dev/null @@ -1,46 +0,0 @@ -use api_models::analytics::payments::{PaymentDimensions, PaymentFilters}; -use error_stack::ResultExt; - -use crate::analytics::{ - query::{QueryBuilder, QueryFilter, QueryResult, ToSql}, - types::{AnalyticsCollection, AnalyticsDataSource}, -}; - -impl QueryFilter for PaymentFilters -where - T: AnalyticsDataSource, - AnalyticsCollection: ToSql, -{ - fn set_filter_clause(&self, builder: &mut QueryBuilder) -> QueryResult<()> { - if !self.currency.is_empty() { - builder - .add_filter_in_range_clause(PaymentDimensions::Currency, &self.currency) - .attach_printable("Error adding currency filter")?; - } - - if !self.status.is_empty() { - builder - .add_filter_in_range_clause(PaymentDimensions::PaymentStatus, &self.status) - .attach_printable("Error adding payment status filter")?; - } - - if !self.connector.is_empty() { - builder - .add_filter_in_range_clause(PaymentDimensions::Connector, &self.connector) - .attach_printable("Error adding connector filter")?; - } - - if !self.auth_type.is_empty() { - builder - .add_filter_in_range_clause(PaymentDimensions::AuthType, &self.auth_type) - .attach_printable("Error adding auth type filter")?; - } - - if !self.payment_method.is_empty() { - builder - .add_filter_in_range_clause(PaymentDimensions::PaymentMethod, &self.payment_method) - .attach_printable("Error adding payment method filter")?; - } - Ok(()) - } -} diff --git a/crates/router/src/analytics/query.rs b/crates/router/src/analytics/query.rs deleted file mode 100644 index b1f621d815..0000000000 --- a/crates/router/src/analytics/query.rs +++ /dev/null @@ -1,533 +0,0 @@ -#![allow(dead_code)] -use std::marker::PhantomData; - -use api_models::{ - analytics::{ - self as analytics_api, - payments::PaymentDimensions, - refunds::{RefundDimensions, RefundType}, - Granularity, - }, - enums::Connector, - refunds::RefundStatus, -}; -use common_enums::{ - enums as storage_enums, - enums::{AttemptStatus, AuthenticationType, Currency, PaymentMethod}, -}; -use common_utils::errors::{CustomResult, ParsingError}; -use error_stack::{IntoReport, ResultExt}; -use router_env::logger; - -use super::types::{AnalyticsCollection, AnalyticsDataSource, LoadRow}; -use crate::analytics::types::QueryExecutionError; -pub type QueryResult = error_stack::Result; -pub trait QueryFilter -where - T: AnalyticsDataSource, - AnalyticsCollection: ToSql, -{ - fn set_filter_clause(&self, builder: &mut QueryBuilder) -> QueryResult<()>; -} - -pub trait GroupByClause -where - T: AnalyticsDataSource, - AnalyticsCollection: ToSql, -{ - fn set_group_by_clause(&self, builder: &mut QueryBuilder) -> QueryResult<()>; -} - -pub trait SeriesBucket { - type SeriesType; - type GranularityLevel; - - fn get_lowest_common_granularity_level(&self) -> Self::GranularityLevel; - - fn get_bucket_size(&self) -> u8; - - fn clip_to_start( - &self, - value: Self::SeriesType, - ) -> error_stack::Result; - - fn clip_to_end( - &self, - value: Self::SeriesType, - ) -> error_stack::Result; -} - -impl QueryFilter for analytics_api::TimeRange -where - T: AnalyticsDataSource, - time::PrimitiveDateTime: ToSql, - AnalyticsCollection: ToSql, - Granularity: GroupByClause, -{ - fn set_filter_clause(&self, builder: &mut QueryBuilder) -> QueryResult<()> { - builder.add_custom_filter_clause("created_at", self.start_time, FilterTypes::Gte)?; - if let Some(end) = self.end_time { - builder.add_custom_filter_clause("created_at", end, FilterTypes::Lte)?; - } - Ok(()) - } -} - -impl GroupByClause for Granularity { - fn set_group_by_clause( - &self, - builder: &mut QueryBuilder, - ) -> QueryResult<()> { - let trunc_scale = self.get_lowest_common_granularity_level(); - - let granularity_bucket_scale = match self { - Self::OneMin => None, - Self::FiveMin | Self::FifteenMin | Self::ThirtyMin => Some("minute"), - Self::OneHour | Self::OneDay => None, - }; - - let granularity_divisor = self.get_bucket_size(); - - builder - .add_group_by_clause(format!("DATE_TRUNC('{trunc_scale}', modified_at)")) - .attach_printable("Error adding time prune group by")?; - if let Some(scale) = granularity_bucket_scale { - builder - .add_group_by_clause(format!( - "FLOOR(DATE_PART('{scale}', modified_at)/{granularity_divisor})" - )) - .attach_printable("Error adding time binning group by")?; - } - Ok(()) - } -} - -#[derive(strum::Display)] -#[strum(serialize_all = "lowercase")] -pub enum TimeGranularityLevel { - Minute, - Hour, - Day, -} - -impl SeriesBucket for Granularity { - type SeriesType = time::PrimitiveDateTime; - - type GranularityLevel = TimeGranularityLevel; - - fn get_lowest_common_granularity_level(&self) -> Self::GranularityLevel { - match self { - Self::OneMin => TimeGranularityLevel::Minute, - Self::FiveMin | Self::FifteenMin | Self::ThirtyMin | Self::OneHour => { - TimeGranularityLevel::Hour - } - Self::OneDay => TimeGranularityLevel::Day, - } - } - - fn get_bucket_size(&self) -> u8 { - match self { - Self::OneMin => 60, - Self::FiveMin => 5, - Self::FifteenMin => 15, - Self::ThirtyMin => 30, - Self::OneHour => 60, - Self::OneDay => 24, - } - } - - fn clip_to_start( - &self, - value: Self::SeriesType, - ) -> error_stack::Result { - let clip_start = |value: u8, modulo: u8| -> u8 { value - value % modulo }; - - let clipped_time = match ( - self.get_lowest_common_granularity_level(), - self.get_bucket_size(), - ) { - (TimeGranularityLevel::Minute, i) => time::Time::MIDNIGHT - .replace_second(clip_start(value.second(), i)) - .and_then(|t| t.replace_minute(value.minute())) - .and_then(|t| t.replace_hour(value.hour())), - (TimeGranularityLevel::Hour, i) => time::Time::MIDNIGHT - .replace_minute(clip_start(value.minute(), i)) - .and_then(|t| t.replace_hour(value.hour())), - (TimeGranularityLevel::Day, i) => { - time::Time::MIDNIGHT.replace_hour(clip_start(value.hour(), i)) - } - } - .into_report() - .change_context(PostProcessingError::BucketClipping)?; - - Ok(value.replace_time(clipped_time)) - } - - fn clip_to_end( - &self, - value: Self::SeriesType, - ) -> error_stack::Result { - let clip_end = |value: u8, modulo: u8| -> u8 { value + modulo - 1 - value % modulo }; - - let clipped_time = match ( - self.get_lowest_common_granularity_level(), - self.get_bucket_size(), - ) { - (TimeGranularityLevel::Minute, i) => time::Time::MIDNIGHT - .replace_second(clip_end(value.second(), i)) - .and_then(|t| t.replace_minute(value.minute())) - .and_then(|t| t.replace_hour(value.hour())), - (TimeGranularityLevel::Hour, i) => time::Time::MIDNIGHT - .replace_minute(clip_end(value.minute(), i)) - .and_then(|t| t.replace_hour(value.hour())), - (TimeGranularityLevel::Day, i) => { - time::Time::MIDNIGHT.replace_hour(clip_end(value.hour(), i)) - } - } - .into_report() - .change_context(PostProcessingError::BucketClipping) - .attach_printable_lazy(|| format!("Bucket Clip Error: {value}"))?; - - Ok(value.replace_time(clipped_time)) - } -} - -#[derive(thiserror::Error, Debug)] -pub enum QueryBuildingError { - #[allow(dead_code)] - #[error("Not Implemented: {0}")] - NotImplemented(String), - #[error("Failed to Serialize to SQL")] - SqlSerializeError, - #[error("Failed to build sql query: {0}")] - InvalidQuery(&'static str), -} - -#[derive(thiserror::Error, Debug)] -pub enum PostProcessingError { - #[error("Error Clipping values to bucket sizes")] - BucketClipping, -} - -#[derive(Debug)] -pub enum Aggregate { - Count { - field: Option, - alias: Option<&'static str>, - }, - Sum { - field: R, - alias: Option<&'static str>, - }, - Min { - field: R, - alias: Option<&'static str>, - }, - Max { - field: R, - alias: Option<&'static str>, - }, -} - -#[derive(Debug)] -pub struct QueryBuilder -where - T: AnalyticsDataSource, - AnalyticsCollection: ToSql, -{ - columns: Vec, - filters: Vec<(String, FilterTypes, String)>, - group_by: Vec, - having: Option>, - table: AnalyticsCollection, - distinct: bool, - db_type: PhantomData, -} - -pub trait ToSql { - fn to_sql(&self) -> error_stack::Result; -} - -/// Implement `ToSql` on arrays of types that impl `ToString`. -macro_rules! impl_to_sql_for_to_string { - ($($type:ty),+) => { - $( - impl ToSql for $type { - fn to_sql(&self) -> error_stack::Result { - Ok(self.to_string()) - } - } - )+ - }; -} - -impl_to_sql_for_to_string!( - String, - &str, - &PaymentDimensions, - &RefundDimensions, - PaymentDimensions, - RefundDimensions, - PaymentMethod, - AuthenticationType, - Connector, - AttemptStatus, - RefundStatus, - storage_enums::RefundStatus, - Currency, - RefundType, - &String, - &bool, - &u64 -); - -#[allow(dead_code)] -#[derive(Debug)] -pub enum FilterTypes { - Equal, - EqualBool, - In, - Gte, - Lte, - Gt, -} - -impl QueryBuilder -where - T: AnalyticsDataSource, - AnalyticsCollection: ToSql, -{ - pub fn new(table: AnalyticsCollection) -> Self { - Self { - columns: Default::default(), - filters: Default::default(), - group_by: Default::default(), - having: Default::default(), - table, - distinct: Default::default(), - db_type: Default::default(), - } - } - - pub fn add_select_column(&mut self, column: impl ToSql) -> QueryResult<()> { - self.columns.push( - column - .to_sql() - .change_context(QueryBuildingError::SqlSerializeError) - .attach_printable("Error serializing select column")?, - ); - Ok(()) - } - - pub fn set_distinct(&mut self) { - self.distinct = true - } - - pub fn add_filter_clause( - &mut self, - key: impl ToSql, - value: impl ToSql, - ) -> QueryResult<()> { - self.add_custom_filter_clause(key, value, FilterTypes::Equal) - } - - pub fn add_bool_filter_clause( - &mut self, - key: impl ToSql, - value: impl ToSql, - ) -> QueryResult<()> { - self.add_custom_filter_clause(key, value, FilterTypes::EqualBool) - } - - pub fn add_custom_filter_clause( - &mut self, - lhs: impl ToSql, - rhs: impl ToSql, - comparison: FilterTypes, - ) -> QueryResult<()> { - self.filters.push(( - lhs.to_sql() - .change_context(QueryBuildingError::SqlSerializeError) - .attach_printable("Error serializing filter key")?, - comparison, - rhs.to_sql() - .change_context(QueryBuildingError::SqlSerializeError) - .attach_printable("Error serializing filter value")?, - )); - Ok(()) - } - - pub fn add_filter_in_range_clause( - &mut self, - key: impl ToSql, - values: &[impl ToSql], - ) -> QueryResult<()> { - let list = values - .iter() - .map(|i| { - // trimming whitespaces from the filter values received in request, to prevent a possibility of an SQL injection - i.to_sql().map(|s| { - let trimmed_str = s.replace(' ', ""); - format!("'{trimmed_str}'") - }) - }) - .collect::, ParsingError>>() - .change_context(QueryBuildingError::SqlSerializeError) - .attach_printable("Error serializing range filter value")? - .join(", "); - self.add_custom_filter_clause(key, list, FilterTypes::In) - } - - pub fn add_group_by_clause(&mut self, column: impl ToSql) -> QueryResult<()> { - self.group_by.push( - column - .to_sql() - .change_context(QueryBuildingError::SqlSerializeError) - .attach_printable("Error serializing group by field")?, - ); - Ok(()) - } - - pub fn add_granularity_in_mins(&mut self, granularity: &Granularity) -> QueryResult<()> { - let interval = match granularity { - Granularity::OneMin => "1", - Granularity::FiveMin => "5", - Granularity::FifteenMin => "15", - Granularity::ThirtyMin => "30", - Granularity::OneHour => "60", - Granularity::OneDay => "1440", - }; - let _ = self.add_select_column(format!( - "toStartOfInterval(created_at, INTERVAL {interval} MINUTE) as time_bucket" - )); - Ok(()) - } - - fn get_filter_clause(&self) -> String { - self.filters - .iter() - .map(|(l, op, r)| match op { - FilterTypes::EqualBool => format!("{l} = {r}"), - FilterTypes::Equal => format!("{l} = '{r}'"), - FilterTypes::In => format!("{l} IN ({r})"), - FilterTypes::Gte => format!("{l} >= '{r}'"), - FilterTypes::Gt => format!("{l} > {r}"), - FilterTypes::Lte => format!("{l} <= '{r}'"), - }) - .collect::>() - .join(" AND ") - } - - fn get_select_clause(&self) -> String { - self.columns.join(", ") - } - - fn get_group_by_clause(&self) -> String { - self.group_by.join(", ") - } - - #[allow(dead_code)] - pub fn add_having_clause( - &mut self, - aggregate: Aggregate, - filter_type: FilterTypes, - value: impl ToSql, - ) -> QueryResult<()> - where - Aggregate: ToSql, - { - let aggregate = aggregate - .to_sql() - .change_context(QueryBuildingError::SqlSerializeError) - .attach_printable("Error serializing having aggregate")?; - let value = value - .to_sql() - .change_context(QueryBuildingError::SqlSerializeError) - .attach_printable("Error serializing having value")?; - let entry = (aggregate, filter_type, value); - if let Some(having) = &mut self.having { - having.push(entry); - } else { - self.having = Some(vec![entry]); - } - Ok(()) - } - - pub fn get_filter_type_clause(&self) -> Option { - self.having.as_ref().map(|vec| { - vec.iter() - .map(|(l, op, r)| match op { - FilterTypes::Equal | FilterTypes::EqualBool => format!("{l} = {r}"), - FilterTypes::In => format!("{l} IN ({r})"), - FilterTypes::Gte => format!("{l} >= {r}"), - FilterTypes::Lte => format!("{l} < {r}"), - FilterTypes::Gt => format!("{l} > {r}"), - }) - .collect::>() - .join(" AND ") - }) - } - - pub fn build_query(&mut self) -> QueryResult - where - Aggregate<&'static str>: ToSql, - { - if self.columns.is_empty() { - Err(QueryBuildingError::InvalidQuery( - "No select fields provided", - )) - .into_report()?; - } - let mut query = String::from("SELECT "); - - if self.distinct { - query.push_str("DISTINCT "); - } - - query.push_str(&self.get_select_clause()); - - query.push_str(" FROM "); - - query.push_str( - &self - .table - .to_sql() - .change_context(QueryBuildingError::SqlSerializeError) - .attach_printable("Error serializing table value")?, - ); - - if !self.filters.is_empty() { - query.push_str(" WHERE "); - query.push_str(&self.get_filter_clause()); - } - - if !self.group_by.is_empty() { - query.push_str(" GROUP BY "); - query.push_str(&self.get_group_by_clause()); - } - - if self.having.is_some() { - if let Some(condition) = self.get_filter_type_clause() { - query.push_str(" HAVING "); - query.push_str(condition.as_str()); - } - } - Ok(query) - } - - pub async fn execute_query( - &mut self, - store: &P, - ) -> CustomResult, QueryExecutionError>, QueryBuildingError> - where - P: LoadRow, - Aggregate<&'static str>: ToSql, - { - let query = self - .build_query() - .change_context(QueryBuildingError::SqlSerializeError) - .attach_printable("Failed to execute query")?; - logger::debug!(?query); - Ok(store.load_results(query.as_str()).await) - } -} diff --git a/crates/router/src/analytics/refunds.rs b/crates/router/src/analytics/refunds.rs deleted file mode 100644 index a8b52effe7..0000000000 --- a/crates/router/src/analytics/refunds.rs +++ /dev/null @@ -1,10 +0,0 @@ -pub mod accumulator; -mod core; - -pub mod filters; -pub mod metrics; -pub mod types; -pub use accumulator::{RefundMetricAccumulator, RefundMetricsAccumulator}; - -pub trait RefundAnalytics: metrics::RefundMetricAnalytics {} -pub use self::core::get_metrics; diff --git a/crates/router/src/analytics/refunds/accumulator.rs b/crates/router/src/analytics/refunds/accumulator.rs deleted file mode 100644 index 3d0c0e659f..0000000000 --- a/crates/router/src/analytics/refunds/accumulator.rs +++ /dev/null @@ -1,110 +0,0 @@ -use api_models::analytics::refunds::RefundMetricsBucketValue; -use common_enums::enums as storage_enums; - -use super::metrics::RefundMetricRow; -#[derive(Debug, Default)] -pub struct RefundMetricsAccumulator { - pub refund_success_rate: SuccessRateAccumulator, - pub refund_count: CountAccumulator, - pub refund_success: CountAccumulator, - pub processed_amount: SumAccumulator, -} - -#[derive(Debug, Default)] -pub struct SuccessRateAccumulator { - pub success: i64, - pub total: i64, -} - -#[derive(Debug, Default)] -#[repr(transparent)] -pub struct CountAccumulator { - pub count: Option, -} - -#[derive(Debug, Default)] -#[repr(transparent)] -pub struct SumAccumulator { - pub total: Option, -} - -pub trait RefundMetricAccumulator { - type MetricOutput; - - fn add_metrics_bucket(&mut self, metrics: &RefundMetricRow); - - fn collect(self) -> Self::MetricOutput; -} - -impl RefundMetricAccumulator for CountAccumulator { - type MetricOutput = Option; - #[inline] - fn add_metrics_bucket(&mut self, metrics: &RefundMetricRow) { - self.count = match (self.count, metrics.count) { - (None, None) => None, - (None, i @ Some(_)) | (i @ Some(_), None) => i, - (Some(a), Some(b)) => Some(a + b), - } - } - #[inline] - fn collect(self) -> Self::MetricOutput { - self.count.and_then(|i| u64::try_from(i).ok()) - } -} - -impl RefundMetricAccumulator for SumAccumulator { - type MetricOutput = Option; - #[inline] - fn add_metrics_bucket(&mut self, metrics: &RefundMetricRow) { - self.total = match ( - self.total, - metrics - .total - .as_ref() - .and_then(bigdecimal::ToPrimitive::to_i64), - ) { - (None, None) => None, - (None, i @ Some(_)) | (i @ Some(_), None) => i, - (Some(a), Some(b)) => Some(a + b), - } - } - #[inline] - fn collect(self) -> Self::MetricOutput { - self.total.and_then(|i| u64::try_from(i).ok()) - } -} - -impl RefundMetricAccumulator for SuccessRateAccumulator { - type MetricOutput = Option; - - fn add_metrics_bucket(&mut self, metrics: &RefundMetricRow) { - if let Some(ref refund_status) = metrics.refund_status { - if refund_status.as_ref() == &storage_enums::RefundStatus::Success { - self.success += metrics.count.unwrap_or_default(); - } - }; - self.total += metrics.count.unwrap_or_default(); - } - - fn collect(self) -> Self::MetricOutput { - if self.total <= 0 { - None - } else { - Some( - f64::from(u32::try_from(self.success).ok()?) * 100.0 - / f64::from(u32::try_from(self.total).ok()?), - ) - } - } -} - -impl RefundMetricsAccumulator { - pub fn collect(self) -> RefundMetricsBucketValue { - RefundMetricsBucketValue { - refund_success_rate: self.refund_success_rate.collect(), - refund_count: self.refund_count.collect(), - refund_success_count: self.refund_success.collect(), - refund_processed_amount: self.processed_amount.collect(), - } - } -} diff --git a/crates/router/src/analytics/refunds/core.rs b/crates/router/src/analytics/refunds/core.rs deleted file mode 100644 index 4c2d2c3941..0000000000 --- a/crates/router/src/analytics/refunds/core.rs +++ /dev/null @@ -1,104 +0,0 @@ -use std::collections::HashMap; - -use api_models::analytics::{ - refunds::{RefundMetrics, RefundMetricsBucketIdentifier, RefundMetricsBucketResponse}, - AnalyticsMetadata, GetRefundMetricRequest, MetricsResponse, -}; -use error_stack::{IntoReport, ResultExt}; -use router_env::{ - logger, - tracing::{self, Instrument}, -}; - -use super::RefundMetricsAccumulator; -use crate::{ - analytics::{ - core::AnalyticsApiResponse, errors::AnalyticsError, refunds::RefundMetricAccumulator, - AnalyticsProvider, - }, - services::ApplicationResponse, - types::domain, -}; - -pub async fn get_metrics( - pool: AnalyticsProvider, - merchant_account: domain::MerchantAccount, - req: GetRefundMetricRequest, -) -> AnalyticsApiResponse> { - let mut metrics_accumulator: HashMap = - HashMap::new(); - let mut set = tokio::task::JoinSet::new(); - for metric_type in req.metrics.iter().cloned() { - let req = req.clone(); - let merchant_id = merchant_account.merchant_id.clone(); - let pool = pool.clone(); - let task_span = tracing::debug_span!( - "analytics_refund_query", - refund_metric = metric_type.as_ref() - ); - set.spawn( - async move { - let data = pool - .get_refund_metrics( - &metric_type, - &req.group_by_names.clone(), - &merchant_id, - &req.filters, - &req.time_series.map(|t| t.granularity), - &req.time_range, - ) - .await - .change_context(AnalyticsError::UnknownError); - (metric_type, data) - } - .instrument(task_span), - ); - } - - while let Some((metric, data)) = set - .join_next() - .await - .transpose() - .into_report() - .change_context(AnalyticsError::UnknownError)? - { - for (id, value) in data? { - logger::debug!(bucket_id=?id, bucket_value=?value, "Bucket row for metric {metric}"); - let metrics_builder = metrics_accumulator.entry(id).or_default(); - match metric { - RefundMetrics::RefundSuccessRate => metrics_builder - .refund_success_rate - .add_metrics_bucket(&value), - RefundMetrics::RefundCount => { - metrics_builder.refund_count.add_metrics_bucket(&value) - } - RefundMetrics::RefundSuccessCount => { - metrics_builder.refund_success.add_metrics_bucket(&value) - } - RefundMetrics::RefundProcessedAmount => { - metrics_builder.processed_amount.add_metrics_bucket(&value) - } - } - } - - logger::debug!( - "Analytics Accumulated Results: metric: {}, results: {:#?}", - metric, - metrics_accumulator - ); - } - let query_data: Vec = metrics_accumulator - .into_iter() - .map(|(id, val)| RefundMetricsBucketResponse { - values: val.collect(), - dimensions: id, - }) - .collect(); - - Ok(ApplicationResponse::Json(MetricsResponse { - query_data, - meta_data: [AnalyticsMetadata { - current_time_range: req.time_range, - }], - })) -} diff --git a/crates/router/src/analytics/refunds/filters.rs b/crates/router/src/analytics/refunds/filters.rs deleted file mode 100644 index 6b45e9194f..0000000000 --- a/crates/router/src/analytics/refunds/filters.rs +++ /dev/null @@ -1,59 +0,0 @@ -use api_models::analytics::{ - refunds::{RefundDimensions, RefundType}, - Granularity, TimeRange, -}; -use common_enums::enums::{Currency, RefundStatus}; -use common_utils::errors::ReportSwitchExt; -use error_stack::ResultExt; -use time::PrimitiveDateTime; - -use crate::analytics::{ - query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql}, - types::{ - AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, FiltersError, FiltersResult, - LoadRow, - }, -}; -pub trait RefundFilterAnalytics: LoadRow {} - -pub async fn get_refund_filter_for_dimension( - dimension: RefundDimensions, - merchant: &String, - time_range: &TimeRange, - pool: &T, -) -> FiltersResult> -where - T: AnalyticsDataSource + RefundFilterAnalytics, - PrimitiveDateTime: ToSql, - AnalyticsCollection: ToSql, - Granularity: GroupByClause, - Aggregate<&'static str>: ToSql, -{ - let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::Refund); - - query_builder.add_select_column(dimension).switch()?; - time_range - .set_filter_clause(&mut query_builder) - .attach_printable("Error filtering time range") - .switch()?; - - query_builder - .add_filter_clause("merchant_id", merchant) - .switch()?; - - query_builder.set_distinct(); - - query_builder - .execute_query::(pool) - .await - .change_context(FiltersError::QueryBuildingError)? - .change_context(FiltersError::QueryExecutionFailure) -} - -#[derive(Debug, serde::Serialize, Eq, PartialEq)] -pub struct RefundFilterRow { - pub currency: Option>, - pub refund_status: Option>, - pub connector: Option, - pub refund_type: Option>, -} diff --git a/crates/router/src/analytics/refunds/metrics.rs b/crates/router/src/analytics/refunds/metrics.rs deleted file mode 100644 index d4f509b4a1..0000000000 --- a/crates/router/src/analytics/refunds/metrics.rs +++ /dev/null @@ -1,126 +0,0 @@ -use api_models::analytics::{ - refunds::{ - RefundDimensions, RefundFilters, RefundMetrics, RefundMetricsBucketIdentifier, RefundType, - }, - Granularity, TimeRange, -}; -use common_enums::enums as storage_enums; -use time::PrimitiveDateTime; -mod refund_count; -mod refund_processed_amount; -mod refund_success_count; -mod refund_success_rate; -use refund_count::RefundCount; -use refund_processed_amount::RefundProcessedAmount; -use refund_success_count::RefundSuccessCount; -use refund_success_rate::RefundSuccessRate; - -use crate::analytics::{ - query::{Aggregate, GroupByClause, ToSql}, - types::{AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, LoadRow, MetricsResult}, -}; - -#[derive(Debug, Eq, PartialEq)] -pub struct RefundMetricRow { - pub currency: Option>, - pub refund_status: Option>, - pub connector: Option, - pub refund_type: Option>, - pub total: Option, - pub count: Option, - pub start_bucket: Option, - pub end_bucket: Option, -} - -pub trait RefundMetricAnalytics: LoadRow {} - -#[async_trait::async_trait] -pub trait RefundMetric -where - T: AnalyticsDataSource + RefundMetricAnalytics, - PrimitiveDateTime: ToSql, - AnalyticsCollection: ToSql, - Granularity: GroupByClause, - Aggregate<&'static str>: ToSql, -{ - async fn load_metrics( - &self, - dimensions: &[RefundDimensions], - merchant_id: &str, - filters: &RefundFilters, - granularity: &Option, - time_range: &TimeRange, - pool: &T, - ) -> MetricsResult>; -} - -#[async_trait::async_trait] -impl RefundMetric for RefundMetrics -where - T: AnalyticsDataSource + RefundMetricAnalytics, - PrimitiveDateTime: ToSql, - AnalyticsCollection: ToSql, - Granularity: GroupByClause, - Aggregate<&'static str>: ToSql, -{ - async fn load_metrics( - &self, - dimensions: &[RefundDimensions], - merchant_id: &str, - filters: &RefundFilters, - granularity: &Option, - time_range: &TimeRange, - pool: &T, - ) -> MetricsResult> { - match self { - Self::RefundSuccessRate => { - RefundSuccessRate::default() - .load_metrics( - dimensions, - merchant_id, - filters, - granularity, - time_range, - pool, - ) - .await - } - Self::RefundCount => { - RefundCount::default() - .load_metrics( - dimensions, - merchant_id, - filters, - granularity, - time_range, - pool, - ) - .await - } - Self::RefundSuccessCount => { - RefundSuccessCount::default() - .load_metrics( - dimensions, - merchant_id, - filters, - granularity, - time_range, - pool, - ) - .await - } - Self::RefundProcessedAmount => { - RefundProcessedAmount::default() - .load_metrics( - dimensions, - merchant_id, - filters, - granularity, - time_range, - pool, - ) - .await - } - } - } -} diff --git a/crates/router/src/analytics/refunds/metrics/refund_count.rs b/crates/router/src/analytics/refunds/metrics/refund_count.rs deleted file mode 100644 index 4713272350..0000000000 --- a/crates/router/src/analytics/refunds/metrics/refund_count.rs +++ /dev/null @@ -1,116 +0,0 @@ -use api_models::analytics::{ - refunds::{RefundDimensions, RefundFilters, RefundMetricsBucketIdentifier}, - Granularity, TimeRange, -}; -use common_utils::errors::ReportSwitchExt; -use error_stack::ResultExt; -use time::PrimitiveDateTime; - -use super::RefundMetricRow; -use crate::analytics::{ - query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, - types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, -}; - -#[derive(Default)] -pub(super) struct RefundCount {} - -#[async_trait::async_trait] -impl super::RefundMetric for RefundCount -where - T: AnalyticsDataSource + super::RefundMetricAnalytics, - PrimitiveDateTime: ToSql, - AnalyticsCollection: ToSql, - Granularity: GroupByClause, - Aggregate<&'static str>: ToSql, -{ - async fn load_metrics( - &self, - dimensions: &[RefundDimensions], - merchant_id: &str, - filters: &RefundFilters, - granularity: &Option, - time_range: &TimeRange, - pool: &T, - ) -> MetricsResult> { - let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::Refund); - - for dim in dimensions.iter() { - query_builder.add_select_column(dim).switch()?; - } - - query_builder - .add_select_column(Aggregate::Count { - field: None, - alias: Some("count"), - }) - .switch()?; - query_builder - .add_select_column(Aggregate::Min { - field: "created_at", - alias: Some("start_bucket"), - }) - .switch()?; - query_builder - .add_select_column(Aggregate::Max { - field: "created_at", - alias: Some("end_bucket"), - }) - .switch()?; - - filters.set_filter_clause(&mut query_builder).switch()?; - - query_builder - .add_filter_clause("merchant_id", merchant_id) - .switch()?; - - time_range - .set_filter_clause(&mut query_builder) - .attach_printable("Error filtering time range") - .switch()?; - - for dim in dimensions.iter() { - query_builder - .add_group_by_clause(dim) - .attach_printable("Error grouping by dimensions") - .switch()?; - } - - if let Some(granularity) = granularity.as_ref() { - granularity - .set_group_by_clause(&mut query_builder) - .attach_printable("Error adding granularity") - .switch()?; - } - - query_builder - .execute_query::(pool) - .await - .change_context(MetricsError::QueryBuildingError)? - .change_context(MetricsError::QueryExecutionFailure)? - .into_iter() - .map(|i| { - Ok(( - RefundMetricsBucketIdentifier::new( - i.currency.as_ref().map(|i| i.0), - i.refund_status.as_ref().map(|i| i.0), - i.connector.clone(), - i.refund_type.as_ref().map(|i| i.0.to_string()), - TimeRange { - start_time: match (granularity, i.start_bucket) { - (Some(g), Some(st)) => g.clip_to_start(st)?, - _ => time_range.start_time, - }, - end_time: granularity.as_ref().map_or_else( - || Ok(time_range.end_time), - |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), - )?, - }, - ), - i, - )) - }) - .collect::, crate::analytics::query::PostProcessingError>>() - .change_context(MetricsError::PostProcessingFailure) - } -} diff --git a/crates/router/src/analytics/refunds/metrics/refund_processed_amount.rs b/crates/router/src/analytics/refunds/metrics/refund_processed_amount.rs deleted file mode 100644 index c5f3a706aa..0000000000 --- a/crates/router/src/analytics/refunds/metrics/refund_processed_amount.rs +++ /dev/null @@ -1,122 +0,0 @@ -use api_models::analytics::{ - refunds::{RefundDimensions, RefundFilters, RefundMetricsBucketIdentifier}, - Granularity, TimeRange, -}; -use common_enums::enums as storage_enums; -use common_utils::errors::ReportSwitchExt; -use error_stack::ResultExt; -use time::PrimitiveDateTime; - -use super::RefundMetricRow; -use crate::analytics::{ - query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, - types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, -}; -#[derive(Default)] -pub(super) struct RefundProcessedAmount {} - -#[async_trait::async_trait] -impl super::RefundMetric for RefundProcessedAmount -where - T: AnalyticsDataSource + super::RefundMetricAnalytics, - PrimitiveDateTime: ToSql, - AnalyticsCollection: ToSql, - Granularity: GroupByClause, - Aggregate<&'static str>: ToSql, -{ - async fn load_metrics( - &self, - dimensions: &[RefundDimensions], - merchant_id: &str, - filters: &RefundFilters, - granularity: &Option, - time_range: &TimeRange, - pool: &T, - ) -> MetricsResult> - where - T: AnalyticsDataSource + super::RefundMetricAnalytics, - { - let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::Refund); - - for dim in dimensions.iter() { - query_builder.add_select_column(dim).switch()?; - } - - query_builder - .add_select_column(Aggregate::Sum { - field: "refund_amount", - alias: Some("total"), - }) - .switch()?; - query_builder - .add_select_column(Aggregate::Min { - field: "created_at", - alias: Some("start_bucket"), - }) - .switch()?; - query_builder - .add_select_column(Aggregate::Max { - field: "created_at", - alias: Some("end_bucket"), - }) - .switch()?; - - filters.set_filter_clause(&mut query_builder).switch()?; - - query_builder - .add_filter_clause("merchant_id", merchant_id) - .switch()?; - - time_range - .set_filter_clause(&mut query_builder) - .attach_printable("Error filtering time range") - .switch()?; - - for dim in dimensions.iter() { - query_builder.add_group_by_clause(dim).switch()?; - } - - if let Some(granularity) = granularity.as_ref() { - granularity - .set_group_by_clause(&mut query_builder) - .switch()?; - } - - query_builder - .add_filter_clause( - RefundDimensions::RefundStatus, - storage_enums::RefundStatus::Success, - ) - .switch()?; - - query_builder - .execute_query::(pool) - .await - .change_context(MetricsError::QueryBuildingError)? - .change_context(MetricsError::QueryExecutionFailure)? - .into_iter() - .map(|i| { - Ok(( - RefundMetricsBucketIdentifier::new( - i.currency.as_ref().map(|i| i.0), - None, - i.connector.clone(), - i.refund_type.as_ref().map(|i| i.0.to_string()), - TimeRange { - start_time: match (granularity, i.start_bucket) { - (Some(g), Some(st)) => g.clip_to_start(st)?, - _ => time_range.start_time, - }, - end_time: granularity.as_ref().map_or_else( - || Ok(time_range.end_time), - |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), - )?, - }, - ), - i, - )) - }) - .collect::, crate::analytics::query::PostProcessingError>>() - .change_context(MetricsError::PostProcessingFailure) - } -} diff --git a/crates/router/src/analytics/refunds/metrics/refund_success_count.rs b/crates/router/src/analytics/refunds/metrics/refund_success_count.rs deleted file mode 100644 index 0c8032908f..0000000000 --- a/crates/router/src/analytics/refunds/metrics/refund_success_count.rs +++ /dev/null @@ -1,122 +0,0 @@ -use api_models::analytics::{ - refunds::{RefundDimensions, RefundFilters, RefundMetricsBucketIdentifier}, - Granularity, TimeRange, -}; -use common_enums::enums as storage_enums; -use common_utils::errors::ReportSwitchExt; -use error_stack::ResultExt; -use time::PrimitiveDateTime; - -use super::RefundMetricRow; -use crate::analytics::{ - query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, - types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, -}; - -#[derive(Default)] -pub(super) struct RefundSuccessCount {} - -#[async_trait::async_trait] -impl super::RefundMetric for RefundSuccessCount -where - T: AnalyticsDataSource + super::RefundMetricAnalytics, - PrimitiveDateTime: ToSql, - AnalyticsCollection: ToSql, - Granularity: GroupByClause, - Aggregate<&'static str>: ToSql, -{ - async fn load_metrics( - &self, - dimensions: &[RefundDimensions], - merchant_id: &str, - filters: &RefundFilters, - granularity: &Option, - time_range: &TimeRange, - pool: &T, - ) -> MetricsResult> - where - T: AnalyticsDataSource + super::RefundMetricAnalytics, - { - let mut query_builder = QueryBuilder::new(AnalyticsCollection::Refund); - - for dim in dimensions.iter() { - query_builder.add_select_column(dim).switch()?; - } - - query_builder - .add_select_column(Aggregate::Count { - field: None, - alias: Some("count"), - }) - .switch()?; - query_builder - .add_select_column(Aggregate::Min { - field: "created_at", - alias: Some("start_bucket"), - }) - .switch()?; - query_builder - .add_select_column(Aggregate::Max { - field: "created_at", - alias: Some("end_bucket"), - }) - .switch()?; - - filters.set_filter_clause(&mut query_builder).switch()?; - - query_builder - .add_filter_clause("merchant_id", merchant_id) - .switch()?; - - time_range.set_filter_clause(&mut query_builder).switch()?; - - for dim in dimensions.iter() { - query_builder.add_group_by_clause(dim).switch()?; - } - - if let Some(granularity) = granularity.as_ref() { - granularity - .set_group_by_clause(&mut query_builder) - .switch()?; - } - - query_builder - .add_filter_clause( - RefundDimensions::RefundStatus, - storage_enums::RefundStatus::Success, - ) - .switch()?; - query_builder - .execute_query::(pool) - .await - .change_context(MetricsError::QueryBuildingError)? - .change_context(MetricsError::QueryExecutionFailure)? - .into_iter() - .map(|i| { - Ok(( - RefundMetricsBucketIdentifier::new( - i.currency.as_ref().map(|i| i.0), - None, - i.connector.clone(), - i.refund_type.as_ref().map(|i| i.0.to_string()), - TimeRange { - start_time: match (granularity, i.start_bucket) { - (Some(g), Some(st)) => g.clip_to_start(st)?, - _ => time_range.start_time, - }, - end_time: granularity.as_ref().map_or_else( - || Ok(time_range.end_time), - |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), - )?, - }, - ), - i, - )) - }) - .collect::, - crate::analytics::query::PostProcessingError, - >>() - .change_context(MetricsError::PostProcessingFailure) - } -} diff --git a/crates/router/src/analytics/refunds/metrics/refund_success_rate.rs b/crates/router/src/analytics/refunds/metrics/refund_success_rate.rs deleted file mode 100644 index 42f9ccf8d3..0000000000 --- a/crates/router/src/analytics/refunds/metrics/refund_success_rate.rs +++ /dev/null @@ -1,117 +0,0 @@ -use api_models::analytics::{ - refunds::{RefundDimensions, RefundFilters, RefundMetricsBucketIdentifier}, - Granularity, TimeRange, -}; -use common_utils::errors::ReportSwitchExt; -use error_stack::ResultExt; -use time::PrimitiveDateTime; - -use super::RefundMetricRow; -use crate::analytics::{ - query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, - types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, -}; -#[derive(Default)] -pub(super) struct RefundSuccessRate {} - -#[async_trait::async_trait] -impl super::RefundMetric for RefundSuccessRate -where - T: AnalyticsDataSource + super::RefundMetricAnalytics, - PrimitiveDateTime: ToSql, - AnalyticsCollection: ToSql, - Granularity: GroupByClause, - Aggregate<&'static str>: ToSql, -{ - async fn load_metrics( - &self, - dimensions: &[RefundDimensions], - merchant_id: &str, - filters: &RefundFilters, - granularity: &Option, - time_range: &TimeRange, - pool: &T, - ) -> MetricsResult> - where - T: AnalyticsDataSource + super::RefundMetricAnalytics, - { - let mut query_builder = QueryBuilder::new(AnalyticsCollection::Refund); - let mut dimensions = dimensions.to_vec(); - - dimensions.push(RefundDimensions::RefundStatus); - - for dim in dimensions.iter() { - query_builder.add_select_column(dim).switch()?; - } - - query_builder - .add_select_column(Aggregate::Count { - field: None, - alias: Some("count"), - }) - .switch()?; - query_builder - .add_select_column(Aggregate::Min { - field: "created_at", - alias: Some("start_bucket"), - }) - .switch()?; - query_builder - .add_select_column(Aggregate::Max { - field: "created_at", - alias: Some("end_bucket"), - }) - .switch()?; - - filters.set_filter_clause(&mut query_builder).switch()?; - - query_builder - .add_filter_clause("merchant_id", merchant_id) - .switch()?; - - time_range.set_filter_clause(&mut query_builder).switch()?; - - for dim in dimensions.iter() { - query_builder.add_group_by_clause(dim).switch()?; - } - - if let Some(granularity) = granularity.as_ref() { - granularity - .set_group_by_clause(&mut query_builder) - .switch()?; - } - - query_builder - .execute_query::(pool) - .await - .change_context(MetricsError::QueryBuildingError)? - .change_context(MetricsError::QueryExecutionFailure)? - .into_iter() - .map(|i| { - Ok(( - RefundMetricsBucketIdentifier::new( - i.currency.as_ref().map(|i| i.0), - None, - i.connector.clone(), - i.refund_type.as_ref().map(|i| i.0.to_string()), - TimeRange { - start_time: match (granularity, i.start_bucket) { - (Some(g), Some(st)) => g.clip_to_start(st)?, - _ => time_range.start_time, - }, - end_time: granularity.as_ref().map_or_else( - || Ok(time_range.end_time), - |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), - )?, - }, - ), - i, - )) - }) - .collect::, - crate::analytics::query::PostProcessingError, - >>() - .change_context(MetricsError::PostProcessingFailure) - } -} diff --git a/crates/router/src/analytics/refunds/types.rs b/crates/router/src/analytics/refunds/types.rs deleted file mode 100644 index fbfd699726..0000000000 --- a/crates/router/src/analytics/refunds/types.rs +++ /dev/null @@ -1,41 +0,0 @@ -use api_models::analytics::refunds::{RefundDimensions, RefundFilters}; -use error_stack::ResultExt; - -use crate::analytics::{ - query::{QueryBuilder, QueryFilter, QueryResult, ToSql}, - types::{AnalyticsCollection, AnalyticsDataSource}, -}; - -impl QueryFilter for RefundFilters -where - T: AnalyticsDataSource, - AnalyticsCollection: ToSql, -{ - fn set_filter_clause(&self, builder: &mut QueryBuilder) -> QueryResult<()> { - if !self.currency.is_empty() { - builder - .add_filter_in_range_clause(RefundDimensions::Currency, &self.currency) - .attach_printable("Error adding currency filter")?; - } - - if !self.refund_status.is_empty() { - builder - .add_filter_in_range_clause(RefundDimensions::RefundStatus, &self.refund_status) - .attach_printable("Error adding refund status filter")?; - } - - if !self.connector.is_empty() { - builder - .add_filter_in_range_clause(RefundDimensions::Connector, &self.connector) - .attach_printable("Error adding connector filter")?; - } - - if !self.refund_type.is_empty() { - builder - .add_filter_in_range_clause(RefundDimensions::RefundType, &self.refund_type) - .attach_printable("Error adding auth type filter")?; - } - - Ok(()) - } -} diff --git a/crates/router/src/analytics/routes.rs b/crates/router/src/analytics/routes.rs deleted file mode 100644 index 298ec61ec9..0000000000 --- a/crates/router/src/analytics/routes.rs +++ /dev/null @@ -1,145 +0,0 @@ -use actix_web::{web, Responder, Scope}; -use api_models::analytics::{ - GetPaymentFiltersRequest, GetPaymentMetricRequest, GetRefundFilterRequest, - GetRefundMetricRequest, -}; -use router_env::AnalyticsFlow; - -use super::{core::*, payments, refunds, types::AnalyticsDomain}; -use crate::{ - core::api_locking, - services::{api, authentication as auth, authentication::AuthenticationData}, - AppState, -}; - -pub struct Analytics; - -impl Analytics { - pub fn server(state: AppState) -> Scope { - let route = web::scope("/analytics/v1").app_data(web::Data::new(state)); - route - .service(web::resource("metrics/payments").route(web::post().to(get_payment_metrics))) - .service(web::resource("metrics/refunds").route(web::post().to(get_refunds_metrics))) - .service(web::resource("filters/payments").route(web::post().to(get_payment_filters))) - .service(web::resource("filters/refunds").route(web::post().to(get_refund_filters))) - .service(web::resource("{domain}/info").route(web::get().to(get_info))) - } -} - -pub async fn get_info( - state: web::Data, - req: actix_web::HttpRequest, - domain: actix_web::web::Path, -) -> impl Responder { - let flow = AnalyticsFlow::GetInfo; - api::server_wrap( - flow, - state, - &req, - domain.into_inner(), - |_, _, domain| get_domain_info(domain), - &auth::NoAuth, - api_locking::LockAction::NotApplicable, - ) - .await -} - -/// # Panics -/// -/// Panics if `json_payload` array does not contain one `GetPaymentMetricRequest` element. -pub async fn get_payment_metrics( - state: web::Data, - req: actix_web::HttpRequest, - json_payload: web::Json<[GetPaymentMetricRequest; 1]>, -) -> impl Responder { - // safety: This shouldn't panic owing to the data type - #[allow(clippy::expect_used)] - let payload = json_payload - .into_inner() - .to_vec() - .pop() - .expect("Couldn't get GetPaymentMetricRequest"); - let flow = AnalyticsFlow::GetPaymentMetrics; - api::server_wrap( - flow, - state, - &req, - payload, - |state, auth: AuthenticationData, req| { - payments::get_metrics(state.pool.clone(), auth.merchant_account, req) - }, - auth::auth_type(&auth::ApiKeyAuth, &auth::JWTAuth, req.headers()), - api_locking::LockAction::NotApplicable, - ) - .await -} - -/// # Panics -/// -/// Panics if `json_payload` array does not contain one `GetRefundMetricRequest` element. -pub async fn get_refunds_metrics( - state: web::Data, - req: actix_web::HttpRequest, - json_payload: web::Json<[GetRefundMetricRequest; 1]>, -) -> impl Responder { - #[allow(clippy::expect_used)] - // safety: This shouldn't panic owing to the data type - let payload = json_payload - .into_inner() - .to_vec() - .pop() - .expect("Couldn't get GetRefundMetricRequest"); - let flow = AnalyticsFlow::GetRefundsMetrics; - api::server_wrap( - flow, - state, - &req, - payload, - |state, auth: AuthenticationData, req| { - refunds::get_metrics(state.pool.clone(), auth.merchant_account, req) - }, - auth::auth_type(&auth::ApiKeyAuth, &auth::JWTAuth, req.headers()), - api_locking::LockAction::NotApplicable, - ) - .await -} - -pub async fn get_payment_filters( - state: web::Data, - req: actix_web::HttpRequest, - json_payload: web::Json, -) -> impl Responder { - let flow = AnalyticsFlow::GetPaymentFilters; - api::server_wrap( - flow, - state, - &req, - json_payload.into_inner(), - |state, auth: AuthenticationData, req| { - payment_filters_core(state.pool.clone(), req, auth.merchant_account) - }, - auth::auth_type(&auth::ApiKeyAuth, &auth::JWTAuth, req.headers()), - api_locking::LockAction::NotApplicable, - ) - .await -} - -pub async fn get_refund_filters( - state: web::Data, - req: actix_web::HttpRequest, - json_payload: web::Json, -) -> impl Responder { - let flow = AnalyticsFlow::GetRefundFilters; - api::server_wrap( - flow, - state, - &req, - json_payload.into_inner(), - |state, auth: AuthenticationData, req: GetRefundFilterRequest| { - refund_filter_core(state.pool.clone(), req, auth.merchant_account) - }, - auth::auth_type(&auth::ApiKeyAuth, &auth::JWTAuth, req.headers()), - api_locking::LockAction::NotApplicable, - ) - .await -} diff --git a/crates/router/src/analytics/sqlx.rs b/crates/router/src/analytics/sqlx.rs deleted file mode 100644 index 5c3060c35a..0000000000 --- a/crates/router/src/analytics/sqlx.rs +++ /dev/null @@ -1,386 +0,0 @@ -use std::{fmt::Display, str::FromStr}; - -use api_models::analytics::refunds::RefundType; -use common_enums::enums::{ - AttemptStatus, AuthenticationType, Currency, PaymentMethod, RefundStatus, -}; -use common_utils::errors::{CustomResult, ParsingError}; -use error_stack::{IntoReport, ResultExt}; -#[cfg(feature = "kms")] -use external_services::{kms, kms::decrypt::KmsDecrypt}; -#[cfg(not(feature = "kms"))] -use masking::PeekInterface; -use sqlx::{ - postgres::{PgArgumentBuffer, PgPoolOptions, PgRow, PgTypeInfo, PgValueRef}, - Decode, Encode, - Error::ColumnNotFound, - FromRow, Pool, Postgres, Row, -}; -use time::PrimitiveDateTime; - -use super::{ - query::{Aggregate, ToSql}, - types::{ - AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, LoadRow, QueryExecutionError, - }, -}; -use crate::configs::settings::Database; - -#[derive(Debug, Clone)] -pub struct SqlxClient { - pool: Pool, -} - -impl SqlxClient { - pub async fn from_conf( - conf: &Database, - #[cfg(feature = "kms")] kms_client: &kms::KmsClient, - ) -> Self { - #[cfg(feature = "kms")] - #[allow(clippy::expect_used)] - let password = conf - .password - .decrypt_inner(kms_client) - .await - .expect("Failed to KMS decrypt database password"); - - #[cfg(not(feature = "kms"))] - let password = &conf.password.peek(); - let database_url = format!( - "postgres://{}:{}@{}:{}/{}", - conf.username, password, conf.host, conf.port, conf.dbname - ); - #[allow(clippy::expect_used)] - let pool = PgPoolOptions::new() - .max_connections(conf.pool_size) - .acquire_timeout(std::time::Duration::from_secs(conf.connection_timeout)) - .connect_lazy(&database_url) - .expect("SQLX Pool Creation failed"); - Self { pool } - } -} - -pub trait DbType { - fn name() -> &'static str; -} - -macro_rules! db_type { - ($a: ident, $str: tt) => { - impl DbType for $a { - fn name() -> &'static str { - stringify!($str) - } - } - }; - ($a:ident) => { - impl DbType for $a { - fn name() -> &'static str { - stringify!($a) - } - } - }; -} - -db_type!(Currency); -db_type!(AuthenticationType); -db_type!(AttemptStatus); -db_type!(PaymentMethod, TEXT); -db_type!(RefundStatus); -db_type!(RefundType); - -impl<'q, Type> Encode<'q, Postgres> for DBEnumWrapper -where - Type: DbType + FromStr + Display, -{ - fn encode_by_ref(&self, buf: &mut PgArgumentBuffer) -> sqlx::encode::IsNull { - self.0.to_string().encode(buf) - } - fn size_hint(&self) -> usize { - self.0.to_string().size_hint() - } -} - -impl<'r, Type> Decode<'r, Postgres> for DBEnumWrapper -where - Type: DbType + FromStr + Display, -{ - fn decode( - value: PgValueRef<'r>, - ) -> Result> { - let str_value = <&'r str as Decode<'r, Postgres>>::decode(value)?; - Type::from_str(str_value).map(DBEnumWrapper).or(Err(format!( - "invalid value {:?} for enum {}", - str_value, - Type::name() - ) - .into())) - } -} - -impl sqlx::Type for DBEnumWrapper -where - Type: DbType + FromStr + Display, -{ - fn type_info() -> PgTypeInfo { - PgTypeInfo::with_name(Type::name()) - } -} - -impl LoadRow for SqlxClient -where - for<'a> T: FromRow<'a, PgRow>, -{ - fn load_row(row: PgRow) -> CustomResult { - T::from_row(&row) - .into_report() - .change_context(QueryExecutionError::RowExtractionFailure) - } -} - -impl super::payments::filters::PaymentFilterAnalytics for SqlxClient {} -impl super::payments::metrics::PaymentMetricAnalytics for SqlxClient {} -impl super::refunds::metrics::RefundMetricAnalytics for SqlxClient {} -impl super::refunds::filters::RefundFilterAnalytics for SqlxClient {} - -#[async_trait::async_trait] -impl AnalyticsDataSource for SqlxClient { - type Row = PgRow; - - async fn load_results(&self, query: &str) -> CustomResult, QueryExecutionError> - where - Self: LoadRow, - { - sqlx::query(&format!("{query};")) - .fetch_all(&self.pool) - .await - .into_report() - .change_context(QueryExecutionError::DatabaseError) - .attach_printable_lazy(|| format!("Failed to run query {query}"))? - .into_iter() - .map(Self::load_row) - .collect::, _>>() - .change_context(QueryExecutionError::RowExtractionFailure) - } -} - -impl<'a> FromRow<'a, PgRow> for super::refunds::metrics::RefundMetricRow { - fn from_row(row: &'a PgRow) -> sqlx::Result { - let currency: Option> = - row.try_get("currency").or_else(|e| match e { - ColumnNotFound(_) => Ok(Default::default()), - e => Err(e), - })?; - let refund_status: Option> = - row.try_get("refund_status").or_else(|e| match e { - ColumnNotFound(_) => Ok(Default::default()), - e => Err(e), - })?; - let connector: Option = row.try_get("connector").or_else(|e| match e { - ColumnNotFound(_) => Ok(Default::default()), - e => Err(e), - })?; - let refund_type: Option> = - row.try_get("refund_type").or_else(|e| match e { - ColumnNotFound(_) => Ok(Default::default()), - e => Err(e), - })?; - let total: Option = row.try_get("total").or_else(|e| match e { - ColumnNotFound(_) => Ok(Default::default()), - e => Err(e), - })?; - let count: Option = row.try_get("count").or_else(|e| match e { - ColumnNotFound(_) => Ok(Default::default()), - e => Err(e), - })?; - - let start_bucket: Option = row - .try_get::, _>("start_bucket")? - .and_then(|dt| dt.replace_millisecond(0).ok()); - let end_bucket: Option = row - .try_get::, _>("end_bucket")? - .and_then(|dt| dt.replace_millisecond(0).ok()); - Ok(Self { - currency, - refund_status, - connector, - refund_type, - total, - count, - start_bucket, - end_bucket, - }) - } -} - -impl<'a> FromRow<'a, PgRow> for super::payments::metrics::PaymentMetricRow { - fn from_row(row: &'a PgRow) -> sqlx::Result { - let currency: Option> = - row.try_get("currency").or_else(|e| match e { - ColumnNotFound(_) => Ok(Default::default()), - e => Err(e), - })?; - let status: Option> = - row.try_get("status").or_else(|e| match e { - ColumnNotFound(_) => Ok(Default::default()), - e => Err(e), - })?; - let connector: Option = row.try_get("connector").or_else(|e| match e { - ColumnNotFound(_) => Ok(Default::default()), - e => Err(e), - })?; - let authentication_type: Option> = - row.try_get("authentication_type").or_else(|e| match e { - ColumnNotFound(_) => Ok(Default::default()), - e => Err(e), - })?; - let payment_method: Option = - row.try_get("payment_method").or_else(|e| match e { - ColumnNotFound(_) => Ok(Default::default()), - e => Err(e), - })?; - let total: Option = row.try_get("total").or_else(|e| match e { - ColumnNotFound(_) => Ok(Default::default()), - e => Err(e), - })?; - let count: Option = row.try_get("count").or_else(|e| match e { - ColumnNotFound(_) => Ok(Default::default()), - e => Err(e), - })?; - - let start_bucket: Option = row - .try_get::, _>("start_bucket")? - .and_then(|dt| dt.replace_millisecond(0).ok()); - let end_bucket: Option = row - .try_get::, _>("end_bucket")? - .and_then(|dt| dt.replace_millisecond(0).ok()); - Ok(Self { - currency, - status, - connector, - authentication_type, - payment_method, - total, - count, - start_bucket, - end_bucket, - }) - } -} - -impl<'a> FromRow<'a, PgRow> for super::payments::filters::FilterRow { - fn from_row(row: &'a PgRow) -> sqlx::Result { - let currency: Option> = - row.try_get("currency").or_else(|e| match e { - ColumnNotFound(_) => Ok(Default::default()), - e => Err(e), - })?; - let status: Option> = - row.try_get("status").or_else(|e| match e { - ColumnNotFound(_) => Ok(Default::default()), - e => Err(e), - })?; - let connector: Option = row.try_get("connector").or_else(|e| match e { - ColumnNotFound(_) => Ok(Default::default()), - e => Err(e), - })?; - let authentication_type: Option> = - row.try_get("authentication_type").or_else(|e| match e { - ColumnNotFound(_) => Ok(Default::default()), - e => Err(e), - })?; - let payment_method: Option = - row.try_get("payment_method").or_else(|e| match e { - ColumnNotFound(_) => Ok(Default::default()), - e => Err(e), - })?; - Ok(Self { - currency, - status, - connector, - authentication_type, - payment_method, - }) - } -} - -impl<'a> FromRow<'a, PgRow> for super::refunds::filters::RefundFilterRow { - fn from_row(row: &'a PgRow) -> sqlx::Result { - let currency: Option> = - row.try_get("currency").or_else(|e| match e { - ColumnNotFound(_) => Ok(Default::default()), - e => Err(e), - })?; - let refund_status: Option> = - row.try_get("refund_status").or_else(|e| match e { - ColumnNotFound(_) => Ok(Default::default()), - e => Err(e), - })?; - let connector: Option = row.try_get("connector").or_else(|e| match e { - ColumnNotFound(_) => Ok(Default::default()), - e => Err(e), - })?; - let refund_type: Option> = - row.try_get("refund_type").or_else(|e| match e { - ColumnNotFound(_) => Ok(Default::default()), - e => Err(e), - })?; - Ok(Self { - currency, - refund_status, - connector, - refund_type, - }) - } -} - -impl ToSql for PrimitiveDateTime { - fn to_sql(&self) -> error_stack::Result { - Ok(self.to_string()) - } -} - -impl ToSql for AnalyticsCollection { - fn to_sql(&self) -> error_stack::Result { - match self { - Self::Payment => Ok("payment_attempt".to_string()), - Self::Refund => Ok("refund".to_string()), - } - } -} - -impl ToSql for Aggregate -where - T: ToSql, -{ - fn to_sql(&self) -> error_stack::Result { - Ok(match self { - Self::Count { field: _, alias } => { - format!( - "count(*){}", - alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias)) - ) - } - Self::Sum { field, alias } => { - format!( - "sum({}){}", - field.to_sql().attach_printable("Failed to sum aggregate")?, - alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias)) - ) - } - Self::Min { field, alias } => { - format!( - "min({}){}", - field.to_sql().attach_printable("Failed to min aggregate")?, - alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias)) - ) - } - Self::Max { field, alias } => { - format!( - "max({}){}", - field.to_sql().attach_printable("Failed to max aggregate")?, - alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias)) - ) - } - }) - } -} diff --git a/crates/router/src/analytics/types.rs b/crates/router/src/analytics/types.rs deleted file mode 100644 index 5173e6214d..0000000000 --- a/crates/router/src/analytics/types.rs +++ /dev/null @@ -1,114 +0,0 @@ -use std::{fmt::Display, str::FromStr}; - -use common_utils::errors::{CustomResult, ErrorSwitch, ParsingError}; -use error_stack::{report, Report, ResultExt}; - -use super::query::QueryBuildingError; - -#[derive(serde::Deserialize, Debug, masking::Serialize)] -#[serde(rename_all = "snake_case")] -pub enum AnalyticsDomain { - Payments, - Refunds, -} - -#[derive(Debug, strum::AsRefStr, strum::Display, Clone, Copy)] -pub enum AnalyticsCollection { - Payment, - Refund, -} - -#[derive(Debug, serde::Serialize, serde::Deserialize, Eq, PartialEq)] -#[serde(transparent)] -pub struct DBEnumWrapper(pub T); - -impl AsRef for DBEnumWrapper { - fn as_ref(&self) -> &T { - &self.0 - } -} - -impl FromStr for DBEnumWrapper -where - T: FromStr + Display, -{ - type Err = Report; - - fn from_str(s: &str) -> Result { - T::from_str(s) - .map_err(|_er| report!(ParsingError::EnumParseFailure(std::any::type_name::()))) - .map(DBEnumWrapper) - .attach_printable_lazy(|| format!("raw_value: {s}")) - } -} - -// Analytics Framework - -pub trait RefundAnalytics {} - -#[async_trait::async_trait] -pub trait AnalyticsDataSource -where - Self: Sized + Sync + Send, -{ - type Row; - async fn load_results(&self, query: &str) -> CustomResult, QueryExecutionError> - where - Self: LoadRow; -} - -pub trait LoadRow -where - Self: AnalyticsDataSource, - T: Sized, -{ - fn load_row(row: Self::Row) -> CustomResult; -} - -#[derive(thiserror::Error, Debug)] -pub enum MetricsError { - #[error("Error building query")] - QueryBuildingError, - #[error("Error running Query")] - QueryExecutionFailure, - #[error("Error processing query results")] - PostProcessingFailure, - #[allow(dead_code)] - #[error("Not Implemented")] - NotImplemented, -} - -#[derive(Debug, thiserror::Error)] -pub enum QueryExecutionError { - #[error("Failed to extract domain rows")] - RowExtractionFailure, - #[error("Database error")] - DatabaseError, -} - -pub type MetricsResult = CustomResult; - -impl ErrorSwitch for QueryBuildingError { - fn switch(&self) -> MetricsError { - MetricsError::QueryBuildingError - } -} - -pub type FiltersResult = CustomResult; - -#[derive(thiserror::Error, Debug)] -pub enum FiltersError { - #[error("Error building query")] - QueryBuildingError, - #[error("Error running Query")] - QueryExecutionFailure, - #[allow(dead_code)] - #[error("Not Implemented")] - NotImplemented, -} - -impl ErrorSwitch for QueryBuildingError { - fn switch(&self) -> FiltersError { - FiltersError::QueryBuildingError - } -} diff --git a/crates/router/src/analytics/utils.rs b/crates/router/src/analytics/utils.rs deleted file mode 100644 index f7e6ea69dc..0000000000 --- a/crates/router/src/analytics/utils.rs +++ /dev/null @@ -1,22 +0,0 @@ -use api_models::analytics::{ - payments::{PaymentDimensions, PaymentMetrics}, - refunds::{RefundDimensions, RefundMetrics}, - NameDescription, -}; -use strum::IntoEnumIterator; - -pub fn get_payment_dimensions() -> Vec { - PaymentDimensions::iter().map(Into::into).collect() -} - -pub fn get_refund_dimensions() -> Vec { - RefundDimensions::iter().map(Into::into).collect() -} - -pub fn get_payment_metrics_info() -> Vec { - PaymentMetrics::iter().map(Into::into).collect() -} - -pub fn get_refund_metrics_info() -> Vec { - RefundMetrics::iter().map(Into::into).collect() -} diff --git a/crates/router/src/configs/settings.rs b/crates/router/src/configs/settings.rs index b48a52e53b..204060b37a 100644 --- a/crates/router/src/configs/settings.rs +++ b/crates/router/src/configs/settings.rs @@ -16,8 +16,6 @@ pub use router_env::config::{Log, LogConsole, LogFile, LogTelemetry}; use scheduler::SchedulerSettings; use serde::{de::Error, Deserialize, Deserializer}; -#[cfg(feature = "olap")] -use crate::analytics::AnalyticsConfig; use crate::{ core::errors::{ApplicationError, ApplicationResult}, env::{self, logger, Env}, @@ -103,8 +101,6 @@ pub struct Settings { pub lock_settings: LockSettings, pub temp_locker_enable_config: TempLockerEnableConfig, pub payment_link: PaymentLink, - #[cfg(feature = "olap")] - pub analytics: AnalyticsConfig, #[cfg(feature = "kv_store")] pub kv_config: KvConfig, } diff --git a/crates/router/src/lib.rs b/crates/router/src/lib.rs index b91a79f072..11efec6405 100644 --- a/crates/router/src/lib.rs +++ b/crates/router/src/lib.rs @@ -1,8 +1,6 @@ #![forbid(unsafe_code)] #![recursion_limit = "256"] -#[cfg(feature = "olap")] -mod analytics; #[cfg(feature = "stripe")] pub mod compatibility; pub mod configs; @@ -143,7 +141,6 @@ pub fn mk_app( .service(routes::ApiKeys::server(state.clone())) .service(routes::Files::server(state.clone())) .service(routes::Disputes::server(state.clone())) - .service(routes::Analytics::server(state.clone())) } #[cfg(all(feature = "olap", feature = "kms"))] diff --git a/crates/router/src/routes.rs b/crates/router/src/routes.rs index 7bd0541f51..307797e8ac 100644 --- a/crates/router/src/routes.rs +++ b/crates/router/src/routes.rs @@ -37,5 +37,3 @@ pub use self::app::{ }; #[cfg(feature = "stripe")] pub use super::compatibility::stripe::StripeApis; -#[cfg(feature = "olap")] -pub use crate::analytics::routes::{self as analytics, Analytics}; diff --git a/crates/router/src/routes/app.rs b/crates/router/src/routes/app.rs index 5033a91717..5b16e93404 100644 --- a/crates/router/src/routes/app.rs +++ b/crates/router/src/routes/app.rs @@ -42,8 +42,6 @@ pub struct AppState { #[cfg(feature = "kms")] pub kms_secrets: Arc, pub api_client: Box, - #[cfg(feature = "olap")] - pub pool: crate::analytics::AnalyticsProvider, } impl scheduler::SchedulerAppState for AppState { @@ -126,14 +124,6 @@ impl AppState { ), }; - #[cfg(feature = "olap")] - let pool = crate::analytics::AnalyticsProvider::from_conf( - &conf.analytics, - #[cfg(feature = "kms")] - kms_client, - ) - .await; - #[cfg(feature = "kms")] #[allow(clippy::expect_used)] let kms_secrets = settings::ActiveKmsSecrets { @@ -155,8 +145,6 @@ impl AppState { kms_secrets: Arc::new(kms_secrets), api_client, event_handler: Box::::default(), - #[cfg(feature = "olap")] - pool, } } diff --git a/crates/router_env/src/lib.rs b/crates/router_env/src/lib.rs index e75606aa15..d3612767ff 100644 --- a/crates/router_env/src/lib.rs +++ b/crates/router_env/src/lib.rs @@ -1,5 +1,5 @@ #![forbid(unsafe_code)] -#![warn(missing_debug_implementations)] +#![warn(missing_docs, missing_debug_implementations)] //! //! Environment of payment router: logger, basic config, its environment awareness. @@ -22,7 +22,6 @@ pub mod vergen; pub use logger::*; pub use once_cell; pub use opentelemetry; -use strum::Display; pub use tracing; #[cfg(feature = "actix_web")] pub use tracing_actix_web; @@ -30,19 +29,3 @@ pub use tracing_appender; #[doc(inline)] pub use self::env::*; -use crate::types::FlowMetric; - -/// Analytics Flow routes Enums -/// Info - Dimensions and filters available for the domain -/// Filters - Set of values present for the dimension -/// Metrics - Analytical data on dimensions and metrics -#[derive(Debug, Display, Clone, PartialEq, Eq)] -pub enum AnalyticsFlow { - GetInfo, - GetPaymentFilters, - GetRefundFilters, - GetRefundsMetrics, - GetPaymentMetrics, -} - -impl FlowMetric for AnalyticsFlow {} diff --git a/crates/router_env/src/metrics.rs b/crates/router_env/src/metrics.rs index 14402a7a6e..e4943699ee 100644 --- a/crates/router_env/src/metrics.rs +++ b/crates/router_env/src/metrics.rs @@ -63,22 +63,3 @@ macro_rules! histogram_metric { > = once_cell::sync::Lazy::new(|| $meter.f64_histogram($description).init()); }; } - -/// Create a [`Histogram`][Histogram] u64 metric with the specified name and an optional description, -/// associated with the specified meter. Note that the meter must be to a valid [`Meter`][Meter]. -/// -/// [Histogram]: opentelemetry::metrics::Histogram -/// [Meter]: opentelemetry::metrics::Meter -#[macro_export] -macro_rules! histogram_metric_u64 { - ($name:ident, $meter:ident) => { - pub(crate) static $name: once_cell::sync::Lazy< - $crate::opentelemetry::metrics::Histogram, - > = once_cell::sync::Lazy::new(|| $meter.u64_histogram(stringify!($name)).init()); - }; - ($name:ident, $meter:ident, $description:literal) => { - pub(crate) static $name: once_cell::sync::Lazy< - $crate::opentelemetry::metrics::Histogram, - > = once_cell::sync::Lazy::new(|| $meter.u64_histogram($description).init()); - }; -} diff --git a/loadtest/config/development.toml b/loadtest/config/development.toml index 96f215ab08..7cdbc8dd6f 100644 --- a/loadtest/config/development.toml +++ b/loadtest/config/development.toml @@ -235,17 +235,5 @@ bank_debit.ach = { connector_list = "gocardless"} bank_debit.becs = { connector_list = "gocardless"} bank_debit.sepa = { connector_list = "gocardless"} -[analytics] -source = "sqlx" - -[analytics.sqlx] -username = "db_user" -password = "db_pass" -host = "localhost" -port = 5432 -dbname = "hyperswitch_db" -pool_size = 5 -connection_timeout = 10 - [kv_config] ttl = 300 # 5 * 60 seconds