mirror of
				https://github.com/juspay/hyperswitch.git
				synced 2025-10-31 10:06:32 +08:00 
			
		
		
		
	feat(analytics): adding kafka dispute analytic events (#3549)
Co-authored-by: Sampras Lopes <lsampras@pm.me>
This commit is contained in:
		 harsh-sharma-juspay
					harsh-sharma-juspay
				
			
				
					committed by
					
						 GitHub
						GitHub
					
				
			
			
				
	
			
			
			 GitHub
						GitHub
					
				
			
						parent
						
							fb254b8924
						
					
				
				
					commit
					39e2233982
				
			| @ -559,6 +559,7 @@ refund_analytics_topic = "topic"      # Kafka topic to be used for Refund events | |||||||
| api_logs_topic = "topic"              # Kafka topic to be used for incoming api events | api_logs_topic = "topic"              # Kafka topic to be used for incoming api events | ||||||
| connector_logs_topic = "topic"        # Kafka topic to be used for connector api events | connector_logs_topic = "topic"        # Kafka topic to be used for connector api events | ||||||
| outgoing_webhook_logs_topic = "topic" # Kafka topic to be used for outgoing webhook events | outgoing_webhook_logs_topic = "topic" # Kafka topic to be used for outgoing webhook events | ||||||
|  | dispute_analytics_topic = "topic"     # Kafka topic to be used for Dispute events | ||||||
|  |  | ||||||
| # File storage configuration | # File storage configuration | ||||||
| [file_storage] | [file_storage] | ||||||
|  | |||||||
| @ -542,6 +542,7 @@ refund_analytics_topic = "hyperswitch-refund-events" | |||||||
| api_logs_topic = "hyperswitch-api-log-events" | api_logs_topic = "hyperswitch-api-log-events" | ||||||
| connector_logs_topic = "hyperswitch-connector-api-events" | connector_logs_topic = "hyperswitch-connector-api-events" | ||||||
| outgoing_webhook_logs_topic = "hyperswitch-outgoing-webhook-events" | outgoing_webhook_logs_topic = "hyperswitch-outgoing-webhook-events" | ||||||
|  | dispute_analytics_topic = "hyperswitch-dispute-events" | ||||||
|  |  | ||||||
| [analytics] | [analytics] | ||||||
| source = "sqlx" | source = "sqlx" | ||||||
|  | |||||||
| @ -383,6 +383,7 @@ refund_analytics_topic = "hyperswitch-refund-events" | |||||||
| api_logs_topic = "hyperswitch-api-log-events" | api_logs_topic = "hyperswitch-api-log-events" | ||||||
| connector_logs_topic = "hyperswitch-connector-api-events" | connector_logs_topic = "hyperswitch-connector-api-events" | ||||||
| outgoing_webhook_logs_topic = "hyperswitch-outgoing-webhook-events" | outgoing_webhook_logs_topic = "hyperswitch-outgoing-webhook-events" | ||||||
|  | dispute_analytics_topic = "hyperswitch-dispute-events" | ||||||
|  |  | ||||||
| [analytics] | [analytics] | ||||||
| source = "sqlx" | source = "sqlx" | ||||||
|  | |||||||
| @ -0,0 +1,142 @@ | |||||||
|  | CREATE TABLE hyperswitch.dispute_queue on cluster '{cluster}' ( | ||||||
|  |     `dispute_id` String, | ||||||
|  |     `amount` String, | ||||||
|  |     `currency` String, | ||||||
|  |     `dispute_stage` LowCardinality(String), | ||||||
|  |     `dispute_status` LowCardinality(String), | ||||||
|  |     `payment_id` String, | ||||||
|  |     `attempt_id` String, | ||||||
|  |     `merchant_id` String, | ||||||
|  |     `connector_status` String, | ||||||
|  |     `connector_dispute_id` String, | ||||||
|  |     `connector_reason` Nullable(String), | ||||||
|  |     `connector_reason_code` Nullable(String), | ||||||
|  |     `challenge_required_by` Nullable(DateTime) CODEC(T64, LZ4), | ||||||
|  |     `connector_created_at` Nullable(DateTime) CODEC(T64, LZ4), | ||||||
|  |     `connector_updated_at` Nullable(DateTime) CODEC(T64, LZ4), | ||||||
|  |     `created_at` DateTime CODEC(T64, LZ4), | ||||||
|  |     `modified_at` DateTime CODEC(T64, LZ4), | ||||||
|  |     `connector` LowCardinality(String), | ||||||
|  |     `evidence` Nullable(String), | ||||||
|  |     `profile_id` Nullable(String), | ||||||
|  |     `merchant_connector_id` Nullable(String), | ||||||
|  |     `sign_flag` Int8 | ||||||
|  | ) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092', | ||||||
|  | kafka_topic_list = 'hyperswitch-dispute-events', | ||||||
|  | kafka_group_name = 'hyper-c1', | ||||||
|  | kafka_format = 'JSONEachRow', | ||||||
|  | kafka_handle_error_mode = 'stream'; | ||||||
|  |  | ||||||
|  |  | ||||||
|  | CREATE MATERIALIZED VIEW hyperswitch.dispute_mv on cluster '{cluster}' TO hyperswitch.dispute ( | ||||||
|  |     `dispute_id` String, | ||||||
|  |     `amount` String, | ||||||
|  |     `currency` String, | ||||||
|  |     `dispute_stage` LowCardinality(String), | ||||||
|  |     `dispute_status` LowCardinality(String), | ||||||
|  |     `payment_id` String, | ||||||
|  |     `attempt_id` String, | ||||||
|  |     `merchant_id` String, | ||||||
|  |     `connector_status` String, | ||||||
|  |     `connector_dispute_id` String, | ||||||
|  |     `connector_reason` Nullable(String), | ||||||
|  |     `connector_reason_code` Nullable(String), | ||||||
|  |     `challenge_required_by` Nullable(DateTime64(3)), | ||||||
|  |     `connector_created_at` Nullable(DateTime64(3)), | ||||||
|  |     `connector_updated_at` Nullable(DateTime64(3)), | ||||||
|  |     `created_at` DateTime64(3), | ||||||
|  |     `modified_at` DateTime64(3), | ||||||
|  |     `connector` LowCardinality(String), | ||||||
|  |     `evidence` Nullable(String), | ||||||
|  |     `profile_id` Nullable(String), | ||||||
|  |     `merchant_connector_id` Nullable(String), | ||||||
|  |     `inserted_at` DateTime64(3), | ||||||
|  |     `sign_flag` Int8 | ||||||
|  | ) AS | ||||||
|  | SELECT | ||||||
|  |     dispute_id, | ||||||
|  |     amount, | ||||||
|  |     currency, | ||||||
|  |     dispute_stage, | ||||||
|  |     dispute_status, | ||||||
|  |     payment_id, | ||||||
|  |     attempt_id, | ||||||
|  |     merchant_id, | ||||||
|  |     connector_status, | ||||||
|  |     connector_dispute_id, | ||||||
|  |     connector_reason, | ||||||
|  |     connector_reason_code, | ||||||
|  |     challenge_required_by, | ||||||
|  |     connector_created_at, | ||||||
|  |     connector_updated_at, | ||||||
|  |     created_at, | ||||||
|  |     modified_at, | ||||||
|  |     connector, | ||||||
|  |     evidence, | ||||||
|  |     profile_id, | ||||||
|  |     merchant_connector_id, | ||||||
|  |     now() as inserted_at, | ||||||
|  |     sign_flag | ||||||
|  | FROM | ||||||
|  |     hyperswitch.dispute_queue | ||||||
|  | WHERE length(_error) = 0; | ||||||
|  |  | ||||||
|  |  | ||||||
|  | CREATE TABLE hyperswitch.dispute_clustered on cluster '{cluster}' ( | ||||||
|  |     `dispute_id` String, | ||||||
|  |     `amount` String, | ||||||
|  |     `currency` String, | ||||||
|  |     `dispute_stage` LowCardinality(String), | ||||||
|  |     `dispute_status` LowCardinality(String), | ||||||
|  |     `payment_id` String, | ||||||
|  |     `attempt_id` String, | ||||||
|  |     `merchant_id` String, | ||||||
|  |     `connector_status` String, | ||||||
|  |     `connector_dispute_id` String, | ||||||
|  |     `connector_reason` Nullable(String), | ||||||
|  |     `connector_reason_code` Nullable(String), | ||||||
|  |     `challenge_required_by` Nullable(DateTime) CODEC(T64, LZ4), | ||||||
|  |     `connector_created_at` Nullable(DateTime) CODEC(T64, LZ4), | ||||||
|  |     `connector_updated_at` Nullable(DateTime) CODEC(T64, LZ4), | ||||||
|  |     `created_at` DateTime DEFAULT now() CODEC(T64, LZ4), | ||||||
|  |     `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4), | ||||||
|  |     `connector` LowCardinality(String), | ||||||
|  |     `evidence` String DEFAULT '{}' CODEC(T64, LZ4), | ||||||
|  |     `profile_id` Nullable(String), | ||||||
|  |     `merchant_connector_id` Nullable(String), | ||||||
|  |     `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4), | ||||||
|  |     `sign_flag` Int8 | ||||||
|  |     INDEX connectorIndex connector TYPE bloom_filter GRANULARITY 1, | ||||||
|  |     INDEX disputeStatusIndex dispute_status TYPE bloom_filter GRANULARITY 1, | ||||||
|  |     INDEX disputeStageIndex dispute_stage TYPE bloom_filter GRANULARITY 1 | ||||||
|  | ) ENGINE = ReplicatedCollapsingMergeTree( | ||||||
|  |     '/clickhouse/{installation}/{cluster}/tables/{shard}/hyperswitch/dispute_clustered', | ||||||
|  |     '{replica}', | ||||||
|  |     dispute_status | ||||||
|  | ) | ||||||
|  | PARTITION BY toStartOfDay(created_at) | ||||||
|  | ORDER BY | ||||||
|  |     (created_at, merchant_id, dispute_id) | ||||||
|  | TTL created_at + toIntervalMonth(6); | ||||||
|  |  | ||||||
|  |  | ||||||
|  | CREATE MATERIALIZED VIEW hyperswitch.dispute_parse_errors on cluster '{cluster}' | ||||||
|  | ( | ||||||
|  |     `topic` String, | ||||||
|  |     `partition` Int64, | ||||||
|  |     `offset` Int64, | ||||||
|  |     `raw` String, | ||||||
|  |     `error` String | ||||||
|  | ) | ||||||
|  | ENGINE = MergeTree | ||||||
|  | ORDER BY (topic, partition, offset) | ||||||
|  | SETTINGS index_granularity = 8192 AS | ||||||
|  | SELECT | ||||||
|  |     _topic AS topic, | ||||||
|  |     _partition AS partition, | ||||||
|  |     _offset AS offset, | ||||||
|  |     _raw_message AS raw, | ||||||
|  |     _error AS error | ||||||
|  | FROM hyperswitch.dispute_queue | ||||||
|  | WHERE length(_error) > 0 | ||||||
|  | ; | ||||||
							
								
								
									
										117
									
								
								crates/analytics/docs/clickhouse/scripts/disputes.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										117
									
								
								crates/analytics/docs/clickhouse/scripts/disputes.sql
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,117 @@ | |||||||
|  | CREATE TABLE dispute_queue ( | ||||||
|  |     `dispute_id` String, | ||||||
|  |     `amount` String, | ||||||
|  |     `currency` String, | ||||||
|  |     `dispute_stage` LowCardinality(String), | ||||||
|  |     `dispute_status` LowCardinality(String), | ||||||
|  |     `payment_id` String, | ||||||
|  |     `attempt_id` String, | ||||||
|  |     `merchant_id` String, | ||||||
|  |     `connector_status` String, | ||||||
|  |     `connector_dispute_id` String, | ||||||
|  |     `connector_reason` Nullable(String), | ||||||
|  |     `connector_reason_code` Nullable(String), | ||||||
|  |     `challenge_required_by` Nullable(DateTime) CODEC(T64, LZ4), | ||||||
|  |     `connector_created_at` Nullable(DateTime) CODEC(T64, LZ4), | ||||||
|  |     `connector_updated_at` Nullable(DateTime) CODEC(T64, LZ4), | ||||||
|  |     `created_at` DateTime CODEC(T64, LZ4), | ||||||
|  |     `modified_at` DateTime CODEC(T64, LZ4), | ||||||
|  |     `connector` LowCardinality(String), | ||||||
|  |     `evidence` Nullable(String), | ||||||
|  |     `profile_id` Nullable(String), | ||||||
|  |     `merchant_connector_id` Nullable(String), | ||||||
|  |     `sign_flag` Int8 | ||||||
|  | ) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092', | ||||||
|  | kafka_topic_list = 'hyperswitch-dispute-events', | ||||||
|  | kafka_group_name = 'hyper-c1', | ||||||
|  | kafka_format = 'JSONEachRow', | ||||||
|  | kafka_handle_error_mode = 'stream'; | ||||||
|  |  | ||||||
|  |  | ||||||
|  | CREATE TABLE dispute ( | ||||||
|  |     `dispute_id` String, | ||||||
|  |     `amount` String, | ||||||
|  |     `currency` String, | ||||||
|  |     `dispute_stage` LowCardinality(String), | ||||||
|  |     `dispute_status` LowCardinality(String), | ||||||
|  |     `payment_id` String, | ||||||
|  |     `attempt_id` String, | ||||||
|  |     `merchant_id` String, | ||||||
|  |     `connector_status` String, | ||||||
|  |     `connector_dispute_id` String, | ||||||
|  |     `connector_reason` Nullable(String), | ||||||
|  |     `connector_reason_code` Nullable(String), | ||||||
|  |     `challenge_required_by` Nullable(DateTime) CODEC(T64, LZ4), | ||||||
|  |     `connector_created_at` Nullable(DateTime) CODEC(T64, LZ4), | ||||||
|  |     `connector_updated_at` Nullable(DateTime) CODEC(T64, LZ4), | ||||||
|  |     `created_at` DateTime DEFAULT now() CODEC(T64, LZ4), | ||||||
|  |     `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4), | ||||||
|  |     `connector` LowCardinality(String), | ||||||
|  |     `evidence` String DEFAULT '{}' CODEC(T64, LZ4), | ||||||
|  |     `profile_id` Nullable(String), | ||||||
|  |     `merchant_connector_id` Nullable(String), | ||||||
|  |     `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4), | ||||||
|  |     `sign_flag` Int8 | ||||||
|  |     INDEX connectorIndex connector TYPE bloom_filter GRANULARITY 1, | ||||||
|  |     INDEX disputeStatusIndex dispute_status TYPE bloom_filter GRANULARITY 1, | ||||||
|  |     INDEX disputeStageIndex dispute_stage TYPE bloom_filter GRANULARITY 1 | ||||||
|  | ) ENGINE = CollapsingMergeTree( | ||||||
|  |     sign_flag | ||||||
|  | ) | ||||||
|  | PARTITION BY toStartOfDay(created_at) | ||||||
|  | ORDER BY | ||||||
|  |     (created_at, merchant_id, dispute_id) | ||||||
|  | TTL created_at + toIntervalMonth(6) | ||||||
|  | ; | ||||||
|  |  | ||||||
|  | CREATE MATERIALIZED VIEW kafka_parse_dispute TO dispute ( | ||||||
|  |     `dispute_id` String, | ||||||
|  |     `amount` String, | ||||||
|  |     `currency` String, | ||||||
|  |     `dispute_stage` LowCardinality(String), | ||||||
|  |     `dispute_status` LowCardinality(String), | ||||||
|  |     `payment_id` String, | ||||||
|  |     `attempt_id` String, | ||||||
|  |     `merchant_id` String, | ||||||
|  |     `connector_status` String, | ||||||
|  |     `connector_dispute_id` String, | ||||||
|  |     `connector_reason` Nullable(String), | ||||||
|  |     `connector_reason_code` Nullable(String), | ||||||
|  |     `challenge_required_by` Nullable(DateTime64(3)), | ||||||
|  |     `connector_created_at` Nullable(DateTime64(3)), | ||||||
|  |     `connector_updated_at` Nullable(DateTime64(3)), | ||||||
|  |     `created_at` DateTime64(3), | ||||||
|  |     `modified_at` DateTime64(3), | ||||||
|  |     `connector` LowCardinality(String), | ||||||
|  |     `evidence` Nullable(String), | ||||||
|  |     `profile_id` Nullable(String), | ||||||
|  |     `merchant_connector_id` Nullable(String), | ||||||
|  |     `inserted_at` DateTime64(3), | ||||||
|  |     `sign_flag` Int8 | ||||||
|  | ) AS | ||||||
|  | SELECT | ||||||
|  |     dispute_id, | ||||||
|  |     amount, | ||||||
|  |     currency, | ||||||
|  |     dispute_stage, | ||||||
|  |     dispute_status, | ||||||
|  |     payment_id, | ||||||
|  |     attempt_id, | ||||||
|  |     merchant_id, | ||||||
|  |     connector_status, | ||||||
|  |     connector_dispute_id, | ||||||
|  |     connector_reason, | ||||||
|  |     connector_reason_code, | ||||||
|  |     challenge_required_by, | ||||||
|  |     connector_created_at, | ||||||
|  |     connector_updated_at, | ||||||
|  |     created_at, | ||||||
|  |     modified_at, | ||||||
|  |     connector, | ||||||
|  |     evidence, | ||||||
|  |     profile_id, | ||||||
|  |     merchant_connector_id, | ||||||
|  |     now() as inserted_at, | ||||||
|  |     sign_flag | ||||||
|  | FROM | ||||||
|  |     dispute_queue; | ||||||
| @ -374,9 +374,15 @@ impl CustomerInterface for KafkaStore { | |||||||
| impl DisputeInterface for KafkaStore { | impl DisputeInterface for KafkaStore { | ||||||
|     async fn insert_dispute( |     async fn insert_dispute( | ||||||
|         &self, |         &self, | ||||||
|         dispute: storage::DisputeNew, |         dispute_new: storage::DisputeNew, | ||||||
|     ) -> CustomResult<storage::Dispute, errors::StorageError> { |     ) -> CustomResult<storage::Dispute, errors::StorageError> { | ||||||
|         self.diesel_store.insert_dispute(dispute).await |         let dispute = self.diesel_store.insert_dispute(dispute_new).await?; | ||||||
|  |  | ||||||
|  |         if let Err(er) = self.kafka_producer.log_dispute(&dispute, None).await { | ||||||
|  |             logger::error!(message="Failed to add analytics entry for Dispute {dispute:?}", error_message=?er); | ||||||
|  |         }; | ||||||
|  |  | ||||||
|  |         Ok(dispute) | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     async fn find_by_merchant_id_payment_id_connector_dispute_id( |     async fn find_by_merchant_id_payment_id_connector_dispute_id( | ||||||
| @ -419,7 +425,19 @@ impl DisputeInterface for KafkaStore { | |||||||
|         this: storage::Dispute, |         this: storage::Dispute, | ||||||
|         dispute: storage::DisputeUpdate, |         dispute: storage::DisputeUpdate, | ||||||
|     ) -> CustomResult<storage::Dispute, errors::StorageError> { |     ) -> CustomResult<storage::Dispute, errors::StorageError> { | ||||||
|         self.diesel_store.update_dispute(this, dispute).await |         let dispute_new = self | ||||||
|  |             .diesel_store | ||||||
|  |             .update_dispute(this.clone(), dispute) | ||||||
|  |             .await?; | ||||||
|  |         if let Err(er) = self | ||||||
|  |             .kafka_producer | ||||||
|  |             .log_dispute(&dispute_new, Some(this)) | ||||||
|  |             .await | ||||||
|  |         { | ||||||
|  |             logger::error!(message="Failed to add analytics entry for Dispute {dispute_new:?}", error_message=?er); | ||||||
|  |         }; | ||||||
|  |  | ||||||
|  |         Ok(dispute_new) | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     async fn find_disputes_by_merchant_id_payment_id( |     async fn find_disputes_by_merchant_id_payment_id( | ||||||
|  | |||||||
| @ -8,6 +8,7 @@ use rdkafka::{ | |||||||
| }; | }; | ||||||
|  |  | ||||||
| use crate::events::EventType; | use crate::events::EventType; | ||||||
|  | mod dispute; | ||||||
| mod payment_attempt; | mod payment_attempt; | ||||||
| mod payment_intent; | mod payment_intent; | ||||||
| mod refund; | mod refund; | ||||||
| @ -17,8 +18,10 @@ use serde::Serialize; | |||||||
| use time::OffsetDateTime; | use time::OffsetDateTime; | ||||||
|  |  | ||||||
| use self::{ | use self::{ | ||||||
|     payment_attempt::KafkaPaymentAttempt, payment_intent::KafkaPaymentIntent, refund::KafkaRefund, |     dispute::KafkaDispute, payment_attempt::KafkaPaymentAttempt, | ||||||
|  |     payment_intent::KafkaPaymentIntent, refund::KafkaRefund, | ||||||
| }; | }; | ||||||
|  | use crate::types::storage::Dispute; | ||||||
| // Using message queue result here to avoid confusion with Kafka result provided by library | // Using message queue result here to avoid confusion with Kafka result provided by library | ||||||
| pub type MQResult<T> = CustomResult<T, KafkaError>; | pub type MQResult<T> = CustomResult<T, KafkaError>; | ||||||
|  |  | ||||||
| @ -82,6 +85,7 @@ pub struct KafkaSettings { | |||||||
|     api_logs_topic: String, |     api_logs_topic: String, | ||||||
|     connector_logs_topic: String, |     connector_logs_topic: String, | ||||||
|     outgoing_webhook_logs_topic: String, |     outgoing_webhook_logs_topic: String, | ||||||
|  |     dispute_analytics_topic: String, | ||||||
| } | } | ||||||
|  |  | ||||||
| impl KafkaSettings { | impl KafkaSettings { | ||||||
| @ -135,6 +139,12 @@ impl KafkaSettings { | |||||||
|             }, |             }, | ||||||
|         )?; |         )?; | ||||||
|  |  | ||||||
|  |         common_utils::fp_utils::when(self.dispute_analytics_topic.is_default_or_empty(), || { | ||||||
|  |             Err(ApplicationError::InvalidConfigurationValueError( | ||||||
|  |                 "Kafka Dispute Logs topic must not be empty".into(), | ||||||
|  |             )) | ||||||
|  |         })?; | ||||||
|  |  | ||||||
|         Ok(()) |         Ok(()) | ||||||
|     } |     } | ||||||
| } | } | ||||||
| @ -148,6 +158,7 @@ pub struct KafkaProducer { | |||||||
|     api_logs_topic: String, |     api_logs_topic: String, | ||||||
|     connector_logs_topic: String, |     connector_logs_topic: String, | ||||||
|     outgoing_webhook_logs_topic: String, |     outgoing_webhook_logs_topic: String, | ||||||
|  |     dispute_analytics_topic: String, | ||||||
| } | } | ||||||
|  |  | ||||||
| struct RdKafkaProducer(ThreadedProducer<DefaultProducerContext>); | struct RdKafkaProducer(ThreadedProducer<DefaultProducerContext>); | ||||||
| @ -186,6 +197,7 @@ impl KafkaProducer { | |||||||
|             api_logs_topic: conf.api_logs_topic.clone(), |             api_logs_topic: conf.api_logs_topic.clone(), | ||||||
|             connector_logs_topic: conf.connector_logs_topic.clone(), |             connector_logs_topic: conf.connector_logs_topic.clone(), | ||||||
|             outgoing_webhook_logs_topic: conf.outgoing_webhook_logs_topic.clone(), |             outgoing_webhook_logs_topic: conf.outgoing_webhook_logs_topic.clone(), | ||||||
|  |             dispute_analytics_topic: conf.dispute_analytics_topic.clone(), | ||||||
|         }) |         }) | ||||||
|     } |     } | ||||||
|  |  | ||||||
| @ -306,6 +318,27 @@ impl KafkaProducer { | |||||||
|         }) |         }) | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |     pub async fn log_dispute( | ||||||
|  |         &self, | ||||||
|  |         dispute: &Dispute, | ||||||
|  |         old_dispute: Option<Dispute>, | ||||||
|  |     ) -> MQResult<()> { | ||||||
|  |         if let Some(negative_event) = old_dispute { | ||||||
|  |             self.log_kafka_event( | ||||||
|  |                 &self.dispute_analytics_topic, | ||||||
|  |                 &KafkaEvent::old(&KafkaDispute::from_storage(&negative_event)), | ||||||
|  |             ) | ||||||
|  |             .attach_printable_lazy(|| { | ||||||
|  |                 format!("Failed to add negative dispute event {negative_event:?}") | ||||||
|  |             })?; | ||||||
|  |         }; | ||||||
|  |         self.log_kafka_event( | ||||||
|  |             &self.dispute_analytics_topic, | ||||||
|  |             &KafkaEvent::new(&KafkaDispute::from_storage(dispute)), | ||||||
|  |         ) | ||||||
|  |         .attach_printable_lazy(|| format!("Failed to add positive dispute event {dispute:?}")) | ||||||
|  |     } | ||||||
|  |  | ||||||
|     pub fn get_topic(&self, event: EventType) -> &str { |     pub fn get_topic(&self, event: EventType) -> &str { | ||||||
|         match event { |         match event { | ||||||
|             EventType::ApiLogs => &self.api_logs_topic, |             EventType::ApiLogs => &self.api_logs_topic, | ||||||
|  | |||||||
							
								
								
									
										76
									
								
								crates/router/src/services/kafka/dispute.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										76
									
								
								crates/router/src/services/kafka/dispute.rs
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,76 @@ | |||||||
|  | use diesel_models::enums as storage_enums; | ||||||
|  | use masking::Secret; | ||||||
|  | use time::OffsetDateTime; | ||||||
|  |  | ||||||
|  | use crate::types::storage::dispute::Dispute; | ||||||
|  |  | ||||||
|  | #[derive(serde::Serialize, Debug)] | ||||||
|  | pub struct KafkaDispute<'a> { | ||||||
|  |     pub dispute_id: &'a String, | ||||||
|  |     pub amount: &'a String, | ||||||
|  |     pub currency: &'a String, | ||||||
|  |     pub dispute_stage: &'a storage_enums::DisputeStage, | ||||||
|  |     pub dispute_status: &'a storage_enums::DisputeStatus, | ||||||
|  |     pub payment_id: &'a String, | ||||||
|  |     pub attempt_id: &'a String, | ||||||
|  |     pub merchant_id: &'a String, | ||||||
|  |     pub connector_status: &'a String, | ||||||
|  |     pub connector_dispute_id: &'a String, | ||||||
|  |     pub connector_reason: Option<&'a String>, | ||||||
|  |     pub connector_reason_code: Option<&'a String>, | ||||||
|  |     #[serde(default, with = "time::serde::timestamp::option")] | ||||||
|  |     pub challenge_required_by: Option<OffsetDateTime>, | ||||||
|  |     #[serde(default, with = "time::serde::timestamp::option")] | ||||||
|  |     pub connector_created_at: Option<OffsetDateTime>, | ||||||
|  |     #[serde(default, with = "time::serde::timestamp::option")] | ||||||
|  |     pub connector_updated_at: Option<OffsetDateTime>, | ||||||
|  |     #[serde(default, with = "time::serde::timestamp")] | ||||||
|  |     pub created_at: OffsetDateTime, | ||||||
|  |     #[serde(default, with = "time::serde::timestamp")] | ||||||
|  |     pub modified_at: OffsetDateTime, | ||||||
|  |     pub connector: &'a String, | ||||||
|  |     pub evidence: &'a Secret<serde_json::Value>, | ||||||
|  |     pub profile_id: Option<&'a String>, | ||||||
|  |     pub merchant_connector_id: Option<&'a String>, | ||||||
|  | } | ||||||
|  |  | ||||||
|  | impl<'a> KafkaDispute<'a> { | ||||||
|  |     pub fn from_storage(dispute: &'a Dispute) -> Self { | ||||||
|  |         Self { | ||||||
|  |             dispute_id: &dispute.dispute_id, | ||||||
|  |             amount: &dispute.amount, | ||||||
|  |             currency: &dispute.currency, | ||||||
|  |             dispute_stage: &dispute.dispute_stage, | ||||||
|  |             dispute_status: &dispute.dispute_status, | ||||||
|  |             payment_id: &dispute.payment_id, | ||||||
|  |             attempt_id: &dispute.attempt_id, | ||||||
|  |             merchant_id: &dispute.merchant_id, | ||||||
|  |             connector_status: &dispute.connector_status, | ||||||
|  |             connector_dispute_id: &dispute.connector_dispute_id, | ||||||
|  |             connector_reason: dispute.connector_reason.as_ref(), | ||||||
|  |             connector_reason_code: dispute.connector_reason_code.as_ref(), | ||||||
|  |             challenge_required_by: dispute.challenge_required_by.map(|i| i.assume_utc()), | ||||||
|  |             connector_created_at: dispute.connector_created_at.map(|i| i.assume_utc()), | ||||||
|  |             connector_updated_at: dispute.connector_updated_at.map(|i| i.assume_utc()), | ||||||
|  |             created_at: dispute.created_at.assume_utc(), | ||||||
|  |             modified_at: dispute.modified_at.assume_utc(), | ||||||
|  |             connector: &dispute.connector, | ||||||
|  |             evidence: &dispute.evidence, | ||||||
|  |             profile_id: dispute.profile_id.as_ref(), | ||||||
|  |             merchant_connector_id: dispute.merchant_connector_id.as_ref(), | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | impl<'a> super::KafkaMessage for KafkaDispute<'a> { | ||||||
|  |     fn key(&self) -> String { | ||||||
|  |         format!( | ||||||
|  |             "{}_{}_{}", | ||||||
|  |             self.merchant_id, self.payment_id, self.dispute_id | ||||||
|  |         ) | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     fn creation_timestamp(&self) -> Option<i64> { | ||||||
|  |         Some(self.modified_at.unix_timestamp()) | ||||||
|  |     } | ||||||
|  | } | ||||||
		Reference in New Issue
	
	Block a user