chore: address Rust 1.72 clippy lints (#2011)

Co-authored-by: Sampras Lopes <lsampras@pm.me>
This commit is contained in:
Sanchith Hegde
2023-08-29 00:33:57 +05:30
committed by GitHub
parent 23b8d3412c
commit eaefa6e15c
29 changed files with 178 additions and 106 deletions

View File

@ -1,5 +1,9 @@
pub type StorageResult<T> = error_stack::Result<T, StorageError>;
#[derive(Debug, thiserror::Error)] #[derive(Debug, thiserror::Error)]
pub enum StorageError { pub enum StorageError {
#[error("Initialization Error")]
InitializationError,
// TODO: deprecate this error type to use a domain error instead // TODO: deprecate this error type to use a domain error instead
#[error("DatabaseError: {0:?}")] #[error("DatabaseError: {0:?}")]
DatabaseError(String), DatabaseError(String),

View File

@ -18,6 +18,10 @@ pub async fn redis_connection(
.expect("Failed to create Redis connection Pool") .expect("Failed to create Redis connection Pool")
} }
// TODO: use stores defined in storage_impl instead
/// # Panics
///
/// Will panic if could not create a db pool
#[allow(clippy::expect_used)] #[allow(clippy::expect_used)]
pub async fn diesel_make_pg_pool( pub async fn diesel_make_pg_pool(
database: &Database, database: &Database,

View File

@ -8,6 +8,10 @@ pub type PgPool = bb8::Pool<async_bb8_diesel::ConnectionManager<PgConnection>>;
pub type PgPooledConn = async_bb8_diesel::Connection<PgConnection>; pub type PgPooledConn = async_bb8_diesel::Connection<PgConnection>;
///
/// # Panics
///
/// Panics if failed to create a redis pool
#[allow(clippy::expect_used)] #[allow(clippy::expect_used)]
pub async fn redis_connection( pub async fn redis_connection(
conf: &crate::configs::settings::Settings, conf: &crate::configs::settings::Settings,

View File

@ -848,7 +848,7 @@ impl api::FileUpload for Checkout {
match purpose { match purpose {
api::FilePurpose::DisputeEvidence => { api::FilePurpose::DisputeEvidence => {
let supported_file_types = let supported_file_types =
vec!["image/jpeg", "image/jpg", "image/png", "application/pdf"]; ["image/jpeg", "image/jpg", "image/png", "application/pdf"];
// 4 Megabytes (MB) // 4 Megabytes (MB)
if file_size > 4000000 { if file_size > 4000000 {
Err(errors::ConnectorError::FileValidationFailed { Err(errors::ConnectorError::FileValidationFailed {

View File

@ -1311,7 +1311,7 @@ impl api::FileUpload for Stripe {
) -> CustomResult<(), errors::ConnectorError> { ) -> CustomResult<(), errors::ConnectorError> {
match purpose { match purpose {
api::FilePurpose::DisputeEvidence => { api::FilePurpose::DisputeEvidence => {
let supported_file_types = vec!["image/jpeg", "image/png", "application/pdf"]; let supported_file_types = ["image/jpeg", "image/png", "application/pdf"];
// 5 Megabytes (MB) // 5 Megabytes (MB)
if file_size > 5000000 { if file_size > 5000000 {
Err(errors::ConnectorError::FileValidationFailed { Err(errors::ConnectorError::FileValidationFailed {

View File

@ -604,11 +604,7 @@ pub mod error_stack_parsing {
attachments: current_error.attachments, attachments: current_error.attachments,
}] }]
.into_iter() .into_iter()
.chain( .chain(Into::<VecLinearErrorStack<'a>>::into(current_error.sources).0)
Into::<VecLinearErrorStack<'a>>::into(current_error.sources)
.0
.into_iter(),
)
}) })
.collect(); .collect();
Self(multi_layered_errors) Self(multi_layered_errors)

View File

@ -754,7 +754,7 @@ pub fn get_banks(
fn get_val(str: String, val: &serde_json::Value) -> Option<String> { fn get_val(str: String, val: &serde_json::Value) -> Option<String> {
str.split('.') str.split('.')
.fold(Some(val), |acc, x| acc.and_then(|v| v.get(x))) .try_fold(val, |acc, x| acc.get(x))
.and_then(|v| v.as_str()) .and_then(|v| v.as_str())
.map(|s| s.to_string()) .map(|s| s.to_string())
} }

View File

@ -434,7 +434,7 @@ pub async fn get_payment_intent_payment_attempt(
merchant_id: &str, merchant_id: &str,
storage_scheme: enums::MerchantStorageScheme, storage_scheme: enums::MerchantStorageScheme,
) -> RouterResult<(storage::PaymentIntent, storage::PaymentAttempt)> { ) -> RouterResult<(storage::PaymentIntent, storage::PaymentAttempt)> {
(|| async { let get_pi_pa = || async {
let (pi, pa); let (pi, pa);
match payment_id { match payment_id {
api_models::payments::PaymentIdType::PaymentIntentId(ref id) => { api_models::payments::PaymentIdType::PaymentIntentId(ref id) => {
@ -501,7 +501,9 @@ pub async fn get_payment_intent_payment_attempt(
} }
} }
error_stack::Result::<_, errors::DataStorageError>::Ok((pi, pa)) error_stack::Result::<_, errors::DataStorageError>::Ok((pi, pa))
})() };
.await
.to_not_found_response(errors::ApiErrorResponse::PaymentNotFound) get_pi_pa()
.await
.to_not_found_response(errors::ApiErrorResponse::PaymentNotFound)
} }

View File

@ -842,7 +842,7 @@ pub async fn sync_refund_with_gateway_workflow(
}, },
) )
.await?; .await?;
let terminal_status = vec![ let terminal_status = [
enums::RefundStatus::Success, enums::RefundStatus::Success,
enums::RefundStatus::Failure, enums::RefundStatus::Failure,
enums::RefundStatus::TransactionFailure, enums::RefundStatus::TransactionFailure,

View File

@ -1,5 +1,5 @@
use diesel_models::configs::ConfigUpdateInternal; use diesel_models::configs::ConfigUpdateInternal;
use error_stack::IntoReport; use error_stack::{IntoReport, ResultExt};
use storage_impl::redis::{ use storage_impl::redis::{
cache::{CacheKind, CONFIG_CACHE}, cache::{CacheKind, CONFIG_CACHE},
kv_store::RedisConnInterface, kv_store::RedisConnInterface,
@ -126,8 +126,11 @@ impl ConfigInterface for MockDb {
let mut configs = self.configs.lock().await; let mut configs = self.configs.lock().await;
let config_new = storage::Config { let config_new = storage::Config {
#[allow(clippy::as_conversions)] id: configs
id: configs.len() as i32, .len()
.try_into()
.into_report()
.change_context(errors::StorageError::MockDbError)?,
key: config.key, key: config.key,
config: config.config, config: config.config,
}; };

View File

@ -1,4 +1,4 @@
use error_stack::IntoReport; use error_stack::{IntoReport, ResultExt};
use super::{MockDb, Store}; use super::{MockDb, Store};
use crate::{ use crate::{
@ -88,8 +88,11 @@ impl ConnectorResponseInterface for MockDb {
) -> CustomResult<storage::ConnectorResponse, errors::StorageError> { ) -> CustomResult<storage::ConnectorResponse, errors::StorageError> {
let mut connector_response = self.connector_response.lock().await; let mut connector_response = self.connector_response.lock().await;
let response = storage::ConnectorResponse { let response = storage::ConnectorResponse {
#[allow(clippy::as_conversions)] id: connector_response
id: connector_response.len() as i32, .len()
.try_into()
.into_report()
.change_context(errors::StorageError::MockDbError)?,
payment_id: new.payment_id, payment_id: new.payment_id,
merchant_id: new.merchant_id, merchant_id: new.merchant_id,
attempt_id: new.attempt_id, attempt_id: new.attempt_id,

View File

@ -1,4 +1,4 @@
use error_stack::IntoReport; use error_stack::{IntoReport, ResultExt};
use super::{MockDb, Store}; use super::{MockDb, Store};
use crate::{ use crate::{
@ -147,8 +147,11 @@ impl DisputeInterface for MockDb {
let now = common_utils::date_time::now(); let now = common_utils::date_time::now();
let new_dispute = storage::Dispute { let new_dispute = storage::Dispute {
#[allow(clippy::as_conversions)] id: locked_disputes
id: locked_disputes.len() as i32, .len()
.try_into()
.into_report()
.change_context(errors::StorageError::MockDbError)?,
dispute_id: dispute.dispute_id, dispute_id: dispute.dispute_id,
amount: dispute.amount, amount: dispute.amount,
currency: dispute.currency, currency: dispute.currency,

View File

@ -1,4 +1,4 @@
use error_stack::IntoReport; use error_stack::{IntoReport, ResultExt};
use super::{MockDb, Store}; use super::{MockDb, Store};
use crate::{ use crate::{
@ -52,8 +52,11 @@ impl EventInterface for MockDb {
let now = common_utils::date_time::now(); let now = common_utils::date_time::now();
let stored_event = storage::Event { let stored_event = storage::Event {
#[allow(clippy::as_conversions)] id: locked_events
id: locked_events.len() as i32, .len()
.try_into()
.into_report()
.change_context(errors::StorageError::MockDbError)?,
event_id: event.event_id, event_id: event.event_id,
event_type: event.event_type, event_type: event.event_type,
event_class: event.event_class, event_class: event.event_class,

View File

@ -1,4 +1,4 @@
use error_stack::IntoReport; use error_stack::{IntoReport, ResultExt};
use super::{MockDb, Store}; use super::{MockDb, Store};
use crate::{ use crate::{
@ -84,8 +84,11 @@ impl LockerMockUpInterface for MockDb {
} }
let created_locker = storage::LockerMockUp { let created_locker = storage::LockerMockUp {
#[allow(clippy::as_conversions)] id: locked_lockers
id: locked_lockers.len() as i32, .len()
.try_into()
.into_report()
.change_context(errors::StorageError::MockDbError)?,
card_id: new.card_id, card_id: new.card_id,
external_id: new.external_id, external_id: new.external_id,
card_fingerprint: new.card_fingerprint, card_fingerprint: new.card_fingerprint,

View File

@ -1,4 +1,4 @@
use error_stack::IntoReport; use error_stack::{IntoReport, ResultExt};
use super::{MockDb, Store}; use super::{MockDb, Store};
use crate::{ use crate::{
@ -221,8 +221,11 @@ impl MandateInterface for MockDb {
) -> CustomResult<storage::Mandate, errors::StorageError> { ) -> CustomResult<storage::Mandate, errors::StorageError> {
let mut mandates = self.mandates.lock().await; let mut mandates = self.mandates.lock().await;
let mandate = storage::Mandate { let mandate = storage::Mandate {
#[allow(clippy::as_conversions)] id: mandates
id: mandates.len() as i32, .len()
.try_into()
.into_report()
.change_context(errors::StorageError::MockDbError)?,
mandate_id: mandate_new.mandate_id.clone(), mandate_id: mandate_new.mandate_id.clone(),
customer_id: mandate_new.customer_id, customer_id: mandate_new.customer_id,
merchant_id: mandate_new.merchant_id, merchant_id: mandate_new.merchant_id,

View File

@ -542,8 +542,11 @@ impl MerchantConnectorAccountInterface for MockDb {
) -> CustomResult<domain::MerchantConnectorAccount, errors::StorageError> { ) -> CustomResult<domain::MerchantConnectorAccount, errors::StorageError> {
let mut accounts = self.merchant_connector_accounts.lock().await; let mut accounts = self.merchant_connector_accounts.lock().await;
let account = storage::MerchantConnectorAccount { let account = storage::MerchantConnectorAccount {
#[allow(clippy::as_conversions)] id: accounts
id: accounts.len() as i32, .len()
.try_into()
.into_report()
.change_context(errors::StorageError::MockDbError)?,
merchant_id: t.merchant_id, merchant_id: t.merchant_id,
connector_name: t.connector_name, connector_name: t.connector_name,
connector_account_details: t.connector_account_details.into(), connector_account_details: t.connector_account_details.into(),

View File

@ -5,6 +5,7 @@ use data_models::payments::payment_intent::{
use data_models::payments::{ use data_models::payments::{
payment_attempt::PaymentAttempt, payment_intent::PaymentIntentFetchConstraints, payment_attempt::PaymentAttempt, payment_intent::PaymentIntentFetchConstraints,
}; };
use error_stack::{IntoReport, ResultExt};
use super::MockDb; use super::MockDb;
#[cfg(feature = "olap")] #[cfg(feature = "olap")]
@ -56,8 +57,11 @@ impl PaymentIntentInterface for MockDb {
let mut payment_intents = self.payment_intents.lock().await; let mut payment_intents = self.payment_intents.lock().await;
let time = common_utils::date_time::now(); let time = common_utils::date_time::now();
let payment_intent = PaymentIntent { let payment_intent = PaymentIntent {
#[allow(clippy::as_conversions)] id: payment_intents
id: payment_intents.len() as i32, .len()
.try_into()
.into_report()
.change_context(errors::DataStorageError::MockDbError)?,
payment_id: new.payment_id, payment_id: new.payment_id,
merchant_id: new.merchant_id, merchant_id: new.merchant_id,
status: new.status, status: new.status,

View File

@ -1,5 +1,5 @@
use diesel_models::payment_method::PaymentMethodUpdateInternal; use diesel_models::payment_method::PaymentMethodUpdateInternal;
use error_stack::IntoReport; use error_stack::{IntoReport, ResultExt};
use super::{MockDb, Store}; use super::{MockDb, Store};
use crate::{ use crate::{
@ -134,8 +134,11 @@ impl PaymentMethodInterface for MockDb {
let mut payment_methods = self.payment_methods.lock().await; let mut payment_methods = self.payment_methods.lock().await;
let payment_method = storage::PaymentMethod { let payment_method = storage::PaymentMethod {
#[allow(clippy::as_conversions)] id: payment_methods
id: payment_methods.len() as i32, .len()
.try_into()
.into_report()
.change_context(errors::StorageError::MockDbError)?,
customer_id: payment_method_new.customer_id, customer_id: payment_method_new.customer_id,
merchant_id: payment_method_new.merchant_id, merchant_id: payment_method_new.merchant_id,
payment_method_id: payment_method_new.payment_method_id, payment_method_id: payment_method_new.payment_method_id,

View File

@ -2,6 +2,7 @@
use std::collections::HashSet; use std::collections::HashSet;
use diesel_models::{errors::DatabaseError, refund::RefundUpdateInternal}; use diesel_models::{errors::DatabaseError, refund::RefundUpdateInternal};
use error_stack::{IntoReport, ResultExt};
use super::MockDb; use super::MockDb;
use crate::{ use crate::{
@ -735,8 +736,11 @@ impl RefundInterface for MockDb {
let current_time = common_utils::date_time::now(); let current_time = common_utils::date_time::now();
let refund = storage_types::Refund { let refund = storage_types::Refund {
#[allow(clippy::as_conversions)] id: refunds
id: refunds.len() as i32, .len()
.try_into()
.into_report()
.change_context(errors::StorageError::MockDbError)?,
internal_reference_id: new.internal_reference_id, internal_reference_id: new.internal_reference_id,
refund_id: new.refund_id, refund_id: new.refund_id,
payment_id: new.payment_id, payment_id: new.payment_id,

View File

@ -59,6 +59,9 @@ impl AppStateInfo for AppState {
} }
impl AppState { impl AppState {
/// # Panics
///
/// Panics if Store can't be created or JWE decryption fails
pub async fn with_storage( pub async fn with_storage(
conf: settings::Settings, conf: settings::Settings,
storage_impl: StorageImpl, storage_impl: StorageImpl,
@ -68,9 +71,12 @@ impl AppState {
let kms_client = kms::get_kms_client(&conf.kms).await; let kms_client = kms::get_kms_client(&conf.kms).await;
let testable = storage_impl == StorageImpl::PostgresqlTest; let testable = storage_impl == StorageImpl::PostgresqlTest;
let store: Box<dyn StorageInterface> = match storage_impl { let store: Box<dyn StorageInterface> = match storage_impl {
StorageImpl::Postgresql | StorageImpl::PostgresqlTest => { StorageImpl::Postgresql | StorageImpl::PostgresqlTest => Box::new(
Box::new(get_store(&conf, shut_down_signal, testable).await) #[allow(clippy::expect_used)]
} get_store(&conf, shut_down_signal, testable)
.await
.expect("Failed to create store"),
),
StorageImpl::Mock => Box::new(MockDb::new(&conf).await), StorageImpl::Mock => Box::new(MockDb::new(&conf).await),
}; };
@ -84,7 +90,6 @@ impl AppState {
.expect("Failed while performing KMS decryption"); .expect("Failed while performing KMS decryption");
#[cfg(feature = "email")] #[cfg(feature = "email")]
#[allow(clippy::expect_used)]
let email_client = Box::new(AwsSes::new(&conf.email).await); let email_client = Box::new(AwsSes::new(&conf.email).await);
Self { Self {
flow_name: String::from("default"), flow_name: String::from("default"),

View File

@ -62,7 +62,7 @@ impl ProcessTrackerWorkflow for PaymentsSyncWorkflow {
) )
.await?; .await?;
let terminal_status = vec![ let terminal_status = [
enums::AttemptStatus::RouterDeclined, enums::AttemptStatus::RouterDeclined,
enums::AttemptStatus::Charged, enums::AttemptStatus::Charged,
enums::AttemptStatus::AutoRefunded, enums::AttemptStatus::AutoRefunded,

View File

@ -3,6 +3,9 @@ pub mod authentication;
pub mod encryption; pub mod encryption;
pub mod logger; pub mod logger;
#[cfg(feature = "kms")]
use data_models::errors::StorageError;
use data_models::errors::StorageResult;
use error_stack::{IntoReport, ResultExt}; use error_stack::{IntoReport, ResultExt};
#[cfg(feature = "kms")] #[cfg(feature = "kms")]
use external_services::kms::{self, decrypt::KmsDecrypt}; use external_services::kms::{self, decrypt::KmsDecrypt};
@ -31,29 +34,29 @@ pub async fn get_store(
config: &settings::Settings, config: &settings::Settings,
shut_down_signal: oneshot::Sender<()>, shut_down_signal: oneshot::Sender<()>,
test_transaction: bool, test_transaction: bool,
) -> Store { ) -> StorageResult<Store> {
#[cfg(feature = "kms")] #[cfg(feature = "kms")]
let kms_client = kms::get_kms_client(&config.kms).await; let kms_client = kms::get_kms_client(&config.kms).await;
#[cfg(feature = "kms")] #[cfg(feature = "kms")]
#[allow(clippy::expect_used)]
let master_config = config let master_config = config
.master_database .master_database
.clone() .clone()
.decrypt_inner(kms_client) .decrypt_inner(kms_client)
.await .await
.expect("Failed to decrypt master database config"); .change_context(StorageError::InitializationError)
.attach_printable("Failed to decrypt master database config")?;
#[cfg(not(feature = "kms"))] #[cfg(not(feature = "kms"))]
let master_config = config.master_database.clone().into(); let master_config = config.master_database.clone().into();
#[cfg(all(feature = "olap", feature = "kms"))] #[cfg(all(feature = "olap", feature = "kms"))]
#[allow(clippy::expect_used)]
let replica_config = config let replica_config = config
.replica_database .replica_database
.clone() .clone()
.decrypt_inner(kms_client) .decrypt_inner(kms_client)
.await .await
.expect("Failed to decrypt replica database config"); .change_context(StorageError::InitializationError)
.attach_printable("Failed to decrypt replica database config")?;
#[cfg(all(feature = "olap", not(feature = "kms")))] #[cfg(all(feature = "olap", not(feature = "kms")))]
let replica_config = config.replica_database.clone().into(); let replica_config = config.replica_database.clone().into();
@ -70,7 +73,7 @@ pub async fn get_store(
let conf = (master_config, replica_config); let conf = (master_config, replica_config);
let store: RouterStore<StoreType> = if test_transaction { let store: RouterStore<StoreType> = if test_transaction {
RouterStore::test_store(conf, &config.redis, master_enc_key).await RouterStore::test_store(conf, &config.redis, master_enc_key).await?
} else { } else {
RouterStore::from_config( RouterStore::from_config(
conf, conf,
@ -79,7 +82,7 @@ pub async fn get_store(
shut_down_signal, shut_down_signal,
consts::PUB_SUB_CHANNEL, consts::PUB_SUB_CHANNEL,
) )
.await .await?
}; };
#[cfg(feature = "kv_store")] #[cfg(feature = "kv_store")]
@ -89,7 +92,7 @@ pub async fn get_store(
config.drainer.num_partitions, config.drainer.num_partitions,
); );
store Ok(store)
} }
#[allow(clippy::expect_used)] #[allow(clippy::expect_used)]

View File

@ -275,8 +275,8 @@ impl HeaderExt for Headers {
) -> CustomResult<reqwest::header::HeaderMap, errors::ApiClientError> { ) -> CustomResult<reqwest::header::HeaderMap, errors::ApiClientError> {
use reqwest::header::{HeaderMap, HeaderName, HeaderValue}; use reqwest::header::{HeaderMap, HeaderName, HeaderValue};
self.into_iter().fold( self.into_iter().try_fold(
Ok(HeaderMap::new()), HeaderMap::new(),
|mut header_map, (header_name, header_value)| { |mut header_map, (header_name, header_value)| {
let header_name = HeaderName::from_str(&header_name) let header_name = HeaderName::from_str(&header_name)
.into_report() .into_report()
@ -285,10 +285,8 @@ impl HeaderExt for Headers {
let header_value = HeaderValue::from_str(&header_value) let header_value = HeaderValue::from_str(&header_value)
.into_report() .into_report()
.change_context(errors::ApiClientError::HeaderMapConstructionFailed)?; .change_context(errors::ApiClientError::HeaderMapConstructionFailed)?;
if let Ok(map) = header_map.as_mut() { header_map.append(header_name, header_value);
map.append(header_name, header_value); Ok(header_map)
}
header_map
}, },
) )
} }

View File

@ -477,7 +477,7 @@ async fn should_fail_payment_for_invalid_exp_month() {
) )
.await .await
.unwrap(); .unwrap();
let errors = vec!["The provided Expiry Date is not valid.: Expiry month should be between 1 and 12 inclusive: 20","Refused"]; let errors = ["The provided Expiry Date is not valid.: Expiry month should be between 1 and 12 inclusive: 20","Refused"];
assert!(errors.contains(&response.response.unwrap_err().message.as_str())) assert!(errors.contains(&response.response.unwrap_err().message.as_str()))
} }

View File

@ -226,7 +226,7 @@ async fn payments_todo() {
let client = awc::Client::default(); let client = awc::Client::default();
let mut response; let mut response;
let mut response_body; let mut response_body;
let _post_endpoints = vec!["123/update", "123/confirm", "cancel"]; let _post_endpoints = ["123/update", "123/confirm", "cancel"];
let get_endpoints = vec!["list"]; let get_endpoints = vec!["list"];
for endpoint in get_endpoints { for endpoint in get_endpoints {

View File

@ -1,6 +1,8 @@
use async_bb8_diesel::{AsyncConnection, ConnectionError}; use async_bb8_diesel::{AsyncConnection, ConnectionError};
use bb8::CustomizeConnection; use bb8::CustomizeConnection;
use data_models::errors::{StorageError, StorageResult};
use diesel::PgConnection; use diesel::PgConnection;
use error_stack::{IntoReport, ResultExt};
use masking::PeekInterface; use masking::PeekInterface;
use crate::config::Database; use crate::config::Database;
@ -11,7 +13,7 @@ pub type PgPooledConn = async_bb8_diesel::Connection<PgConnection>;
#[async_trait::async_trait] #[async_trait::async_trait]
pub trait DatabaseStore: Clone + Send + Sync { pub trait DatabaseStore: Clone + Send + Sync {
type Config: Send; type Config: Send;
async fn new(config: Self::Config, test_transaction: bool) -> Self; async fn new(config: Self::Config, test_transaction: bool) -> StorageResult<Self>;
fn get_master_pool(&self) -> &PgPool; fn get_master_pool(&self) -> &PgPool;
fn get_replica_pool(&self) -> &PgPool; fn get_replica_pool(&self) -> &PgPool;
} }
@ -24,10 +26,10 @@ pub struct Store {
#[async_trait::async_trait] #[async_trait::async_trait]
impl DatabaseStore for Store { impl DatabaseStore for Store {
type Config = Database; type Config = Database;
async fn new(config: Database, test_transaction: bool) -> Self { async fn new(config: Database, test_transaction: bool) -> StorageResult<Self> {
Self { Ok(Self {
master_pool: diesel_make_pg_pool(&config, test_transaction).await, master_pool: diesel_make_pg_pool(&config, test_transaction).await?,
} })
} }
fn get_master_pool(&self) -> &PgPool { fn get_master_pool(&self) -> &PgPool {
@ -48,14 +50,18 @@ pub struct ReplicaStore {
#[async_trait::async_trait] #[async_trait::async_trait]
impl DatabaseStore for ReplicaStore { impl DatabaseStore for ReplicaStore {
type Config = (Database, Database); type Config = (Database, Database);
async fn new(config: (Database, Database), test_transaction: bool) -> Self { async fn new(config: (Database, Database), test_transaction: bool) -> StorageResult<Self> {
let (master_config, replica_config) = config; let (master_config, replica_config) = config;
let master_pool = diesel_make_pg_pool(&master_config, test_transaction).await; let master_pool = diesel_make_pg_pool(&master_config, test_transaction)
let replica_pool = diesel_make_pg_pool(&replica_config, test_transaction).await; .await
Self { .attach_printable("failed to create master pool")?;
let replica_pool = diesel_make_pg_pool(&replica_config, test_transaction)
.await
.attach_printable("failed to create replica pool")?;
Ok(Self {
master_pool, master_pool,
replica_pool, replica_pool,
} })
} }
fn get_master_pool(&self) -> &PgPool { fn get_master_pool(&self) -> &PgPool {
@ -67,8 +73,10 @@ impl DatabaseStore for ReplicaStore {
} }
} }
#[allow(clippy::expect_used)] pub async fn diesel_make_pg_pool(
pub async fn diesel_make_pg_pool(database: &Database, test_transaction: bool) -> PgPool { database: &Database,
test_transaction: bool,
) -> StorageResult<PgPool> {
let database_url = format!( let database_url = format!(
"postgres://{}:{}@{}:{}/{}", "postgres://{}:{}@{}:{}/{}",
database.username, database.username,
@ -88,7 +96,9 @@ pub async fn diesel_make_pg_pool(database: &Database, test_transaction: bool) ->
pool.build(manager) pool.build(manager)
.await .await
.expect("Failed to create PostgreSQL connection pool") .into_report()
.change_context(StorageError::InitializationError)
.attach_printable("Failed to create PostgreSQL connection pool")
} }
#[derive(Debug)] #[derive(Debug)]

View File

@ -1,5 +1,6 @@
use std::sync::Arc; use std::sync::Arc;
use data_models::errors::{StorageError, StorageResult};
use error_stack::ResultExt; use error_stack::ResultExt;
use masking::StrongSecret; use masking::StrongSecret;
use redis::{kv_store::RedisConnInterface, RedisStore}; use redis::{kv_store::RedisConnInterface, RedisStore};
@ -35,11 +36,13 @@ where
tokio::sync::oneshot::Sender<()>, tokio::sync::oneshot::Sender<()>,
&'static str, &'static str,
); );
async fn new(config: Self::Config, test_transaction: bool) -> Self { async fn new(config: Self::Config, test_transaction: bool) -> StorageResult<Self> {
let (db_conf, cache_conf, encryption_key, cache_error_signal, inmemory_cache_stream) = let (db_conf, cache_conf, encryption_key, cache_error_signal, inmemory_cache_stream) =
config; config;
if test_transaction { if test_transaction {
Self::test_store(db_conf, &cache_conf, encryption_key).await Self::test_store(db_conf, &cache_conf, encryption_key)
.await
.attach_printable("failed to create test router store")
} else { } else {
Self::from_config( Self::from_config(
db_conf, db_conf,
@ -49,6 +52,7 @@ where
inmemory_cache_stream, inmemory_cache_stream,
) )
.await .await
.attach_printable("failed to create store")
} }
} }
fn get_master_pool(&self) -> &PgPool { fn get_master_pool(&self) -> &PgPool {
@ -74,46 +78,48 @@ impl<T: DatabaseStore> RouterStore<T> {
encryption_key: StrongSecret<Vec<u8>>, encryption_key: StrongSecret<Vec<u8>>,
cache_error_signal: tokio::sync::oneshot::Sender<()>, cache_error_signal: tokio::sync::oneshot::Sender<()>,
inmemory_cache_stream: &str, inmemory_cache_stream: &str,
) -> Self { ) -> StorageResult<Self> {
// TODO: create an error enum and return proper error here let db_store = T::new(db_conf, false).await?;
let db_store = T::new(db_conf, false).await;
#[allow(clippy::expect_used)]
let cache_store = RedisStore::new(cache_conf) let cache_store = RedisStore::new(cache_conf)
.await .await
.expect("Failed to create cache store"); .change_context(StorageError::InitializationError)
.attach_printable("Failed to create cache store")?;
cache_store.set_error_callback(cache_error_signal); cache_store.set_error_callback(cache_error_signal);
#[allow(clippy::expect_used)]
cache_store cache_store
.subscribe_to_channel(inmemory_cache_stream) .subscribe_to_channel(inmemory_cache_stream)
.await .await
.expect("Failed to subscribe to inmemory cache stream"); .change_context(StorageError::InitializationError)
Self { .attach_printable("Failed to subscribe to inmemory cache stream")?;
Ok(Self {
db_store, db_store,
cache_store, cache_store,
master_encryption_key: encryption_key, master_encryption_key: encryption_key,
} })
} }
pub fn master_key(&self) -> &StrongSecret<Vec<u8>> { pub fn master_key(&self) -> &StrongSecret<Vec<u8>> {
&self.master_encryption_key &self.master_encryption_key
} }
/// # Panics
///
/// Will panic if `CONNECTOR_AUTH_FILE_PATH` is not set
pub async fn test_store( pub async fn test_store(
db_conf: T::Config, db_conf: T::Config,
cache_conf: &redis_interface::RedisSettings, cache_conf: &redis_interface::RedisSettings,
encryption_key: StrongSecret<Vec<u8>>, encryption_key: StrongSecret<Vec<u8>>,
) -> Self { ) -> StorageResult<Self> {
// TODO: create an error enum and return proper error here // TODO: create an error enum and return proper error here
let db_store = T::new(db_conf, true).await; let db_store = T::new(db_conf, true).await?;
#[allow(clippy::expect_used)]
let cache_store = RedisStore::new(cache_conf) let cache_store = RedisStore::new(cache_conf)
.await .await
.expect("Failed to create cache store"); .change_context(StorageError::InitializationError)
Self { .attach_printable("failed to create redis cache")?;
Ok(Self {
db_store, db_store,
cache_store, cache_store,
master_encryption_key: encryption_key, master_encryption_key: encryption_key,
} })
} }
} }
@ -131,9 +137,13 @@ where
T: DatabaseStore, T: DatabaseStore,
{ {
type Config = (RouterStore<T>, String, u8); type Config = (RouterStore<T>, String, u8);
async fn new(config: Self::Config, _test_transaction: bool) -> Self { async fn new(config: Self::Config, _test_transaction: bool) -> StorageResult<Self> {
let (router_store, drainer_stream_name, drainer_num_partitions) = config; let (router_store, drainer_stream_name, drainer_num_partitions) = config;
Self::from_store(router_store, drainer_stream_name, drainer_num_partitions) Ok(Self::from_store(
router_store,
drainer_stream_name,
drainer_num_partitions,
))
} }
fn get_master_pool(&self) -> &PgPool { fn get_master_pool(&self) -> &PgPool {
self.router_store.get_master_pool() self.router_store.get_master_pool()
@ -224,28 +234,26 @@ impl DataModelExt for data_models::MerchantStorageScheme {
pub(crate) fn diesel_error_to_data_error( pub(crate) fn diesel_error_to_data_error(
diesel_error: &diesel_models::errors::DatabaseError, diesel_error: &diesel_models::errors::DatabaseError,
) -> data_models::errors::StorageError { ) -> StorageError {
match diesel_error { match diesel_error {
diesel_models::errors::DatabaseError::DatabaseConnectionError => { diesel_models::errors::DatabaseError::DatabaseConnectionError => {
data_models::errors::StorageError::DatabaseConnectionError StorageError::DatabaseConnectionError
} }
diesel_models::errors::DatabaseError::NotFound => { diesel_models::errors::DatabaseError::NotFound => {
data_models::errors::StorageError::ValueNotFound("Value not found".to_string()) StorageError::ValueNotFound("Value not found".to_string())
}
diesel_models::errors::DatabaseError::UniqueViolation => {
data_models::errors::StorageError::DuplicateValue {
entity: "entity ",
key: None,
}
} }
diesel_models::errors::DatabaseError::UniqueViolation => StorageError::DuplicateValue {
entity: "entity ",
key: None,
},
diesel_models::errors::DatabaseError::NoFieldsToUpdate => { diesel_models::errors::DatabaseError::NoFieldsToUpdate => {
data_models::errors::StorageError::DatabaseError("No fields to update".to_string()) StorageError::DatabaseError("No fields to update".to_string())
} }
diesel_models::errors::DatabaseError::QueryGenerationFailed => { diesel_models::errors::DatabaseError::QueryGenerationFailed => {
data_models::errors::StorageError::DatabaseError("Query generation failed".to_string()) StorageError::DatabaseError("Query generation failed".to_string())
} }
diesel_models::errors::DatabaseError::Others => { diesel_models::errors::DatabaseError::Others => {
data_models::errors::StorageError::DatabaseError("Others".to_string()) StorageError::DatabaseError("Others".to_string())
} }
} }
} }

View File

@ -70,6 +70,9 @@ impl Default for ConnectorAuthentication {
#[allow(dead_code)] #[allow(dead_code)]
impl ConnectorAuthentication { impl ConnectorAuthentication {
/// # Panics
///
/// Will panic if `CONNECTOR_AUTH_FILE_PATH` env is not set
#[allow(clippy::expect_used)] #[allow(clippy::expect_used)]
pub fn new() -> Self { pub fn new() -> Self {
// Do `export CONNECTOR_AUTH_FILE_PATH="/hyperswitch/crates/router/tests/connectors/sample_auth.toml"` // Do `export CONNECTOR_AUTH_FILE_PATH="/hyperswitch/crates/router/tests/connectors/sample_auth.toml"`
@ -99,6 +102,9 @@ impl ConnectorAuthenticationMap {
&self.0 &self.0
} }
/// # Panics
///
/// Will panic if `CONNECTOR_AUTH_FILE_PATH` env is not set
#[allow(clippy::expect_used)] #[allow(clippy::expect_used)]
pub fn new() -> Self { pub fn new() -> Self {
// Do `export CONNECTOR_AUTH_FILE_PATH="/hyperswitch/crates/router/tests/connectors/sample_auth.toml"` // Do `export CONNECTOR_AUTH_FILE_PATH="/hyperswitch/crates/router/tests/connectors/sample_auth.toml"`

View File

@ -832,7 +832,7 @@ fn get_chrome_profile_path() -> Result<String, WebDriverError> {
fp.join(&MAIN_SEPARATOR.to_string()) fp.join(&MAIN_SEPARATOR.to_string())
}) })
.unwrap(); .unwrap();
base_path.push_str(r#"/Library/Application\ Support/Google/Chrome/Default"#); //Issue: 1573 base_path.push_str(r"/Library/Application\ Support/Google/Chrome/Default"); //Issue: 1573
Ok(base_path) Ok(base_path)
} }