mirror of
https://github.com/juspay/hyperswitch.git
synced 2025-10-30 01:27:31 +08:00
refactor(api_keys): use a KMS encrypted API key hashing key and remove key ID prefix from plaintext API keys (#639)
Co-authored-by: Arun Raj M <jarnura47@gmail.com>
This commit is contained in:
@ -76,6 +76,9 @@ outgoing_enabled = true
|
||||
[eph_key]
|
||||
validity = 1
|
||||
|
||||
[api_keys]
|
||||
hash_key = "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
|
||||
|
||||
[connectors.aci]
|
||||
base_url = "https://eu-test.oppwa.com/"
|
||||
|
||||
|
||||
@ -107,6 +107,16 @@ outgoing_enabled = true
|
||||
[eph_key]
|
||||
validity = 1
|
||||
|
||||
[api_keys]
|
||||
# Key ID for the KMS managed key used to decrypt the API key hashing key
|
||||
aws_key_id = ""
|
||||
# The AWS region for the KMS managed key used to decrypt the API key hashing key
|
||||
aws_region = ""
|
||||
# Base64-encoded (KMS encrypted) ciphertext of the API key hashing key
|
||||
kms_encrypted_hash_key = ""
|
||||
# Hex-encoded 32-byte long (64 characters long when hex-encoded) key used for calculating hashes of API keys
|
||||
hash_key = "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
|
||||
|
||||
# Connector configuration, provided attributes will be used to fulfill API requests.
|
||||
# Examples provided here are sandbox/test base urls, can be replaced by live or mock
|
||||
# base urls based on your need.
|
||||
|
||||
@ -63,6 +63,9 @@ cluster_urls = ["redis-queue:6379"]
|
||||
max_attempts = 10
|
||||
max_age = 365
|
||||
|
||||
[api_keys]
|
||||
hash_key = "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
|
||||
|
||||
[connectors.aci]
|
||||
base_url = "https://eu-test.oppwa.com/"
|
||||
|
||||
|
||||
@ -12,7 +12,7 @@ build = "src/build.rs"
|
||||
[features]
|
||||
default = ["kv_store", "stripe", "oltp", "olap", "accounts_cache"]
|
||||
kms = ["aws-config", "aws-sdk-kms"]
|
||||
basilisk = ["josekit"]
|
||||
basilisk = ["josekit", "kms"]
|
||||
stripe = ["dep:serde_qs"]
|
||||
sandbox = ["kms", "stripe", "basilisk"]
|
||||
olap = []
|
||||
|
||||
@ -57,6 +57,7 @@ pub struct Settings {
|
||||
pub webhooks: WebhooksSettings,
|
||||
pub pm_filters: ConnectorFilters,
|
||||
pub bank_config: BankRedirectConfig,
|
||||
pub api_keys: ApiKeys,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone, Default)]
|
||||
@ -304,6 +305,26 @@ pub struct WebhooksSettings {
|
||||
pub outgoing_enabled: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone, Default)]
|
||||
#[serde(default)]
|
||||
pub struct ApiKeys {
|
||||
#[cfg(feature = "kms")]
|
||||
pub aws_key_id: String,
|
||||
|
||||
#[cfg(feature = "kms")]
|
||||
pub aws_region: String,
|
||||
|
||||
/// Base64-encoded (KMS encrypted) ciphertext of the key used for calculating hashes of API
|
||||
/// keys
|
||||
#[cfg(feature = "kms")]
|
||||
pub kms_encrypted_hash_key: String,
|
||||
|
||||
/// Hex-encoded 32-byte long (64 characters long when hex-encoded) key used for calculating
|
||||
/// hashes of API keys
|
||||
#[cfg(not(feature = "kms"))]
|
||||
pub hash_key: String,
|
||||
}
|
||||
|
||||
impl Settings {
|
||||
pub fn new() -> ApplicationResult<Self> {
|
||||
Self::with_config_path(None)
|
||||
@ -378,6 +399,7 @@ impl Settings {
|
||||
#[cfg(feature = "kv_store")]
|
||||
self.drainer.validate()?;
|
||||
self.jwekey.validate()?;
|
||||
self.api_keys.validate()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -184,3 +184,37 @@ impl super::settings::DrainerSettings {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl super::settings::ApiKeys {
|
||||
pub fn validate(&self) -> Result<(), ApplicationError> {
|
||||
use common_utils::fp_utils::when;
|
||||
|
||||
#[cfg(feature = "kms")]
|
||||
{
|
||||
when(self.aws_key_id.is_default_or_empty(), || {
|
||||
Err(ApplicationError::InvalidConfigurationValueError(
|
||||
"API key AWS key ID must not be empty when KMS feature is enabled".into(),
|
||||
))
|
||||
})?;
|
||||
|
||||
when(self.aws_region.is_default_or_empty(), || {
|
||||
Err(ApplicationError::InvalidConfigurationValueError(
|
||||
"API key AWS region must not be empty when KMS feature is enabled".into(),
|
||||
))
|
||||
})?;
|
||||
|
||||
when(self.kms_encrypted_hash_key.is_default_or_empty(), || {
|
||||
Err(ApplicationError::InvalidConfigurationValueError(
|
||||
"API key hashing key must not be empty when KMS feature is enabled".into(),
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "kms"))]
|
||||
when(self.hash_key.is_empty(), || {
|
||||
Err(ApplicationError::InvalidConfigurationValueError(
|
||||
"API key hashing key must not be empty".into(),
|
||||
))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,9 +1,12 @@
|
||||
use common_utils::{date_time, errors::CustomResult, fp_utils};
|
||||
use error_stack::{report, IntoReport, ResultExt};
|
||||
use masking::{PeekInterface, Secret};
|
||||
use masking::{PeekInterface, Secret, StrongSecret};
|
||||
use router_env::{instrument, tracing};
|
||||
|
||||
#[cfg(feature = "kms")]
|
||||
use crate::services::kms;
|
||||
use crate::{
|
||||
configs::settings,
|
||||
consts,
|
||||
core::errors::{self, RouterResponse, StorageErrorExt},
|
||||
db::StorageInterface,
|
||||
@ -12,23 +15,52 @@ use crate::{
|
||||
utils,
|
||||
};
|
||||
|
||||
pub static HASH_KEY: tokio::sync::OnceCell<StrongSecret<[u8; PlaintextApiKey::HASH_KEY_LEN]>> =
|
||||
tokio::sync::OnceCell::const_new();
|
||||
|
||||
pub async fn get_hash_key(
|
||||
api_key_config: &settings::ApiKeys,
|
||||
) -> errors::RouterResult<StrongSecret<[u8; PlaintextApiKey::HASH_KEY_LEN]>> {
|
||||
#[cfg(feature = "kms")]
|
||||
let hash_key = kms::KeyHandler::get_kms_decrypted_key(
|
||||
&api_key_config.aws_region,
|
||||
&api_key_config.aws_key_id,
|
||||
api_key_config.kms_encrypted_hash_key.clone(),
|
||||
)
|
||||
.await
|
||||
.change_context(errors::ApiErrorResponse::InternalServerError)
|
||||
.attach_printable("Failed to KMS decrypt API key hashing key")?;
|
||||
|
||||
#[cfg(not(feature = "kms"))]
|
||||
let hash_key = &api_key_config.hash_key;
|
||||
|
||||
<[u8; PlaintextApiKey::HASH_KEY_LEN]>::try_from(
|
||||
hex::decode(hash_key)
|
||||
.into_report()
|
||||
.change_context(errors::ApiErrorResponse::InternalServerError)
|
||||
.attach_printable("API key hash key has invalid hexadecimal data")?
|
||||
.as_slice(),
|
||||
)
|
||||
.into_report()
|
||||
.change_context(errors::ApiErrorResponse::InternalServerError)
|
||||
.attach_printable("The API hashing key has incorrect length")
|
||||
.map(StrongSecret::new)
|
||||
}
|
||||
|
||||
// Defining new types `PlaintextApiKey` and `HashedApiKey` in the hopes of reducing the possibility
|
||||
// of plaintext API key being stored in the data store.
|
||||
pub struct PlaintextApiKey(Secret<String>);
|
||||
pub struct HashedApiKey(String);
|
||||
|
||||
impl PlaintextApiKey {
|
||||
const HASH_KEY_LEN: usize = 32;
|
||||
pub const HASH_KEY_LEN: usize = 32;
|
||||
|
||||
const PREFIX_LEN: usize = 8;
|
||||
const PREFIX_LEN: usize = 12;
|
||||
|
||||
pub fn new(length: usize) -> Self {
|
||||
let env = router_env::env::prefix_for_env();
|
||||
let key = common_utils::crypto::generate_cryptographically_secure_random_string(length);
|
||||
Self(key.into())
|
||||
}
|
||||
|
||||
pub fn new_hash_key() -> [u8; Self::HASH_KEY_LEN] {
|
||||
common_utils::crypto::generate_cryptographically_secure_random_bytes()
|
||||
Self(format!("{env}_{key}").into())
|
||||
}
|
||||
|
||||
pub fn new_key_id() -> String {
|
||||
@ -96,18 +128,20 @@ impl PlaintextApiKey {
|
||||
#[instrument(skip_all)]
|
||||
pub async fn create_api_key(
|
||||
store: &dyn StorageInterface,
|
||||
api_key_config: &settings::ApiKeys,
|
||||
api_key: api::CreateApiKeyRequest,
|
||||
merchant_id: String,
|
||||
) -> RouterResponse<api::CreateApiKeyResponse> {
|
||||
let hash_key = PlaintextApiKey::new_hash_key();
|
||||
let hash_key = HASH_KEY
|
||||
.get_or_try_init(|| get_hash_key(api_key_config))
|
||||
.await?;
|
||||
let plaintext_api_key = PlaintextApiKey::new(consts::API_KEY_LENGTH);
|
||||
let api_key = storage::ApiKeyNew {
|
||||
key_id: PlaintextApiKey::new_key_id(),
|
||||
merchant_id,
|
||||
name: api_key.name,
|
||||
description: api_key.description,
|
||||
hash_key: Secret::from(hex::encode(hash_key)),
|
||||
hashed_api_key: plaintext_api_key.keyed_hash(&hash_key).into(),
|
||||
hashed_api_key: plaintext_api_key.keyed_hash(hash_key.peek()).into(),
|
||||
prefix: plaintext_api_key.prefix(),
|
||||
created_at: date_time::now(),
|
||||
expires_at: api_key.expiration.into(),
|
||||
@ -198,14 +232,19 @@ impl From<HashedApiKey> for storage::HashedApiKey {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#![allow(clippy::unwrap_used)]
|
||||
#![allow(clippy::expect_used, clippy::unwrap_used)]
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_hashing_and_verification() {
|
||||
#[tokio::test]
|
||||
async fn test_hashing_and_verification() {
|
||||
let settings = settings::Settings::new().expect("invalid settings");
|
||||
|
||||
let plaintext_api_key = PlaintextApiKey::new(consts::API_KEY_LENGTH);
|
||||
let hash_key = PlaintextApiKey::new_hash_key();
|
||||
let hashed_api_key = plaintext_api_key.keyed_hash(&hash_key);
|
||||
let hash_key = HASH_KEY
|
||||
.get_or_try_init(|| get_hash_key(&settings.api_keys))
|
||||
.await
|
||||
.unwrap();
|
||||
let hashed_api_key = plaintext_api_key.keyed_hash(hash_key.peek());
|
||||
|
||||
assert_ne!(
|
||||
plaintext_api_key.0.peek().as_bytes(),
|
||||
@ -213,7 +252,7 @@ mod tests {
|
||||
);
|
||||
|
||||
plaintext_api_key
|
||||
.verify_hash(&hash_key, &hashed_api_key)
|
||||
.verify_hash(hash_key.peek(), &hashed_api_key)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
@ -39,7 +39,13 @@ pub async fn api_key_create(
|
||||
&req,
|
||||
payload,
|
||||
|state, _, payload| async {
|
||||
api_keys::create_api_key(&*state.store, payload, merchant_id.clone()).await
|
||||
api_keys::create_api_key(
|
||||
&*state.store,
|
||||
&state.conf.api_keys,
|
||||
payload,
|
||||
merchant_id.clone(),
|
||||
)
|
||||
.await
|
||||
},
|
||||
&auth::AdminApiAuth,
|
||||
)
|
||||
|
||||
@ -2,6 +2,7 @@ pub mod api;
|
||||
pub mod authentication;
|
||||
#[cfg(feature = "basilisk")]
|
||||
pub mod encryption;
|
||||
pub mod kms;
|
||||
pub mod logger;
|
||||
|
||||
use std::sync::{atomic, Arc};
|
||||
|
||||
@ -9,6 +9,7 @@ use ring::{aead::*, error::Unspecified};
|
||||
use crate::{
|
||||
configs::settings::Jwekey,
|
||||
core::errors::{self, CustomResult},
|
||||
services::kms::KeyHandler,
|
||||
utils,
|
||||
};
|
||||
|
||||
@ -50,72 +51,6 @@ impl NonceSequence for NonceGen {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct KeyHandler;
|
||||
|
||||
#[cfg(feature = "kms")]
|
||||
mod kms {
|
||||
use aws_config::meta::region::RegionProviderChain;
|
||||
use aws_sdk_kms::{types::Blob, Client, Region};
|
||||
use base64::Engine;
|
||||
|
||||
use super::*;
|
||||
use crate::consts;
|
||||
|
||||
impl KeyHandler {
|
||||
// Fetching KMS decrypted key
|
||||
// | Amazon KMS decryption
|
||||
// This expect a base64 encoded input but we values are set via aws cli in env than cli
|
||||
// already does that so we don't need to
|
||||
pub async fn get_kms_decrypted_key(
|
||||
aws_keys: &Jwekey,
|
||||
kms_enc_key: String,
|
||||
) -> CustomResult<String, errors::EncryptionError> {
|
||||
let region = aws_keys.aws_region.to_string();
|
||||
let key_id = aws_keys.aws_key_id.clone();
|
||||
let region_provider = RegionProviderChain::first_try(Region::new(region));
|
||||
let shared_config = aws_config::from_env().region(region_provider).load().await;
|
||||
let client = Client::new(&shared_config);
|
||||
let data = consts::BASE64_ENGINE
|
||||
.decode(kms_enc_key)
|
||||
.into_report()
|
||||
.change_context(errors::EncryptionError)
|
||||
.attach_printable("Error decoding from base64")?;
|
||||
let blob = Blob::new(data);
|
||||
let resp = client
|
||||
.decrypt()
|
||||
.key_id(key_id)
|
||||
.ciphertext_blob(blob)
|
||||
.send()
|
||||
.await
|
||||
.into_report()
|
||||
.change_context(errors::EncryptionError)
|
||||
.attach_printable("Error decrypting kms encrypted data")?;
|
||||
match resp.plaintext() {
|
||||
Some(inner) => {
|
||||
let bytes = inner.as_ref().to_vec();
|
||||
let res = String::from_utf8(bytes)
|
||||
.into_report()
|
||||
.change_context(errors::EncryptionError)
|
||||
.attach_printable("Could not convert to UTF-8")?;
|
||||
Ok(res)
|
||||
}
|
||||
None => Err(report!(errors::EncryptionError)
|
||||
.attach_printable("Missing plaintext in response")),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "kms"))]
|
||||
impl KeyHandler {
|
||||
pub async fn get_kms_decrypted_key(
|
||||
_aws_keys: &Jwekey,
|
||||
key: String,
|
||||
) -> CustomResult<String, errors::EncryptionError> {
|
||||
Ok(key)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn encrypt(msg: &String, key: &[u8]) -> CustomResult<Vec<u8>, errors::EncryptionError> {
|
||||
let nonce_seed = rand::random();
|
||||
let mut sealing_key = {
|
||||
@ -184,9 +119,19 @@ pub async fn encrypt_jwe(
|
||||
let alg = jwe::RSA_OAEP_256;
|
||||
let key_id = get_key_id(keys);
|
||||
let public_key = if key_id == keys.locker_key_identifier1 {
|
||||
KeyHandler::get_kms_decrypted_key(keys, keys.locker_encryption_key1.to_string()).await?
|
||||
KeyHandler::get_kms_decrypted_key(
|
||||
&keys.aws_region,
|
||||
&keys.aws_key_id,
|
||||
keys.locker_encryption_key1.to_string(),
|
||||
)
|
||||
.await?
|
||||
} else {
|
||||
KeyHandler::get_kms_decrypted_key(keys, keys.locker_encryption_key2.to_string()).await?
|
||||
KeyHandler::get_kms_decrypted_key(
|
||||
&keys.aws_region,
|
||||
&keys.aws_key_id,
|
||||
keys.locker_encryption_key2.to_string(),
|
||||
)
|
||||
.await?
|
||||
};
|
||||
let payload = msg.as_bytes();
|
||||
let enc = "A256GCM";
|
||||
@ -213,9 +158,19 @@ pub async fn decrypt_jwe(
|
||||
let alg = jwe::RSA_OAEP_256;
|
||||
let key_id = get_key_id(keys);
|
||||
let private_key = if key_id == keys.locker_key_identifier1 {
|
||||
KeyHandler::get_kms_decrypted_key(keys, keys.locker_decryption_key1.to_string()).await?
|
||||
KeyHandler::get_kms_decrypted_key(
|
||||
&keys.aws_region,
|
||||
&keys.aws_key_id,
|
||||
keys.locker_decryption_key1.to_string(),
|
||||
)
|
||||
.await?
|
||||
} else {
|
||||
KeyHandler::get_kms_decrypted_key(keys, keys.locker_decryption_key2.to_string()).await?
|
||||
KeyHandler::get_kms_decrypted_key(
|
||||
&keys.aws_region,
|
||||
&keys.aws_key_id,
|
||||
keys.locker_decryption_key2.to_string(),
|
||||
)
|
||||
.await?
|
||||
};
|
||||
|
||||
let decrypter = alg
|
||||
|
||||
69
crates/router/src/services/kms.rs
Normal file
69
crates/router/src/services/kms.rs
Normal file
@ -0,0 +1,69 @@
|
||||
use crate::core::errors::{self, CustomResult};
|
||||
|
||||
pub struct KeyHandler;
|
||||
|
||||
#[cfg(feature = "kms")]
|
||||
mod aws_kms {
|
||||
use aws_config::meta::region::RegionProviderChain;
|
||||
use aws_sdk_kms::{types::Blob, Client, Region};
|
||||
use base64::Engine;
|
||||
use error_stack::{report, IntoReport, ResultExt};
|
||||
|
||||
use super::*;
|
||||
use crate::consts;
|
||||
|
||||
impl KeyHandler {
|
||||
// Fetching KMS decrypted key
|
||||
// | Amazon KMS decryption
|
||||
// This expect a base64 encoded input but we values are set via aws cli in env than cli
|
||||
// already does that so we don't need to
|
||||
pub async fn get_kms_decrypted_key(
|
||||
aws_region: &str,
|
||||
aws_key_id: &str,
|
||||
kms_enc_key: String,
|
||||
) -> CustomResult<String, errors::EncryptionError> {
|
||||
let region_provider =
|
||||
RegionProviderChain::first_try(Region::new(aws_region.to_owned()));
|
||||
let shared_config = aws_config::from_env().region(region_provider).load().await;
|
||||
let client = Client::new(&shared_config);
|
||||
let data = consts::BASE64_ENGINE
|
||||
.decode(kms_enc_key)
|
||||
.into_report()
|
||||
.change_context(errors::EncryptionError)
|
||||
.attach_printable("Error decoding from base64")?;
|
||||
let blob = Blob::new(data);
|
||||
let resp = client
|
||||
.decrypt()
|
||||
.key_id(aws_key_id)
|
||||
.ciphertext_blob(blob)
|
||||
.send()
|
||||
.await
|
||||
.into_report()
|
||||
.change_context(errors::EncryptionError)
|
||||
.attach_printable("Error decrypting kms encrypted data")?;
|
||||
match resp.plaintext() {
|
||||
Some(inner) => {
|
||||
let bytes = inner.as_ref().to_vec();
|
||||
let res = String::from_utf8(bytes)
|
||||
.into_report()
|
||||
.change_context(errors::EncryptionError)
|
||||
.attach_printable("Could not convert to UTF-8")?;
|
||||
Ok(res)
|
||||
}
|
||||
None => Err(report!(errors::EncryptionError)
|
||||
.attach_printable("Missing plaintext in response")),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "kms"))]
|
||||
impl KeyHandler {
|
||||
pub async fn get_kms_decrypted_key(
|
||||
_aws_region: &str,
|
||||
_aws_key_id: &str,
|
||||
key: String,
|
||||
) -> CustomResult<String, errors::EncryptionError> {
|
||||
Ok(key)
|
||||
}
|
||||
}
|
||||
@ -392,15 +392,11 @@ impl
|
||||
|
||||
let (api_key, plaintext_api_key) = item;
|
||||
Self {
|
||||
key_id: api_key.key_id.clone(),
|
||||
key_id: api_key.key_id,
|
||||
merchant_id: api_key.merchant_id,
|
||||
name: api_key.name,
|
||||
description: api_key.description,
|
||||
api_key: StrongSecret::from(format!(
|
||||
"{}-{}",
|
||||
api_key.key_id,
|
||||
plaintext_api_key.peek().to_owned()
|
||||
)),
|
||||
api_key: StrongSecret::from(plaintext_api_key.peek().to_owned()),
|
||||
created: api_key.created_at,
|
||||
expiration: api_key.expires_at.into(),
|
||||
}
|
||||
@ -410,14 +406,13 @@ impl
|
||||
impl ForeignFrom<storage_models::api_keys::ApiKey>
|
||||
for api_models::api_keys::RetrieveApiKeyResponse
|
||||
{
|
||||
fn foreign_from(item: storage_models::api_keys::ApiKey) -> Self {
|
||||
let api_key = item;
|
||||
fn foreign_from(api_key: storage_models::api_keys::ApiKey) -> Self {
|
||||
Self {
|
||||
key_id: api_key.key_id.clone(),
|
||||
key_id: api_key.key_id,
|
||||
merchant_id: api_key.merchant_id,
|
||||
name: api_key.name,
|
||||
description: api_key.description,
|
||||
prefix: format!("{}-{}", api_key.key_id, api_key.prefix).into(),
|
||||
prefix: api_key.prefix.into(),
|
||||
created: api_key.created_at,
|
||||
expiration: api_key.expires_at.into(),
|
||||
}
|
||||
@ -427,8 +422,7 @@ impl ForeignFrom<storage_models::api_keys::ApiKey>
|
||||
impl ForeignFrom<api_models::api_keys::UpdateApiKeyRequest>
|
||||
for storage_models::api_keys::ApiKeyUpdate
|
||||
{
|
||||
fn foreign_from(item: api_models::api_keys::UpdateApiKeyRequest) -> Self {
|
||||
let api_key = item;
|
||||
fn foreign_from(api_key: api_models::api_keys::UpdateApiKeyRequest) -> Self {
|
||||
Self::Update {
|
||||
name: api_key.name,
|
||||
description: api_key.description,
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
use diesel::{AsChangeset, AsExpression, Identifiable, Insertable, Queryable};
|
||||
use masking::Secret;
|
||||
use time::PrimitiveDateTime;
|
||||
|
||||
use crate::schema::api_keys;
|
||||
@ -11,7 +10,6 @@ pub struct ApiKey {
|
||||
pub merchant_id: String,
|
||||
pub name: String,
|
||||
pub description: Option<String>,
|
||||
pub hash_key: Secret<String>,
|
||||
pub hashed_api_key: HashedApiKey,
|
||||
pub prefix: String,
|
||||
pub created_at: PrimitiveDateTime,
|
||||
@ -26,7 +24,6 @@ pub struct ApiKeyNew {
|
||||
pub merchant_id: String,
|
||||
pub name: String,
|
||||
pub description: Option<String>,
|
||||
pub hash_key: Secret<String>,
|
||||
pub hashed_api_key: HashedApiKey,
|
||||
pub prefix: String,
|
||||
pub created_at: PrimitiveDateTime,
|
||||
|
||||
@ -34,7 +34,6 @@ diesel::table! {
|
||||
merchant_id -> Varchar,
|
||||
name -> Varchar,
|
||||
description -> Nullable<Varchar>,
|
||||
hash_key -> Varchar,
|
||||
hashed_api_key -> Varchar,
|
||||
prefix -> Varchar,
|
||||
created_at -> Timestamp,
|
||||
|
||||
@ -49,6 +49,9 @@ locker_decryption_key2 = ""
|
||||
[webhooks]
|
||||
outgoing_enabled = true
|
||||
|
||||
[api_keys]
|
||||
hash_key = "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
|
||||
|
||||
[connectors.aci]
|
||||
base_url = "https://eu-test.oppwa.com/"
|
||||
|
||||
|
||||
@ -0,0 +1,11 @@
|
||||
/*
|
||||
We could have added the `hash_key` column with a default of the plaintext key
|
||||
used for hashing API keys, but we don't do that as it is a hassle to update
|
||||
this migration with the plaintext hash key.
|
||||
*/
|
||||
TRUNCATE TABLE api_keys;
|
||||
|
||||
ALTER TABLE api_keys
|
||||
ADD COLUMN hash_key VARCHAR(64) NOT NULL;
|
||||
|
||||
ALTER TABLE api_keys DROP CONSTRAINT api_keys_hashed_api_key_key;
|
||||
11
migrations/2023-02-21-094019_api_keys_remove_hash_key/up.sql
Normal file
11
migrations/2023-02-21-094019_api_keys_remove_hash_key/up.sql
Normal file
@ -0,0 +1,11 @@
|
||||
ALTER TABLE api_keys DROP COLUMN hash_key;
|
||||
|
||||
/*
|
||||
Once we've dropped the `hash_key` column, we cannot use the existing API keys
|
||||
from the `api_keys` table anymore, as the `hash_key` is a random string that
|
||||
we no longer have.
|
||||
*/
|
||||
TRUNCATE TABLE api_keys;
|
||||
|
||||
ALTER TABLE api_keys
|
||||
ADD CONSTRAINT api_keys_hashed_api_key_key UNIQUE (hashed_api_key);
|
||||
Reference in New Issue
Block a user