repo stringclasses 4 values | file_path stringlengths 6 193 | extension stringclasses 23 values | content stringlengths 0 1.73M | token_count int64 0 724k | __index_level_0__ int64 0 10.8k |
|---|---|---|---|---|---|
hyperswitch | crates/router/src/core/payment_methods/surcharge_decision_configs.rs | .rs | use api_models::{
payment_methods::SurchargeDetailsResponse,
payments, routing,
surcharge_decision_configs::{self, SurchargeDecisionConfigs, SurchargeDecisionManagerRecord},
};
#[cfg(all(
any(feature = "v1", feature = "v2"),
not(feature = "payment_methods_v2")
))]
use common_utils::{ext_traits::StringExt, types as common_utils_types};
#[cfg(all(feature = "v2", feature = "payment_methods_v2"))]
use common_utils::{
ext_traits::{OptionExt, StringExt},
types as common_utils_types,
};
use error_stack::{self, ResultExt};
use euclid::{
backend,
backend::{inputs as dsl_inputs, EuclidBackend},
};
use router_env::{instrument, logger, tracing};
use serde::{Deserialize, Serialize};
use storage_impl::redis::cache::{self, SURCHARGE_CACHE};
use crate::{
core::{
errors::{self, ConditionalConfigError as ConfigError},
payments::{
conditional_configs::ConditionalConfigResult, routing::make_dsl_input_for_surcharge,
types,
},
},
db::StorageInterface,
types::{
storage::{self, payment_attempt::PaymentAttemptExt},
transformers::ForeignTryFrom,
},
SessionState,
};
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct VirInterpreterBackendCacheWrapper {
cached_algorithm: backend::VirInterpreterBackend<SurchargeDecisionConfigs>,
merchant_surcharge_configs: surcharge_decision_configs::MerchantSurchargeConfigs,
}
impl TryFrom<SurchargeDecisionManagerRecord> for VirInterpreterBackendCacheWrapper {
type Error = error_stack::Report<ConfigError>;
fn try_from(value: SurchargeDecisionManagerRecord) -> Result<Self, Self::Error> {
let cached_algorithm = backend::VirInterpreterBackend::with_program(value.algorithm)
.change_context(ConfigError::DslBackendInitError)
.attach_printable("Error initializing DSL interpreter backend")?;
let merchant_surcharge_configs = value.merchant_surcharge_configs;
Ok(Self {
cached_algorithm,
merchant_surcharge_configs,
})
}
}
enum SurchargeSource {
/// Surcharge will be generated through the surcharge rules
Generate(VirInterpreterBackendCacheWrapper),
/// Surcharge is predefined by the merchant through payment create request
Predetermined(payments::RequestSurchargeDetails),
}
impl SurchargeSource {
pub fn generate_surcharge_details_and_populate_surcharge_metadata(
&self,
backend_input: &backend::BackendInput,
payment_attempt: &storage::PaymentAttempt,
surcharge_metadata_and_key: (&mut types::SurchargeMetadata, types::SurchargeKey),
) -> ConditionalConfigResult<Option<types::SurchargeDetails>> {
match self {
Self::Generate(interpreter) => {
let surcharge_output = execute_dsl_and_get_conditional_config(
backend_input.clone(),
&interpreter.cached_algorithm,
)?;
Ok(surcharge_output
.surcharge_details
.map(|surcharge_details| {
get_surcharge_details_from_surcharge_output(
surcharge_details,
payment_attempt,
)
})
.transpose()?
.inspect(|surcharge_details| {
let (surcharge_metadata, surcharge_key) = surcharge_metadata_and_key;
surcharge_metadata
.insert_surcharge_details(surcharge_key, surcharge_details.clone());
}))
}
Self::Predetermined(request_surcharge_details) => Ok(Some(
types::SurchargeDetails::from((request_surcharge_details, payment_attempt)),
)),
}
}
}
#[cfg(feature = "v2")]
pub async fn perform_surcharge_decision_management_for_payment_method_list(
_state: &SessionState,
_algorithm_ref: routing::RoutingAlgorithmRef,
_payment_attempt: &storage::PaymentAttempt,
_payment_intent: &storage::PaymentIntent,
_billing_address: Option<payments::Address>,
_response_payment_method_types: &mut [api_models::payment_methods::ResponsePaymentMethodsEnabled],
) -> ConditionalConfigResult<(
types::SurchargeMetadata,
surcharge_decision_configs::MerchantSurchargeConfigs,
)> {
todo!()
}
#[cfg(feature = "v1")]
pub async fn perform_surcharge_decision_management_for_payment_method_list(
state: &SessionState,
algorithm_ref: routing::RoutingAlgorithmRef,
payment_attempt: &storage::PaymentAttempt,
payment_intent: &storage::PaymentIntent,
billing_address: Option<hyperswitch_domain_models::address::Address>,
response_payment_method_types: &mut [api_models::payment_methods::ResponsePaymentMethodsEnabled],
) -> ConditionalConfigResult<(
types::SurchargeMetadata,
surcharge_decision_configs::MerchantSurchargeConfigs,
)> {
let mut surcharge_metadata = types::SurchargeMetadata::new(payment_attempt.attempt_id.clone());
let (surcharge_source, merchant_surcharge_configs) = match (
payment_attempt.get_surcharge_details(),
algorithm_ref.surcharge_config_algo_id,
) {
(Some(request_surcharge_details), _) => (
SurchargeSource::Predetermined(request_surcharge_details),
surcharge_decision_configs::MerchantSurchargeConfigs::default(),
),
(None, Some(algorithm_id)) => {
let cached_algo = ensure_algorithm_cached(
&*state.store,
&payment_attempt.merchant_id,
algorithm_id.as_str(),
)
.await?;
let merchant_surcharge_config = cached_algo.merchant_surcharge_configs.clone();
(
SurchargeSource::Generate(cached_algo),
merchant_surcharge_config,
)
}
(None, None) => {
return Ok((
surcharge_metadata,
surcharge_decision_configs::MerchantSurchargeConfigs::default(),
))
}
};
let surcharge_source_log_message = match &surcharge_source {
SurchargeSource::Generate(_) => "Surcharge was calculated through surcharge rules",
SurchargeSource::Predetermined(_) => "Surcharge was sent in payment create request",
};
logger::debug!(payment_method_list_surcharge_source = surcharge_source_log_message);
let mut backend_input =
make_dsl_input_for_surcharge(payment_attempt, payment_intent, billing_address)
.change_context(ConfigError::InputConstructionError)?;
for payment_methods_enabled in response_payment_method_types.iter_mut() {
for payment_method_type_response in
&mut payment_methods_enabled.payment_method_types.iter_mut()
{
let payment_method_type = payment_method_type_response.payment_method_type;
backend_input.payment_method.payment_method_type = Some(payment_method_type);
backend_input.payment_method.payment_method =
Some(payment_methods_enabled.payment_method);
if let Some(card_network_list) = &mut payment_method_type_response.card_networks {
for card_network_type in card_network_list.iter_mut() {
backend_input.payment_method.card_network =
Some(card_network_type.card_network.clone());
let surcharge_details = surcharge_source
.generate_surcharge_details_and_populate_surcharge_metadata(
&backend_input,
payment_attempt,
(
&mut surcharge_metadata,
types::SurchargeKey::PaymentMethodData(
payment_methods_enabled.payment_method,
payment_method_type_response.payment_method_type,
Some(card_network_type.card_network.clone()),
),
),
)?;
card_network_type.surcharge_details = surcharge_details
.map(|surcharge_details| {
SurchargeDetailsResponse::foreign_try_from((
&surcharge_details,
payment_attempt,
))
.change_context(ConfigError::DslExecutionError)
.attach_printable("Error while constructing Surcharge response type")
})
.transpose()?;
}
} else {
let surcharge_details = surcharge_source
.generate_surcharge_details_and_populate_surcharge_metadata(
&backend_input,
payment_attempt,
(
&mut surcharge_metadata,
types::SurchargeKey::PaymentMethodData(
payment_methods_enabled.payment_method,
payment_method_type_response.payment_method_type,
None,
),
),
)?;
payment_method_type_response.surcharge_details = surcharge_details
.map(|surcharge_details| {
SurchargeDetailsResponse::foreign_try_from((
&surcharge_details,
payment_attempt,
))
.change_context(ConfigError::DslExecutionError)
.attach_printable("Error while constructing Surcharge response type")
})
.transpose()?;
}
}
}
Ok((surcharge_metadata, merchant_surcharge_configs))
}
#[cfg(feature = "v1")]
pub async fn perform_surcharge_decision_management_for_session_flow(
state: &SessionState,
algorithm_ref: routing::RoutingAlgorithmRef,
payment_attempt: &storage::PaymentAttempt,
payment_intent: &storage::PaymentIntent,
billing_address: Option<hyperswitch_domain_models::address::Address>,
payment_method_type_list: &Vec<common_enums::PaymentMethodType>,
) -> ConditionalConfigResult<types::SurchargeMetadata> {
let mut surcharge_metadata = types::SurchargeMetadata::new(payment_attempt.attempt_id.clone());
let surcharge_source = match (
payment_attempt.get_surcharge_details(),
algorithm_ref.surcharge_config_algo_id,
) {
(Some(request_surcharge_details), _) => {
SurchargeSource::Predetermined(request_surcharge_details)
}
(None, Some(algorithm_id)) => {
let cached_algo = ensure_algorithm_cached(
&*state.store,
&payment_attempt.merchant_id,
algorithm_id.as_str(),
)
.await?;
SurchargeSource::Generate(cached_algo)
}
(None, None) => return Ok(surcharge_metadata),
};
let mut backend_input =
make_dsl_input_for_surcharge(payment_attempt, payment_intent, billing_address)
.change_context(ConfigError::InputConstructionError)?;
for payment_method_type in payment_method_type_list {
backend_input.payment_method.payment_method_type = Some(*payment_method_type);
// in case of session flow, payment_method will always be wallet
backend_input.payment_method.payment_method = Some(payment_method_type.to_owned().into());
surcharge_source.generate_surcharge_details_and_populate_surcharge_metadata(
&backend_input,
payment_attempt,
(
&mut surcharge_metadata,
types::SurchargeKey::PaymentMethodData(
payment_method_type.to_owned().into(),
*payment_method_type,
None,
),
),
)?;
}
Ok(surcharge_metadata)
}
#[cfg(all(
any(feature = "v1", feature = "v2"),
not(feature = "payment_methods_v2")
))]
pub async fn perform_surcharge_decision_management_for_saved_cards(
state: &SessionState,
algorithm_ref: routing::RoutingAlgorithmRef,
payment_attempt: &storage::PaymentAttempt,
payment_intent: &storage::PaymentIntent,
customer_payment_method_list: &mut [api_models::payment_methods::CustomerPaymentMethod],
) -> ConditionalConfigResult<types::SurchargeMetadata> {
let mut surcharge_metadata = types::SurchargeMetadata::new(payment_attempt.attempt_id.clone());
let surcharge_source = match (
payment_attempt.get_surcharge_details(),
algorithm_ref.surcharge_config_algo_id,
) {
(Some(request_surcharge_details), _) => {
SurchargeSource::Predetermined(request_surcharge_details)
}
(None, Some(algorithm_id)) => {
let cached_algo = ensure_algorithm_cached(
&*state.store,
&payment_attempt.merchant_id,
algorithm_id.as_str(),
)
.await?;
SurchargeSource::Generate(cached_algo)
}
(None, None) => return Ok(surcharge_metadata),
};
let surcharge_source_log_message = match &surcharge_source {
SurchargeSource::Generate(_) => "Surcharge was calculated through surcharge rules",
SurchargeSource::Predetermined(_) => "Surcharge was sent in payment create request",
};
logger::debug!(customer_saved_card_list_surcharge_source = surcharge_source_log_message);
let mut backend_input = make_dsl_input_for_surcharge(payment_attempt, payment_intent, None)
.change_context(ConfigError::InputConstructionError)?;
for customer_payment_method in customer_payment_method_list.iter_mut() {
let payment_token = customer_payment_method.payment_token.clone();
backend_input.payment_method.payment_method = Some(customer_payment_method.payment_method);
backend_input.payment_method.payment_method_type =
customer_payment_method.payment_method_type;
let card_network = customer_payment_method
.card
.as_ref()
.and_then(|card| card.scheme.as_ref())
.map(|scheme| {
scheme
.clone()
.parse_enum("CardNetwork")
.change_context(ConfigError::DslExecutionError)
})
.transpose()?;
backend_input.payment_method.card_network = card_network;
let surcharge_details = surcharge_source
.generate_surcharge_details_and_populate_surcharge_metadata(
&backend_input,
payment_attempt,
(
&mut surcharge_metadata,
types::SurchargeKey::Token(payment_token),
),
)?;
customer_payment_method.surcharge_details = surcharge_details
.map(|surcharge_details| {
SurchargeDetailsResponse::foreign_try_from((&surcharge_details, payment_attempt))
.change_context(ConfigError::DslParsingError)
})
.transpose()?;
}
Ok(surcharge_metadata)
}
// TODO: uncomment and resolve compiler error when required
// #[cfg(all(feature = "v2", feature = "payment_methods_v2"))]
// pub async fn perform_surcharge_decision_management_for_saved_cards(
// state: &SessionState,
// algorithm_ref: routing::RoutingAlgorithmRef,
// payment_attempt: &storage::PaymentAttempt,
// payment_intent: &storage::PaymentIntent,
// customer_payment_method_list: &mut [api_models::payment_methods::CustomerPaymentMethod],
// ) -> ConditionalConfigResult<types::SurchargeMetadata> {
// // let mut surcharge_metadata = types::SurchargeMetadata::new(payment_attempt.id.clone());
// let mut surcharge_metadata = todo!();
// let surcharge_source = match (
// payment_attempt.get_surcharge_details(),
// algorithm_ref.surcharge_config_algo_id,
// ) {
// (Some(request_surcharge_details), _) => {
// SurchargeSource::Predetermined(request_surcharge_details)
// }
// (None, Some(algorithm_id)) => {
// let cached_algo = ensure_algorithm_cached(
// &*state.store,
// &payment_attempt.merchant_id,
// algorithm_id.as_str(),
// )
// .await?;
// SurchargeSource::Generate(cached_algo)
// }
// (None, None) => return Ok(surcharge_metadata),
// };
// let surcharge_source_log_message = match &surcharge_source {
// SurchargeSource::Generate(_) => "Surcharge was calculated through surcharge rules",
// SurchargeSource::Predetermined(_) => "Surcharge was sent in payment create request",
// };
// logger::debug!(customer_saved_card_list_surcharge_source = surcharge_source_log_message);
// let mut backend_input = make_dsl_input_for_surcharge(payment_attempt, payment_intent, None)
// .change_context(ConfigError::InputConstructionError)?;
// for customer_payment_method in customer_payment_method_list.iter_mut() {
// let payment_token = customer_payment_method
// .payment_token
// .clone()
// .get_required_value("payment_token")
// .change_context(ConfigError::InputConstructionError)?;
// backend_input.payment_method.payment_method =
// Some(customer_payment_method.payment_method_type);
// backend_input.payment_method.payment_method_type =
// customer_payment_method.payment_method_subtype;
// let card_network = match customer_payment_method.payment_method_data.as_ref() {
// Some(api_models::payment_methods::PaymentMethodListData::Card(card)) => {
// card.card_network.clone()
// }
// _ => None,
// };
// backend_input.payment_method.card_network = card_network;
// let surcharge_details = surcharge_source
// .generate_surcharge_details_and_populate_surcharge_metadata(
// &backend_input,
// payment_attempt,
// (
// &mut surcharge_metadata,
// types::SurchargeKey::Token(payment_token),
// ),
// )?;
// customer_payment_method.surcharge_details = surcharge_details
// .map(|surcharge_details| {
// SurchargeDetailsResponse::foreign_try_from((&surcharge_details, payment_attempt))
// .change_context(ConfigError::DslParsingError)
// })
// .transpose()?;
// }
// Ok(surcharge_metadata)
// }
#[cfg(feature = "v2")]
fn get_surcharge_details_from_surcharge_output(
_surcharge_details: surcharge_decision_configs::SurchargeDetailsOutput,
_payment_attempt: &storage::PaymentAttempt,
) -> ConditionalConfigResult<types::SurchargeDetails> {
todo!()
}
#[cfg(feature = "v1")]
fn get_surcharge_details_from_surcharge_output(
surcharge_details: surcharge_decision_configs::SurchargeDetailsOutput,
payment_attempt: &storage::PaymentAttempt,
) -> ConditionalConfigResult<types::SurchargeDetails> {
let surcharge_amount = match surcharge_details.surcharge.clone() {
surcharge_decision_configs::SurchargeOutput::Fixed { amount } => amount,
surcharge_decision_configs::SurchargeOutput::Rate(percentage) => percentage
.apply_and_ceil_result(payment_attempt.net_amount.get_total_amount())
.change_context(ConfigError::DslExecutionError)
.attach_printable("Failed to Calculate surcharge amount by applying percentage")?,
};
let tax_on_surcharge_amount = surcharge_details
.tax_on_surcharge
.clone()
.map(|tax_on_surcharge| {
tax_on_surcharge
.apply_and_ceil_result(surcharge_amount)
.change_context(ConfigError::DslExecutionError)
.attach_printable("Failed to Calculate tax amount")
})
.transpose()?
.unwrap_or_default();
Ok(types::SurchargeDetails {
original_amount: payment_attempt.net_amount.get_order_amount(),
surcharge: match surcharge_details.surcharge {
surcharge_decision_configs::SurchargeOutput::Fixed { amount } => {
common_utils_types::Surcharge::Fixed(amount)
}
surcharge_decision_configs::SurchargeOutput::Rate(percentage) => {
common_utils_types::Surcharge::Rate(percentage)
}
},
tax_on_surcharge: surcharge_details.tax_on_surcharge,
surcharge_amount,
tax_on_surcharge_amount,
})
}
#[instrument(skip_all)]
pub async fn ensure_algorithm_cached(
store: &dyn StorageInterface,
merchant_id: &common_utils::id_type::MerchantId,
algorithm_id: &str,
) -> ConditionalConfigResult<VirInterpreterBackendCacheWrapper> {
let key = merchant_id.get_surcharge_dsk_key();
let value_to_cache = || async {
let config: diesel_models::Config = store.find_config_by_key(algorithm_id).await?;
let record: SurchargeDecisionManagerRecord = config
.config
.parse_struct("Program")
.change_context(errors::StorageError::DeserializationFailed)
.attach_printable("Error parsing routing algorithm from configs")?;
VirInterpreterBackendCacheWrapper::try_from(record)
.change_context(errors::StorageError::ValueNotFound("Program".to_string()))
.attach_printable("Error initializing DSL interpreter backend")
};
let interpreter = cache::get_or_populate_in_memory(
store.get_cache_store().as_ref(),
&key,
value_to_cache,
&SURCHARGE_CACHE,
)
.await
.change_context(ConfigError::CacheMiss)
.attach_printable("Unable to retrieve cached routing algorithm even after refresh")?;
Ok(interpreter)
}
pub fn execute_dsl_and_get_conditional_config(
backend_input: dsl_inputs::BackendInput,
interpreter: &backend::VirInterpreterBackend<SurchargeDecisionConfigs>,
) -> ConditionalConfigResult<SurchargeDecisionConfigs> {
let routing_output = interpreter
.execute(backend_input)
.map(|out| out.connector_selection)
.change_context(ConfigError::DslExecutionError)?;
Ok(routing_output)
}
| 4,338 | 1,706 |
hyperswitch | crates/router/src/core/payment_methods/migration.rs | .rs | use actix_multipart::form::{bytes::Bytes, text::Text, MultipartForm};
use api_models::payment_methods::{PaymentMethodMigrationResponse, PaymentMethodRecord};
use csv::Reader;
use error_stack::ResultExt;
use masking::PeekInterface;
use rdkafka::message::ToBytes;
use router_env::{instrument, tracing};
use crate::{
core::{errors, payment_methods::cards::migrate_payment_method},
routes, services,
types::{api, domain},
};
pub async fn migrate_payment_methods(
state: routes::SessionState,
payment_methods: Vec<PaymentMethodRecord>,
merchant_id: &common_utils::id_type::MerchantId,
merchant_account: &domain::MerchantAccount,
key_store: &domain::MerchantKeyStore,
mca_id: Option<common_utils::id_type::MerchantConnectorAccountId>,
) -> errors::RouterResponse<Vec<PaymentMethodMigrationResponse>> {
let mut result = Vec::new();
for record in payment_methods {
let req = api::PaymentMethodMigrate::try_from((
record.clone(),
merchant_id.clone(),
mca_id.clone(),
))
.map_err(|err| errors::ApiErrorResponse::InvalidRequestData {
message: format!("error: {:?}", err),
})
.attach_printable("record deserialization failed");
match req {
Ok(_) => (),
Err(e) => {
result.push(PaymentMethodMigrationResponse::from((
Err(e.to_string()),
record,
)));
continue;
}
};
let res = migrate_payment_method(
state.clone(),
req?,
merchant_id,
merchant_account,
key_store,
)
.await;
result.push(PaymentMethodMigrationResponse::from((
match res {
Ok(services::api::ApplicationResponse::Json(response)) => Ok(response),
Err(e) => Err(e.to_string()),
_ => Err("Failed to migrate payment method".to_string()),
},
record,
)));
}
Ok(services::api::ApplicationResponse::Json(result))
}
#[derive(Debug, MultipartForm)]
pub struct PaymentMethodsMigrateForm {
#[multipart(limit = "1MB")]
pub file: Bytes,
pub merchant_id: Text<common_utils::id_type::MerchantId>,
pub merchant_connector_id: Text<Option<common_utils::id_type::MerchantConnectorAccountId>>,
}
fn parse_csv(data: &[u8]) -> csv::Result<Vec<PaymentMethodRecord>> {
let mut csv_reader = Reader::from_reader(data);
let mut records = Vec::new();
let mut id_counter = 0;
for result in csv_reader.deserialize() {
let mut record: PaymentMethodRecord = result?;
id_counter += 1;
record.line_number = Some(id_counter);
records.push(record);
}
Ok(records)
}
pub fn get_payment_method_records(
form: PaymentMethodsMigrateForm,
) -> Result<
(
common_utils::id_type::MerchantId,
Vec<PaymentMethodRecord>,
Option<common_utils::id_type::MerchantConnectorAccountId>,
),
errors::ApiErrorResponse,
> {
match parse_csv(form.file.data.to_bytes()) {
Ok(records) => {
let merchant_id = form.merchant_id.clone();
let mca_id = form.merchant_connector_id.clone();
Ok((merchant_id.clone(), records, mca_id))
}
Err(e) => Err(errors::ApiErrorResponse::PreconditionFailed {
message: e.to_string(),
}),
}
}
#[instrument(skip_all)]
pub fn validate_card_expiry(
card_exp_month: &masking::Secret<String>,
card_exp_year: &masking::Secret<String>,
) -> errors::CustomResult<(), errors::ApiErrorResponse> {
let exp_month = card_exp_month
.peek()
.to_string()
.parse::<u8>()
.change_context(errors::ApiErrorResponse::InvalidDataValue {
field_name: "card_exp_month",
})?;
::cards::CardExpirationMonth::try_from(exp_month).change_context(
errors::ApiErrorResponse::PreconditionFailed {
message: "Invalid Expiry Month".to_string(),
},
)?;
let year_str = card_exp_year.peek().to_string();
validate_card_exp_year(year_str).change_context(
errors::ApiErrorResponse::PreconditionFailed {
message: "Invalid Expiry Year".to_string(),
},
)?;
Ok(())
}
fn validate_card_exp_year(year: String) -> Result<(), errors::ValidationError> {
let year_str = year.to_string();
if year_str.len() == 2 || year_str.len() == 4 {
year_str
.parse::<u16>()
.map_err(|_| errors::ValidationError::InvalidValue {
message: "card_exp_year".to_string(),
})?;
Ok(())
} else {
Err(errors::ValidationError::InvalidValue {
message: "invalid card expiration year".to_string(),
})
}
}
#[derive(Debug)]
pub struct RecordMigrationStatus {
pub card_migrated: Option<bool>,
pub network_token_migrated: Option<bool>,
pub connector_mandate_details_migrated: Option<bool>,
pub network_transaction_migrated: Option<bool>,
}
#[derive(Debug)]
pub struct RecordMigrationStatusBuilder {
pub card_migrated: Option<bool>,
pub network_token_migrated: Option<bool>,
pub connector_mandate_details_migrated: Option<bool>,
pub network_transaction_migrated: Option<bool>,
}
impl RecordMigrationStatusBuilder {
pub fn new() -> Self {
Self {
card_migrated: None,
network_token_migrated: None,
connector_mandate_details_migrated: None,
network_transaction_migrated: None,
}
}
pub fn card_migrated(&mut self, card_migrated: bool) {
self.card_migrated = Some(card_migrated);
}
pub fn network_token_migrated(&mut self, network_token_migrated: Option<bool>) {
self.network_token_migrated = network_token_migrated;
}
pub fn connector_mandate_details_migrated(
&mut self,
connector_mandate_details_migrated: Option<bool>,
) {
self.connector_mandate_details_migrated = connector_mandate_details_migrated;
}
pub fn network_transaction_id_migrated(&mut self, network_transaction_migrated: Option<bool>) {
self.network_transaction_migrated = network_transaction_migrated;
}
pub fn build(self) -> RecordMigrationStatus {
RecordMigrationStatus {
card_migrated: self.card_migrated,
network_token_migrated: self.network_token_migrated,
connector_mandate_details_migrated: self.connector_mandate_details_migrated,
network_transaction_migrated: self.network_transaction_migrated,
}
}
}
impl Default for RecordMigrationStatusBuilder {
fn default() -> Self {
Self::new()
}
}
| 1,470 | 1,707 |
hyperswitch | crates/router/src/core/payment_methods/vault.rs | .rs | use common_enums::PaymentMethodType;
#[cfg(all(feature = "v2", feature = "payment_methods_v2"))]
use common_utils::request;
use common_utils::{
crypto::{DecodeMessage, EncodeMessage, GcmAes256},
ext_traits::{BytesExt, Encode},
generate_id_with_default_len, id_type,
pii::Email,
};
use error_stack::{report, ResultExt};
use masking::PeekInterface;
use router_env::{instrument, tracing};
use scheduler::{types::process_data, utils as process_tracker_utils};
#[cfg(feature = "payouts")]
use crate::types::api::payouts;
use crate::{
consts,
core::errors::{self, CustomResult, RouterResult},
db, logger, routes,
routes::metrics,
types::{
api, domain,
storage::{self, enums},
},
utils::StringExt,
};
#[cfg(all(feature = "v2", feature = "payment_methods_v2"))]
use crate::{
core::payment_methods::transformers as pm_transforms, headers, services, settings,
types::payment_methods as pm_types, utils::ConnectorResponseExt,
};
const VAULT_SERVICE_NAME: &str = "CARD";
pub struct SupplementaryVaultData {
pub customer_id: Option<id_type::CustomerId>,
pub payment_method_id: Option<String>,
}
pub trait Vaultable: Sized {
fn get_value1(
&self,
customer_id: Option<id_type::CustomerId>,
) -> CustomResult<String, errors::VaultError>;
fn get_value2(
&self,
_customer_id: Option<id_type::CustomerId>,
) -> CustomResult<String, errors::VaultError> {
Ok(String::new())
}
fn from_values(
value1: String,
value2: String,
) -> CustomResult<(Self, SupplementaryVaultData), errors::VaultError>;
}
impl Vaultable for domain::Card {
fn get_value1(
&self,
_customer_id: Option<id_type::CustomerId>,
) -> CustomResult<String, errors::VaultError> {
let value1 = domain::TokenizedCardValue1 {
card_number: self.card_number.peek().clone(),
exp_year: self.card_exp_year.peek().clone(),
exp_month: self.card_exp_month.peek().clone(),
nickname: self.nick_name.as_ref().map(|name| name.peek().clone()),
card_last_four: None,
card_token: None,
card_holder_name: self.card_holder_name.clone(),
};
value1
.encode_to_string_of_json()
.change_context(errors::VaultError::RequestEncodingFailed)
.attach_printable("Failed to encode card value1")
}
fn get_value2(
&self,
customer_id: Option<id_type::CustomerId>,
) -> CustomResult<String, errors::VaultError> {
let value2 = domain::TokenizedCardValue2 {
card_security_code: Some(self.card_cvc.peek().clone()),
card_fingerprint: None,
external_id: None,
customer_id,
payment_method_id: None,
};
value2
.encode_to_string_of_json()
.change_context(errors::VaultError::RequestEncodingFailed)
.attach_printable("Failed to encode card value2")
}
fn from_values(
value1: String,
value2: String,
) -> CustomResult<(Self, SupplementaryVaultData), errors::VaultError> {
let value1: domain::TokenizedCardValue1 = value1
.parse_struct("TokenizedCardValue1")
.change_context(errors::VaultError::ResponseDeserializationFailed)
.attach_printable("Could not deserialize into card value1")?;
let value2: domain::TokenizedCardValue2 = value2
.parse_struct("TokenizedCardValue2")
.change_context(errors::VaultError::ResponseDeserializationFailed)
.attach_printable("Could not deserialize into card value2")?;
let card = Self {
card_number: cards::CardNumber::try_from(value1.card_number)
.change_context(errors::VaultError::ResponseDeserializationFailed)
.attach_printable("Invalid card number format from the mock locker")?,
card_exp_month: value1.exp_month.into(),
card_exp_year: value1.exp_year.into(),
card_cvc: value2.card_security_code.unwrap_or_default().into(),
card_issuer: None,
card_network: None,
bank_code: None,
card_issuing_country: None,
card_type: None,
nick_name: value1.nickname.map(masking::Secret::new),
card_holder_name: value1.card_holder_name,
};
let supp_data = SupplementaryVaultData {
customer_id: value2.customer_id,
payment_method_id: value2.payment_method_id,
};
Ok((card, supp_data))
}
}
impl Vaultable for domain::BankTransferData {
fn get_value1(
&self,
_customer_id: Option<id_type::CustomerId>,
) -> CustomResult<String, errors::VaultError> {
let value1 = domain::TokenizedBankTransferValue1 {
data: self.to_owned(),
};
value1
.encode_to_string_of_json()
.change_context(errors::VaultError::RequestEncodingFailed)
.attach_printable("Failed to encode bank transfer data")
}
fn get_value2(
&self,
customer_id: Option<id_type::CustomerId>,
) -> CustomResult<String, errors::VaultError> {
let value2 = domain::TokenizedBankTransferValue2 { customer_id };
value2
.encode_to_string_of_json()
.change_context(errors::VaultError::RequestEncodingFailed)
.attach_printable("Failed to encode bank transfer supplementary data")
}
fn from_values(
value1: String,
value2: String,
) -> CustomResult<(Self, SupplementaryVaultData), errors::VaultError> {
let value1: domain::TokenizedBankTransferValue1 = value1
.parse_struct("TokenizedBankTransferValue1")
.change_context(errors::VaultError::ResponseDeserializationFailed)
.attach_printable("Could not deserialize into bank transfer data")?;
let value2: domain::TokenizedBankTransferValue2 = value2
.parse_struct("TokenizedBankTransferValue2")
.change_context(errors::VaultError::ResponseDeserializationFailed)
.attach_printable("Could not deserialize into supplementary bank transfer data")?;
let bank_transfer_data = value1.data;
let supp_data = SupplementaryVaultData {
customer_id: value2.customer_id,
payment_method_id: None,
};
Ok((bank_transfer_data, supp_data))
}
}
impl Vaultable for domain::WalletData {
fn get_value1(
&self,
_customer_id: Option<id_type::CustomerId>,
) -> CustomResult<String, errors::VaultError> {
let value1 = domain::TokenizedWalletValue1 {
data: self.to_owned(),
};
value1
.encode_to_string_of_json()
.change_context(errors::VaultError::RequestEncodingFailed)
.attach_printable("Failed to encode wallet data value1")
}
fn get_value2(
&self,
customer_id: Option<id_type::CustomerId>,
) -> CustomResult<String, errors::VaultError> {
let value2 = domain::TokenizedWalletValue2 { customer_id };
value2
.encode_to_string_of_json()
.change_context(errors::VaultError::RequestEncodingFailed)
.attach_printable("Failed to encode wallet data value2")
}
fn from_values(
value1: String,
value2: String,
) -> CustomResult<(Self, SupplementaryVaultData), errors::VaultError> {
let value1: domain::TokenizedWalletValue1 = value1
.parse_struct("TokenizedWalletValue1")
.change_context(errors::VaultError::ResponseDeserializationFailed)
.attach_printable("Could not deserialize into wallet data value1")?;
let value2: domain::TokenizedWalletValue2 = value2
.parse_struct("TokenizedWalletValue2")
.change_context(errors::VaultError::ResponseDeserializationFailed)
.attach_printable("Could not deserialize into wallet data value2")?;
let wallet = value1.data;
let supp_data = SupplementaryVaultData {
customer_id: value2.customer_id,
payment_method_id: None,
};
Ok((wallet, supp_data))
}
}
impl Vaultable for domain::BankRedirectData {
fn get_value1(
&self,
_customer_id: Option<id_type::CustomerId>,
) -> CustomResult<String, errors::VaultError> {
let value1 = domain::TokenizedBankRedirectValue1 {
data: self.to_owned(),
};
value1
.encode_to_string_of_json()
.change_context(errors::VaultError::RequestEncodingFailed)
.attach_printable("Failed to encode bank redirect data")
}
fn get_value2(
&self,
customer_id: Option<id_type::CustomerId>,
) -> CustomResult<String, errors::VaultError> {
let value2 = domain::TokenizedBankRedirectValue2 { customer_id };
value2
.encode_to_string_of_json()
.change_context(errors::VaultError::RequestEncodingFailed)
.attach_printable("Failed to encode bank redirect supplementary data")
}
fn from_values(
value1: String,
value2: String,
) -> CustomResult<(Self, SupplementaryVaultData), errors::VaultError> {
let value1: domain::TokenizedBankRedirectValue1 = value1
.parse_struct("TokenizedBankRedirectValue1")
.change_context(errors::VaultError::ResponseDeserializationFailed)
.attach_printable("Could not deserialize into bank redirect data")?;
let value2: domain::TokenizedBankRedirectValue2 = value2
.parse_struct("TokenizedBankRedirectValue2")
.change_context(errors::VaultError::ResponseDeserializationFailed)
.attach_printable("Could not deserialize into supplementary bank redirect data")?;
let bank_transfer_data = value1.data;
let supp_data = SupplementaryVaultData {
customer_id: value2.customer_id,
payment_method_id: None,
};
Ok((bank_transfer_data, supp_data))
}
}
impl Vaultable for domain::BankDebitData {
fn get_value1(
&self,
_customer_id: Option<id_type::CustomerId>,
) -> CustomResult<String, errors::VaultError> {
let value1 = domain::TokenizedBankDebitValue1 {
data: self.to_owned(),
};
value1
.encode_to_string_of_json()
.change_context(errors::VaultError::RequestEncodingFailed)
.attach_printable("Failed to encode bank debit data")
}
fn get_value2(
&self,
customer_id: Option<id_type::CustomerId>,
) -> CustomResult<String, errors::VaultError> {
let value2 = domain::TokenizedBankDebitValue2 { customer_id };
value2
.encode_to_string_of_json()
.change_context(errors::VaultError::RequestEncodingFailed)
.attach_printable("Failed to encode bank debit supplementary data")
}
fn from_values(
value1: String,
value2: String,
) -> CustomResult<(Self, SupplementaryVaultData), errors::VaultError> {
let value1: domain::TokenizedBankDebitValue1 = value1
.parse_struct("TokenizedBankDebitValue1")
.change_context(errors::VaultError::ResponseDeserializationFailed)
.attach_printable("Could not deserialize into bank debit data")?;
let value2: domain::TokenizedBankDebitValue2 = value2
.parse_struct("TokenizedBankDebitValue2")
.change_context(errors::VaultError::ResponseDeserializationFailed)
.attach_printable("Could not deserialize into supplementary bank debit data")?;
let bank_transfer_data = value1.data;
let supp_data = SupplementaryVaultData {
customer_id: value2.customer_id,
payment_method_id: None,
};
Ok((bank_transfer_data, supp_data))
}
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[serde(tag = "type", content = "value", rename_all = "snake_case")]
pub enum VaultPaymentMethod {
Card(String),
Wallet(String),
BankTransfer(String),
BankRedirect(String),
BankDebit(String),
}
impl Vaultable for domain::PaymentMethodData {
fn get_value1(
&self,
customer_id: Option<id_type::CustomerId>,
) -> CustomResult<String, errors::VaultError> {
let value1 = match self {
Self::Card(card) => VaultPaymentMethod::Card(card.get_value1(customer_id)?),
Self::Wallet(wallet) => VaultPaymentMethod::Wallet(wallet.get_value1(customer_id)?),
Self::BankTransfer(bank_transfer) => {
VaultPaymentMethod::BankTransfer(bank_transfer.get_value1(customer_id)?)
}
Self::BankRedirect(bank_redirect) => {
VaultPaymentMethod::BankRedirect(bank_redirect.get_value1(customer_id)?)
}
Self::BankDebit(bank_debit) => {
VaultPaymentMethod::BankDebit(bank_debit.get_value1(customer_id)?)
}
_ => Err(errors::VaultError::PaymentMethodNotSupported)
.attach_printable("Payment method not supported")?,
};
value1
.encode_to_string_of_json()
.change_context(errors::VaultError::RequestEncodingFailed)
.attach_printable("Failed to encode payment method value1")
}
fn get_value2(
&self,
customer_id: Option<id_type::CustomerId>,
) -> CustomResult<String, errors::VaultError> {
let value2 = match self {
Self::Card(card) => VaultPaymentMethod::Card(card.get_value2(customer_id)?),
Self::Wallet(wallet) => VaultPaymentMethod::Wallet(wallet.get_value2(customer_id)?),
Self::BankTransfer(bank_transfer) => {
VaultPaymentMethod::BankTransfer(bank_transfer.get_value2(customer_id)?)
}
Self::BankRedirect(bank_redirect) => {
VaultPaymentMethod::BankRedirect(bank_redirect.get_value2(customer_id)?)
}
Self::BankDebit(bank_debit) => {
VaultPaymentMethod::BankDebit(bank_debit.get_value2(customer_id)?)
}
_ => Err(errors::VaultError::PaymentMethodNotSupported)
.attach_printable("Payment method not supported")?,
};
value2
.encode_to_string_of_json()
.change_context(errors::VaultError::RequestEncodingFailed)
.attach_printable("Failed to encode payment method value2")
}
fn from_values(
value1: String,
value2: String,
) -> CustomResult<(Self, SupplementaryVaultData), errors::VaultError> {
let value1: VaultPaymentMethod = value1
.parse_struct("PaymentMethodValue1")
.change_context(errors::VaultError::ResponseDeserializationFailed)
.attach_printable("Could not deserialize into payment method value 1")?;
let value2: VaultPaymentMethod = value2
.parse_struct("PaymentMethodValue2")
.change_context(errors::VaultError::ResponseDeserializationFailed)
.attach_printable("Could not deserialize into payment method value 2")?;
match (value1, value2) {
(VaultPaymentMethod::Card(mvalue1), VaultPaymentMethod::Card(mvalue2)) => {
let (card, supp_data) = domain::Card::from_values(mvalue1, mvalue2)?;
Ok((Self::Card(card), supp_data))
}
(VaultPaymentMethod::Wallet(mvalue1), VaultPaymentMethod::Wallet(mvalue2)) => {
let (wallet, supp_data) = domain::WalletData::from_values(mvalue1, mvalue2)?;
Ok((Self::Wallet(wallet), supp_data))
}
(
VaultPaymentMethod::BankTransfer(mvalue1),
VaultPaymentMethod::BankTransfer(mvalue2),
) => {
let (bank_transfer, supp_data) =
domain::BankTransferData::from_values(mvalue1, mvalue2)?;
Ok((Self::BankTransfer(Box::new(bank_transfer)), supp_data))
}
(
VaultPaymentMethod::BankRedirect(mvalue1),
VaultPaymentMethod::BankRedirect(mvalue2),
) => {
let (bank_redirect, supp_data) =
domain::BankRedirectData::from_values(mvalue1, mvalue2)?;
Ok((Self::BankRedirect(bank_redirect), supp_data))
}
(VaultPaymentMethod::BankDebit(mvalue1), VaultPaymentMethod::BankDebit(mvalue2)) => {
let (bank_debit, supp_data) = domain::BankDebitData::from_values(mvalue1, mvalue2)?;
Ok((Self::BankDebit(bank_debit), supp_data))
}
_ => Err(errors::VaultError::PaymentMethodNotSupported)
.attach_printable("Payment method not supported"),
}
}
}
#[cfg(feature = "payouts")]
impl Vaultable for api::CardPayout {
fn get_value1(
&self,
_customer_id: Option<id_type::CustomerId>,
) -> CustomResult<String, errors::VaultError> {
let value1 = api::TokenizedCardValue1 {
card_number: self.card_number.peek().clone(),
exp_year: self.expiry_year.peek().clone(),
exp_month: self.expiry_month.peek().clone(),
name_on_card: self.card_holder_name.clone().map(|n| n.peek().to_string()),
nickname: None,
card_last_four: None,
card_token: None,
};
value1
.encode_to_string_of_json()
.change_context(errors::VaultError::RequestEncodingFailed)
.attach_printable("Failed to encode card value1")
}
fn get_value2(
&self,
customer_id: Option<id_type::CustomerId>,
) -> CustomResult<String, errors::VaultError> {
let value2 = api::TokenizedCardValue2 {
card_security_code: None,
card_fingerprint: None,
external_id: None,
customer_id,
payment_method_id: None,
};
value2
.encode_to_string_of_json()
.change_context(errors::VaultError::RequestEncodingFailed)
.attach_printable("Failed to encode card value2")
}
fn from_values(
value1: String,
value2: String,
) -> CustomResult<(Self, SupplementaryVaultData), errors::VaultError> {
let value1: api::TokenizedCardValue1 = value1
.parse_struct("TokenizedCardValue1")
.change_context(errors::VaultError::ResponseDeserializationFailed)
.attach_printable("Could not deserialize into card value1")?;
let value2: api::TokenizedCardValue2 = value2
.parse_struct("TokenizedCardValue2")
.change_context(errors::VaultError::ResponseDeserializationFailed)
.attach_printable("Could not deserialize into card value2")?;
let card = Self {
card_number: value1
.card_number
.parse()
.map_err(|_| errors::VaultError::FetchCardFailed)?,
expiry_month: value1.exp_month.into(),
expiry_year: value1.exp_year.into(),
card_holder_name: value1.name_on_card.map(masking::Secret::new),
};
let supp_data = SupplementaryVaultData {
customer_id: value2.customer_id,
payment_method_id: value2.payment_method_id,
};
Ok((card, supp_data))
}
}
#[derive(Debug, serde::Serialize, serde::Deserialize)]
pub struct TokenizedWalletSensitiveValues {
pub email: Option<Email>,
pub telephone_number: Option<masking::Secret<String>>,
pub wallet_id: Option<masking::Secret<String>>,
pub wallet_type: PaymentMethodType,
}
#[derive(Debug, serde::Serialize, serde::Deserialize)]
pub struct TokenizedWalletInsensitiveValues {
pub customer_id: Option<id_type::CustomerId>,
}
#[cfg(feature = "payouts")]
impl Vaultable for api::WalletPayout {
fn get_value1(
&self,
_customer_id: Option<id_type::CustomerId>,
) -> CustomResult<String, errors::VaultError> {
let value1 = match self {
Self::Paypal(paypal_data) => TokenizedWalletSensitiveValues {
email: paypal_data.email.clone(),
telephone_number: paypal_data.telephone_number.clone(),
wallet_id: paypal_data.paypal_id.clone(),
wallet_type: PaymentMethodType::Paypal,
},
Self::Venmo(venmo_data) => TokenizedWalletSensitiveValues {
email: None,
telephone_number: venmo_data.telephone_number.clone(),
wallet_id: None,
wallet_type: PaymentMethodType::Venmo,
},
};
value1
.encode_to_string_of_json()
.change_context(errors::VaultError::RequestEncodingFailed)
.attach_printable("Failed to encode wallet data - TokenizedWalletSensitiveValues")
}
fn get_value2(
&self,
customer_id: Option<id_type::CustomerId>,
) -> CustomResult<String, errors::VaultError> {
let value2 = TokenizedWalletInsensitiveValues { customer_id };
value2
.encode_to_string_of_json()
.change_context(errors::VaultError::RequestEncodingFailed)
.attach_printable("Failed to encode data - TokenizedWalletInsensitiveValues")
}
fn from_values(
value1: String,
value2: String,
) -> CustomResult<(Self, SupplementaryVaultData), errors::VaultError> {
let value1: TokenizedWalletSensitiveValues = value1
.parse_struct("TokenizedWalletSensitiveValues")
.change_context(errors::VaultError::ResponseDeserializationFailed)
.attach_printable("Could not deserialize into wallet data wallet_sensitive_data")?;
let value2: TokenizedWalletInsensitiveValues = value2
.parse_struct("TokenizedWalletInsensitiveValues")
.change_context(errors::VaultError::ResponseDeserializationFailed)
.attach_printable("Could not deserialize into wallet data wallet_insensitive_data")?;
let wallet = match value1.wallet_type {
PaymentMethodType::Paypal => Self::Paypal(api_models::payouts::Paypal {
email: value1.email,
telephone_number: value1.telephone_number,
paypal_id: value1.wallet_id,
}),
PaymentMethodType::Venmo => Self::Venmo(api_models::payouts::Venmo {
telephone_number: value1.telephone_number,
}),
_ => Err(errors::VaultError::PayoutMethodNotSupported)?,
};
let supp_data = SupplementaryVaultData {
customer_id: value2.customer_id,
payment_method_id: None,
};
Ok((wallet, supp_data))
}
}
#[derive(Debug, serde::Serialize, serde::Deserialize)]
pub struct TokenizedBankSensitiveValues {
pub bank_account_number: Option<masking::Secret<String>>,
pub bank_routing_number: Option<masking::Secret<String>>,
pub bic: Option<masking::Secret<String>>,
pub bank_sort_code: Option<masking::Secret<String>>,
pub iban: Option<masking::Secret<String>>,
pub pix_key: Option<masking::Secret<String>>,
pub tax_id: Option<masking::Secret<String>>,
}
#[derive(Debug, serde::Serialize, serde::Deserialize)]
pub struct TokenizedBankInsensitiveValues {
pub customer_id: Option<id_type::CustomerId>,
pub bank_name: Option<String>,
pub bank_country_code: Option<api::enums::CountryAlpha2>,
pub bank_city: Option<String>,
pub bank_branch: Option<String>,
}
#[cfg(feature = "payouts")]
impl Vaultable for api::BankPayout {
fn get_value1(
&self,
_customer_id: Option<id_type::CustomerId>,
) -> CustomResult<String, errors::VaultError> {
let bank_sensitive_data = match self {
Self::Ach(b) => TokenizedBankSensitiveValues {
bank_account_number: Some(b.bank_account_number.clone()),
bank_routing_number: Some(b.bank_routing_number.to_owned()),
bic: None,
bank_sort_code: None,
iban: None,
pix_key: None,
tax_id: None,
},
Self::Bacs(b) => TokenizedBankSensitiveValues {
bank_account_number: Some(b.bank_account_number.to_owned()),
bank_routing_number: None,
bic: None,
bank_sort_code: Some(b.bank_sort_code.to_owned()),
iban: None,
pix_key: None,
tax_id: None,
},
Self::Sepa(b) => TokenizedBankSensitiveValues {
bank_account_number: None,
bank_routing_number: None,
bic: b.bic.to_owned(),
bank_sort_code: None,
iban: Some(b.iban.to_owned()),
pix_key: None,
tax_id: None,
},
Self::Pix(bank_details) => TokenizedBankSensitiveValues {
bank_account_number: Some(bank_details.bank_account_number.to_owned()),
bank_routing_number: None,
bic: None,
bank_sort_code: None,
iban: None,
pix_key: Some(bank_details.pix_key.to_owned()),
tax_id: bank_details.tax_id.to_owned(),
},
};
bank_sensitive_data
.encode_to_string_of_json()
.change_context(errors::VaultError::RequestEncodingFailed)
.attach_printable("Failed to encode data - bank_sensitive_data")
}
fn get_value2(
&self,
customer_id: Option<id_type::CustomerId>,
) -> CustomResult<String, errors::VaultError> {
let bank_insensitive_data = match self {
Self::Ach(b) => TokenizedBankInsensitiveValues {
customer_id,
bank_name: b.bank_name.to_owned(),
bank_country_code: b.bank_country_code.to_owned(),
bank_city: b.bank_city.to_owned(),
bank_branch: None,
},
Self::Bacs(b) => TokenizedBankInsensitiveValues {
customer_id,
bank_name: b.bank_name.to_owned(),
bank_country_code: b.bank_country_code.to_owned(),
bank_city: b.bank_city.to_owned(),
bank_branch: None,
},
Self::Sepa(bank_details) => TokenizedBankInsensitiveValues {
customer_id,
bank_name: bank_details.bank_name.to_owned(),
bank_country_code: bank_details.bank_country_code.to_owned(),
bank_city: bank_details.bank_city.to_owned(),
bank_branch: None,
},
Self::Pix(bank_details) => TokenizedBankInsensitiveValues {
customer_id,
bank_name: bank_details.bank_name.to_owned(),
bank_country_code: None,
bank_city: None,
bank_branch: bank_details.bank_branch.to_owned(),
},
};
bank_insensitive_data
.encode_to_string_of_json()
.change_context(errors::VaultError::RequestEncodingFailed)
.attach_printable("Failed to encode wallet data bank_insensitive_data")
}
fn from_values(
bank_sensitive_data: String,
bank_insensitive_data: String,
) -> CustomResult<(Self, SupplementaryVaultData), errors::VaultError> {
let bank_sensitive_data: TokenizedBankSensitiveValues = bank_sensitive_data
.parse_struct("TokenizedBankValue1")
.change_context(errors::VaultError::ResponseDeserializationFailed)
.attach_printable("Could not deserialize into bank data bank_sensitive_data")?;
let bank_insensitive_data: TokenizedBankInsensitiveValues = bank_insensitive_data
.parse_struct("TokenizedBankValue2")
.change_context(errors::VaultError::ResponseDeserializationFailed)
.attach_printable("Could not deserialize into wallet data bank_insensitive_data")?;
let bank = match (
// ACH + BACS + PIX
bank_sensitive_data.bank_account_number.to_owned(),
bank_sensitive_data.bank_routing_number.to_owned(), // ACH
bank_sensitive_data.bank_sort_code.to_owned(), // BACS
// SEPA
bank_sensitive_data.iban.to_owned(),
bank_sensitive_data.bic,
// PIX
bank_sensitive_data.pix_key,
bank_sensitive_data.tax_id,
) {
(Some(ban), Some(brn), None, None, None, None, None) => {
Self::Ach(payouts::AchBankTransfer {
bank_account_number: ban,
bank_routing_number: brn,
bank_name: bank_insensitive_data.bank_name,
bank_country_code: bank_insensitive_data.bank_country_code,
bank_city: bank_insensitive_data.bank_city,
})
}
(Some(ban), None, Some(bsc), None, None, None, None) => {
Self::Bacs(payouts::BacsBankTransfer {
bank_account_number: ban,
bank_sort_code: bsc,
bank_name: bank_insensitive_data.bank_name,
bank_country_code: bank_insensitive_data.bank_country_code,
bank_city: bank_insensitive_data.bank_city,
})
}
(None, None, None, Some(iban), bic, None, None) => {
Self::Sepa(payouts::SepaBankTransfer {
iban,
bic,
bank_name: bank_insensitive_data.bank_name,
bank_country_code: bank_insensitive_data.bank_country_code,
bank_city: bank_insensitive_data.bank_city,
})
}
(Some(ban), None, None, None, None, Some(pix_key), tax_id) => {
Self::Pix(payouts::PixBankTransfer {
bank_account_number: ban,
bank_branch: bank_insensitive_data.bank_branch,
bank_name: bank_insensitive_data.bank_name,
pix_key,
tax_id,
})
}
_ => Err(errors::VaultError::ResponseDeserializationFailed)?,
};
let supp_data = SupplementaryVaultData {
customer_id: bank_insensitive_data.customer_id,
payment_method_id: None,
};
Ok((bank, supp_data))
}
}
#[cfg(feature = "payouts")]
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[serde(tag = "type", content = "value", rename_all = "snake_case")]
pub enum VaultPayoutMethod {
Card(String),
Bank(String),
Wallet(String),
}
#[cfg(feature = "payouts")]
impl Vaultable for api::PayoutMethodData {
fn get_value1(
&self,
customer_id: Option<id_type::CustomerId>,
) -> CustomResult<String, errors::VaultError> {
let value1 = match self {
Self::Card(card) => VaultPayoutMethod::Card(card.get_value1(customer_id)?),
Self::Bank(bank) => VaultPayoutMethod::Bank(bank.get_value1(customer_id)?),
Self::Wallet(wallet) => VaultPayoutMethod::Wallet(wallet.get_value1(customer_id)?),
};
value1
.encode_to_string_of_json()
.change_context(errors::VaultError::RequestEncodingFailed)
.attach_printable("Failed to encode payout method value1")
}
fn get_value2(
&self,
customer_id: Option<id_type::CustomerId>,
) -> CustomResult<String, errors::VaultError> {
let value2 = match self {
Self::Card(card) => VaultPayoutMethod::Card(card.get_value2(customer_id)?),
Self::Bank(bank) => VaultPayoutMethod::Bank(bank.get_value2(customer_id)?),
Self::Wallet(wallet) => VaultPayoutMethod::Wallet(wallet.get_value2(customer_id)?),
};
value2
.encode_to_string_of_json()
.change_context(errors::VaultError::RequestEncodingFailed)
.attach_printable("Failed to encode payout method value2")
}
fn from_values(
value1: String,
value2: String,
) -> CustomResult<(Self, SupplementaryVaultData), errors::VaultError> {
let value1: VaultPayoutMethod = value1
.parse_struct("VaultMethodValue1")
.change_context(errors::VaultError::ResponseDeserializationFailed)
.attach_printable("Could not deserialize into vault method value 1")?;
let value2: VaultPayoutMethod = value2
.parse_struct("VaultMethodValue2")
.change_context(errors::VaultError::ResponseDeserializationFailed)
.attach_printable("Could not deserialize into vault method value 2")?;
match (value1, value2) {
(VaultPayoutMethod::Card(mvalue1), VaultPayoutMethod::Card(mvalue2)) => {
let (card, supp_data) = api::CardPayout::from_values(mvalue1, mvalue2)?;
Ok((Self::Card(card), supp_data))
}
(VaultPayoutMethod::Bank(mvalue1), VaultPayoutMethod::Bank(mvalue2)) => {
let (bank, supp_data) = api::BankPayout::from_values(mvalue1, mvalue2)?;
Ok((Self::Bank(bank), supp_data))
}
(VaultPayoutMethod::Wallet(mvalue1), VaultPayoutMethod::Wallet(mvalue2)) => {
let (wallet, supp_data) = api::WalletPayout::from_values(mvalue1, mvalue2)?;
Ok((Self::Wallet(wallet), supp_data))
}
_ => Err(errors::VaultError::PayoutMethodNotSupported)
.attach_printable("Payout method not supported"),
}
}
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct MockTokenizeDBValue {
pub value1: String,
pub value2: String,
}
pub struct Vault;
impl Vault {
#[instrument(skip_all)]
pub async fn get_payment_method_data_from_locker(
state: &routes::SessionState,
lookup_key: &str,
merchant_key_store: &domain::MerchantKeyStore,
) -> RouterResult<(Option<domain::PaymentMethodData>, SupplementaryVaultData)> {
let de_tokenize =
get_tokenized_data(state, lookup_key, true, merchant_key_store.key.get_inner()).await?;
let (payment_method, customer_id) =
domain::PaymentMethodData::from_values(de_tokenize.value1, de_tokenize.value2)
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Error parsing Payment Method from Values")?;
Ok((Some(payment_method), customer_id))
}
#[instrument(skip_all)]
pub async fn store_payment_method_data_in_locker(
state: &routes::SessionState,
token_id: Option<String>,
payment_method: &domain::PaymentMethodData,
customer_id: Option<id_type::CustomerId>,
pm: enums::PaymentMethod,
merchant_key_store: &domain::MerchantKeyStore,
) -> RouterResult<String> {
let value1 = payment_method
.get_value1(customer_id.clone())
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Error getting Value1 for locker")?;
let value2 = payment_method
.get_value2(customer_id)
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Error getting Value12 for locker")?;
let lookup_key = token_id.unwrap_or_else(|| generate_id_with_default_len("token"));
let lookup_key = create_tokenize(
state,
value1,
Some(value2),
lookup_key,
merchant_key_store.key.get_inner(),
)
.await?;
add_delete_tokenized_data_task(&*state.store, &lookup_key, pm).await?;
metrics::TOKENIZED_DATA_COUNT.add(1, &[]);
Ok(lookup_key)
}
#[cfg(feature = "payouts")]
#[instrument(skip_all)]
pub async fn get_payout_method_data_from_temporary_locker(
state: &routes::SessionState,
lookup_key: &str,
merchant_key_store: &domain::MerchantKeyStore,
) -> RouterResult<(Option<api::PayoutMethodData>, SupplementaryVaultData)> {
let de_tokenize =
get_tokenized_data(state, lookup_key, true, merchant_key_store.key.get_inner()).await?;
let (payout_method, supp_data) =
api::PayoutMethodData::from_values(de_tokenize.value1, de_tokenize.value2)
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Error parsing Payout Method from Values")?;
Ok((Some(payout_method), supp_data))
}
#[cfg(feature = "payouts")]
#[instrument(skip_all)]
pub async fn store_payout_method_data_in_locker(
state: &routes::SessionState,
token_id: Option<String>,
payout_method: &api::PayoutMethodData,
customer_id: Option<id_type::CustomerId>,
merchant_key_store: &domain::MerchantKeyStore,
) -> RouterResult<String> {
let value1 = payout_method
.get_value1(customer_id.clone())
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Error getting Value1 for locker")?;
let value2 = payout_method
.get_value2(customer_id)
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Error getting Value2 for locker")?;
let lookup_key =
token_id.unwrap_or_else(|| generate_id_with_default_len("temporary_token"));
let lookup_key = create_tokenize(
state,
value1,
Some(value2),
lookup_key,
merchant_key_store.key.get_inner(),
)
.await?;
// add_delete_tokenized_data_task(&*state.store, &lookup_key, pm).await?;
// scheduler_metrics::TOKENIZED_DATA_COUNT.add(1, &[]);
Ok(lookup_key)
}
#[instrument(skip_all)]
pub async fn delete_locker_payment_method_by_lookup_key(
state: &routes::SessionState,
lookup_key: &Option<String>,
) {
if let Some(lookup_key) = lookup_key {
delete_tokenized_data(state, lookup_key)
.await
.map(|_| logger::info!("Card From locker deleted Successfully"))
.map_err(|err| logger::error!("Error: Deleting Card From Redis Locker : {:?}", err))
.ok();
}
}
}
//------------------------------------------------TokenizeService------------------------------------------------
#[inline(always)]
fn get_redis_locker_key(lookup_key: &str) -> String {
format!("{}_{}", consts::LOCKER_REDIS_PREFIX, lookup_key)
}
#[instrument(skip(state, value1, value2))]
pub async fn create_tokenize(
state: &routes::SessionState,
value1: String,
value2: Option<String>,
lookup_key: String,
encryption_key: &masking::Secret<Vec<u8>>,
) -> RouterResult<String> {
let redis_key = get_redis_locker_key(lookup_key.as_str());
let func = || async {
metrics::CREATED_TOKENIZED_CARD.add(1, &[]);
let payload_to_be_encrypted = api::TokenizePayloadRequest {
value1: value1.clone(),
value2: value2.clone().unwrap_or_default(),
lookup_key: lookup_key.clone(),
service_name: VAULT_SERVICE_NAME.to_string(),
};
let payload = payload_to_be_encrypted
.encode_to_string_of_json()
.change_context(errors::ApiErrorResponse::InternalServerError)?;
let encrypted_payload = GcmAes256
.encode_message(encryption_key.peek().as_ref(), payload.as_bytes())
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to encode redis temp locker data")?;
let redis_conn = state
.store
.get_redis_conn()
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to get redis connection")?;
redis_conn
.set_key_if_not_exists_with_expiry(
&redis_key.as_str().into(),
bytes::Bytes::from(encrypted_payload),
Some(i64::from(consts::LOCKER_REDIS_EXPIRY_SECONDS)),
)
.await
.map(|_| lookup_key.clone())
.inspect_err(|error| {
metrics::TEMP_LOCKER_FAILURES.add(1, &[]);
logger::error!(?error, "Failed to store tokenized data in Redis");
})
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Error from redis locker")
};
match func().await {
Ok(s) => {
logger::info!(
"Insert payload in redis locker successful with lookup key: {:?}",
redis_key
);
Ok(s)
}
Err(err) => {
logger::error!("Redis Temp locker Failed: {:?}", err);
Err(err)
}
}
}
#[instrument(skip(state))]
pub async fn get_tokenized_data(
state: &routes::SessionState,
lookup_key: &str,
_should_get_value2: bool,
encryption_key: &masking::Secret<Vec<u8>>,
) -> RouterResult<api::TokenizePayloadRequest> {
let redis_key = get_redis_locker_key(lookup_key);
let func = || async {
metrics::GET_TOKENIZED_CARD.add(1, &[]);
let redis_conn = state
.store
.get_redis_conn()
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to get redis connection")?;
let response = redis_conn
.get_key::<bytes::Bytes>(&redis_key.as_str().into())
.await;
match response {
Ok(resp) => {
let decrypted_payload = GcmAes256
.decode_message(
encryption_key.peek().as_ref(),
masking::Secret::new(resp.into()),
)
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to decode redis temp locker data")?;
let get_response: api::TokenizePayloadRequest =
bytes::Bytes::from(decrypted_payload)
.parse_struct("TokenizePayloadRequest")
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable(
"Error getting TokenizePayloadRequest from tokenize response",
)?;
Ok(get_response)
}
Err(err) => {
metrics::TEMP_LOCKER_FAILURES.add(1, &[]);
Err(err).change_context(errors::ApiErrorResponse::UnprocessableEntity {
message: "Token is invalid or expired".into(),
})
}
}
};
match func().await {
Ok(s) => {
logger::info!(
"Fetch payload in redis locker successful with lookup key: {:?}",
redis_key
);
Ok(s)
}
Err(err) => {
logger::error!("Redis Temp locker Failed: {:?}", err);
Err(err)
}
}
}
#[instrument(skip(state))]
pub async fn delete_tokenized_data(
state: &routes::SessionState,
lookup_key: &str,
) -> RouterResult<()> {
let redis_key = get_redis_locker_key(lookup_key);
let func = || async {
metrics::DELETED_TOKENIZED_CARD.add(1, &[]);
let redis_conn = state
.store
.get_redis_conn()
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to get redis connection")?;
let response = redis_conn.delete_key(&redis_key.as_str().into()).await;
match response {
Ok(redis_interface::DelReply::KeyDeleted) => Ok(()),
Ok(redis_interface::DelReply::KeyNotDeleted) => {
Err(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Token invalid or expired")
}
Err(err) => {
metrics::TEMP_LOCKER_FAILURES.add(1, &[]);
Err(errors::ApiErrorResponse::InternalServerError).attach_printable_lazy(|| {
format!("Failed to delete from redis locker: {err:?}")
})
}
}
};
match func().await {
Ok(s) => {
logger::info!(
"Delete payload in redis locker successful with lookup key: {:?}",
redis_key
);
Ok(s)
}
Err(err) => {
logger::error!("Redis Temp locker Failed: {:?}", err);
Err(err)
}
}
}
#[cfg(all(feature = "v2", feature = "payment_methods_v2"))]
async fn create_vault_request<R: pm_types::VaultingInterface>(
jwekey: &settings::Jwekey,
locker: &settings::Locker,
payload: Vec<u8>,
tenant_id: id_type::TenantId,
) -> CustomResult<request::Request, errors::VaultError> {
let private_key = jwekey.vault_private_key.peek().as_bytes();
let jws = services::encryption::jws_sign_payload(
&payload,
&locker.locker_signing_key_id,
private_key,
)
.await
.change_context(errors::VaultError::RequestEncryptionFailed)?;
let jwe_payload = pm_transforms::create_jwe_body_for_vault(jwekey, &jws).await?;
let mut url = locker.host.to_owned();
url.push_str(R::get_vaulting_request_url());
let mut request = request::Request::new(services::Method::Post, &url);
request.add_header(
headers::CONTENT_TYPE,
consts::VAULT_HEADER_CONTENT_TYPE.into(),
);
request.add_header(
headers::X_TENANT_ID,
tenant_id.get_string_repr().to_owned().into(),
);
request.set_body(request::RequestContent::Json(Box::new(jwe_payload)));
Ok(request)
}
#[cfg(all(feature = "v2", feature = "payment_methods_v2"))]
#[instrument(skip_all)]
pub async fn call_to_vault<V: pm_types::VaultingInterface>(
state: &routes::SessionState,
payload: Vec<u8>,
) -> CustomResult<String, errors::VaultError> {
let locker = &state.conf.locker;
let jwekey = state.conf.jwekey.get_inner();
let request =
create_vault_request::<V>(jwekey, locker, payload, state.tenant.tenant_id.to_owned())
.await?;
let response = services::call_connector_api(state, request, V::get_vaulting_flow_name())
.await
.change_context(errors::VaultError::VaultAPIError);
let jwe_body: services::JweBody = response
.get_response_inner("JweBody")
.change_context(errors::VaultError::ResponseDeserializationFailed)
.attach_printable("Failed to get JweBody from vault response")?;
let decrypted_payload = pm_transforms::get_decrypted_vault_response_payload(
jwekey,
jwe_body,
locker.decryption_scheme.clone(),
)
.await
.change_context(errors::VaultError::ResponseDecryptionFailed)
.attach_printable("Error getting decrypted vault response payload")?;
Ok(decrypted_payload)
}
#[cfg(all(feature = "v2", feature = "payment_methods_v2"))]
#[instrument(skip_all)]
pub async fn get_fingerprint_id_from_vault<D: domain::VaultingDataInterface + serde::Serialize>(
state: &routes::SessionState,
data: &D,
key: String,
) -> CustomResult<String, errors::VaultError> {
let data = serde_json::to_string(data)
.change_context(errors::VaultError::RequestEncodingFailed)
.attach_printable("Failed to encode Vaulting data to string")?;
let payload = pm_types::VaultFingerprintRequest { key, data }
.encode_to_vec()
.change_context(errors::VaultError::RequestEncodingFailed)
.attach_printable("Failed to encode VaultFingerprintRequest")?;
let resp = call_to_vault::<pm_types::GetVaultFingerprint>(state, payload)
.await
.change_context(errors::VaultError::VaultAPIError)
.attach_printable("Call to vault failed")?;
let fingerprint_resp: pm_types::VaultFingerprintResponse = resp
.parse_struct("VaultFingerprintResponse")
.change_context(errors::VaultError::ResponseDeserializationFailed)
.attach_printable("Failed to parse data into VaultFingerprintResponse")?;
Ok(fingerprint_resp.fingerprint_id)
}
#[cfg(all(feature = "v2", feature = "payment_methods_v2"))]
#[instrument(skip_all)]
pub async fn add_payment_method_to_vault(
state: &routes::SessionState,
merchant_account: &domain::MerchantAccount,
pmd: &domain::PaymentMethodVaultingData,
existing_vault_id: Option<domain::VaultId>,
) -> CustomResult<pm_types::AddVaultResponse, errors::VaultError> {
let payload = pm_types::AddVaultRequest {
entity_id: merchant_account.get_id().to_owned(),
vault_id: existing_vault_id
.unwrap_or(domain::VaultId::generate(uuid::Uuid::now_v7().to_string())),
data: pmd,
ttl: state.conf.locker.ttl_for_storage_in_secs,
}
.encode_to_vec()
.change_context(errors::VaultError::RequestEncodingFailed)
.attach_printable("Failed to encode AddVaultRequest")?;
let resp = call_to_vault::<pm_types::AddVault>(state, payload)
.await
.change_context(errors::VaultError::VaultAPIError)
.attach_printable("Call to vault failed")?;
let stored_pm_resp: pm_types::AddVaultResponse = resp
.parse_struct("AddVaultResponse")
.change_context(errors::VaultError::ResponseDeserializationFailed)
.attach_printable("Failed to parse data into AddVaultResponse")?;
Ok(stored_pm_resp)
}
#[cfg(all(feature = "v2", feature = "payment_methods_v2"))]
#[instrument(skip_all)]
pub async fn retrieve_payment_method_from_vault(
state: &routes::SessionState,
merchant_account: &domain::MerchantAccount,
pm: &domain::PaymentMethod,
) -> CustomResult<pm_types::VaultRetrieveResponse, errors::VaultError> {
let payload = pm_types::VaultRetrieveRequest {
entity_id: merchant_account.get_id().to_owned(),
vault_id: pm
.locker_id
.clone()
.ok_or(errors::VaultError::MissingRequiredField {
field_name: "locker_id",
})
.attach_printable("Missing locker_id for VaultRetrieveRequest")?,
}
.encode_to_vec()
.change_context(errors::VaultError::RequestEncodingFailed)
.attach_printable("Failed to encode VaultRetrieveRequest")?;
let resp = call_to_vault::<pm_types::VaultRetrieve>(state, payload)
.await
.change_context(errors::VaultError::VaultAPIError)
.attach_printable("Call to vault failed")?;
let stored_pm_resp: pm_types::VaultRetrieveResponse = resp
.parse_struct("VaultRetrieveResponse")
.change_context(errors::VaultError::ResponseDeserializationFailed)
.attach_printable("Failed to parse data into VaultRetrieveResponse")?;
Ok(stored_pm_resp)
}
#[cfg(all(feature = "v2", feature = "payment_methods_v2"))]
#[instrument(skip_all)]
pub async fn delete_payment_method_data_from_vault(
state: &routes::SessionState,
merchant_account: &domain::MerchantAccount,
vault_id: domain::VaultId,
) -> CustomResult<pm_types::VaultDeleteResponse, errors::VaultError> {
let payload = pm_types::VaultDeleteRequest {
entity_id: merchant_account.get_id().to_owned(),
vault_id,
}
.encode_to_vec()
.change_context(errors::VaultError::RequestEncodingFailed)
.attach_printable("Failed to encode VaultDeleteRequest")?;
let resp = call_to_vault::<pm_types::VaultDelete>(state, payload)
.await
.change_context(errors::VaultError::VaultAPIError)
.attach_printable("Call to vault failed")?;
let stored_pm_resp: pm_types::VaultDeleteResponse = resp
.parse_struct("VaultDeleteResponse")
.change_context(errors::VaultError::ResponseDeserializationFailed)
.attach_printable("Failed to parse data into VaultDeleteResponse")?;
Ok(stored_pm_resp)
}
// ********************************************** PROCESS TRACKER **********************************************
pub async fn add_delete_tokenized_data_task(
db: &dyn db::StorageInterface,
lookup_key: &str,
pm: enums::PaymentMethod,
) -> RouterResult<()> {
let runner = storage::ProcessTrackerRunner::DeleteTokenizeDataWorkflow;
let process_tracker_id = format!("{runner}_{lookup_key}");
let task = runner.to_string();
let tag = ["BASILISK-V3"];
let tracking_data = storage::TokenizeCoreWorkflow {
lookup_key: lookup_key.to_owned(),
pm,
};
let schedule_time = get_delete_tokenize_schedule_time(db, pm, 0)
.await
.ok_or(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to obtain initial process tracker schedule time")?;
let process_tracker_entry = storage::ProcessTrackerNew::new(
process_tracker_id,
&task,
runner,
tag,
tracking_data,
None,
schedule_time,
common_types::consts::API_VERSION,
)
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to construct delete tokenized data process tracker task")?;
let response = db.insert_process(process_tracker_entry).await;
response.map(|_| ()).or_else(|err| {
if err.current_context().is_db_unique_violation() {
Ok(())
} else {
Err(report!(errors::ApiErrorResponse::InternalServerError))
}
})
}
pub async fn start_tokenize_data_workflow(
state: &routes::SessionState,
tokenize_tracker: &storage::ProcessTracker,
) -> Result<(), errors::ProcessTrackerError> {
let db = &*state.store;
let delete_tokenize_data = serde_json::from_value::<storage::TokenizeCoreWorkflow>(
tokenize_tracker.tracking_data.clone(),
)
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable_lazy(|| {
format!(
"unable to convert into DeleteTokenizeByTokenRequest {:?}",
tokenize_tracker.tracking_data
)
})?;
match delete_tokenized_data(state, &delete_tokenize_data.lookup_key).await {
Ok(()) => {
logger::info!("Card From locker deleted Successfully");
//mark task as finished
db.as_scheduler()
.finish_process_with_business_status(
tokenize_tracker.clone(),
diesel_models::process_tracker::business_status::COMPLETED_BY_PT,
)
.await?;
}
Err(err) => {
logger::error!("Err: Deleting Card From Locker : {:?}", err);
retry_delete_tokenize(db, delete_tokenize_data.pm, tokenize_tracker.to_owned()).await?;
metrics::RETRIED_DELETE_DATA_COUNT.add(1, &[]);
}
}
Ok(())
}
pub async fn get_delete_tokenize_schedule_time(
db: &dyn db::StorageInterface,
pm: enums::PaymentMethod,
retry_count: i32,
) -> Option<time::PrimitiveDateTime> {
let redis_mapping = db::get_and_deserialize_key(
db,
&format!("pt_mapping_delete_{pm}_tokenize_data"),
"PaymentMethodsPTMapping",
)
.await;
let mapping = match redis_mapping {
Ok(x) => x,
Err(error) => {
logger::info!(?error, "Redis Mapping Error");
process_data::PaymentMethodsPTMapping::default()
}
};
let time_delta = process_tracker_utils::get_pm_schedule_time(mapping, pm, retry_count + 1);
process_tracker_utils::get_time_from_delta(time_delta)
}
pub async fn retry_delete_tokenize(
db: &dyn db::StorageInterface,
pm: enums::PaymentMethod,
pt: storage::ProcessTracker,
) -> Result<(), errors::ProcessTrackerError> {
let schedule_time = get_delete_tokenize_schedule_time(db, pm, pt.retry_count).await;
match schedule_time {
Some(s_time) => {
let retry_schedule = db
.as_scheduler()
.retry_process(pt, s_time)
.await
.map_err(Into::into);
metrics::TASKS_RESET_COUNT.add(
1,
router_env::metric_attributes!(("flow", "DeleteTokenizeData")),
);
retry_schedule
}
None => db
.as_scheduler()
.finish_process_with_business_status(
pt,
diesel_models::process_tracker::business_status::RETRIES_EXCEEDED,
)
.await
.map_err(Into::into),
}
}
// Fallback logic of old temp locker needs to be removed later
| 12,350 | 1,708 |
hyperswitch | crates/router/src/core/payment_methods/tokenize/card_executor.rs | .rs | use std::str::FromStr;
use api_models::{enums as api_enums, payment_methods as payment_methods_api};
use common_utils::{
consts,
ext_traits::OptionExt,
generate_customer_id_of_default_length, id_type,
pii::Email,
type_name,
types::keymanager::{Identifier, KeyManagerState, ToEncryptable},
};
use error_stack::{report, ResultExt};
use hyperswitch_domain_models::type_encryption::{crypto_operation, CryptoOperation};
use masking::{ExposeInterface, PeekInterface, SwitchStrategy};
use router_env::logger;
use super::{
migration, CardNetworkTokenizeExecutor, NetworkTokenizationBuilder, NetworkTokenizationProcess,
NetworkTokenizationResponse, State, StoreLockerResponse, TransitionTo,
};
use crate::{
core::payment_methods::{
cards::{add_card_to_hs_locker, create_payment_method},
transformers as pm_transformers,
},
errors::{self, RouterResult},
types::{api, domain},
utils,
};
// Available states for card tokenization
pub struct TokenizeWithCard;
pub struct CardRequestValidated;
pub struct CardDetailsAssigned;
pub struct CustomerAssigned;
pub struct CardTokenized;
pub struct CardStored;
pub struct CardTokenStored;
pub struct PaymentMethodCreated;
impl State for TokenizeWithCard {}
impl State for CustomerAssigned {}
impl State for CardRequestValidated {}
impl State for CardDetailsAssigned {}
impl State for CardTokenized {}
impl State for CardStored {}
impl State for CardTokenStored {}
impl State for PaymentMethodCreated {}
// State transitions for card tokenization
impl TransitionTo<CardRequestValidated> for TokenizeWithCard {}
impl TransitionTo<CardDetailsAssigned> for CardRequestValidated {}
impl TransitionTo<CustomerAssigned> for CardDetailsAssigned {}
impl TransitionTo<CardTokenized> for CustomerAssigned {}
impl TransitionTo<CardTokenStored> for CardTokenized {}
impl TransitionTo<PaymentMethodCreated> for CardTokenStored {}
impl Default for NetworkTokenizationBuilder<'_, TokenizeWithCard> {
fn default() -> Self {
Self::new()
}
}
impl<'a> NetworkTokenizationBuilder<'a, TokenizeWithCard> {
pub fn new() -> Self {
Self {
state: std::marker::PhantomData,
customer: None,
card: None,
card_cvc: None,
network_token: None,
stored_card: None,
stored_token: None,
payment_method_response: None,
card_tokenized: false,
error_code: None,
error_message: None,
}
}
pub fn set_validate_result(self) -> NetworkTokenizationBuilder<'a, CardRequestValidated> {
NetworkTokenizationBuilder {
state: std::marker::PhantomData,
customer: self.customer,
card: self.card,
card_cvc: self.card_cvc,
network_token: self.network_token,
stored_card: self.stored_card,
stored_token: self.stored_token,
payment_method_response: self.payment_method_response,
card_tokenized: self.card_tokenized,
error_code: self.error_code,
error_message: self.error_message,
}
}
}
impl<'a> NetworkTokenizationBuilder<'a, CardRequestValidated> {
pub fn set_card_details(
self,
card_req: &'a domain::TokenizeCardRequest,
optional_card_info: Option<diesel_models::CardInfo>,
) -> NetworkTokenizationBuilder<'a, CardDetailsAssigned> {
let card = domain::CardDetail {
card_number: card_req.raw_card_number.clone(),
card_exp_month: card_req.card_expiry_month.clone(),
card_exp_year: card_req.card_expiry_year.clone(),
bank_code: optional_card_info
.as_ref()
.and_then(|card_info| card_info.bank_code.clone()),
nick_name: card_req.nick_name.clone(),
card_holder_name: card_req.card_holder_name.clone(),
card_issuer: optional_card_info
.as_ref()
.map_or(card_req.card_issuer.clone(), |card_info| {
card_info.card_issuer.clone()
}),
card_network: optional_card_info
.as_ref()
.map_or(card_req.card_network.clone(), |card_info| {
card_info.card_network.clone()
}),
card_type: optional_card_info.as_ref().map_or(
card_req
.card_type
.as_ref()
.map(|card_type| card_type.to_string()),
|card_info| card_info.card_type.clone(),
),
card_issuing_country: optional_card_info
.as_ref()
.map_or(card_req.card_issuing_country.clone(), |card_info| {
card_info.card_issuing_country.clone()
}),
};
NetworkTokenizationBuilder {
state: std::marker::PhantomData,
card: Some(card),
card_cvc: card_req.card_cvc.clone(),
customer: self.customer,
network_token: self.network_token,
stored_card: self.stored_card,
stored_token: self.stored_token,
payment_method_response: self.payment_method_response,
card_tokenized: self.card_tokenized,
error_code: self.error_code,
error_message: self.error_message,
}
}
}
impl<'a> NetworkTokenizationBuilder<'a, CardDetailsAssigned> {
pub fn set_customer(
self,
customer: &'a api::CustomerDetails,
) -> NetworkTokenizationBuilder<'a, CustomerAssigned> {
NetworkTokenizationBuilder {
state: std::marker::PhantomData,
customer: Some(customer),
card: self.card,
card_cvc: self.card_cvc,
network_token: self.network_token,
stored_card: self.stored_card,
stored_token: self.stored_token,
payment_method_response: self.payment_method_response,
card_tokenized: self.card_tokenized,
error_code: self.error_code,
error_message: self.error_message,
}
}
}
impl<'a> NetworkTokenizationBuilder<'a, CustomerAssigned> {
pub fn get_optional_card_and_cvc(
&self,
) -> (Option<domain::CardDetail>, Option<masking::Secret<String>>) {
(self.card.clone(), self.card_cvc.clone())
}
pub fn set_token_details(
self,
network_token: &'a NetworkTokenizationResponse,
) -> NetworkTokenizationBuilder<'a, CardTokenized> {
NetworkTokenizationBuilder {
state: std::marker::PhantomData,
network_token: Some(&network_token.0),
customer: self.customer,
card: self.card,
card_cvc: self.card_cvc,
stored_card: self.stored_card,
stored_token: self.stored_token,
payment_method_response: self.payment_method_response,
card_tokenized: self.card_tokenized,
error_code: self.error_code,
error_message: self.error_message,
}
}
}
impl<'a> NetworkTokenizationBuilder<'a, CardTokenized> {
pub fn set_stored_card_response(
self,
store_card_response: &'a StoreLockerResponse,
) -> NetworkTokenizationBuilder<'a, CardStored> {
NetworkTokenizationBuilder {
state: std::marker::PhantomData,
stored_card: Some(&store_card_response.store_card_resp),
customer: self.customer,
card: self.card,
card_cvc: self.card_cvc,
network_token: self.network_token,
stored_token: self.stored_token,
payment_method_response: self.payment_method_response,
card_tokenized: self.card_tokenized,
error_code: self.error_code,
error_message: self.error_message,
}
}
}
impl<'a> NetworkTokenizationBuilder<'a, CardStored> {
pub fn set_stored_token_response(
self,
store_token_response: &'a StoreLockerResponse,
) -> NetworkTokenizationBuilder<'a, CardTokenStored> {
NetworkTokenizationBuilder {
state: std::marker::PhantomData,
card_tokenized: true,
stored_token: Some(&store_token_response.store_token_resp),
customer: self.customer,
card: self.card,
card_cvc: self.card_cvc,
network_token: self.network_token,
stored_card: self.stored_card,
payment_method_response: self.payment_method_response,
error_code: self.error_code,
error_message: self.error_message,
}
}
}
impl<'a> NetworkTokenizationBuilder<'a, CardTokenStored> {
pub fn set_payment_method_response(
self,
payment_method: &'a domain::PaymentMethod,
) -> NetworkTokenizationBuilder<'a, PaymentMethodCreated> {
let card_detail_from_locker = self.card.as_ref().map(|card| api::CardDetailFromLocker {
scheme: None,
issuer_country: card.card_issuing_country.clone(),
last4_digits: Some(card.card_number.clone().get_last4()),
card_number: None,
expiry_month: Some(card.card_exp_month.clone().clone()),
expiry_year: Some(card.card_exp_year.clone().clone()),
card_token: None,
card_holder_name: card.card_holder_name.clone(),
card_fingerprint: None,
nick_name: card.nick_name.clone(),
card_network: card.card_network.clone(),
card_isin: Some(card.card_number.clone().get_card_isin()),
card_issuer: card.card_issuer.clone(),
card_type: card.card_type.clone(),
saved_to_locker: true,
});
let payment_method_response = api::PaymentMethodResponse {
merchant_id: payment_method.merchant_id.clone(),
customer_id: Some(payment_method.customer_id.clone()),
payment_method_id: payment_method.payment_method_id.clone(),
payment_method: payment_method.payment_method,
payment_method_type: payment_method.payment_method_type,
card: card_detail_from_locker,
recurring_enabled: true,
installment_payment_enabled: false,
metadata: payment_method.metadata.clone(),
created: Some(payment_method.created_at),
last_used_at: Some(payment_method.last_used_at),
client_secret: payment_method.client_secret.clone(),
bank_transfer: None,
payment_experience: None,
};
NetworkTokenizationBuilder {
state: std::marker::PhantomData,
payment_method_response: Some(payment_method_response),
customer: self.customer,
card: self.card,
card_cvc: self.card_cvc,
network_token: self.network_token,
stored_card: self.stored_card,
stored_token: self.stored_token,
card_tokenized: self.card_tokenized,
error_code: self.error_code,
error_message: self.error_message,
}
}
}
impl NetworkTokenizationBuilder<'_, PaymentMethodCreated> {
pub fn build(self) -> api::CardNetworkTokenizeResponse {
api::CardNetworkTokenizeResponse {
payment_method_response: self.payment_method_response,
customer: self.customer.cloned(),
card_tokenized: self.card_tokenized,
error_code: self.error_code.cloned(),
error_message: self.error_message.cloned(),
// Below field is mutated by caller functions for batched API operations
tokenization_data: None,
}
}
}
// Specific executor for card tokenization
impl CardNetworkTokenizeExecutor<'_, domain::TokenizeCardRequest> {
pub async fn validate_request_and_fetch_optional_customer(
&self,
) -> RouterResult<Option<api::CustomerDetails>> {
// Validate card's expiry
migration::validate_card_expiry(&self.data.card_expiry_month, &self.data.card_expiry_year)?;
// Validate customer ID
let customer_id = self
.customer
.customer_id
.as_ref()
.get_required_value("customer_id")
.change_context(errors::ApiErrorResponse::MissingRequiredField {
field_name: "customer.customer_id",
})?;
// Fetch customer details if present
let db = &*self.state.store;
let key_manager_state: &KeyManagerState = &self.state.into();
db.find_customer_optional_by_customer_id_merchant_id(
key_manager_state,
customer_id,
self.merchant_account.get_id(),
self.key_store,
self.merchant_account.storage_scheme,
)
.await
.inspect_err(|err| logger::info!("Error fetching customer: {:?}", err))
.change_context(errors::ApiErrorResponse::InternalServerError)
.map_or(
// Validate if customer creation is feasible
if self.customer.name.is_some()
|| self.customer.email.is_some()
|| self.customer.phone.is_some()
{
Ok(None)
} else {
Err(report!(errors::ApiErrorResponse::MissingRequiredFields {
field_names: vec!["customer.name", "customer.email", "customer.phone"],
}))
},
// If found, send back CustomerDetails from DB
|optional_customer| {
Ok(optional_customer.map(|customer| api::CustomerDetails {
id: customer.customer_id.clone(),
name: customer.name.clone().map(|name| name.into_inner()),
email: customer.email.clone().map(Email::from),
phone: customer.phone.clone().map(|phone| phone.into_inner()),
phone_country_code: customer.phone_country_code.clone(),
}))
},
)
}
pub async fn create_customer(&self) -> RouterResult<api::CustomerDetails> {
let db = &*self.state.store;
let customer_id = self
.customer
.customer_id
.as_ref()
.get_required_value("customer_id")
.change_context(errors::ApiErrorResponse::MissingRequiredField {
field_name: "customer_id",
})?;
let key_manager_state: &KeyManagerState = &self.state.into();
let encrypted_data = crypto_operation(
key_manager_state,
type_name!(domain::Customer),
CryptoOperation::BatchEncrypt(domain::FromRequestEncryptableCustomer::to_encryptable(
domain::FromRequestEncryptableCustomer {
name: self.customer.name.clone(),
email: self
.customer
.email
.clone()
.map(|email| email.expose().switch_strategy()),
phone: self.customer.phone.clone(),
},
)),
Identifier::Merchant(self.merchant_account.get_id().clone()),
self.key_store.key.get_inner().peek(),
)
.await
.inspect_err(|err| logger::info!("Error encrypting customer: {:?}", err))
.and_then(|val| val.try_into_batchoperation())
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to encrypt customer")?;
let encryptable_customer =
domain::FromRequestEncryptableCustomer::from_encryptable(encrypted_data)
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to form EncryptableCustomer")?;
let new_customer_id = generate_customer_id_of_default_length();
let domain_customer = domain::Customer {
customer_id: new_customer_id.clone(),
merchant_id: self.merchant_account.get_id().clone(),
name: encryptable_customer.name,
email: encryptable_customer.email.map(|email| {
utils::Encryptable::new(
email.clone().into_inner().switch_strategy(),
email.into_encrypted(),
)
}),
phone: encryptable_customer.phone,
description: None,
phone_country_code: self.customer.phone_country_code.to_owned(),
metadata: None,
connector_customer: None,
created_at: common_utils::date_time::now(),
modified_at: common_utils::date_time::now(),
address_id: None,
default_payment_method_id: None,
updated_by: None,
version: common_types::consts::API_VERSION,
};
db.insert_customer(
domain_customer,
key_manager_state,
self.key_store,
self.merchant_account.storage_scheme,
)
.await
.inspect_err(|err| logger::info!("Error creating a customer: {:?}", err))
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable_lazy(|| {
format!(
"Failed to insert customer [id - {:?}] for merchant [id - {:?}]",
customer_id,
self.merchant_account.get_id()
)
})?;
Ok(api::CustomerDetails {
id: new_customer_id,
name: self.customer.name.clone(),
email: self.customer.email.clone(),
phone: self.customer.phone.clone(),
phone_country_code: self.customer.phone_country_code.clone(),
})
}
pub async fn store_card_and_token_in_locker(
&self,
network_token: &NetworkTokenizationResponse,
card: &domain::CardDetail,
customer_id: &id_type::CustomerId,
) -> RouterResult<StoreLockerResponse> {
let stored_card_resp = self.store_card_in_locker(card, customer_id).await?;
let stored_token_resp = self
.store_network_token_in_locker(
network_token,
customer_id,
card.card_holder_name.clone(),
card.nick_name.clone(),
)
.await?;
let store_locker_response = StoreLockerResponse {
store_card_resp: stored_card_resp,
store_token_resp: stored_token_resp,
};
Ok(store_locker_response)
}
pub async fn store_card_in_locker(
&self,
card: &domain::CardDetail,
customer_id: &id_type::CustomerId,
) -> RouterResult<pm_transformers::StoreCardRespPayload> {
let merchant_id = self.merchant_account.get_id();
let locker_req =
pm_transformers::StoreLockerReq::LockerCard(pm_transformers::StoreCardReq {
merchant_id: merchant_id.clone(),
merchant_customer_id: customer_id.clone(),
card: payment_methods_api::Card {
card_number: card.card_number.clone(),
card_exp_month: card.card_exp_month.clone(),
card_exp_year: card.card_exp_year.clone(),
card_isin: Some(card.card_number.get_card_isin().clone()),
name_on_card: card.card_holder_name.clone(),
nick_name: card
.nick_name
.as_ref()
.map(|nick_name| nick_name.clone().expose()),
card_brand: None,
},
requestor_card_reference: None,
ttl: self.state.conf.locker.ttl_for_storage_in_secs,
});
let stored_resp = add_card_to_hs_locker(
self.state,
&locker_req,
customer_id,
api_enums::LockerChoice::HyperswitchCardVault,
)
.await
.inspect_err(|err| logger::info!("Error adding card in locker: {:?}", err))
.change_context(errors::ApiErrorResponse::InternalServerError)?;
Ok(stored_resp)
}
pub async fn create_payment_method(
&self,
stored_locker_resp: &StoreLockerResponse,
network_token_details: &NetworkTokenizationResponse,
card_details: &domain::CardDetail,
customer_id: &id_type::CustomerId,
) -> RouterResult<domain::PaymentMethod> {
let payment_method_id = common_utils::generate_id(consts::ID_LENGTH, "pm");
// Form encrypted PM data (original card)
let enc_pm_data = self.encrypt_card(card_details, true).await?;
// Form encrypted network token data
let enc_token_data = self
.encrypt_network_token(network_token_details, card_details, true)
.await?;
// Form PM create entry
let payment_method_create = api::PaymentMethodCreate {
payment_method: Some(api_enums::PaymentMethod::Card),
payment_method_type: card_details
.card_type
.as_ref()
.and_then(|card_type| api_enums::PaymentMethodType::from_str(card_type).ok()),
payment_method_issuer: card_details.card_issuer.clone(),
payment_method_issuer_code: None,
card: Some(api::CardDetail {
card_number: card_details.card_number.clone(),
card_exp_month: card_details.card_exp_month.clone(),
card_exp_year: card_details.card_exp_year.clone(),
card_holder_name: card_details.card_holder_name.clone(),
nick_name: card_details.nick_name.clone(),
card_issuing_country: card_details.card_issuing_country.clone(),
card_network: card_details.card_network.clone(),
card_issuer: card_details.card_issuer.clone(),
card_type: card_details.card_type.clone(),
}),
metadata: None,
customer_id: Some(customer_id.clone()),
card_network: card_details
.card_network
.as_ref()
.map(|network| network.to_string()),
bank_transfer: None,
wallet: None,
client_secret: None,
payment_method_data: None,
billing: None,
connector_mandate_details: None,
network_transaction_id: None,
};
create_payment_method(
self.state,
&payment_method_create,
customer_id,
&payment_method_id,
Some(stored_locker_resp.store_card_resp.card_reference.clone()),
self.merchant_account.get_id(),
None,
None,
Some(enc_pm_data),
self.key_store,
None,
None,
None,
self.merchant_account.storage_scheme,
None,
None,
network_token_details.1.clone(),
Some(stored_locker_resp.store_token_resp.card_reference.clone()),
Some(enc_token_data),
)
.await
}
}
| 4,543 | 1,709 |
hyperswitch | crates/router/src/core/payment_methods/tokenize/payment_method_executor.rs | .rs | use api_models::enums as api_enums;
use common_utils::{
ext_traits::OptionExt, fp_utils::when, pii::Email, types::keymanager::KeyManagerState,
};
use error_stack::{report, ResultExt};
use masking::Secret;
use router_env::logger;
use super::{
CardNetworkTokenizeExecutor, NetworkTokenizationBuilder, NetworkTokenizationProcess,
NetworkTokenizationResponse, State, TransitionTo,
};
use crate::{
core::payment_methods::transformers as pm_transformers,
errors::{self, RouterResult},
types::{api, domain},
};
// Available states for payment method tokenization
pub struct TokenizeWithPmId;
pub struct PmValidated;
pub struct PmFetched;
pub struct PmAssigned;
pub struct PmTokenized;
pub struct PmTokenStored;
pub struct PmTokenUpdated;
impl State for TokenizeWithPmId {}
impl State for PmValidated {}
impl State for PmFetched {}
impl State for PmAssigned {}
impl State for PmTokenized {}
impl State for PmTokenStored {}
impl State for PmTokenUpdated {}
// State transitions for payment method tokenization
impl TransitionTo<PmFetched> for TokenizeWithPmId {}
impl TransitionTo<PmValidated> for PmFetched {}
impl TransitionTo<PmAssigned> for PmValidated {}
impl TransitionTo<PmTokenized> for PmAssigned {}
impl TransitionTo<PmTokenStored> for PmTokenized {}
impl TransitionTo<PmTokenUpdated> for PmTokenStored {}
impl Default for NetworkTokenizationBuilder<'_, TokenizeWithPmId> {
fn default() -> Self {
Self::new()
}
}
impl<'a> NetworkTokenizationBuilder<'a, TokenizeWithPmId> {
pub fn new() -> Self {
Self {
state: std::marker::PhantomData,
customer: None,
card: None,
card_cvc: None,
network_token: None,
stored_card: None,
stored_token: None,
payment_method_response: None,
card_tokenized: false,
error_code: None,
error_message: None,
}
}
pub fn set_payment_method(
self,
payment_method: &domain::PaymentMethod,
) -> NetworkTokenizationBuilder<'a, PmFetched> {
let payment_method_response = api::PaymentMethodResponse {
merchant_id: payment_method.merchant_id.clone(),
customer_id: Some(payment_method.customer_id.clone()),
payment_method_id: payment_method.payment_method_id.clone(),
payment_method: payment_method.payment_method,
payment_method_type: payment_method.payment_method_type,
recurring_enabled: true,
installment_payment_enabled: false,
metadata: payment_method.metadata.clone(),
created: Some(payment_method.created_at),
last_used_at: Some(payment_method.last_used_at),
client_secret: payment_method.client_secret.clone(),
card: None,
bank_transfer: None,
payment_experience: None,
};
NetworkTokenizationBuilder {
state: std::marker::PhantomData,
payment_method_response: Some(payment_method_response),
customer: self.customer,
card: self.card,
card_cvc: self.card_cvc,
network_token: self.network_token,
stored_card: self.stored_card,
stored_token: self.stored_token,
card_tokenized: self.card_tokenized,
error_code: self.error_code,
error_message: self.error_message,
}
}
}
impl<'a> NetworkTokenizationBuilder<'a, PmFetched> {
pub fn set_validate_result(
self,
customer: &'a api::CustomerDetails,
) -> NetworkTokenizationBuilder<'a, PmValidated> {
NetworkTokenizationBuilder {
state: std::marker::PhantomData,
customer: Some(customer),
card: self.card,
card_cvc: self.card_cvc,
network_token: self.network_token,
stored_card: self.stored_card,
stored_token: self.stored_token,
payment_method_response: self.payment_method_response,
card_tokenized: self.card_tokenized,
error_code: self.error_code,
error_message: self.error_message,
}
}
}
impl<'a> NetworkTokenizationBuilder<'a, PmValidated> {
pub fn set_card_details(
self,
card_from_locker: &'a api_models::payment_methods::Card,
optional_card_info: Option<diesel_models::CardInfo>,
card_cvc: Option<Secret<String>>,
) -> NetworkTokenizationBuilder<'a, PmAssigned> {
let card = domain::CardDetail {
card_number: card_from_locker.card_number.clone(),
card_exp_month: card_from_locker.card_exp_month.clone(),
card_exp_year: card_from_locker.card_exp_year.clone(),
bank_code: optional_card_info
.as_ref()
.and_then(|card_info| card_info.bank_code.clone()),
nick_name: card_from_locker
.nick_name
.as_ref()
.map(|nick_name| Secret::new(nick_name.clone())),
card_holder_name: card_from_locker.name_on_card.clone(),
card_issuer: optional_card_info
.as_ref()
.and_then(|card_info| card_info.card_issuer.clone()),
card_network: optional_card_info
.as_ref()
.and_then(|card_info| card_info.card_network.clone()),
card_type: optional_card_info
.as_ref()
.and_then(|card_info| card_info.card_type.clone()),
card_issuing_country: optional_card_info
.as_ref()
.and_then(|card_info| card_info.card_issuing_country.clone()),
};
NetworkTokenizationBuilder {
state: std::marker::PhantomData,
card: Some(card),
card_cvc,
customer: self.customer,
network_token: self.network_token,
stored_card: self.stored_card,
stored_token: self.stored_token,
payment_method_response: self.payment_method_response,
card_tokenized: self.card_tokenized,
error_code: self.error_code,
error_message: self.error_message,
}
}
}
impl<'a> NetworkTokenizationBuilder<'a, PmAssigned> {
pub fn get_optional_card_and_cvc(
&self,
) -> (Option<domain::CardDetail>, Option<Secret<String>>) {
(self.card.clone(), self.card_cvc.clone())
}
pub fn set_token_details(
self,
network_token: &'a NetworkTokenizationResponse,
) -> NetworkTokenizationBuilder<'a, PmTokenized> {
NetworkTokenizationBuilder {
state: std::marker::PhantomData,
network_token: Some(&network_token.0),
card_tokenized: true,
customer: self.customer,
card: self.card,
card_cvc: self.card_cvc,
stored_card: self.stored_card,
stored_token: self.stored_token,
payment_method_response: self.payment_method_response,
error_code: self.error_code,
error_message: self.error_message,
}
}
}
impl<'a> NetworkTokenizationBuilder<'a, PmTokenized> {
pub fn set_stored_token_response(
self,
store_token_response: &'a pm_transformers::StoreCardRespPayload,
) -> NetworkTokenizationBuilder<'a, PmTokenStored> {
NetworkTokenizationBuilder {
state: std::marker::PhantomData,
stored_token: Some(store_token_response),
customer: self.customer,
card: self.card,
card_cvc: self.card_cvc,
network_token: self.network_token,
stored_card: self.stored_card,
payment_method_response: self.payment_method_response,
card_tokenized: self.card_tokenized,
error_code: self.error_code,
error_message: self.error_message,
}
}
}
impl<'a> NetworkTokenizationBuilder<'a, PmTokenStored> {
pub fn set_payment_method(
self,
payment_method: &'a domain::PaymentMethod,
) -> NetworkTokenizationBuilder<'a, PmTokenUpdated> {
let payment_method_response = api::PaymentMethodResponse {
merchant_id: payment_method.merchant_id.clone(),
customer_id: Some(payment_method.customer_id.clone()),
payment_method_id: payment_method.payment_method_id.clone(),
payment_method: payment_method.payment_method,
payment_method_type: payment_method.payment_method_type,
recurring_enabled: true,
installment_payment_enabled: false,
metadata: payment_method.metadata.clone(),
created: Some(payment_method.created_at),
last_used_at: Some(payment_method.last_used_at),
client_secret: payment_method.client_secret.clone(),
card: None,
bank_transfer: None,
payment_experience: None,
};
NetworkTokenizationBuilder {
state: std::marker::PhantomData,
payment_method_response: Some(payment_method_response),
customer: self.customer,
card: self.card,
card_cvc: self.card_cvc,
stored_token: self.stored_token,
network_token: self.network_token,
stored_card: self.stored_card,
card_tokenized: self.card_tokenized,
error_code: self.error_code,
error_message: self.error_message,
}
}
}
impl NetworkTokenizationBuilder<'_, PmTokenUpdated> {
pub fn build(self) -> api::CardNetworkTokenizeResponse {
api::CardNetworkTokenizeResponse {
payment_method_response: self.payment_method_response,
customer: self.customer.cloned(),
card_tokenized: self.card_tokenized,
error_code: self.error_code.cloned(),
error_message: self.error_message.cloned(),
// Below field is mutated by caller functions for batched API operations
tokenization_data: None,
}
}
}
// Specific executor for payment method tokenization
impl CardNetworkTokenizeExecutor<'_, domain::TokenizePaymentMethodRequest> {
pub async fn fetch_payment_method(
&self,
payment_method_id: &str,
) -> RouterResult<domain::PaymentMethod> {
self.state
.store
.find_payment_method(
&self.state.into(),
self.key_store,
payment_method_id,
self.merchant_account.storage_scheme,
)
.await
.map_err(|err| match err.current_context() {
errors::StorageError::DatabaseError(err)
if matches!(
err.current_context(),
diesel_models::errors::DatabaseError::NotFound
) =>
{
report!(errors::ApiErrorResponse::InvalidRequestData {
message: "Invalid payment_method_id".into(),
})
}
errors::StorageError::ValueNotFound(_) => {
report!(errors::ApiErrorResponse::InvalidRequestData {
message: "Invalid payment_method_id".to_string(),
})
}
err => {
logger::info!("Error fetching payment_method: {:?}", err);
report!(errors::ApiErrorResponse::InternalServerError)
}
})
}
pub async fn validate_request_and_locker_reference_and_customer(
&self,
payment_method: &domain::PaymentMethod,
) -> RouterResult<(String, api::CustomerDetails)> {
// Ensure customer ID matches
let customer_id_in_req = self
.customer
.customer_id
.clone()
.get_required_value("customer_id")
.change_context(errors::ApiErrorResponse::MissingRequiredField {
field_name: "customer",
})?;
when(payment_method.customer_id != customer_id_in_req, || {
Err(report!(errors::ApiErrorResponse::InvalidRequestData {
message: "Payment method does not belong to the customer".to_string()
}))
})?;
// Ensure payment method is card
match payment_method.payment_method {
Some(api_enums::PaymentMethod::Card) => Ok(()),
Some(_) => Err(report!(errors::ApiErrorResponse::InvalidRequestData {
message: "Payment method is not card".to_string()
})),
None => Err(report!(errors::ApiErrorResponse::InvalidRequestData {
message: "Payment method is empty".to_string()
})),
}?;
// Ensure card is not tokenized already
when(
payment_method
.network_token_requestor_reference_id
.is_some(),
|| {
Err(report!(errors::ApiErrorResponse::InvalidRequestData {
message: "Card is already tokenized".to_string()
}))
},
)?;
// Ensure locker reference is present
let locker_id = payment_method.locker_id.clone().ok_or(report!(
errors::ApiErrorResponse::InvalidRequestData {
message: "locker_id not found for given payment_method_id".to_string()
}
))?;
// Fetch customer
let db = &*self.state.store;
let key_manager_state: &KeyManagerState = &self.state.into();
let customer = db
.find_customer_by_customer_id_merchant_id(
key_manager_state,
&payment_method.customer_id,
self.merchant_account.get_id(),
self.key_store,
self.merchant_account.storage_scheme,
)
.await
.inspect_err(|err| logger::info!("Error fetching customer: {:?}", err))
.change_context(errors::ApiErrorResponse::InternalServerError)?;
let customer_details = api::CustomerDetails {
id: customer.customer_id.clone(),
name: customer.name.clone().map(|name| name.into_inner()),
email: customer.email.clone().map(Email::from),
phone: customer.phone.clone().map(|phone| phone.into_inner()),
phone_country_code: customer.phone_country_code.clone(),
};
Ok((locker_id, customer_details))
}
pub async fn update_payment_method(
&self,
store_token_response: &pm_transformers::StoreCardRespPayload,
payment_method: domain::PaymentMethod,
network_token_details: &NetworkTokenizationResponse,
card_details: &domain::CardDetail,
) -> RouterResult<domain::PaymentMethod> {
// Form encrypted network token data
let enc_token_data = self
.encrypt_network_token(network_token_details, card_details, true)
.await?;
// Update payment method
let payment_method_update = diesel_models::PaymentMethodUpdate::NetworkTokenDataUpdate {
network_token_requestor_reference_id: network_token_details.1.clone(),
network_token_locker_id: Some(store_token_response.card_reference.clone()),
network_token_payment_method_data: Some(enc_token_data.into()),
};
self.state
.store
.update_payment_method(
&self.state.into(),
self.key_store,
payment_method,
payment_method_update,
self.merchant_account.storage_scheme,
)
.await
.inspect_err(|err| logger::info!("Error updating payment method: {:?}", err))
.change_context(errors::ApiErrorResponse::InternalServerError)
}
}
| 3,182 | 1,710 |
hyperswitch | crates/router/src/core/blocklist/utils.rs | .rs | use api_models::blocklist as api_blocklist;
use common_enums::MerchantDecision;
use common_utils::errors::CustomResult;
use diesel_models::configs;
use error_stack::ResultExt;
use masking::StrongSecret;
use super::{errors, transformers::generate_fingerprint, SessionState};
use crate::{
consts,
core::{
errors::{RouterResult, StorageErrorExt},
payments::PaymentData,
},
logger,
types::{domain, storage, transformers::ForeignInto},
utils,
};
pub async fn delete_entry_from_blocklist(
state: &SessionState,
merchant_id: &common_utils::id_type::MerchantId,
request: api_blocklist::DeleteFromBlocklistRequest,
) -> RouterResult<api_blocklist::DeleteFromBlocklistResponse> {
let blocklist_entry = match request {
api_blocklist::DeleteFromBlocklistRequest::CardBin(bin) => {
delete_card_bin_blocklist_entry(state, &bin, merchant_id).await?
}
api_blocklist::DeleteFromBlocklistRequest::ExtendedCardBin(xbin) => {
delete_card_bin_blocklist_entry(state, &xbin, merchant_id).await?
}
api_blocklist::DeleteFromBlocklistRequest::Fingerprint(fingerprint_id) => state
.store
.delete_blocklist_entry_by_merchant_id_fingerprint_id(merchant_id, &fingerprint_id)
.await
.to_not_found_response(errors::ApiErrorResponse::GenericNotFoundError {
message: "no blocklist record for the given fingerprint id was found".to_string(),
})?,
};
Ok(blocklist_entry.foreign_into())
}
pub async fn toggle_blocklist_guard_for_merchant(
state: &SessionState,
merchant_id: &common_utils::id_type::MerchantId,
query: api_blocklist::ToggleBlocklistQuery,
) -> CustomResult<api_blocklist::ToggleBlocklistResponse, errors::ApiErrorResponse> {
let key = merchant_id.get_blocklist_guard_key();
let maybe_guard = state.store.find_config_by_key(&key).await;
let new_config = configs::ConfigNew {
key: key.clone(),
config: query.status.to_string(),
};
match maybe_guard {
Ok(_config) => {
let updated_config = configs::ConfigUpdate::Update {
config: Some(query.status.to_string()),
};
state
.store
.update_config_by_key(&key, updated_config)
.await
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Error enabling the blocklist guard")?;
}
Err(e) if e.current_context().is_db_not_found() => {
state
.store
.insert_config(new_config)
.await
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Error enabling the blocklist guard")?;
}
Err(error) => {
logger::error!(?error);
Err(error)
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Error enabling the blocklist guard")?;
}
};
let guard_status = if query.status { "enabled" } else { "disabled" };
Ok(api_blocklist::ToggleBlocklistResponse {
blocklist_guard_status: guard_status.to_string(),
})
}
pub async fn list_blocklist_entries_for_merchant(
state: &SessionState,
merchant_id: &common_utils::id_type::MerchantId,
query: api_blocklist::ListBlocklistQuery,
) -> RouterResult<Vec<api_blocklist::BlocklistResponse>> {
state
.store
.list_blocklist_entries_by_merchant_id_data_kind(
merchant_id,
query.data_kind,
query.limit.into(),
query.offset.into(),
)
.await
.to_not_found_response(errors::ApiErrorResponse::GenericNotFoundError {
message: "no blocklist records found".to_string(),
})
.map(|v| v.into_iter().map(ForeignInto::foreign_into).collect())
}
fn validate_card_bin(bin: &str) -> RouterResult<()> {
if bin.len() == 6 && bin.chars().all(|c| c.is_ascii_digit()) {
Ok(())
} else {
Err(errors::ApiErrorResponse::InvalidDataFormat {
field_name: "data".to_string(),
expected_format: "a 6 digit number".to_string(),
}
.into())
}
}
fn validate_extended_card_bin(bin: &str) -> RouterResult<()> {
if bin.len() == 8 && bin.chars().all(|c| c.is_ascii_digit()) {
Ok(())
} else {
Err(errors::ApiErrorResponse::InvalidDataFormat {
field_name: "data".to_string(),
expected_format: "an 8 digit number".to_string(),
}
.into())
}
}
pub async fn insert_entry_into_blocklist(
state: &SessionState,
merchant_id: &common_utils::id_type::MerchantId,
to_block: api_blocklist::AddToBlocklistRequest,
) -> RouterResult<api_blocklist::AddToBlocklistResponse> {
let blocklist_entry = match &to_block {
api_blocklist::AddToBlocklistRequest::CardBin(bin) => {
validate_card_bin(bin)?;
duplicate_check_insert_bin(
bin,
state,
merchant_id,
common_enums::BlocklistDataKind::CardBin,
)
.await?
}
api_blocklist::AddToBlocklistRequest::ExtendedCardBin(bin) => {
validate_extended_card_bin(bin)?;
duplicate_check_insert_bin(
bin,
state,
merchant_id,
common_enums::BlocklistDataKind::ExtendedCardBin,
)
.await?
}
api_blocklist::AddToBlocklistRequest::Fingerprint(fingerprint_id) => {
let blocklist_entry_result = state
.store
.find_blocklist_entry_by_merchant_id_fingerprint_id(merchant_id, fingerprint_id)
.await;
match blocklist_entry_result {
Ok(_) => {
return Err(errors::ApiErrorResponse::PreconditionFailed {
message: "data associated with the given fingerprint is already blocked"
.to_string(),
}
.into());
}
// if it is a db not found error, we can proceed as normal
Err(inner) if inner.current_context().is_db_not_found() => {}
err @ Err(_) => {
err.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("error fetching blocklist entry from table")?;
}
}
state
.store
.insert_blocklist_entry(storage::BlocklistNew {
merchant_id: merchant_id.to_owned(),
fingerprint_id: fingerprint_id.clone(),
data_kind: api_models::enums::enums::BlocklistDataKind::PaymentMethod,
metadata: None,
created_at: common_utils::date_time::now(),
})
.await
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("failed to add fingerprint to blocklist")?
}
};
Ok(blocklist_entry.foreign_into())
}
pub async fn get_merchant_fingerprint_secret(
state: &SessionState,
merchant_id: &common_utils::id_type::MerchantId,
) -> RouterResult<String> {
let key = merchant_id.get_merchant_fingerprint_secret_key();
let config_fetch_result = state.store.find_config_by_key(&key).await;
match config_fetch_result {
Ok(config) => Ok(config.config),
Err(e) if e.current_context().is_db_not_found() => {
let new_fingerprint_secret =
utils::generate_id(consts::FINGERPRINT_SECRET_LENGTH, "fs");
let new_config = storage::ConfigNew {
key,
config: new_fingerprint_secret.clone(),
};
state
.store
.insert_config(new_config)
.await
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("unable to create new fingerprint secret for merchant")?;
Ok(new_fingerprint_secret)
}
Err(e) => Err(e)
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("error fetching merchant fingerprint secret"),
}
}
async fn duplicate_check_insert_bin(
bin: &str,
state: &SessionState,
merchant_id: &common_utils::id_type::MerchantId,
data_kind: common_enums::BlocklistDataKind,
) -> RouterResult<storage::Blocklist> {
let blocklist_entry_result = state
.store
.find_blocklist_entry_by_merchant_id_fingerprint_id(merchant_id, bin)
.await;
match blocklist_entry_result {
Ok(_) => {
return Err(errors::ApiErrorResponse::PreconditionFailed {
message: "provided bin is already blocked".to_string(),
}
.into());
}
Err(e) if e.current_context().is_db_not_found() => {}
err @ Err(_) => {
return err
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("unable to fetch blocklist entry");
}
}
state
.store
.insert_blocklist_entry(storage::BlocklistNew {
merchant_id: merchant_id.to_owned(),
fingerprint_id: bin.to_string(),
data_kind,
metadata: None,
created_at: common_utils::date_time::now(),
})
.await
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("error inserting pm blocklist item")
}
async fn delete_card_bin_blocklist_entry(
state: &SessionState,
bin: &str,
merchant_id: &common_utils::id_type::MerchantId,
) -> RouterResult<storage::Blocklist> {
state
.store
.delete_blocklist_entry_by_merchant_id_fingerprint_id(merchant_id, bin)
.await
.to_not_found_response(errors::ApiErrorResponse::GenericNotFoundError {
message: "could not find a blocklist entry for the given bin".to_string(),
})
}
pub async fn validate_data_for_blocklist<F>(
state: &SessionState,
merchant_account: &domain::MerchantAccount,
key_store: &domain::MerchantKeyStore,
payment_data: &mut PaymentData<F>,
) -> CustomResult<bool, errors::ApiErrorResponse>
where
F: Send + Clone,
{
let db = &state.store;
let merchant_id = merchant_account.get_id();
let merchant_fingerprint_secret = get_merchant_fingerprint_secret(state, merchant_id).await?;
// Hashed Fingerprint to check whether or not this payment should be blocked.
let card_number_fingerprint = if let Some(domain::PaymentMethodData::Card(card)) =
payment_data.payment_method_data.as_ref()
{
generate_fingerprint(
state,
StrongSecret::new(card.card_number.get_card_no()),
StrongSecret::new(merchant_fingerprint_secret.clone()),
api_models::enums::LockerChoice::HyperswitchCardVault,
)
.await
.attach_printable("error in pm fingerprint creation")
.map_or_else(
|error| {
logger::error!(?error);
None
},
Some,
)
.map(|payload| payload.card_fingerprint)
} else {
None
};
// Hashed Cardbin to check whether or not this payment should be blocked.
let card_bin_fingerprint = payment_data
.payment_method_data
.as_ref()
.and_then(|pm_data| match pm_data {
domain::PaymentMethodData::Card(card) => Some(card.card_number.get_card_isin()),
_ => None,
});
// Hashed Extended Cardbin to check whether or not this payment should be blocked.
let extended_card_bin_fingerprint =
payment_data
.payment_method_data
.as_ref()
.and_then(|pm_data| match pm_data {
domain::PaymentMethodData::Card(card) => {
Some(card.card_number.get_extended_card_bin())
}
_ => None,
});
//validating the payment method.
let mut blocklist_futures = Vec::new();
if let Some(card_number_fingerprint) = card_number_fingerprint.as_ref() {
blocklist_futures.push(db.find_blocklist_entry_by_merchant_id_fingerprint_id(
merchant_id,
card_number_fingerprint,
));
}
if let Some(card_bin_fingerprint) = card_bin_fingerprint.as_ref() {
blocklist_futures.push(
db.find_blocklist_entry_by_merchant_id_fingerprint_id(
merchant_id,
card_bin_fingerprint,
),
);
}
if let Some(extended_card_bin_fingerprint) = extended_card_bin_fingerprint.as_ref() {
blocklist_futures.push(db.find_blocklist_entry_by_merchant_id_fingerprint_id(
merchant_id,
extended_card_bin_fingerprint,
));
}
let blocklist_lookups = futures::future::join_all(blocklist_futures).await;
let mut should_payment_be_blocked = false;
for lookup in blocklist_lookups {
match lookup {
Ok(_) => {
should_payment_be_blocked = true;
}
Err(e) => {
logger::error!(blocklist_db_error=?e, "failed db operations for blocklist");
}
}
}
if should_payment_be_blocked {
// Update db for attempt and intent status.
db.update_payment_intent(
&state.into(),
payment_data.payment_intent.clone(),
storage::PaymentIntentUpdate::RejectUpdate {
status: common_enums::IntentStatus::Failed,
merchant_decision: Some(MerchantDecision::Rejected.to_string()),
updated_by: merchant_account.storage_scheme.to_string(),
},
key_store,
merchant_account.storage_scheme,
)
.await
.to_not_found_response(errors::ApiErrorResponse::PaymentNotFound)
.attach_printable(
"Failed to update status in Payment Intent to failed due to it being blocklisted",
)?;
// If payment is blocked not showing connector details
let attempt_update = storage::PaymentAttemptUpdate::BlocklistUpdate {
status: common_enums::AttemptStatus::Failure,
error_code: Some(Some("HE-03".to_string())),
error_message: Some(Some("This payment method is blocked".to_string())),
updated_by: merchant_account.storage_scheme.to_string(),
};
db.update_payment_attempt_with_attempt_id(
payment_data.payment_attempt.clone(),
attempt_update,
merchant_account.storage_scheme,
)
.await
.to_not_found_response(errors::ApiErrorResponse::PaymentNotFound)
.attach_printable(
"Failed to update status in Payment Attempt to failed, due to it being blocklisted",
)?;
Err(errors::ApiErrorResponse::PaymentBlockedError {
code: 200,
message: "This payment method is blocked".to_string(),
status: "Failed".to_string(),
reason: "Blocked".to_string(),
}
.into())
} else {
payment_data.payment_attempt.fingerprint_id = generate_payment_fingerprint(
state,
payment_data.payment_attempt.merchant_id.clone(),
payment_data.payment_method_data.clone(),
)
.await?;
Ok(false)
}
}
pub async fn generate_payment_fingerprint(
state: &SessionState,
merchant_id: common_utils::id_type::MerchantId,
payment_method_data: Option<domain::PaymentMethodData>,
) -> CustomResult<Option<String>, errors::ApiErrorResponse> {
let merchant_fingerprint_secret = get_merchant_fingerprint_secret(state, &merchant_id).await?;
Ok(
if let Some(domain::PaymentMethodData::Card(card)) = payment_method_data.as_ref() {
generate_fingerprint(
state,
StrongSecret::new(card.card_number.get_card_no()),
StrongSecret::new(merchant_fingerprint_secret),
api_models::enums::LockerChoice::HyperswitchCardVault,
)
.await
.attach_printable("error in pm fingerprint creation")
.map_or_else(
|error| {
logger::error!(?error);
None
},
Some,
)
.map(|payload| payload.card_fingerprint)
} else {
logger::error!("failed to retrieve card fingerprint");
None
},
)
}
| 3,486 | 1,711 |
hyperswitch | crates/router/src/core/blocklist/transformers.rs | .rs | use api_models::{blocklist, enums as api_enums};
use common_utils::{
ext_traits::{Encode, StringExt},
request::RequestContent,
};
use error_stack::ResultExt;
use josekit::jwe;
use masking::{PeekInterface, StrongSecret};
use router_env::{instrument, tracing};
use crate::{
configs::settings,
core::{
errors::{self, CustomResult},
payment_methods::transformers as payment_methods,
},
headers, routes,
services::{api as services, encryption, EncryptionAlgorithm},
types::{storage, transformers::ForeignFrom},
utils::ConnectorResponseExt,
};
const LOCKER_FINGERPRINT_PATH: &str = "/cards/fingerprint";
impl ForeignFrom<storage::Blocklist> for blocklist::AddToBlocklistResponse {
fn foreign_from(from: storage::Blocklist) -> Self {
Self {
fingerprint_id: from.fingerprint_id,
data_kind: from.data_kind,
created_at: from.created_at,
}
}
}
async fn generate_fingerprint_request(
jwekey: &settings::Jwekey,
locker: &settings::Locker,
payload: &blocklist::GenerateFingerprintRequest,
locker_choice: api_enums::LockerChoice,
) -> CustomResult<services::Request, errors::VaultError> {
let payload = payload
.encode_to_vec()
.change_context(errors::VaultError::RequestEncodingFailed)?;
let private_key = jwekey.vault_private_key.peek().as_bytes();
let jws = encryption::jws_sign_payload(&payload, &locker.locker_signing_key_id, private_key)
.await
.change_context(errors::VaultError::RequestEncodingFailed)?;
let jwe_payload = generate_jwe_payload_for_request(jwekey, &jws, locker_choice).await?;
let mut url = match locker_choice {
api_enums::LockerChoice::HyperswitchCardVault => locker.host.to_owned(),
};
url.push_str(LOCKER_FINGERPRINT_PATH);
let mut request = services::Request::new(services::Method::Post, &url);
request.add_header(headers::CONTENT_TYPE, "application/json".into());
request.set_body(RequestContent::Json(Box::new(jwe_payload)));
Ok(request)
}
async fn generate_jwe_payload_for_request(
jwekey: &settings::Jwekey,
jws: &str,
locker_choice: api_enums::LockerChoice,
) -> CustomResult<encryption::JweBody, errors::VaultError> {
let jws_payload: Vec<&str> = jws.split('.').collect();
let generate_jws_body = |payload: Vec<&str>| -> Option<encryption::JwsBody> {
Some(encryption::JwsBody {
header: payload.first()?.to_string(),
payload: payload.get(1)?.to_string(),
signature: payload.get(2)?.to_string(),
})
};
let jws_body =
generate_jws_body(jws_payload).ok_or(errors::VaultError::GenerateFingerprintFailed)?;
let payload = jws_body
.encode_to_vec()
.change_context(errors::VaultError::GenerateFingerprintFailed)?;
let public_key = match locker_choice {
api_enums::LockerChoice::HyperswitchCardVault => {
jwekey.vault_encryption_key.peek().as_bytes()
}
};
let jwe_encrypted =
encryption::encrypt_jwe(&payload, public_key, EncryptionAlgorithm::A256GCM, None)
.await
.change_context(errors::VaultError::SaveCardFailed)
.attach_printable("Error on jwe encrypt")?;
let jwe_payload: Vec<&str> = jwe_encrypted.split('.').collect();
let generate_jwe_body = |payload: Vec<&str>| -> Option<encryption::JweBody> {
Some(encryption::JweBody {
header: payload.first()?.to_string(),
iv: payload.get(2)?.to_string(),
encrypted_payload: payload.get(3)?.to_string(),
tag: payload.get(4)?.to_string(),
encrypted_key: payload.get(1)?.to_string(),
})
};
let jwe_body =
generate_jwe_body(jwe_payload).ok_or(errors::VaultError::GenerateFingerprintFailed)?;
Ok(jwe_body)
}
#[instrument(skip_all)]
pub async fn generate_fingerprint(
state: &routes::SessionState,
card_number: StrongSecret<String>,
hash_key: StrongSecret<String>,
locker_choice: api_enums::LockerChoice,
) -> CustomResult<blocklist::GenerateFingerprintResponsePayload, errors::VaultError> {
let payload = blocklist::GenerateFingerprintRequest {
card: blocklist::Card { card_number },
hash_key,
};
let generate_fingerprint_resp =
call_to_locker_for_fingerprint(state, &payload, locker_choice).await?;
Ok(generate_fingerprint_resp)
}
#[instrument(skip_all)]
async fn call_to_locker_for_fingerprint(
state: &routes::SessionState,
payload: &blocklist::GenerateFingerprintRequest,
locker_choice: api_enums::LockerChoice,
) -> CustomResult<blocklist::GenerateFingerprintResponsePayload, errors::VaultError> {
let locker = &state.conf.locker;
let jwekey = state.conf.jwekey.get_inner();
let request = generate_fingerprint_request(jwekey, locker, payload, locker_choice).await?;
let response = services::call_connector_api(state, request, "call_locker_to_get_fingerprint")
.await
.change_context(errors::VaultError::GenerateFingerprintFailed);
let jwe_body: encryption::JweBody = response
.get_response_inner("JweBody")
.change_context(errors::VaultError::GenerateFingerprintFailed)?;
let decrypted_payload = decrypt_generate_fingerprint_response_payload(
jwekey,
jwe_body,
Some(locker_choice),
locker.decryption_scheme.clone(),
)
.await
.change_context(errors::VaultError::GenerateFingerprintFailed)
.attach_printable("Error getting decrypted fingerprint response payload")?;
let generate_fingerprint_response: blocklist::GenerateFingerprintResponsePayload =
decrypted_payload
.parse_struct("GenerateFingerprintResponse")
.change_context(errors::VaultError::ResponseDeserializationFailed)?;
Ok(generate_fingerprint_response)
}
async fn decrypt_generate_fingerprint_response_payload(
jwekey: &settings::Jwekey,
jwe_body: encryption::JweBody,
locker_choice: Option<api_enums::LockerChoice>,
decryption_scheme: settings::DecryptionScheme,
) -> CustomResult<String, errors::VaultError> {
let target_locker = locker_choice.unwrap_or(api_enums::LockerChoice::HyperswitchCardVault);
let public_key = match target_locker {
api_enums::LockerChoice::HyperswitchCardVault => {
jwekey.vault_encryption_key.peek().as_bytes()
}
};
let private_key = jwekey.vault_private_key.peek().as_bytes();
let jwt = payment_methods::get_dotted_jwe(jwe_body);
let alg = match decryption_scheme {
settings::DecryptionScheme::RsaOaep => jwe::RSA_OAEP,
settings::DecryptionScheme::RsaOaep256 => jwe::RSA_OAEP_256,
};
let jwe_decrypted = encryption::decrypt_jwe(
&jwt,
encryption::KeyIdCheck::SkipKeyIdCheck,
private_key,
alg,
)
.await
.change_context(errors::VaultError::SaveCardFailed)
.attach_printable("Jwe Decryption failed for JweBody for vault")?;
let jws = jwe_decrypted
.parse_struct("JwsBody")
.change_context(errors::VaultError::ResponseDeserializationFailed)?;
let jws_body = payment_methods::get_dotted_jws(jws);
encryption::verify_sign(jws_body, public_key)
.change_context(errors::VaultError::SaveCardFailed)
.attach_printable("Jws Decryption failed for JwsBody for vault")
}
| 1,769 | 1,712 |
hyperswitch | crates/router/src/core/revenue_recovery/transformers.rs | .rs | use common_enums::AttemptStatus;
use crate::{core::revenue_recovery::types::PcrAttemptStatus, types::transformers::ForeignFrom};
impl ForeignFrom<AttemptStatus> for PcrAttemptStatus {
fn foreign_from(s: AttemptStatus) -> Self {
match s {
AttemptStatus::Authorized | AttemptStatus::Charged | AttemptStatus::AutoRefunded => {
Self::Succeeded
}
AttemptStatus::Started
| AttemptStatus::AuthenticationSuccessful
| AttemptStatus::Authorizing
| AttemptStatus::CodInitiated
| AttemptStatus::VoidInitiated
| AttemptStatus::CaptureInitiated
| AttemptStatus::Pending => Self::Processing,
AttemptStatus::AuthenticationFailed
| AttemptStatus::AuthorizationFailed
| AttemptStatus::VoidFailed
| AttemptStatus::RouterDeclined
| AttemptStatus::CaptureFailed
| AttemptStatus::Failure => Self::Failed,
AttemptStatus::Voided
| AttemptStatus::ConfirmationAwaited
| AttemptStatus::PartialCharged
| AttemptStatus::PartialChargedAndChargeable
| AttemptStatus::PaymentMethodAwaited
| AttemptStatus::AuthenticationPending
| AttemptStatus::DeviceDataCollectionPending
| AttemptStatus::Unresolved => Self::InvalidStatus(s.to_string()),
}
}
}
| 286 | 1,713 |
hyperswitch | crates/router/src/core/revenue_recovery/types.rs | .rs | use std::marker::PhantomData;
use api_models::{
enums as api_enums,
mandates::RecurringDetails,
payments::{
AmountDetails, FeatureMetadata, PaymentRevenueRecoveryMetadata,
PaymentsUpdateIntentRequest, ProxyPaymentsRequest,
},
};
use common_utils::{
self,
ext_traits::{OptionExt, ValueExt},
id_type,
};
use diesel_models::{enums, process_tracker::business_status, types as diesel_types};
use error_stack::{self, ResultExt};
use hyperswitch_domain_models::{
business_profile, merchant_connector_account,
payments::{
self as domain_payments, payment_attempt, PaymentConfirmData, PaymentIntent,
PaymentIntentData,
},
router_data_v2::{self, flow_common_types},
router_flow_types,
router_request_types::revenue_recovery as revenue_recovery_request,
router_response_types::revenue_recovery as revenue_recovery_response,
ApiModelToDieselModelConvertor,
};
use time::PrimitiveDateTime;
use crate::{
core::{
errors::{self, RouterResult},
payments::{self, helpers, operations::Operation},
revenue_recovery::{self as core_pcr},
},
db::StorageInterface,
logger,
routes::SessionState,
services::{self, connector_integration_interface::RouterDataConversion},
types::{
self, api as api_types, api::payments as payments_types, storage, transformers::ForeignInto,
},
workflows::revenue_recovery::get_schedule_time_to_retry_mit_payments,
};
type RecoveryResult<T> = error_stack::Result<T, errors::RecoveryError>;
/// The status of Passive Churn Payments
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
pub enum PcrAttemptStatus {
Succeeded,
Failed,
Processing,
InvalidStatus(String),
// Cancelled,
}
impl PcrAttemptStatus {
pub(crate) async fn update_pt_status_based_on_attempt_status_for_execute_payment(
&self,
db: &dyn StorageInterface,
execute_task_process: &storage::ProcessTracker,
) -> Result<(), errors::ProcessTrackerError> {
match &self {
Self::Succeeded | Self::Failed | Self::Processing => {
// finish the current execute task
db.finish_process_with_business_status(
execute_task_process.clone(),
business_status::EXECUTE_WORKFLOW_COMPLETE_FOR_PSYNC,
)
.await?;
}
Self::InvalidStatus(action) => {
logger::debug!(
"Invalid Attempt Status for the Recovery Payment : {}",
action
);
let pt_update = storage::ProcessTrackerUpdate::StatusUpdate {
status: enums::ProcessTrackerStatus::Review,
business_status: Some(String::from(business_status::EXECUTE_WORKFLOW_COMPLETE)),
};
// update the process tracker status as Review
db.update_process(execute_task_process.clone(), pt_update)
.await?;
}
};
Ok(())
}
}
#[derive(Debug, Clone)]
pub enum Decision {
Execute,
Psync(enums::AttemptStatus, id_type::GlobalAttemptId),
InvalidDecision,
}
impl Decision {
pub async fn get_decision_based_on_params(
state: &SessionState,
intent_status: enums::IntentStatus,
called_connector: enums::PaymentConnectorTransmission,
active_attempt_id: Option<id_type::GlobalAttemptId>,
pcr_data: &storage::revenue_recovery::PcrPaymentData,
payment_id: &id_type::GlobalPaymentId,
) -> RecoveryResult<Self> {
Ok(match (intent_status, called_connector, active_attempt_id) {
(
enums::IntentStatus::Failed,
enums::PaymentConnectorTransmission::ConnectorCallUnsuccessful,
None,
) => Self::Execute,
(
enums::IntentStatus::Processing,
enums::PaymentConnectorTransmission::ConnectorCallSucceeded,
Some(_),
) => {
let psync_data = core_pcr::call_psync_api(state, payment_id, pcr_data)
.await
.change_context(errors::RecoveryError::PaymentCallFailed)
.attach_printable("Error while executing the Psync call")?;
let payment_attempt = psync_data
.payment_attempt
.get_required_value("Payment Attempt")
.change_context(errors::RecoveryError::ValueNotFound)
.attach_printable("Error while executing the Psync call")?;
Self::Psync(payment_attempt.status, payment_attempt.get_id().clone())
}
_ => Self::InvalidDecision,
})
}
}
#[derive(Debug, Clone)]
pub enum Action {
SyncPayment(id_type::GlobalAttemptId),
RetryPayment(PrimitiveDateTime),
TerminalFailure(payment_attempt::PaymentAttempt),
SuccessfulPayment(payment_attempt::PaymentAttempt),
ReviewPayment,
ManualReviewAction,
}
impl Action {
pub async fn execute_payment(
state: &SessionState,
merchant_id: &id_type::MerchantId,
payment_intent: &PaymentIntent,
process: &storage::ProcessTracker,
pcr_data: &storage::revenue_recovery::PcrPaymentData,
revenue_recovery_metadata: &PaymentRevenueRecoveryMetadata,
) -> RecoveryResult<Self> {
let db = &*state.store;
let response =
call_proxy_api(state, payment_intent, pcr_data, revenue_recovery_metadata).await;
// handle proxy api's response
match response {
Ok(payment_data) => match payment_data.payment_attempt.status.foreign_into() {
PcrAttemptStatus::Succeeded => Ok(Self::SuccessfulPayment(
payment_data.payment_attempt.clone(),
)),
PcrAttemptStatus::Failed => {
Self::decide_retry_failure_action(
db,
merchant_id,
process.clone(),
&payment_data.payment_attempt,
)
.await
}
PcrAttemptStatus::Processing => {
Ok(Self::SyncPayment(payment_data.payment_attempt.id.clone()))
}
PcrAttemptStatus::InvalidStatus(action) => {
logger::info!(?action, "Invalid Payment Status For PCR Payment");
Ok(Self::ManualReviewAction)
}
},
Err(err) =>
// check for an active attempt being constructed or not
{
logger::error!(execute_payment_res=?err);
match payment_intent.active_attempt_id.clone() {
Some(attempt_id) => Ok(Self::SyncPayment(attempt_id)),
None => Ok(Self::ReviewPayment),
}
}
}
}
pub async fn execute_payment_task_response_handler(
&self,
state: &SessionState,
payment_intent: &PaymentIntent,
execute_task_process: &storage::ProcessTracker,
pcr_data: &storage::revenue_recovery::PcrPaymentData,
revenue_recovery_metadata: &mut PaymentRevenueRecoveryMetadata,
billing_mca: &merchant_connector_account::MerchantConnectorAccount,
) -> Result<(), errors::ProcessTrackerError> {
let db = &*state.store;
match self {
Self::SyncPayment(attempt_id) => {
core_pcr::insert_psync_pcr_task(
db,
pcr_data.merchant_account.get_id().to_owned(),
payment_intent.id.clone(),
pcr_data.profile.get_id().to_owned(),
attempt_id.clone(),
storage::ProcessTrackerRunner::PassiveRecoveryWorkflow,
)
.await
.change_context(errors::RecoveryError::ProcessTrackerFailure)
.attach_printable("Failed to create a psync workflow in the process tracker")?;
db.as_scheduler()
.finish_process_with_business_status(
execute_task_process.clone(),
business_status::EXECUTE_WORKFLOW_COMPLETE_FOR_PSYNC,
)
.await
.change_context(errors::RecoveryError::ProcessTrackerFailure)
.attach_printable("Failed to update the process tracker")?;
Ok(())
}
Self::RetryPayment(schedule_time) => {
db.as_scheduler()
.retry_process(execute_task_process.clone(), *schedule_time)
.await?;
// update the connector payment transmission field to Unsuccessful and unset active attempt id
revenue_recovery_metadata.set_payment_transmission_field_for_api_request(
enums::PaymentConnectorTransmission::ConnectorCallUnsuccessful,
);
let payment_update_req = PaymentsUpdateIntentRequest::update_feature_metadata_and_active_attempt_with_api(
payment_intent.feature_metadata.clone().unwrap_or_default().convert_back().set_payment_revenue_recovery_metadata_using_api(
revenue_recovery_metadata.clone()
),
api_enums::UpdateActiveAttempt::Unset,
);
logger::info!(
"Call made to payments update intent api , with the request body {:?}",
payment_update_req
);
update_payment_intent_api(
state,
payment_intent.id.clone(),
pcr_data,
payment_update_req,
)
.await
.change_context(errors::RecoveryError::PaymentCallFailed)?;
Ok(())
}
Self::TerminalFailure(payment_attempt) => {
db.as_scheduler()
.finish_process_with_business_status(
execute_task_process.clone(),
business_status::EXECUTE_WORKFLOW_COMPLETE,
)
.await
.change_context(errors::RecoveryError::ProcessTrackerFailure)
.attach_printable("Failed to update the process tracker")?;
// Record back to billing connector for terminal status
// TODO: Add support for retrying failed outgoing recordback webhooks
self.record_back_to_billing_connector(
state,
payment_attempt,
payment_intent,
billing_mca,
)
.await
.change_context(errors::RecoveryError::RecordBackToBillingConnectorFailed)
.attach_printable("Failed to record back the billing connector")?;
Ok(())
}
Self::SuccessfulPayment(payment_attempt) => {
db.as_scheduler()
.finish_process_with_business_status(
execute_task_process.clone(),
business_status::EXECUTE_WORKFLOW_COMPLETE,
)
.await
.change_context(errors::RecoveryError::ProcessTrackerFailure)
.attach_printable("Failed to update the process tracker")?;
// Record back to billing connector for terminal status
// TODO: Add support for retrying failed outgoing recordback webhooks
self.record_back_to_billing_connector(
state,
payment_attempt,
payment_intent,
billing_mca,
)
.await
.change_context(errors::RecoveryError::RecordBackToBillingConnectorFailed)
.attach_printable("Failed to update the process tracker")?;
Ok(())
}
Self::ReviewPayment => Ok(()),
Self::ManualReviewAction => {
logger::debug!("Invalid Payment Status For PCR Payment");
let pt_update = storage::ProcessTrackerUpdate::StatusUpdate {
status: enums::ProcessTrackerStatus::Review,
business_status: Some(String::from(business_status::EXECUTE_WORKFLOW_COMPLETE)),
};
// update the process tracker status as Review
db.as_scheduler()
.update_process(execute_task_process.clone(), pt_update)
.await?;
Ok(())
}
}
}
async fn record_back_to_billing_connector(
&self,
state: &SessionState,
payment_attempt: &payment_attempt::PaymentAttempt,
payment_intent: &PaymentIntent,
billing_mca: &merchant_connector_account::MerchantConnectorAccount,
) -> RecoveryResult<()> {
let connector_name = billing_mca.connector_name.to_string();
let connector_data = api_types::ConnectorData::get_connector_by_name(
&state.conf.connectors,
&connector_name,
api_types::GetToken::Connector,
Some(billing_mca.get_id()),
)
.change_context(errors::RecoveryError::RecordBackToBillingConnectorFailed)
.attach_printable(
"invalid connector name received in billing merchant connector account",
)?;
let connector_integration: services::BoxedRevenueRecoveryRecordBackInterface<
router_flow_types::RecoveryRecordBack,
revenue_recovery_request::RevenueRecoveryRecordBackRequest,
revenue_recovery_response::RevenueRecoveryRecordBackResponse,
> = connector_data.connector.get_connector_integration();
let router_data = self.construct_recovery_record_back_router_data(
state,
billing_mca,
payment_attempt,
payment_intent,
)?;
let response = services::execute_connector_processing_step(
state,
connector_integration,
&router_data,
payments::CallConnectorAction::Trigger,
None,
)
.await
.change_context(errors::RecoveryError::RecordBackToBillingConnectorFailed)
.attach_printable("Failed while handling response of record back to billing connector")?;
let record_back_response = match response.response {
Ok(response) => Ok(response),
error @ Err(_) => {
router_env::logger::error!(?error);
Err(errors::RecoveryError::RecordBackToBillingConnectorFailed)
.attach_printable("Failed while recording back to billing connector")
}
}?;
Ok(())
}
pub fn construct_recovery_record_back_router_data(
&self,
state: &SessionState,
billing_mca: &merchant_connector_account::MerchantConnectorAccount,
payment_attempt: &payment_attempt::PaymentAttempt,
payment_intent: &PaymentIntent,
) -> RecoveryResult<hyperswitch_domain_models::types::RevenueRecoveryRecordBackRouterData> {
let auth_type: types::ConnectorAuthType =
helpers::MerchantConnectorAccountType::DbVal(Box::new(billing_mca.clone()))
.get_connector_account_details()
.parse_value("ConnectorAuthType")
.change_context(errors::RecoveryError::RecordBackToBillingConnectorFailed)?;
let merchant_reference_id = payment_intent
.merchant_reference_id
.clone()
.ok_or(errors::RecoveryError::RecordBackToBillingConnectorFailed)
.attach_printable(
"Merchant reference id not found while recording back to billing connector",
)?;
let router_data = router_data_v2::RouterDataV2 {
flow: PhantomData::<router_flow_types::RecoveryRecordBack>,
tenant_id: state.tenant.tenant_id.clone(),
resource_common_data: flow_common_types::RevenueRecoveryRecordBackData,
connector_auth_type: auth_type,
request: revenue_recovery_request::RevenueRecoveryRecordBackRequest {
merchant_reference_id,
amount: payment_attempt.get_total_amount(),
currency: payment_intent.amount_details.currency,
payment_method_type: Some(payment_attempt.payment_method_subtype),
attempt_status: payment_attempt.status,
connector_transaction_id: payment_attempt
.connector_payment_id
.as_ref()
.map(|id| common_utils::types::ConnectorTransactionId::TxnId(id.clone())),
},
response: Err(types::ErrorResponse::default()),
};
let old_router_data =
flow_common_types::RevenueRecoveryRecordBackData::to_old_router_data(router_data)
.change_context(errors::RecoveryError::RecordBackToBillingConnectorFailed)
.attach_printable("Cannot construct record back router data")?;
Ok(old_router_data)
}
pub(crate) async fn decide_retry_failure_action(
db: &dyn StorageInterface,
merchant_id: &id_type::MerchantId,
pt: storage::ProcessTracker,
payment_attempt: &payment_attempt::PaymentAttempt,
) -> RecoveryResult<Self> {
let schedule_time =
get_schedule_time_to_retry_mit_payments(db, merchant_id, pt.retry_count + 1).await;
match schedule_time {
Some(schedule_time) => Ok(Self::RetryPayment(schedule_time)),
None => Ok(Self::TerminalFailure(payment_attempt.clone())),
}
}
}
async fn call_proxy_api(
state: &SessionState,
payment_intent: &PaymentIntent,
pcr_data: &storage::revenue_recovery::PcrPaymentData,
revenue_recovery: &PaymentRevenueRecoveryMetadata,
) -> RouterResult<PaymentConfirmData<payments_types::Authorize>> {
let operation = payments::operations::proxy_payments_intent::PaymentProxyIntent;
let req = ProxyPaymentsRequest {
return_url: None,
amount: AmountDetails::new(payment_intent.amount_details.clone().into()),
recurring_details: revenue_recovery.get_payment_token_for_api_request(),
shipping: None,
browser_info: None,
connector: revenue_recovery.connector.to_string(),
merchant_connector_id: revenue_recovery.get_merchant_connector_id_for_api_request(),
};
logger::info!(
"Call made to payments proxy api , with the request body {:?}",
req
);
// TODO : Use api handler instead of calling get_tracker and payments_operation_core
// Get the tracker related information. This includes payment intent and payment attempt
let get_tracker_response = operation
.to_get_tracker()?
.get_trackers(
state,
payment_intent.get_id(),
&req,
&pcr_data.merchant_account,
&pcr_data.profile,
&pcr_data.key_store,
&hyperswitch_domain_models::payments::HeaderPayload::default(),
None,
)
.await?;
let (payment_data, _req, _, _) = Box::pin(payments::proxy_for_payments_operation_core::<
payments_types::Authorize,
_,
_,
_,
PaymentConfirmData<payments_types::Authorize>,
>(
state,
state.get_req_state(),
pcr_data.merchant_account.clone(),
pcr_data.key_store.clone(),
pcr_data.profile.clone(),
operation,
req,
get_tracker_response,
payments::CallConnectorAction::Trigger,
hyperswitch_domain_models::payments::HeaderPayload::default(),
))
.await?;
Ok(payment_data)
}
pub async fn update_payment_intent_api(
state: &SessionState,
global_payment_id: id_type::GlobalPaymentId,
pcr_data: &storage::revenue_recovery::PcrPaymentData,
update_req: PaymentsUpdateIntentRequest,
) -> RouterResult<PaymentIntentData<payments_types::PaymentUpdateIntent>> {
// TODO : Use api handler instead of calling payments_intent_operation_core
let operation = payments::operations::PaymentUpdateIntent;
let (payment_data, _req, customer) = payments::payments_intent_operation_core::<
payments_types::PaymentUpdateIntent,
_,
_,
PaymentIntentData<payments_types::PaymentUpdateIntent>,
>(
state,
state.get_req_state(),
pcr_data.merchant_account.clone(),
pcr_data.profile.clone(),
pcr_data.key_store.clone(),
operation,
update_req,
global_payment_id,
hyperswitch_domain_models::payments::HeaderPayload::default(),
None,
)
.await?;
Ok(payment_data)
}
| 3,941 | 1,714 |
hyperswitch | crates/router/src/core/payment_link/validator.rs | .rs | use actix_http::header;
use api_models::admin::PaymentLinkConfig;
use common_utils::validation::validate_domain_against_allowed_domains;
use error_stack::{report, ResultExt};
use url::Url;
use crate::{
core::errors::{self, RouterResult},
types::storage::PaymentLink,
};
pub fn validate_secure_payment_link_render_request(
request_headers: &header::HeaderMap,
payment_link: &PaymentLink,
payment_link_config: &PaymentLinkConfig,
) -> RouterResult<()> {
let link_id = payment_link.payment_link_id.clone();
let allowed_domains = payment_link_config
.allowed_domains
.clone()
.ok_or(report!(errors::ApiErrorResponse::InvalidRequestUrl))
.attach_printable_lazy(|| {
format!(
"Secure payment link was not generated for {}\nmissing allowed_domains",
link_id
)
})?;
// Validate secure_link was generated
if payment_link.secure_link.clone().is_none() {
return Err(report!(errors::ApiErrorResponse::InvalidRequestUrl)).attach_printable_lazy(
|| {
format!(
"Secure payment link was not generated for {}\nmissing secure_link",
link_id
)
},
);
}
// Fetch destination is "iframe"
match request_headers.get("sec-fetch-dest").and_then(|v| v.to_str().ok()) {
Some("iframe") => Ok(()),
Some(requestor) => Err(report!(errors::ApiErrorResponse::AccessForbidden {
resource: "payment_link".to_string(),
}))
.attach_printable_lazy(|| {
format!(
"Access to payment_link [{}] is forbidden when requested through {}",
link_id, requestor
)
}),
None => Err(report!(errors::ApiErrorResponse::AccessForbidden {
resource: "payment_link".to_string(),
}))
.attach_printable_lazy(|| {
format!(
"Access to payment_link [{}] is forbidden when sec-fetch-dest is not present in request headers",
link_id
)
}),
}?;
// Validate origin / referer
let domain_in_req = {
let origin_or_referer = request_headers
.get("origin")
.or_else(|| request_headers.get("referer"))
.and_then(|v| v.to_str().ok())
.ok_or_else(|| {
report!(errors::ApiErrorResponse::AccessForbidden {
resource: "payment_link".to_string(),
})
})
.attach_printable_lazy(|| {
format!(
"Access to payment_link [{}] is forbidden when origin or referer is not present in request headers",
link_id
)
})?;
let url = Url::parse(origin_or_referer)
.map_err(|_| {
report!(errors::ApiErrorResponse::AccessForbidden {
resource: "payment_link".to_string(),
})
})
.attach_printable_lazy(|| {
format!("Invalid URL found in request headers {}", origin_or_referer)
})?;
url.host_str()
.and_then(|host| url.port().map(|port| format!("{}:{}", host, port)))
.or_else(|| url.host_str().map(String::from))
.ok_or_else(|| {
report!(errors::ApiErrorResponse::AccessForbidden {
resource: "payment_link".to_string(),
})
})
.attach_printable_lazy(|| {
format!("host or port not found in request headers {:?}", url)
})?
};
if validate_domain_against_allowed_domains(&domain_in_req, allowed_domains) {
Ok(())
} else {
Err(report!(errors::ApiErrorResponse::AccessForbidden {
resource: "payment_link".to_string(),
}))
.attach_printable_lazy(|| {
format!(
"Access to payment_link [{}] is forbidden from requestor - {}",
link_id, domain_in_req
)
})
}
}
| 828 | 1,715 |
hyperswitch | crates/router/src/core/payment_link/locale.js | .js | /*
The languages supported by locale.js are:
1) English (en)
2) Hebrew (he)
3) French (fr)
4) British English (en_gb)
5) Arabic (ar)
6) Japanese (ja)
7) German (de)
8) Belgian French (fr_be)
9) Spanish (es)
10) Catalan (ca)
11) Portuguese (pt)
12) Italian (it)
13) Polish (pl)
14) Dutch (nl)
15) Swedish (sv)
16) Russian (ru)
17) Chinese (zh)
19) Traditional Chinese (zh_hant)
*/
const locales = {
en: {
expiresOn: "Link expires on: ",
refId: "Ref Id: ",
requestedBy: "Requested by ",
payNow: "Pay now",
yourCart: "Your Cart",
quantity: "Quantity",
showLess: "Show Less",
showMore: "Show More",
miscellaneousCharges: "Miscellaneous charges",
miscellaneousChargesDetail: "(includes taxes, shipping, discounts, offers etc.)",
paymentTakingLonger: "Sorry! Your payment is taking longer than expected. Please check back again in sometime.",
paymentLinkExpired: "Payment Link Expired",
paymentReceived: "We have successfully received your payment",
paymentLinkExpiredMessage: "Sorry, this payment link has expired. Please use below reference for further investigation.",
paidSuccessfully: "Paid successfully",
paymentPending: "Payment Pending",
paymentFailed: "Payment Failed!",
paymentCancelled: "Payment Cancelled",
paymentUnderReview: "Payment under review",
paymentSuccess: "Payment Success",
partialPaymentCaptured: "Partial payment was captured.",
somethingWentWrong: "Something went wrong",
redirecting: "Redirecting ...",
redirectingIn: "Redirecting in ",
seconds: " seconds ...",
unexpectedError: "An unexpected error occurred.",
notAllowed: "You are not allowed to view this content.",
errorCode: "Error code",
errorMessage: "Error Message"
},
he: {
expiresOn: "הקישור יפוג ב: ",
refId: "מזהה הפניה: ",
requestedBy: "ביקש על ידי ",
payNow: "שלם עכשיו",
yourCart: "העגלה שלך",
quantity: "כמות",
showLess: "הצג פחות",
showMore: "הצג עוד",
miscellaneousCharges: "חיובים נוספים",
miscellaneousChargesDetail: "(כולל מיסים, משלוח, הנחות, הצעות וכו')",
paymentTakingLonger: "מצטערים! התשלום שלך לוקח יותר זמן מהצפוי. אנא בדוק שוב בעוד זמן מה.",
paymentLinkExpired: "הקישור לתשלום פג",
paymentReceived: "קיבלנו את התשלום שלך בהצלחה",
paymentLinkExpiredMessage: "מצטערים, הקישור הזה לתשלום פג. השתמש בהפניה למטה להמשך החקירה.",
paidSuccessfully: "שולם בהצלחה",
paymentPending: "התשלום ממתין",
paymentFailed: "התשלום נכשל!",
paymentCancelled: "התשלום בוטל",
paymentUnderReview: "התשלום בבדיקה",
paymentSuccess: "התשלום הצליח",
partialPaymentCaptured: "תשלום חלקי נלכד.",
somethingWentWrong: "משהו השתבש",
redirecting: "מעביר...",
redirectingIn: "מעביר בעוד ",
seconds: " שניות ...",
unexpectedError: "אירעה שגיאה בלתי צפויה.",
notAllowed: "אינך מורשה לצפות בתוכן זה.",
errorCode: "קוד שגיאה",
errorMessage: "הודעת שגיאה"
},
fr: {
expiresOn: "Le lien expire le: ",
refId: "ID de référence: ",
requestedBy: "Demandé par ",
payNow: "Payer maintenant",
yourCart: "Votre panier",
quantity: "Quantité",
showLess: "Afficher moins",
showMore: "Afficher plus",
miscellaneousCharges: "Frais divers",
miscellaneousChargesDetail: "(comprend les taxes, les frais d'expédition, les remises, les offres, etc.)",
paymentTakingLonger: "Désolé! Votre paiement prend plus de temps que prévu. Veuillez réessayer plus tard.",
paymentLinkExpired: "Lien de paiement expiré",
paymentReceived: "Nous avons bien reçu votre paiement",
paymentLinkExpiredMessage: "Désolé, ce lien de paiement a expiré. Veuillez utiliser la référence ci-dessous pour une enquête plus approfondie.",
paidSuccessfully: "Payé avec succès",
paymentPending: "Paiement en attente",
paymentFailed: "Échec du paiement!",
paymentCancelled: "Paiement annulé",
paymentUnderReview: "Paiement en cours de révision",
paymentSuccess: "Paiement réussi",
partialPaymentCaptured: "Paiement partiel capturé.",
somethingWentWrong: "Quelque chose a mal tourné",
redirecting: "Redirection...",
redirectingIn: "Redirection dans ",
seconds: " secondes...",
unexpectedError: "Une erreur inattendue est survenue.",
notAllowed: "Vous n'êtes pas autorisé à voir ce contenu.",
errorCode: "Code d'erreur",
errorMessage: "Message d'erreur"
},
en_gb: {
expiresOn: "Link expires on: ",
refId: "Ref Id: ",
requestedBy: "Requested by ",
payNow: "Pay now",
yourCart: "Your Basket",
quantity: "Quantity",
showLess: "Show Less",
showMore: "Show More",
miscellaneousCharges: "Miscellaneous charges",
miscellaneousChargesDetail: "(includes taxes, shipping, discounts, offers etc.)",
paymentTakingLonger: "Sorry! Your payment is taking longer than expected. Please check back again in sometime.",
paymentLinkExpired: "Payment Link Expired",
paymentReceived: "We have successfully received your payment",
paymentLinkExpiredMessage: "Sorry, this payment link has expired. Please use below reference for further investigation.",
paidSuccessfully: "Paid successfully",
paymentPending: "Payment Pending",
paymentFailed: "Payment Failed!",
paymentCancelled: "Payment Cancelled",
paymentUnderReview: "Payment under review",
paymentSuccess: "Payment Success",
partialPaymentCaptured: "Partial payment was captured.",
somethingWentWrong: "Something went wrong",
redirecting: "Redirecting ...",
redirectingIn: "Redirecting in ",
seconds: " seconds ...",
unexpectedError: "An unexpected error occurred.",
notAllowed: "You are not allowed to view this content.",
errorCode: "Error code",
errorMessage: "Error Message"
},
ar: {
expiresOn: "الرابط ينتهي في: ",
refId: "معرف المرجع: ",
requestedBy: "طلب بواسطة ",
payNow: "ادفع الآن",
yourCart: "سلة التسوق الخاصة بك",
quantity: "الكمية",
showLess: "عرض أقل",
showMore: "عرض المزيد",
miscellaneousCharges: "رسوم متنوعة",
miscellaneousChargesDetail: "(يشمل الضرائب والشحن والخصومات والعروض وما إلى ذلك)",
paymentTakingLonger: "عذرًا! يستغرق الدفع الخاص بك وقتًا أطول من المتوقع. يرجى التحقق مرة أخرى بعد فترة.",
paymentLinkExpired: "انتهاء صلاحية رابط الدفع",
paymentReceived: "لقد تلقينا دفعتك بنجاح",
paymentLinkExpiredMessage: "عذرًا، انتهت صلاحية رابط الدفع هذا. يرجى استخدام المرجع أدناه لمزيد من التحقيق.",
paidSuccessfully: "تم الدفع بنجاح",
paymentPending: "الدفع معلق",
paymentFailed: "فشل الدفع!",
paymentCancelled: "تم إلغاء الدفع",
paymentUnderReview: "الدفع قيد المراجعة",
paymentSuccess: "نجاح الدفع",
partialPaymentCaptured: "تم تحصيل دفعة جزئية.",
somethingWentWrong: "حدث خطأ ما",
redirecting: "إعادة توجيه ...",
redirectingIn: "إعادة توجيه في ",
seconds: " ثوانٍ ...",
unexpectedError: "حدث خطأ غير متوقع.",
notAllowed: "أنت غير مسموح لك بعرض هذا المحتوى.",
errorCode: "رمز الخطأ",
errorMessage: "رسالة الخطأ"
},
ja: {
expiresOn: "リンクの有効期限は: ",
refId: "参照 ID: ",
requestedBy: "リクエスト者 ",
payNow: "今すぐ支払う",
yourCart: "あなたのカート",
quantity: "数量",
showLess: "表示を減らす",
showMore: "もっと見る",
miscellaneousCharges: "その他の料金",
miscellaneousChargesDetail: "(税金、送料、割引、特典などが含まれます)",
paymentTakingLonger: "申し訳ありません! お支払いに予想以上の時間がかかっています。 しばらくしてから再度確認してください。",
paymentLinkExpired: "支払いリンクの有効期限が切れました",
paymentReceived: "お支払いが正常に完了しました",
paymentLinkExpiredMessage: "申し訳ありませんが、この支払いリンクの有効期限が切れています。 詳細な調査については、以下の参照をご利用ください。",
paidSuccessfully: "支払い完了",
paymentPending: "保留中の支払い",
paymentFailed: "支払い失敗!",
paymentCancelled: "支払いがキャンセルされました",
paymentUnderReview: "支払いの審査中",
paymentSuccess: "支払い成功",
partialPaymentCaptured: "部分的な支払いが取得されました。",
somethingWentWrong: "何かがうまくいかなかった",
redirecting: "リダイレクト中...",
redirectingIn: "リダイレクト中 ",
seconds: " 秒 ...",
unexpectedError: "予期しないエラーが発生しました。",
notAllowed: "このコンテンツを表示する権限がありません。",
errorCode: "エラーコード",
errorMessage: "エラーメッセージ"
},
de: {
expiresOn: "Link läuft ab am: ",
refId: "Referenz-ID: ",
requestedBy: "Angefordert von ",
payNow: "Jetzt bezahlen",
yourCart: "Ihr Warenkorb",
quantity: "Menge",
showLess: "Weniger anzeigen",
showMore: "Mehr anzeigen",
miscellaneousCharges: "Sonstige Gebühren",
miscellaneousChargesDetail: "(einschließlich Steuern, Versand, Rabatte, Angebote usw.)",
paymentTakingLonger: "Entschuldigung! Ihre Zahlung dauert länger als erwartet. Bitte prüfen Sie später erneut.",
paymentLinkExpired: "Zahlungslink abgelaufen",
paymentReceived: "Wir haben Ihre Zahlung erfolgreich erhalten",
paymentLinkExpiredMessage: "Entschuldigung, dieser Zahlungslink ist abgelaufen. Bitte verwenden Sie die folgende Referenz für weitere Untersuchungen.",
paidSuccessfully: "Erfolgreich bezahlt",
paymentPending: "Zahlung ausstehend",
paymentFailed: "Zahlung fehlgeschlagen!",
paymentCancelled: "Zahlung storniert",
paymentUnderReview: "Zahlung wird überprüft",
paymentSuccess: "Zahlung erfolgreich",
partialPaymentCaptured: "Teilzahlung wurde erfasst.",
somethingWentWrong: "Etwas ist schiefgelaufen",
redirecting: "Weiterleiten ...",
redirectingIn: "Weiterleiten in ",
seconds: " Sekunden ...",
unexpectedError: "Ein unerwarteter Fehler ist aufgetreten.",
notAllowed: "Sie dürfen diesen Inhalt nicht ansehen.",
errorCode: "Fehlercode",
errorMessage: "Fehlermeldung"
},
fr_be: {
expiresOn: "Le lien expire le: ",
refId: "ID de référence: ",
requestedBy: "Demandé par ",
payNow: "Payer maintenant",
yourCart: "Votre panier",
quantity: "Quantité",
showLess: "Afficher moins",
showMore: "Afficher plus",
miscellaneousCharges: "Frais divers",
miscellaneousChargesDetail: "(comprend les taxes, les frais d'expédition, les remises, les offres, etc.)",
paymentTakingLonger: "Désolé! Votre paiement prend plus de temps que prévu. Veuillez réessayer plus tard.",
paymentLinkExpired: "Lien de paiement expiré",
paymentReceived: "Nous avons bien reçu votre paiement",
paymentLinkExpiredMessage: "Désolé, ce lien de paiement a expiré. Veuillez utiliser la référence ci-dessous pour une enquête plus approfondie.",
paidSuccessfully: "Payé avec succès",
paymentPending: "Paiement en attente",
paymentFailed: "Échec du paiement!",
paymentCancelled: "Paiement annulé",
paymentUnderReview: "Paiement en cours de révision",
paymentSuccess: "Paiement réussi",
partialPaymentCaptured: "Paiement partiel capturé.",
somethingWentWrong: "Quelque chose a mal tourné",
redirecting: "Redirection...",
redirectingIn: "Redirection dans ",
seconds: " secondes...",
unexpectedError: "Une erreur inattendue est survenue.",
notAllowed: "Vous n'êtes pas autorisé à voir ce contenu.",
errorCode: "Code d'erreur",
errorMessage: "Message d'erreur"
},
es: {
expiresOn: "El enlace expira el: ",
refId: "ID de referencia: ",
requestedBy: "Solicitado por ",
payNow: "Pagar ahora",
yourCart: "Tu carrito",
quantity: "Cantidad",
showLess: "Mostrar menos",
showMore: "Mostrar más",
miscellaneousCharges: "Cargos varios",
miscellaneousChargesDetail: "(incluye impuestos, envío, descuentos, ofertas, etc.)",
paymentTakingLonger: "¡Lo siento! Tu pago está tardando más de lo esperado. Por favor, vuelve a verificarlo más tarde.",
paymentLinkExpired: "Enlace de pago expirado",
paymentReceived: "Hemos recibido tu pago con éxito",
paymentLinkExpiredMessage: "Lo siento, este enlace de pago ha expirado. Por favor, usa la referencia a continuación para una investigación adicional.",
paidSuccessfully: "Pagado exitosamente",
paymentPending: "Pago Pendiente",
paymentFailed: "¡Pago Fallido!",
paymentCancelled: "Pago Cancelado",
paymentUnderReview: "Pago en revisión",
paymentSuccess: "Éxito en el pago",
partialPaymentCaptured: "Pago parcial capturado.",
somethingWentWrong: "Algo salió mal",
redirecting: "Redirigiendo ...",
redirectingIn: "Redirigiendo en ",
seconds: " segundos ...",
unexpectedError: "Ocurrió un error inesperado.",
notAllowed: "No tienes permiso para ver este contenido.",
errorCode: "Código de error",
errorMessage: "Mensaje de error"
},
ca: {
expiresOn: "L'enllaç caduca el: ",
refId: "ID de referència: ",
requestedBy: "Sol·licitat per ",
payNow: "Paga ara",
yourCart: "El teu carret",
quantity: "Quantitat",
showLess: "Mostrar menys",
showMore: "Mostrar més",
miscellaneousCharges: "Càrrecs diversos",
miscellaneousChargesDetail: "(inclou impostos, enviaments, descomptes, ofertes, etc.)",
paymentTakingLonger: "Ho sentim! El teu pagament està trigant més del que s'esperava. Si us plau, torna-ho a comprovar d'aquí a una estona.",
paymentLinkExpired: "Enllaç de pagament caducat",
paymentReceived: "Hem rebut el teu pagament amb èxit",
paymentLinkExpiredMessage: "Ho sentim, aquest enllaç de pagament ha caducat. Si us plau, utilitza la referència següent per a més investigació.",
paidSuccessfully: "Pagat amb èxit",
paymentPending: "Pagament pendent",
paymentFailed: "Pagament fallit!",
paymentCancelled: "Pagament cancel·lat",
paymentUnderReview: "Pagament en revisió",
paymentSuccess: "Pagament amb èxit",
partialPaymentCaptured: "Pagament parcial capturat.",
somethingWentWrong: "Alguna cosa ha anat malament",
redirecting: "Redirigint ...",
redirectingIn: "Redirigint en ",
seconds: " segons ...",
unexpectedError: "S'ha produït un error inesperat.",
notAllowed: "No tens permís per veure aquest contingut.",
errorCode: "Codi d'error",
errorMessage: "Missatge d'error"
},
pt: {
expiresOn: "Link expira em: ",
refId: "ID de referência: ",
requestedBy: "Solicitado por ",
payNow: "Pagar agora",
yourCart: "Seu Carrinho",
quantity: "Quantidade",
showLess: "Mostrar menos",
showMore: "Mostrar mais",
miscellaneousCharges: "Encargos diversos",
miscellaneousChargesDetail: "(inclui impostos, frete, descontos, ofertas, etc.)",
paymentTakingLonger: "Desculpe! Seu pagamento está demorando mais do que o esperado. Por favor, volte novamente em algum momento.",
paymentLinkExpired: "Link de Pagamento Expirado",
paymentReceived: "Recebemos seu pagamento com sucesso",
paymentLinkExpiredMessage: "Desculpe, este link de pagamento expirou. Por favor, use a referência abaixo para investigação adicional.",
paidSuccessfully: "Pago com sucesso",
paymentPending: "Pagamento Pendente",
paymentFailed: "Pagamento Falhou!",
paymentCancelled: "Pagamento Cancelado",
paymentUnderReview: "Pagamento em análise",
paymentSuccess: "Sucesso no pagamento",
partialPaymentCaptured: "Pagamento parcial capturado.",
somethingWentWrong: "Algo deu errado",
redirecting: "Redirecionando ...",
redirectingIn: "Redirecionando em ",
seconds: " segundos ...",
unexpectedError: "Ocorreu um erro inesperado.",
notAllowed: "Você não tem permissão para ver este conteúdo.",
errorCode: "Código de erro",
errorMessage: "Mensagem de erro"
},
it: {
expiresOn: "Link scade il: ",
refId: "ID di riferimento: ",
requestedBy: "Richiesto da ",
payNow: "Paga ora",
yourCart: "Il tuo carrello",
quantity: "Quantità",
showLess: "Mostra meno",
showMore: "Mostra di più",
miscellaneousCharges: "Spese varie",
miscellaneousChargesDetail: "(inclusi tasse, spedizione, sconti, offerte, ecc.)",
paymentTakingLonger: "Spiacenti! Il tuo pagamento sta impiegando più tempo del previsto. Controlla di nuovo tra un po'.",
paymentLinkExpired: "Link di pagamento scaduto",
paymentReceived: "Abbiamo ricevuto il tuo pagamento con successo",
paymentLinkExpiredMessage: "Spiacenti, questo link di pagamento è scaduto. Utilizza il riferimento sottostante per ulteriori indagini.",
paidSuccessfully: "Pagato con successo",
paymentPending: "Pagamento in sospeso",
paymentFailed: "Pagamento fallito!",
paymentCancelled: "Pagamento annullato",
paymentUnderReview: "Pagamento in revisione",
paymentSuccess: "Pagamento riuscito",
partialPaymentCaptured: "Pagamento parziale catturato.",
somethingWentWrong: "Qualcosa è andato storto",
redirecting: "Reindirizzando ...",
redirectingIn: "Reindirizzando in ",
seconds: " secondi ...",
unexpectedError: "Si è verificato un errore imprevisto.",
notAllowed: "Non sei autorizzato a vedere questo contenuto.",
errorCode: "Codice di errore",
errorMessage: "Messaggio di errore"
},
pl: {
expiresOn: "Link wygasa w dniu: ",
refId: "Identyfikator referencyjny: ",
requestedBy: "Zażądane przez ",
payNow: "Zapłać teraz",
yourCart: "Twój koszyk",
quantity: "Ilość",
showLess: "Pokaż mniej",
showMore: "Pokaż więcej",
miscellaneousCharges: "Różne opłaty",
miscellaneousChargesDetail: "(obejmuje podatki, wysyłkę, rabaty, oferty itp.)",
paymentTakingLonger: "Przepraszamy! Twoja płatność trwa dłużej niż oczekiwano. Sprawdź ponownie później.",
paymentLinkExpired: "Link do płatności wygasł",
paymentReceived: "Otrzymaliśmy twoją płatność pomyślnie",
paymentLinkExpiredMessage: "Przepraszamy, ten link do płatności wygasł. Skorzystaj z poniższego odniesienia do dalszego dochodzenia.",
paidSuccessfully: "Zapłacono pomyślnie",
paymentPending: "Oczekująca płatność",
paymentFailed: "Płatność nie powiodła się!",
paymentCancelled: "Płatność anulowana",
paymentUnderReview: "Płatność w trakcie przeglądu",
paymentSuccess: "Sukces płatności",
partialPaymentCaptured: "Częściowa płatność została przechwycona.",
somethingWentWrong: "Coś poszło nie tak",
redirecting: "Przekierowanie ...",
redirectingIn: "Przekierowanie w ",
seconds: " sekund ...",
unexpectedError: "Wystąpił nieoczekiwany błąd.",
notAllowed: "Nie masz uprawnień do przeglądania tej zawartości.",
errorCode: "Kod błędu",
errorMessage: "Komunikat o błędzie"
},
nl: {
expiresOn: "Link verloopt op: ",
refId: "Ref Id: ",
requestedBy: "Aangevraagd door ",
payNow: "Nu betalen",
yourCart: "Je winkelwagen",
quantity: "Hoeveelheid",
showLess: "Toon minder",
showMore: "Toon meer",
miscellaneousCharges: "Diverse kosten",
miscellaneousChargesDetail: "(inclusief belastingen, verzending, kortingen, aanbiedingen, enz.)",
paymentTakingLonger: "Sorry! Je betaling duurt langer dan verwacht. Controleer het later opnieuw.",
paymentLinkExpired: "Betaallink verlopen",
paymentReceived: "We hebben je betaling succesvol ontvangen",
paymentLinkExpiredMessage: "Sorry, deze betaallink is verlopen. Gebruik de onderstaande referentie voor verder onderzoek.",
paidSuccessfully: "Succesvol betaald",
paymentPending: "Betaling in behandeling",
paymentFailed: "Betaling mislukt!",
paymentCancelled: "Betaling geannuleerd",
paymentUnderReview: "Betaling in behandeling",
paymentSuccess: "Betaling geslaagd",
partialPaymentCaptured: "Deelbetaling is vastgelegd.",
somethingWentWrong: "Er is iets misgegaan",
redirecting: "Doorverwijzen ...",
redirectingIn: "Doorverwijzen in ",
seconds: " seconden ...",
unexpectedError: "Er is een onverwachte fout opgetreden.",
notAllowed: "Je mag deze inhoud niet bekijken.",
errorCode: "Foutcode",
errorMessage: "Foutmelding"
},
sv: {
expiresOn: "Länken upphör att gälla den: ",
refId: "Referens-ID: ",
requestedBy: "Begärd av ",
payNow: "Betala nu",
yourCart: "Din varukorg",
quantity: "Antal",
showLess: "Visa mindre",
showMore: "Visa mer",
miscellaneousCharges: "Diverse avgifter",
miscellaneousChargesDetail: "(inklusive skatter, frakt, rabatter, erbjudanden osv.)",
paymentTakingLonger: "Ledsen! Din betalning tar längre tid än väntat. Vänligen kontrollera igen senare.",
paymentLinkExpired: "Betalningslänk har upphört att gälla",
paymentReceived: "Vi har mottagit din betalning",
paymentLinkExpiredMessage: "Tyvärr, denna betalningslänk har upphört att gälla. Använd nedanstående referens för ytterligare utredning.",
paidSuccessfully: "Betalad framgångsrikt",
paymentPending: "Betalning väntar",
paymentFailed: "Betalning misslyckades!",
paymentCancelled: "Betalning avbruten",
paymentUnderReview: "Betalning under granskning",
paymentSuccess: "Betalning lyckades",
partialPaymentCaptured: "Delbetalning har fångats.",
somethingWentWrong: "Något gick fel",
redirecting: "Omdirigerar ...",
redirectingIn: "Omdirigerar om ",
seconds: " sekunder ...",
unexpectedError: "Ett oväntat fel inträffade.",
notAllowed: "Du har inte behörighet att se detta innehåll.",
errorCode: "Felkod",
errorMessage: "Felmeddelande"
},
ru: {
expiresOn: "Ссылка истекает: ",
refId: "ID ссылки: ",
requestedBy: "Запрошено ",
payNow: "Оплатить сейчас",
yourCart: "Ваша корзина",
quantity: "Количество",
showLess: "Показать меньше",
showMore: "Показать больше",
miscellaneousCharges: "Прочие сборы",
miscellaneousChargesDetail: "(включает налоги, доставку, скидки, предложения и т. д.)",
paymentTakingLonger: "Извините! Ваш платеж занимает больше времени, чем ожидалось. Пожалуйста, проверьте позже.",
paymentLinkExpired: "Срок действия ссылки оплаты истек",
paymentReceived: "Мы успешно получили ваш платеж",
paymentLinkExpiredMessage: "Извините, срок действия этой ссылки на оплату истек. Пожалуйста, используйте ссылку ниже для дальнейшего расследования.",
paidSuccessfully: "Оплачено успешно",
paymentPending: "Ожидание оплаты",
paymentFailed: "Платеж не прошел!",
paymentCancelled: "Платеж отменен",
paymentUnderReview: "Платеж на рассмотрении",
paymentSuccess: "Платеж успешен",
partialPaymentCaptured: "Частичный платеж захвачен.",
somethingWentWrong: "Что-то пошло не так",
redirecting: "Перенаправление...",
redirectingIn: "Перенаправление через ",
seconds: " секунд ...",
unexpectedError: "Произошла неожиданная ошибка.",
notAllowed: "Вам не разрешено просматривать этот контент.",
errorCode: "Код ошибки",
errorMessage: "Сообщение об ошибке"
},
zh: {
expiresOn: "链接将于以下时间过期: ",
refId: "参考编号: ",
requestedBy: "请求者: ",
payNow: "立即付款",
yourCart: "您的购物车",
quantity: "数量",
showLess: "显示较少",
showMore: "显示更多",
miscellaneousCharges: "其他费用",
miscellaneousChargesDetail: "(包括税费、运费、折扣、优惠等)",
paymentTakingLonger: "抱歉! 您的付款比预期的时间更长。 请稍后再回来查看。",
paymentLinkExpired: "支付链接已过期",
paymentReceived: "我们已成功收到您的付款",
paymentLinkExpiredMessage: "抱歉,此支付链接已过期。 请使用以下参考进行进一步调查。",
paidSuccessfully: "支付成功",
paymentPending: "付款待定",
paymentFailed: "付款失败!",
paymentCancelled: "付款取消",
paymentUnderReview: "付款审查中",
paymentSuccess: "付款成功",
partialPaymentCaptured: "已捕获部分付款。",
somethingWentWrong: "发生了一些错误",
redirecting: "重定向中...",
redirectingIn: "将在 ",
seconds: " 秒内重定向...",
unexpectedError: "发生意外错误。",
notAllowed: "您没有权限查看此内容。",
errorCode: "错误代码",
errorMessage: "错误信息"
},
zh_hant: {
expiresOn: "連結到期日期:",
refId: "參考編號:",
requestedBy: "請求者 ",
payNow: "立即付款",
yourCart: "你的購物車",
quantity: "數量",
showLess: "顯示較少",
showMore: "顯示更多",
miscellaneousCharges: "雜項費用",
miscellaneousChargesDetail: "(包括稅金、運費、折扣、優惠等)",
paymentTakingLonger: "抱歉!您的付款處理時間比預期長。請稍後再查看。",
paymentLinkExpired: "付款連結已過期",
paymentReceived: "我們已成功收到您的付款",
paymentLinkExpiredMessage: "抱歉,此付款連結已過期。請使用以下參考進行進一步調查。",
paidSuccessfully: "付款成功",
paymentPending: "付款待處理",
paymentFailed: "付款失敗!",
paymentCancelled: "付款已取消",
paymentUnderReview: "付款正在審核中",
paymentSuccess: "支付成功",
partialPaymentCaptured: "部分付款已被捕獲。",
somethingWentWrong: "出了點問題",
redirecting: "重定向...",
redirectingIn: "重定向到",
seconds: " 秒...",
unexpectedError: "發生了意外錯誤。",
notAllowed: "您無權查看此內容。",
errorCode: "錯誤代碼",
errorMessage: "錯誤訊息"
},
};
function getTranslations(locale_str) {
var fallback_locale = 'en';
var locale = locale_str.toLowerCase().replace(/-/g, "_") || fallback_locale; // defaults if locale is not present in payment details.
return locales[locale] || locales['en']; // defaults if locale is not implemented in locales.
} | 7,244 | 1,716 |
hyperswitch | crates/router/src/core/payment_link/payment_link_status/status.html | .html | <html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Payment Status</title>
<style>
{{rendered_css}}
</style>
<link
rel="stylesheet"
href="https://fonts.googleapis.com/css2?family=Montserrat:wght@400;500;600;700;800"
/>
<script>
{{logging_template}}
{{locale_template}}
{{ rendered_js }}
</script>
</head>
<body onload="boot()" class="hidden">
<div>
<div class="hyper-checkout-status-wrap">
<div id="hyper-checkout-status-header"></div>
<div id="hyper-checkout-status-content"></div>
</div>
<div id="hyper-checkout-status-redirect-message"></div>
</div>
</body>
</html>
| 219 | 1,717 |
hyperswitch | crates/router/src/core/payment_link/payment_link_status/status.js | .js | // @ts-check
/**
* UTIL FUNCTIONS
*/
/**
* Ref - https://github.com/onury/invert-color/blob/master/lib/cjs/invert.js
*/
function padz(str, len) {
if (len === void 0) {
len = 2;
}
return (new Array(len).join("0") + str).slice(-len);
}
function hexToRgbArray(hex) {
if (hex.slice(0, 1) === "#") hex = hex.slice(1);
var RE_HEX = /^(?:[0-9a-f]{3}){1,2}$/i;
if (!RE_HEX.test(hex)) throw new Error('Invalid HEX color: "' + hex + '"');
if (hex.length === 3) {
hex = hex[0] + hex[0] + hex[1] + hex[1] + hex[2] + hex[2];
}
return [
parseInt(hex.slice(0, 2), 16),
parseInt(hex.slice(2, 4), 16),
parseInt(hex.slice(4, 6), 16),
];
}
function toRgbArray(c) {
if (!c) throw new Error("Invalid color value");
if (Array.isArray(c)) return c;
return typeof c === "string" ? hexToRgbArray(c) : [c.r, c.g, c.b];
}
function getLuminance(c) {
var i, x;
var a = [];
for (i = 0; i < c.length; i++) {
x = c[i] / 255;
a[i] = x <= 0.03928 ? x / 12.92 : Math.pow((x + 0.055) / 1.055, 2.4);
}
return 0.2126 * a[0] + 0.7152 * a[1] + 0.0722 * a[2];
}
function invertToBW(color, bw, asArr) {
var DEFAULT_BW = {
black: "#090302",
white: "#FFFFFC",
threshold: Math.sqrt(1.05 * 0.05) - 0.05,
};
var options = bw === true ? DEFAULT_BW : Object.assign({}, DEFAULT_BW, bw);
return getLuminance(color) > options.threshold
? asArr
? hexToRgbArray(options.black)
: options.black
: asArr
? hexToRgbArray(options.white)
: options.white;
}
function invert(color, bw) {
if (bw === void 0) {
bw = false;
}
color = toRgbArray(color);
if (bw) return invertToBW(color, bw);
return (
"#" +
color
.map(function (c) {
return padz((255 - c).toString(16));
})
.join("")
);
}
/**
* UTIL FUNCTIONS END HERE
*/
// @ts-ignore
{{ payment_details_js_script }}
// @ts-ignore
window.state = {
prevHeight: window.innerHeight,
prevWidth: window.innerWidth,
isMobileView: window.innerWidth <= 1400,
};
const translations = getTranslations(window.__PAYMENT_DETAILS.locale);
var isFramed = false;
try {
isFramed = window.parent.location !== window.location;
// If parent's window object is restricted, DOMException is
// thrown which concludes that the webpage is iframed
} catch (err) {
isFramed = true;
}
/**
* Trigger - on boot
* Use - emit latest payment status to parent window
*/
function emitPaymentStatus(paymentDetails) {
var message = {
payment: {
status: paymentDetails.status,
}
};
window.parent.postMessage(message, "*");
}
/**
* Trigger - init function invoked once the script tag is loaded
* Use
* - Update document's title
* - Update document's icon
* - Render and populate document with payment details and cart
* - Initialize event listeners for updating UI on screen size changes
* - Initialize SDK
**/
function boot() {
// @ts-ignore
var paymentDetails = window.__PAYMENT_DETAILS;
// Emit latest payment status
if (isFramed) {
emitPaymentStatus(paymentDetails);
}
if (shouldRenderUI(paymentDetails)) {
removeClass("body", "hidden");
// Attach document icon
if (paymentDetails.merchant_logo) {
var link = document.createElement("link");
link.rel = "icon";
link.href = paymentDetails.merchant_logo;
link.type = "image/x-icon";
document.head.appendChild(link);
}
// Render status details
renderStatusDetails(paymentDetails);
// Add event listeners
initializeEventListeners(paymentDetails);
}
}
/**
* Trigger - on boot
* Use - Check if UI should be rendered based on some conditions
* @returns {Boolean}
*/
function shouldRenderUI(paymentDetails) {
var status = paymentDetails.status;
if (isFramed) {
switch (status) {
case "requires_customer_action": return false;
}
}
return true;
}
/**
* Trigger - on boot
* Uses
* - Render status details
* - Header - (amount, merchant name, merchant logo)
* - Body - status with image
* - Footer - payment details (id | error code and msg, if any)
* @param {PaymentDetails} paymentDetails
**/
function renderStatusDetails(paymentDetails) {
var status = paymentDetails.status;
var statusDetails = {
imageSource: "",
message: "",
status: status,
amountText: "",
items: [],
};
// Payment details
var paymentId = createItem(translations.refId, paymentDetails.payment_id);
// @ts-ignore
statusDetails.items.push(paymentId);
// Status specific information
switch (status) {
case "expired":
statusDetails.imageSource = "https://live.hyperswitch.io/payment-link-assets/failed.png";
statusDetails.status = translations.paymentLinkExpired;
statusDetails.message = translations.paymentLinkExpiredMessage;
break;
case "succeeded":
statusDetails.imageSource = "https://live.hyperswitch.io/payment-link-assets/success.png";
statusDetails.message = translations.paymentReceived;
statusDetails.status = translations.paidSuccessfully;
statusDetails.amountText = new Date(
paymentDetails.created
).toTimeString();
break;
case "requires_customer_action":
case "processing":
statusDetails.imageSource = "https://live.hyperswitch.io/payment-link-assets/pending.png";
statusDetails.message = translations.paymentTakingLonger;
statusDetails.status = translations.paymentPending;
break;
case "failed":
statusDetails.imageSource = "https://live.hyperswitch.io/payment-link-assets/failed.png";
statusDetails.status = translations.paymentFailed;
var unifiedErrorCode = paymentDetails.unified_code || paymentDetails.error_code;
var unifiedErrorMessage = paymentDetails.unified_message || paymentDetails.error_message;
var errorCodeNode = createItem(translations.errorCode, unifiedErrorCode);
var errorMessageNode = createItem(
translations.errorMessage,
unifiedErrorMessage
);
// @ts-ignore
statusDetails.items.push(errorMessageNode, errorCodeNode);
break;
case "cancelled":
statusDetails.imageSource = "https://live.hyperswitch.io/payment-link-assets/failed.png";
statusDetails.status = translations.paymentCancelled;
break;
case "requires_merchant_action":
statusDetails.imageSource = "https://live.hyperswitch.io/payment-link-assets/pending.png";
statusDetails.status = translations.paymentUnderReview;
break;
case "requires_capture":
statusDetails.imageSource = "https://live.hyperswitch.io/payment-link-assets/success.png";
statusDetails.message = translations.paymentReceived;
statusDetails.status = translations.paymentSuccess;
break;
case "partially_captured":
statusDetails.imageSource = "https://live.hyperswitch.io/payment-link-assets/success.png";
statusDetails.message = translations.partialPaymentCaptured;
statusDetails.status = translations.paymentSuccess;
break;
default:
statusDetails.imageSource = "https://live.hyperswitch.io/payment-link-assets/failed.png";
statusDetails.status = translations.somethingWentWrong;
// Error details
if (typeof paymentDetails.error === "object") {
var errorCodeNode = createItem(translations.errorCode, paymentDetails.error.code);
var errorMessageNode = createItem(
translations.errorMessage,
paymentDetails.error.message
);
// @ts-ignore
statusDetails.items.push(errorMessageNode, errorCodeNode);
}
break;
}
// Form header items
var amountNode = document.createElement("div");
amountNode.className = "hyper-checkout-status-amount";
amountNode.innerText = paymentDetails.currency + " " + paymentDetails.amount;
var merchantLogoNode = document.createElement("img");
merchantLogoNode.className = "hyper-checkout-status-merchant-logo";
// @ts-ignore
merchantLogoNode.src = window.__PAYMENT_DETAILS.merchant_logo;
merchantLogoNode.alt = "";
// Form content items
var statusImageNode = document.createElement("img");
statusImageNode.className = "hyper-checkout-status-image";
statusImageNode.src = statusDetails.imageSource;
var statusTextNode = document.createElement("div");
statusTextNode.className = "hyper-checkout-status-text";
statusTextNode.innerText = statusDetails.status;
var statusMessageNode = document.createElement("div");
statusMessageNode.className = "hyper-checkout-status-message";
statusMessageNode.innerText = statusDetails.message;
var statusDetailsNode = document.createElement("div");
statusDetailsNode.className = "hyper-checkout-status-details";
// Append items
if (statusDetailsNode instanceof HTMLDivElement) {
statusDetails.items.map(function (item) {
statusDetailsNode.append(item);
});
}
var statusHeaderNode = document.getElementById(
"hyper-checkout-status-header"
);
if (statusHeaderNode instanceof HTMLDivElement) {
statusHeaderNode.append(amountNode, merchantLogoNode);
}
var statusContentNode = document.getElementById(
"hyper-checkout-status-content"
);
if (statusContentNode instanceof HTMLDivElement) {
statusContentNode.append(statusImageNode, statusTextNode);
if (statusMessageNode instanceof HTMLDivElement) {
statusContentNode.append(statusMessageNode);
}
statusContentNode.append(statusDetailsNode);
}
if (paymentDetails.redirect === true) {
// Form redirect text
var statusRedirectTextNode = document.getElementById(
"hyper-checkout-status-redirect-message"
);
if (
statusRedirectTextNode instanceof HTMLDivElement &&
typeof paymentDetails.return_url === "string"
) {
var timeout = 5,
j = 0;
for (var i = 0; i <= timeout; i++) {
setTimeout(function () {
var secondsLeft = timeout - j++;
var innerText =
secondsLeft === 0
? translations.redirecting
: translations.redirectingIn + secondsLeft + " " + translations.seconds;
// @ts-ignore
statusRedirectTextNode.innerText = innerText;
if (secondsLeft === 0) {
// Form query params
var queryParams = {
payment_id: paymentDetails.payment_id,
status: paymentDetails.status,
};
var url = new URL(paymentDetails.return_url);
var params = new URLSearchParams(url.search);
// Attach query params to return_url
for (var key in queryParams) {
if (queryParams.hasOwnProperty(key)) {
params.set(key, queryParams[key]);
}
}
url.search = params.toString();
setTimeout(function () {
// Finally redirect
window.location.href = url.toString();
}, 1000);
}
}, i * 1000);
}
}
}
}
/**
* Use - create an item which is a key-value pair of some information related to a payment
* @param {String} heading
* @param {String} value
**/
function createItem(heading, value) {
var itemNode = document.createElement("div");
itemNode.className = "hyper-checkout-status-item";
var headerNode = document.createElement("div");
headerNode.className = "hyper-checkout-item-header";
headerNode.innerText = heading;
var valueNode = document.createElement("div");
valueNode.className = "hyper-checkout-item-value";
valueNode.innerText = value;
itemNode.append(headerNode);
itemNode.append(valueNode);
return itemNode;
}
/**
* Use - add event listeners for changing UI on screen resize
* @param {PaymentDetails} paymentDetails
*/
function initializeEventListeners(paymentDetails) {
var primaryColor = paymentDetails.theme;
var contrastBWColor = invert(primaryColor, true);
var statusRedirectTextNode = document.getElementById(
"hyper-checkout-status-redirect-message"
);
if (statusRedirectTextNode instanceof HTMLDivElement) {
statusRedirectTextNode.style.color = contrastBWColor;
}
};
function addClass(id, className) {
var element = document.querySelector(id);
if (element instanceof HTMLElement) {
element.classList.add(className);
}
}
function removeClass(id, className) {
var element = document.querySelector(id);
if (element instanceof HTMLElement) {
element.classList.remove(className);
}
} | 2,914 | 1,718 |
hyperswitch | crates/router/src/core/payment_link/payment_link_status/status.css | .css | {{ css_color_scheme }}
body,
body > div {
height: 100vh;
width: 100vw;
}
body {
font-family: "Montserrat";
background-color: var(--primary-color);
color: #333;
text-align: center;
margin: 0;
padding: 0;
overflow: hidden;
}
body > div {
height: 100vh;
width: 100vw;
overflow: scroll;
display: flex;
flex-flow: column;
align-items: center;
justify-content: center;
}
.hyper-checkout-status-wrap {
display: flex;
flex-flow: column;
font-family: "Montserrat";
width: auto;
min-width: 400px;
max-width: 800px;
background-color: white;
border-radius: 5px;
}
#hyper-checkout-status-header {
max-width: 1200px;
border-radius: 3px;
border-bottom: 1px solid #e6e6e6;
}
#hyper-checkout-status-header,
#hyper-checkout-status-content {
display: flex;
align-items: center;
justify-content: space-between;
font-size: 24px;
font-weight: 600;
padding: 15px 20px;
}
.hyper-checkout-status-amount {
font-family: "Montserrat";
font-size: 35px;
font-weight: 700;
}
.hyper-checkout-status-merchant-logo {
border: 1px solid #e6e6e6;
border-radius: 5px;
padding: 9px;
height: 48px;
width: 48px;
}
#hyper-checkout-status-content {
height: 100%;
flex-flow: column;
min-height: 500px;
align-items: center;
justify-content: center;
}
.hyper-checkout-status-image {
height: 200px;
width: 200px;
}
.hyper-checkout-status-text {
text-align: center;
font-size: 21px;
font-weight: 600;
margin-top: 20px;
}
.hyper-checkout-status-message {
text-align: center;
font-size: 12px !important;
margin-top: 10px;
font-size: 14px;
font-weight: 500;
max-width: 400px;
}
.hyper-checkout-status-details {
display: flex;
flex-flow: column;
margin-top: 20px;
border-radius: 3px;
border: 1px solid #e6e6e6;
max-width: calc(100vw - 40px);
}
.hyper-checkout-status-item {
display: flex;
align-items: center;
padding: 5px 10px;
border-bottom: 1px solid #e6e6e6;
word-wrap: break-word;
}
.hyper-checkout-status-item:last-child {
border-bottom: 0;
}
.hyper-checkout-item-header {
min-width: 13ch;
font-size: 12px;
}
.hyper-checkout-item-value {
font-size: 12px;
overflow-x: hidden;
overflow-y: auto;
word-wrap: break-word;
font-weight: 400;
text-align: center;
}
#hyper-checkout-status-redirect-message {
margin-top: 20px;
font-family: "Montserrat";
font-size: 13px;
}
.hidden {
display: none;
}
.ellipsis-container-2 {
height: 2.5em;
overflow: hidden;
display: -webkit-box;
-webkit-line-clamp: 2;
-webkit-box-orient: vertical;
text-overflow: ellipsis;
white-space: normal;
}
@media only screen and (max-width: 1136px) {
.info {
flex-flow: column;
align-self: flex-start;
align-items: flex-start;
min-width: auto;
}
.value {
margin: 0;
}
} | 944 | 1,719 |
hyperswitch | crates/router/src/core/payment_link/payment_link_initiate/payment_link.css | .css | {{ css_color_scheme }}
html,
body {
height: 100%;
overflow: hidden;
}
body {
display: flex;
flex-flow: column;
align-items: center;
justify-content: flex-start;
margin: 0;
color: #333333;
}
/* Hide scrollbar for Chrome, Safari and Opera */
.hide-scrollbar::-webkit-scrollbar {
display: none;
}
/* Hide scrollbar for IE, Edge and Firefox */
.hide-scrollbar {
-ms-overflow-style: none;
/* IE and Edge */
scrollbar-width: none;
/* Firefox */
}
/* For ellipsis on text lines */
.ellipsis-container-3 {
height: 4em;
overflow: hidden;
display: -webkit-box;
-webkit-line-clamp: 3;
-webkit-box-orient: vertical;
text-overflow: ellipsis;
white-space: normal;
}
.hidden {
display: none !important;
}
.hyper-checkout {
display: flex;
background-color: #f8f9fb;
color: #333333;
width: 100%;
height: 100%;
overflow: scroll;
}
#hyper-footer {
width: 100vw;
display: flex;
justify-content: center;
padding: 20px 0;
}
.main {
display: flex;
flex-flow: column;
justify-content: center;
align-items: center;
min-width: 600px;
width: 50vw;
}
#hyper-checkout-details {
font-family: "Montserrat";
background-repeat: no-repeat;
}
.hyper-checkout-payment {
min-width: 600px;
box-shadow: 0px 0px 5px #d1d1d1;
border-radius: 8px;
background-color: #fefefe;
}
.hyper-checkout-payment-content-details {
display: flex;
flex-flow: column;
justify-content: space-between;
align-content: space-between;
}
.content-details-wrap {
display: flex;
flex-flow: row;
margin: 20px 20px 10px 20px;
justify-content: space-between;
}
#hyper-checkout-payment-merchant-dynamic-details {
margin: 20px 20px 10px 20px;
overflow-y: scroll;
max-width: 35vw;
max-height: 10vh;
}
.hyper-checkout-payment-horizontal-line {
margin: 0px 20px;
height: 2px;
background-color: #e5e5e5;
border: none;
}
.hyper-checkout-payment-price {
font-weight: 700;
font-size: 40px;
height: 64px;
display: flex;
align-items: center;
}
#hyper-checkout-payment-merchant-details {
margin-top: 5px;
}
.hyper-checkout-payment-merchant-name {
font-weight: 600;
font-size: 19px;
}
.hyper-checkout-payment-merchant-dynamic-data {
font-size: 12px;
margin-top: 5px;
}
.hyper-checkout-payment-ref {
font-size: 12px;
margin-top: 5px;
}
.hyper-checkout-image-header {
display: flex;
justify-content: space-between;
align-items: center;
}
#hyper-checkout-merchant-image,
#hyper-checkout-cart-image {
height: 64px;
padding: 0 10px;
border-radius: 4px;
display: flex;
align-self: flex-start;
align-items: center;
justify-content: center;
}
#hyper-checkout-merchant-image > img {
height: 40px;
}
#hyper-checkout-cart-image {
display: none;
cursor: pointer;
height: 60px;
width: 60px;
border-radius: 100px;
background-color: #f5f5f5;
}
#hyper-checkout-payment-footer {
margin-top: 20px;
background-color: #f5f5f5;
font-size: 13px;
font-weight: 500;
padding: 12px 20px;
border-radius: 0 0 8px 8px;
}
#hyper-checkout-cart {
display: flex;
flex-flow: column;
min-width: 600px;
margin-top: 40px;
max-height: 60vh;
}
#hyper-checkout-cart-items {
max-height: 291px;
overflow: scroll;
transition: all 0.3s ease;
}
.hyper-checkout-cart-header {
font-size: 15px;
display: flex;
flex-flow: row;
align-items: center;
}
.hyper-checkout-cart-header > span {
margin-left: 5px;
font-weight: 500;
}
.cart-close {
display: none;
cursor: pointer;
}
.hyper-checkout-cart-item {
display: flex;
flex-flow: row;
padding: 20px 0;
font-size: 15px;
}
.hyper-checkout-cart-product-image {
height: 56px;
width: 56px;
border-radius: 4px;
}
.hyper-checkout-card-item-name {
font-weight: 500;
overflow: hidden;
text-overflow: ellipsis;
-webkit-line-clamp: 2;
display: -webkit-box;
-webkit-box-orient: vertical;
}
.hyper-checkout-card-item-quantity {
border: 1px solid #e6e6e6;
border-radius: 3px;
width: max-content;
padding: 5px 12px;
background-color: #fafafa;
font-size: 13px;
font-weight: 500;
}
.hyper-checkout-cart-product-details {
display: flex;
flex-flow: column;
margin-left: 15px;
justify-content: space-between;
width: 100%;
}
.hyper-checkout-card-item-price {
justify-self: flex-end;
font-weight: 600;
font-size: 16px;
padding-left: 30px;
text-align: end;
min-width: max-content;
}
.hyper-checkout-cart-item-divider {
height: 1px;
background-color: #e6e6e6;
}
.hyper-checkout-cart-button {
font-size: 12px;
font-weight: 500;
cursor: pointer;
align-self: flex-start;
display: flex;
align-content: flex-end;
gap: 3px;
text-decoration: none;
transition: text-decoration 0.3s;
margin-top: 10px;
}
.hyper-checkout-cart-button:hover {
text-decoration: underline;
}
#hyper-checkout-merchant-description {
font-size: 13px;
margin: 10px 0 20px 0;
}
.powered-by-hyper {
margin-top: 40px;
align-self: flex-start;
}
.hyper-checkout-sdk {
width: 50vw;
min-width: 584px;
z-index: 2;
background-color: var(--primary-color);
display: flex;
flex-flow: column;
align-items: center;
justify-content: center;
}
#payment-form-wrap {
min-width: 300px;
width: 30vw;
padding: 20px;
background-color: white;
border-radius: 3px;
}
#hyper-checkout-sdk-header {
padding: 10px 10px 10px 22px;
display: flex;
align-items: flex-start;
justify-content: flex-start;
border-bottom: 1px solid #f2f2f2;
}
.hyper-checkout-sdk-header-logo {
height: 60px;
width: 60px;
background-color: white;
border-radius: 2px;
}
.hyper-checkout-sdk-header-logo > img {
height: 56px;
width: 56px;
margin: 2px;
}
.hyper-checkout-sdk-header-items {
display: flex;
flex-flow: column;
color: white;
font-size: 20px;
font-weight: 700;
}
.hyper-checkout-sdk-items {
margin-left: 10px;
}
.hyper-checkout-sdk-header-brand-name,
.hyper-checkout-sdk-header-amount {
font-size: 18px;
font-weight: 600;
display: flex;
align-items: center;
font-family: "Montserrat";
justify-self: flex-start;
}
.hyper-checkout-sdk-header-amount {
font-weight: 800;
font-size: 25px;
}
.page-spinner {
position: absolute;
width: 100vw;
height: 100vh;
z-index: 3;
background-color: #fff;
display: flex;
align-items: center;
justify-content: center;
}
.sdk-spinner {
width: 100%;
height: 100%;
z-index: 3;
background-color: #fff;
display: flex;
align-items: center;
justify-content: center;
}
.spinner {
width: 60px;
height: 60px;
}
.spinner div {
transform-origin: 30px 30px;
animation: spinner 1.2s linear infinite;
}
.spinner div:after {
content: " ";
display: block;
position: absolute;
top: 3px;
left: 28px;
width: 4px;
height: 15px;
border-radius: 20%;
background: var(--primary-color);
}
.spinner div:nth-child(1) {
transform: rotate(0deg);
animation-delay: -1.1s;
}
.spinner div:nth-child(2) {
transform: rotate(30deg);
animation-delay: -1s;
}
.spinner div:nth-child(3) {
transform: rotate(60deg);
animation-delay: -0.9s;
}
.spinner div:nth-child(4) {
transform: rotate(90deg);
animation-delay: -0.8s;
}
.spinner div:nth-child(5) {
transform: rotate(120deg);
animation-delay: -0.7s;
}
.spinner div:nth-child(6) {
transform: rotate(150deg);
animation-delay: -0.6s;
}
.spinner div:nth-child(7) {
transform: rotate(180deg);
animation-delay: -0.5s;
}
.spinner div:nth-child(8) {
transform: rotate(210deg);
animation-delay: -0.4s;
}
.spinner div:nth-child(9) {
transform: rotate(240deg);
animation-delay: -0.3s;
}
.spinner div:nth-child(10) {
transform: rotate(270deg);
animation-delay: -0.2s;
}
.spinner div:nth-child(11) {
transform: rotate(300deg);
animation-delay: -0.1s;
}
.spinner div:nth-child(12) {
transform: rotate(330deg);
animation-delay: 0s;
}
@keyframes spinner {
0% {
opacity: 1;
}
100% {
opacity: 0;
}
}
#hyper-checkout-status-canvas {
width: 100%;
height: 100%;
justify-content: center;
align-items: center;
background-color: var(--primary-color);
}
.hyper-checkout-status-wrap {
display: flex;
flex-flow: column;
font-family: "Montserrat";
width: auto;
min-width: 400px;
background-color: white;
border-radius: 5px;
}
#hyper-checkout-status-header {
max-width: 1200px;
border-radius: 3px;
border-bottom: 1px solid #e6e6e6;
}
#hyper-checkout-status-header,
#hyper-checkout-status-content {
display: flex;
align-items: center;
justify-content: space-between;
font-size: 24px;
font-weight: 600;
padding: 15px 20px;
}
.hyper-checkout-status-amount {
font-family: "Montserrat";
font-size: 35px;
font-weight: 700;
}
.hyper-checkout-status-merchant-logo {
border: 1px solid #e6e6e6;
border-radius: 5px;
padding: 9px;
height: 48px;
width: 48px;
}
#hyper-checkout-status-content {
height: 100%;
flex-flow: column;
min-height: 500px;
align-items: center;
justify-content: center;
}
.hyper-checkout-status-image {
height: 200px;
width: 200px;
}
.hyper-checkout-status-text {
text-align: center;
font-size: 21px;
font-weight: 600;
margin-top: 20px;
}
.hyper-checkout-status-message {
text-align: center;
font-size: 12px !important;
margin-top: 10px;
font-size: 14px;
font-weight: 500;
max-width: 400px;
}
.hyper-checkout-status-details {
display: flex;
flex-flow: column;
margin-top: 20px;
border-radius: 3px;
border: 1px solid #e6e6e6;
max-width: calc(100vw - 40px);
}
.hyper-checkout-status-item {
display: flex;
align-items: center;
padding: 5px 10px;
border-bottom: 1px solid #e6e6e6;
word-wrap: break-word;
}
.hyper-checkout-status-item:last-child {
border-bottom: 0;
}
.hyper-checkout-item-header {
min-width: 13ch;
font-size: 12px;
}
.hyper-checkout-item-value {
font-size: 12px;
overflow-x: hidden;
overflow-y: auto;
word-wrap: break-word;
font-weight: 400;
}
#hyper-checkout-status-redirect-message {
margin-top: 20px;
font-family: "Montserrat";
font-size: 13px;
}
@keyframes loading {
0% {
-webkit-transform: rotate(0deg);
transform: rotate(0deg);
}
100% {
-webkit-transform: rotate(360deg);
transform: rotate(360deg);
}
}
@keyframes slide-from-right {
from {
right: -582px;
}
to {
right: 0;
}
}
@keyframes slide-to-right {
from {
right: 0;
}
to {
right: -582px;
}
}
#payment-message {
font-size: 12px;
font-weight: 500;
padding: 2%;
color: #ff0000;
font-family: "Montserrat";
}
#payment-form {
max-width: 560px;
width: 100%;
/* min-height: 500px; */
max-height: 90vh;
height: 100%;
overflow: scroll;
margin: 0 auto;
text-align: center;
}
#submit {
cursor: pointer;
margin-top: 20px;
width: 100%;
height: 38px;
border: 0;
border-radius: 4px;
font-size: 18px;
display: flex;
justify-content: center;
align-items: center;
}
#submit.disabled {
cursor: not-allowed;
}
#submit.not-ready {
background-color: #C2C2C2 !important;
}
#submit-spinner {
width: 28px;
height: 28px;
border: 4px solid #fff;
border-bottom-color: #ff3d00;
border-radius: 50%;
display: inline-block;
box-sizing: border-box;
animation: loading 1s linear infinite;
}
@media only screen and (min-width: 1199px) {
#hyper-checkout-merchant-description {
color: #808080;
}
}
@media only screen and (max-width: 1199px) {
body {
overflow-y: scroll;
}
.hyper-checkout {
flex-flow: column;
margin: 0;
height: auto;
overflow: visible;
}
#hyper-checkout-payment-merchant-details {
margin-top: 20px;
}
.main {
width: auto;
min-width: 300px;
}
.hyper-checkout-payment {
min-width: 300px;
width: calc(100vw - 50px);
margin: 0;
padding: 25px;
border: 0;
border-radius: 0;
background-color: var(--primary-color);
display: flex;
flex-flow: column;
justify-self: flex-start;
align-self: flex-start;
}
.hyper-checkout-payment-content-details {
max-width: 520px;
width: 100%;
align-self: center;
margin-bottom: 0;
}
.content-details-wrap {
flex-flow: column;
flex-direction: column-reverse;
margin: 0;
}
#hyper-checkout-payment-merchant-dynamic-details {
flex-flow: column;
flex-direction: column-reverse;
margin: 0;
max-width: 100%;
overflow-y: scroll;
max-height: 10vh;
}
.hyper-checkout-payment-horizontal-line {
margin: 10px 0px;
height: 2px;
border: none;
}
#hyper-checkout-merchant-image {
background-color: white;
}
#hyper-checkout-cart-image {
display: flex;
}
.hyper-checkout-payment-price {
font-size: 48px;
margin-top: 20px;
}
.hyper-checkout-payment-merchant-name {
font-size: 18px;
}
#hyper-checkout-payment-footer {
border-radius: 50px;
width: max-content;
padding: 10px 20px;
}
#hyper-checkout-cart {
position: absolute;
top: 0;
right: 0;
z-index: 100;
margin: 0;
min-width: 300px;
max-width: 582px;
max-height: 100vh;
width: 100vw;
height: 100vh;
background-color: #f5f5f5;
box-shadow: 0px 10px 10px #aeaeae;
right: 0px;
animation: slide-from-right 0.3s linear;
}
.hyper-checkout-cart-header {
margin: 10px 0 0 10px;
}
.cart-close {
margin: 0 10px 0 auto;
display: inline;
}
#hyper-checkout-cart-items {
margin: 20px 20px 0 20px;
padding: 0;
}
.hyper-checkout-cart-button {
margin: 10px;
text-align: right;
}
.powered-by-hyper {
display: none;
}
#hyper-checkout-sdk {
background-color: transparent;
width: auto;
min-width: 300px;
box-shadow: none;
}
#payment-form-wrap {
min-width: 300px;
width: calc(100vw - 40px);
margin: 0;
padding: 25px 20px;
}
#hyper-checkout-status-canvas {
background-color: #fefefe;
}
.hyper-checkout-status-wrap {
min-width: 100vw;
width: 100vw;
}
#hyper-checkout-status-header {
max-width: calc(100% - 40px);
}
}
| 4,697 | 1,720 |
hyperswitch | crates/router/src/core/payment_link/payment_link_initiate/payment_link_initiator.js | .js | // @ts-check
/**
* Trigger - post downloading SDK
* Uses
* - Instantiate SDK
* - Create a payment widget
* - Decide whether or not to show SDK (based on status)
**/
function initializeSDK() {
// @ts-ignore
var paymentDetails = window.__PAYMENT_DETAILS;
var clientSecret = paymentDetails.client_secret;
var sdkUiRules = paymentDetails.sdk_ui_rules;
var appearance = {
variables: {
colorPrimary: paymentDetails.theme || "rgb(0, 109, 249)",
fontFamily: "Work Sans, sans-serif",
fontSizeBase: "16px",
colorText: "rgb(51, 65, 85)",
colorTextSecondary: "#334155B3",
colorPrimaryText: "rgb(51, 65, 85)",
colorTextPlaceholder: "#33415550",
borderColor: "#33415550",
colorBackground: "rgb(255, 255, 255)",
},
};
if (sdkUiRules !== null && typeof sdkUiRules === "object" && Object.getPrototypeOf(sdkUiRules) === Object.prototype) {
appearance.rules = sdkUiRules;
}
// @ts-ignore
hyper = window.Hyper(pub_key, {
isPreloadEnabled: false,
// TODO: Remove in next deployment
shouldUseTopRedirection: true,
redirectionFlags: {
shouldRemoveBeforeUnloadEvents: true,
shouldUseTopRedirection: true,
},
});
// @ts-ignore
widgets = hyper.widgets({
appearance: appearance,
clientSecret: clientSecret,
locale: paymentDetails.locale,
});
var type =
paymentDetails.sdk_layout === "spaced_accordion" ||
paymentDetails.sdk_layout === "accordion"
? "accordion"
: paymentDetails.sdk_layout;
var hideCardNicknameField = paymentDetails.hide_card_nickname_field;
var unifiedCheckoutOptions = {
displaySavedPaymentMethodsCheckbox: false,
displaySavedPaymentMethods: false,
layout: {
type: type, //accordion , tabs, spaced accordion
spacedAccordionItems: paymentDetails.sdk_layout === "spaced_accordion",
},
branding: "never",
wallets: {
walletReturnUrl: paymentDetails.return_url,
style: {
theme: "dark",
type: "default",
height: 55,
},
},
showCardFormByDefault: paymentDetails.show_card_form_by_default,
hideCardNicknameField: hideCardNicknameField,
customMessageForCardTerms: paymentDetails.custom_message_for_card_terms,
};
// @ts-ignore
unifiedCheckout = widgets.create("payment", unifiedCheckoutOptions);
// @ts-ignore
mountUnifiedCheckout("#unified-checkout");
// @ts-ignore
showSDK(paymentDetails.display_sdk_only, paymentDetails.enable_button_only_on_form_ready);
let shimmer = document.getElementById("payment-details-shimmer");
shimmer.classList.add("reduce-opacity");
setTimeout(() => {
document.body.removeChild(shimmer);
}, 500);
}
/**
* Use - redirect to /payment_link/status
*/
function redirectToStatus() {
var paymentDetails = window.__PAYMENT_DETAILS;
var arr = window.location.pathname.split("/");
// NOTE - This code preserves '/api' in url for integ and sbx
// e.g. url for integ/sbx - https://integ.hyperswitch.io/api/payment_link/merchant_1234/pay_1234?locale=en
// e.g. url for others - https://abc.dev.com/payment_link/merchant_1234/pay_1234?locale=en
var hasApiInPath = arr.includes("api");
if (hasApiInPath) {
arr.splice(0, 3);
arr.unshift("api", "payment_link", "status");
} else {
arr.splice(0, 2);
arr.unshift("payment_link", "status");
}
window.location.href =
window.location.origin +
"/" +
arr.join("/") +
"?locale=" +
paymentDetails.locale;
}
| 916 | 1,721 |
hyperswitch | crates/router/src/core/payment_link/payment_link_initiate/secure_payment_link_initiator.js | .js | // @ts-check
// Top level checks
var isFramed = false;
try {
isFramed = window.parent.location !== window.location;
// If parent's window object is restricted, DOMException is
// thrown which concludes that the webpage is iframed
} catch (err) {
isFramed = true;
}
if (!isFramed) {
function initializeSDK() {
var contentElement = document.getElementById("payment-link");
if (contentElement instanceof HTMLDivElement) {
contentElement.innerHTML = translations.notAllowed;
} else {
document.body.innerHTML = translations.notAllowed;
}
}
} else {
/**
* Trigger - post downloading SDK
* Uses
* - Instantiate SDK
* - Create a payment widget
* - Decide whether or not to show SDK (based on status)
**/
function initializeSDK() {
// @ts-ignore
var paymentDetails = window.__PAYMENT_DETAILS;
var clientSecret = paymentDetails.client_secret;
var sdkUiRules = paymentDetails.sdk_ui_rules;
var appearance = {
variables: {
colorPrimary: paymentDetails.theme || "rgb(0, 109, 249)",
fontFamily: "Work Sans, sans-serif",
fontSizeBase: "16px",
colorText: "rgb(51, 65, 85)",
colorTextSecondary: "#334155B3",
colorPrimaryText: "rgb(51, 65, 85)",
colorTextPlaceholder: "#33415550",
borderColor: "#33415550",
colorBackground: "rgb(255, 255, 255)",
},
};
if (sdkUiRules !== null && typeof sdkUiRules === "object" && Object.getPrototypeOf(sdkUiRules) === Object.prototype) {
appearance.rules = sdkUiRules;
}
// @ts-ignore
hyper = window.Hyper(pub_key, {
isPreloadEnabled: false,
// TODO: Remove in next deployment
shouldUseTopRedirection: true,
redirectionFlags: {
shouldRemoveBeforeUnloadEvents: true,
shouldUseTopRedirection: true,
},
});
// @ts-ignore
widgets = hyper.widgets({
appearance: appearance,
clientSecret: clientSecret,
locale: paymentDetails.locale,
});
var type =
paymentDetails.sdk_layout === "spaced_accordion" ||
paymentDetails.sdk_layout === "accordion"
? "accordion"
: paymentDetails.sdk_layout;
var enableSavedPaymentMethod = paymentDetails.enabled_saved_payment_method;
var hideCardNicknameField = paymentDetails.hide_card_nickname_field;
var unifiedCheckoutOptions = {
displaySavedPaymentMethodsCheckbox: enableSavedPaymentMethod,
displaySavedPaymentMethods: enableSavedPaymentMethod,
layout: {
type: type, //accordion , tabs, spaced accordion
spacedAccordionItems: paymentDetails.sdk_layout === "spaced_accordion",
},
branding: "never",
wallets: {
walletReturnUrl: paymentDetails.return_url,
style: {
theme: "dark",
type: "default",
height: 55,
},
},
hideCardNicknameField: hideCardNicknameField,
showCardFormByDefault: paymentDetails.show_card_form_by_default,
customMessageForCardTerms: paymentDetails.custom_message_for_card_terms,
};
// @ts-ignore
unifiedCheckout = widgets.create("payment", unifiedCheckoutOptions);
// @ts-ignore
mountUnifiedCheckout("#unified-checkout");
// @ts-ignore
showSDK(paymentDetails.display_sdk_only, paymentDetails.enable_button_only_on_form_ready);
let shimmer = document.getElementById("payment-details-shimmer");
shimmer.classList.add("reduce-opacity");
setTimeout(() => {
document.body.removeChild(shimmer);
}, 500);
}
/**
* Use - redirect to /payment_link/status
*/
function redirectToStatus() {
var paymentDetails = window.__PAYMENT_DETAILS;
var arr = window.location.pathname.split("/");
// NOTE - This code preserves '/api' in url for integ and sbx envs
// e.g. url for integ/sbx - https://integ.hyperswitch.io/api/payment_link/s/merchant_1234/pay_1234?locale=en
// e.g. url for others - https://abc.dev.com/payment_link/s/merchant_1234/pay_1234?locale=en
var hasApiInPath = arr.includes("api");
if (hasApiInPath) {
arr.splice(0, 4);
arr.unshift("api", "payment_link", "status");
} else {
arr.splice(0, 3);
arr.unshift("payment_link", "status");
}
let returnUrl =
window.location.origin +
"/" +
arr.join("/") +
"?locale=" +
paymentDetails.locale;
try {
window.top.location.href = returnUrl;
// Push logs to logs endpoint
} catch (error) {
var url = window.location.href;
var { paymentId, merchantId, attemptId, connector } = parseRoute(url);
var urlToPost = getEnvRoute(url);
var message = {
message:
"CRITICAL ERROR - Failed to redirect top document. Falling back to redirecting using window.location",
reason: error.message,
};
var log = {
message,
url,
paymentId,
merchantId,
attemptId,
connector,
};
postLog(log, urlToPost);
window.location.href = returnUrl;
}
}
}
| 1,235 | 1,722 |
hyperswitch | crates/router/src/core/payment_link/payment_link_initiate/payment_link.html | .html | <!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
{{rendered_meta_tag_html}}
<title>Payments requested by HyperSwitch</title>
{{ preload_link_tags }}
<link rel="preconnect" href="https://fonts.gstatic.com">
<link rel="stylesheet" href="https://fonts.googleapis.com/css2?family=Montserrat:wght@400;500;600;700;800">
<style>
#payment-details-shimmer{
min-width: 300px;
width: 100%;
margin: 0;
border: 0;
border-radius: 0;
height: 100%;
position: fixed;
top: 0;
left: 0;
background: linear-gradient(to right, #f6f7f8 0%, #edeef1 50%, #f6f7f8 100%);
background-size: 300%;
animation: shimmer 1.5s infinite linear;
background-position-x: -100%;
justify-content: center;
display: flex;
align-items: center;
flex-direction: column;
z-index: 3;
transition: opacity 0.5s;
opacity: 1;
}
.reduce-opacity {
opacity: 0 !important;
}
.shine {
background: #f6f7f8;
background-image: linear-gradient(to right, #f6f7f8 0%, #edeef1 20%, #f6f7f8 40%, #f6f7f8 100%);
background-repeat: no-repeat;
background-size: 800px 104px;
display: inline-block;
position: relative;
justify-content: center;
display: flex;
-webkit-animation-duration: 2s;
-webkit-animation-fill-mode: forwards;
-webkit-animation-iteration-count: infinite;
-webkit-animation-name: placeholderShimmer;
-webkit-animation-timing-function: linear;
}
.wrap {
display: inline-flex;
margin: 10px;
}
box {
height: 104px;
width: 100px;
border-radius: 25px;
}
lines {
height: 15px;
width: 200px;
border-radius: 25px;
margin: 7px;
}
.line-shimmer > :first-child{
width: 140px;
}
.line-shimmer {
display: inline-flex;
flex-direction: column;
margin-left: 25px;
margin-top: 15px;
vertical-align: top;
}
@-webkit-keyframes placeholderShimmer {
0% {
background-position: -468px 0;
}
100% {
background-position: 468px 0;
}
}
</style>
</head>
<body id="payment-link" class="hide-scrollbar">
<div id="payment-details-shimmer">
<div class = "wrap">
<box class="shine"></box>
<div class="line-shimmer">
<lines class="shine"></lines>
<lines class="shine"></lines>
<lines class="shine"></lines>
</div>
</div>
<div class = "wrap">
<box class="shine"></box>
<div class="line-shimmer">
<lines class="shine"></lines>
<lines class="shine"></lines>
<lines class="shine"></lines>
</div>
</div>
<div class = "wrap">
<box class="shine"></box>
<div class="line-shimmer">
<lines class="shine"></lines>
<lines class="shine"></lines>
<lines class="shine"></lines>
</div>
</div>
</div>
<style>
{{rendered_css}}
</style>
<!-- SVG ICONS -->
<svg xmlns="http://www.w3.org/2000/svg" display="none">
<defs>
<symbol id="cart-icon-small">
<image href="https://live.hyperswitch.io/payment-link-assets/icons/cart-small.svg"/>
</symbol>
<symbol id="cart-icon-big">
<image href="https://live.hyperswitch.io/payment-link-assets/icons/cart-big.svg"/>
</symbol>
<symbol id="cart-close">
<image href="https://live.hyperswitch.io/payment-link-assets/icons/close.svg"/>
</symbol>
<symbol id="hyperswitch-brand">
<image href="https://live.hyperswitch.io/payment-link-assets/icons/powered-by-hyperswitch.svg" opacity="0.4"/>
</symbol>
<symbol id="arrow-down">
<svg
width="16"
height="16"
viewBox="0 0 16 16"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<path
d="M8.20573 11.0351C8.33593 11.0351 8.45573 10.9831 8.54948 10.8841L12.5911 6.75911C12.6797 6.66536 12.7266 6.55078 12.7266 6.41536C12.7266 6.14974 12.5234 5.94141 12.2526 5.94141C12.1224 5.94141 12.0026 5.99349 11.9141 6.07682L8.20573 9.86848L4.4974 6.07682C4.40887 5.99349 4.29429 5.94141 4.15882 5.94141C3.88798 5.94141 3.68486 6.14974 3.68486 6.41536C3.68486 6.55078 3.7318 6.66536 3.82549 6.75911L7.86198 10.8841C7.96094 10.9831 8.07551 11.0351 8.20573 11.0351Z"
fill="#333333"
></path>
</svg>
</symbol>
<symbol id="arrow-up">
<svg
width="16"
height="16"
viewBox="0 0 16 16"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<path
d="M7.79427 4.96485C7.66407 4.96485 7.54427 5.01694 7.45052 5.11587L3.40886 9.24089C3.32032 9.33464 3.27344 9.44922 3.27344 9.58464C3.27344 9.85026 3.47657 10.0586 3.7474 10.0586C3.87761 10.0586 3.9974 10.0065 4.08594 9.92318L7.79427 6.13152L11.5026 9.92318C11.5911 10.0065 11.7057 10.0586 11.8412 10.0586C12.112 10.0586 12.3151 9.85026 12.3151 9.58464C12.3151 9.44922 12.2682 9.33464 12.1745 9.24089L8.13802 5.11587C8.03906 5.01694 7.92449 4.96485 7.79427 4.96485Z"
fill="#333333"
></path>
</svg>
</symbol>
</defs>
</svg>
<div id="page-spinner" class="page-spinner hidden">
<div class="spinner">
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
</div>
</div>
<div id="hyper-checkout" class="hyper-checkout hide-scrollbar">
<div class="main hidden" id="hyper-checkout-status-canvas">
<div class="hyper-checkout-status-wrap">
<div id="hyper-checkout-status-header"></div>
<div id="hyper-checkout-status-content"></div>
</div>
<div id="hyper-checkout-status-redirect-message"></div>
</div>
<div class="main checkout-page" id="hyper-checkout-details">
<div id="hyper-checkout-payment" class="hyper-checkout-payment">
<div class="hyper-checkout-payment-content-details">
<div class="content-details-wrap">
<div id="hyper-checkout-payment-context">
<div id="hyper-checkout-payment-merchant-details"></div>
</div>
<div class="hyper-checkout-image-header">
<div id="hyper-checkout-merchant-image"></div>
<div
id="hyper-checkout-cart-image"
onclick="viewCartInMobileView()"
>
<svg
width="30"
height="30"
viewBox="0 0 30 30"
fill="none"
xmlns="http://www.w3.org/2000/svg"
class="cart-icon"
>
<use
xlink:href="#cart-icon-big"
x="0"
y="0"
width="30"
height="30"
></use>
</svg>
</div>
</div>
</div>
<div id="hyper-checkout-payment-horizontal-line-container"></div>
<div id="hyper-checkout-payment-merchant-dynamic-details" class="hidden"></div>
<div id="hyper-checkout-payment-footer"></div>
</div>
</div>
<div id="hyper-checkout-cart" class="">
<div
id="hyper-checkout-cart-header"
class="hyper-checkout-cart-header"
>
<svg
width="16"
height="16"
viewBox="0 0 16 16"
fill="none"
xmlns="http://www.w3.org/2000/svg"
class="cart-icon"
>
<use
xlink:href="#cart-icon-small"
x="0"
y="0"
width="16"
height="16"
></use>
</svg>
<span id="your-cart-text"></span>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 50 50"
width="25"
height="25"
class="cart-close"
onclick="hideCartInMobileView()"
>
<use
xlink:href="#cart-close"
x="0"
y="0"
width="50"
height="50"
></use>
</svg>
</div>
<div id="hyper-checkout-cart-items" class="hide-scrollbar"></div>
</div>
</div>
<div class="hyper-checkout-sdk" id="hyper-checkout-sdk">
<div id="payment-form-wrap">
<div id="sdk-spinner" class="sdk-spinner">
<div class="spinner">
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
</div>
</div>
<form id="payment-form">
<div id="unified-checkout"></div>
<button type="submit" id="submit" class="hidden">
<span id="submit-spinner" class="hidden"></span>
<span id="submit-button-text"></span>
</button>
<div id="payment-message" class="hidden"></div>
</form>
</div>
</div>
</div>
<script>
{{logging_template}}
{{locale_template}}
{{rendered_js}}
{{payment_link_initiator}}
</script>
{{ hyperloader_sdk_link }}
</body>
</html>
| 3,277 | 1,723 |
hyperswitch | crates/router/src/core/payment_link/payment_link_initiate/payment_link.js | .js | // @ts-nocheck
/**
* UTIL FUNCTIONS
*/
function adjustLightness(hexColor, factor) {
// Convert hex to RGB
var r = parseInt(hexColor.slice(1, 3), 16);
var g = parseInt(hexColor.slice(3, 5), 16);
var b = parseInt(hexColor.slice(5, 7), 16);
// Convert RGB to HSL
var hsl = rgbToHsl(r, g, b);
// Adjust lightness
hsl[2] = Math.max(0, Math.min(100, hsl[2] * factor));
// Convert HSL back to RGB
var rgb = hslToRgb(hsl[0], hsl[1], hsl[2]);
// Convert RGB to hex
var newHexColor = rgbToHex(rgb[0], rgb[1], rgb[2]);
return newHexColor;
}
function rgbToHsl(r, g, b) {
r /= 255;
g /= 255;
b /= 255;
var max = Math.max(r, g, b),
min = Math.min(r, g, b);
var h = 1,
s,
l = (max + min) / 2;
if (max === min) {
h = s = 0;
} else {
var d = max - min;
s = l > 0.5 ? d / (2 - max - min) : d / (max + min);
switch (max) {
case r:
h = (g - b) / d + (g < b ? 6 : 0);
break;
case g:
h = (b - r) / d + 2;
break;
case b:
h = (r - g) / d + 4;
break;
}
h /= 6;
}
return [h * 360, s * 100, l * 100];
}
function hslToRgb(h, s, l) {
h /= 360;
s /= 100;
l /= 100;
var r, g, b;
if (s === 0) {
r = g = b = l;
} else {
var hue2rgb = function (p, q, t) {
if (t < 0) t += 1;
if (t > 1) t -= 1;
if (t < 1 / 6) return p + (q - p) * 6 * t;
if (t < 1 / 2) return q;
if (t < 2 / 3) return p + (q - p) * (2 / 3 - t) * 6;
return p;
};
var q = l < 0.5 ? l * (1 + s) : l + s - l * s;
var p = 2 * l - q;
r = hue2rgb(p, q, h + 1 / 3);
g = hue2rgb(p, q, h);
b = hue2rgb(p, q, h - 1 / 3);
}
return [r * 255, g * 255, b * 255];
}
function rgbToHex(r, g, b) {
var toHex = function (c) {
var hex = Math.round(c).toString(16);
return hex.length === 1 ? "0" + hex : hex;
};
return "#" + toHex(r) + toHex(g) + toHex(b);
}
/**
* Ref - https://github.com/onury/invert-color/blob/master/lib/cjs/invert.js
*/
function padz(str, len) {
if (len === void 0) {
len = 2;
}
return (new Array(len).join("0") + str).slice(-len);
}
function hexToRgbArray(hex) {
if (hex.slice(0, 1) === "#") hex = hex.slice(1);
var RE_HEX = /^(?:[0-9a-f]{3}){1,2}$/i;
if (!RE_HEX.test(hex)) throw new Error('Invalid HEX color: "' + hex + '"');
if (hex.length === 3) {
hex = hex[0] + hex[0] + hex[1] + hex[1] + hex[2] + hex[2];
}
return [
parseInt(hex.slice(0, 2), 16),
parseInt(hex.slice(2, 4), 16),
parseInt(hex.slice(4, 6), 16),
];
}
function toRgbArray(c) {
if (!c) throw new Error("Invalid color value");
if (Array.isArray(c)) return c;
return typeof c === "string" ? hexToRgbArray(c) : [c.r, c.g, c.b];
}
function getLuminance(c) {
var i, x;
var a = [];
for (i = 0; i < c.length; i++) {
x = c[i] / 255;
a[i] = x <= 0.03928 ? x / 12.92 : Math.pow((x + 0.055) / 1.055, 2.4);
}
return 0.2126 * a[0] + 0.7152 * a[1] + 0.0722 * a[2];
}
function invertToBW(color, bw, asArr) {
var DEFAULT_BW = {
black: "#090302",
white: "#FFFFFC",
threshold: Math.sqrt(1.05 * 0.05) - 0.05,
};
var options = bw === true ? DEFAULT_BW : Object.assign({}, DEFAULT_BW, bw);
return getLuminance(color) > options.threshold
? asArr
? hexToRgbArray(options.black)
: options.black
: asArr
? hexToRgbArray(options.white)
: options.white;
}
function invert(color, bw) {
if (bw === void 0) {
bw = false;
}
color = toRgbArray(color);
if (bw) return invertToBW(color, bw);
return (
"#" +
color
.map(function (c) {
return padz((255 - c).toString(16));
})
.join("")
);
}
/**
* UTIL FUNCTIONS END HERE
*/
// @ts-ignore
{{ payment_details_js_script }}
// @ts-ignore
window.state = {
prevHeight: window.innerHeight,
prevWidth: window.innerWidth,
isMobileView: window.innerWidth <= 1199,
currentScreen: "payment_link",
};
var widgets = null;
var unifiedCheckout = null;
// @ts-ignore
var pub_key = window.__PAYMENT_DETAILS.pub_key;
var hyper = null;
const translations = getTranslations(window.__PAYMENT_DETAILS.locale);
var isFramed = false;
try {
isFramed = window.parent.location !== window.location;
// If parent's window object is restricted, DOMException is
// thrown which concludes that the webpage is iframed
} catch (err) {
isFramed = true;
}
/**
* Trigger - on boot
* Use - emit latest payment status to parent window
*/
function emitPaymentStatus(paymentDetails) {
var message = {
payment: {
status: paymentDetails.status,
}
};
window.parent.postMessage(message, "*");
}
/**
* Trigger - init function invoked once the script tag is loaded
* Use
* - Update document's title
* - Update document's icon
* - Render and populate document with payment details and cart
* - Initialize event listeners for updating UI on screen size changes
* - Initialize SDK
**/
function boot() {
// @ts-ignore
var paymentDetails = window.__PAYMENT_DETAILS;
// Emit latest payment status
if (isFramed) {
emitPaymentStatus(paymentDetails);
}
if (paymentDetails.display_sdk_only) {
hide(".checkout-page")
var sdkDisplayWidth = document.querySelector('.hyper-checkout-sdk');
sdkDisplayWidth.style.width = '100vw';
}
else {
var orderDetails = paymentDetails.order_details;
if (orderDetails !== null) {
var charges = 0;
for (var i = 0; i < orderDetails.length; i++) {
charges += parseFloat(orderDetails[i].amount * orderDetails[i].quantity);
}
orderDetails.push({
"amount": (paymentDetails.amount - charges).toFixed(2),
"product_img_link": "https://live.hyperswitch.io/payment-link-assets/cart_placeholder.png",
"product_name": translations.miscellaneousCharges + "\n" +
translations.miscellaneousChargesDetail,
"quantity": null
});
}
}
if (paymentDetails.merchant_name) {
document.title = "Payment requested by " + paymentDetails.merchant_name;
}
if (paymentDetails.merchant_logo) {
var link = document.createElement("link");
link.rel = "icon";
link.href = paymentDetails.merchant_logo;
link.type = "image/x-icon";
document.head.appendChild(link);
}
// Render UI
if (paymentDetails.display_sdk_only) {
renderSDKHeader(paymentDetails);
renderBranding(paymentDetails);
}
else {
renderBackgroundImage(paymentDetails);
renderPaymentDetails(paymentDetails);
renderDynamicMerchantDetails(paymentDetails);
renderCart(paymentDetails);
renderDescription(paymentDetails);
renderBranding(paymentDetails);
renderSDKHeader(paymentDetails);
}
// Deal w loaders
show("#sdk-spinner");
hide("#page-spinner");
hide("#unified-checkout");
// Add event listeners
initializeEventListeners(paymentDetails);
// Update payment link styles
var paymentLinkUiRules = paymentDetails.payment_link_ui_rules;
if (isObject(paymentLinkUiRules)) {
updatePaymentLinkUi(paymentLinkUiRules);
}
// Initialize SDK
// @ts-ignore
if (window.Hyper) {
initializeSDK();
}
// State specific functions
// @ts-ignore
if (window.state.isMobileView) {
show("#hyper-footer");
hide("#hyper-checkout-cart");
} else {
show("#hyper-checkout-cart");
}
}
boot();
/**
* Use - checks if a given value is an object
* @param {any} val
* @returns {boolean}
*/
function isObject(val) {
return val !== null && typeof val === "object" && Object.getPrototypeOf(val) === Object.prototype
}
/**
* Use - add event listeners for changing UI on
* - Screen resize
* - Form inputs
* @param {PaymentDetails} paymentDetails
*/
function initializeEventListeners(paymentDetails) {
var primaryColor = paymentDetails.theme;
var lighterColor = adjustLightness(primaryColor, 1.4);
var darkerColor = adjustLightness(primaryColor, 0.8);
var contrastBWColor = invert(primaryColor, true);
var a = lighterColor.match(/[fF]/gi);
var contrastingTone =
Array.isArray(a) && a.length > 4 ? darkerColor : lighterColor;
var hyperCheckoutNode = document.getElementById("hyper-checkout-payment");
var hyperCheckoutCartImageNode = document.getElementById(
"hyper-checkout-cart-image"
);
var hyperCheckoutFooterNode = document.getElementById(
"hyper-checkout-payment-footer"
);
var submitButtonNode = document.getElementById("submit");
var submitButtonLoaderNode = document.getElementById("submit-spinner");
if (submitButtonLoaderNode instanceof HTMLSpanElement) {
submitButtonLoaderNode.style.borderBottomColor = contrastingTone;
}
// Get locale for pay now
var payNowButtonText = document.createElement("div");
var payNowButtonText = document.getElementById('submit-button-text');
if (payNowButtonText) {
payNowButtonText.textContent = paymentDetails.payment_button_text || translations.payNow;
}
if (submitButtonNode instanceof HTMLButtonElement) {
var chosenColor = paymentDetails.payment_button_colour || primaryColor;
submitButtonNode.style.color = paymentDetails.payment_button_text_colour || invert(chosenColor, true);
submitButtonNode.style.backgroundColor = chosenColor;
}
if (hyperCheckoutCartImageNode instanceof HTMLDivElement) {
hyperCheckoutCartImageNode.style.backgroundColor = contrastingTone;
}
if (window.innerWidth <= 1199) {
if (hyperCheckoutNode instanceof HTMLDivElement) {
hyperCheckoutNode.style.color = contrastBWColor;
}
if (hyperCheckoutFooterNode instanceof HTMLDivElement) {
hyperCheckoutFooterNode.style.backgroundColor = contrastingTone;
}
} else if (window.innerWidth > 1199) {
if (hyperCheckoutNode instanceof HTMLDivElement) {
hyperCheckoutNode.style.color = "#333333";
}
if (hyperCheckoutFooterNode instanceof HTMLDivElement) {
hyperCheckoutFooterNode.style.backgroundColor = "#F5F5F5";
}
}
// @ts-ignore
window.addEventListener("resize", function (event) {
var currentHeight = window.innerHeight;
var currentWidth = window.innerWidth;
// @ts-ignore
if (currentWidth <= 1199 && window.state.prevWidth > 1199) {
hide("#hyper-checkout-cart");
// @ts-ignore
if (window.state.currentScreen === "payment_link") {
show("#hyper-footer");
}
try {
if (hyperCheckoutNode instanceof HTMLDivElement) {
hyperCheckoutNode.style.color = contrastBWColor;
}
if (hyperCheckoutFooterNode instanceof HTMLDivElement) {
hyperCheckoutFooterNode.style.backgroundColor = lighterColor;
}
} catch (error) {
console.error("Failed to fetch primary-color, using default", error);
}
// @ts-ignore
} else if (currentWidth > 1199 && window.state.prevWidth <= 1199) {
// @ts-ignore
if (window.state.currentScreen === "payment_link") {
hide("#hyper-footer");
}
show("#hyper-checkout-cart");
try {
if (hyperCheckoutNode instanceof HTMLDivElement) {
hyperCheckoutNode.style.color = "#333333";
}
if (hyperCheckoutFooterNode instanceof HTMLDivElement) {
hyperCheckoutFooterNode.style.backgroundColor = "#F5F5F5";
}
} catch (error) {
console.error("Failed to revert back to default colors", error);
}
}
// @ts-ignore
window.state.prevHeight = currentHeight;
// @ts-ignore
window.state.prevWidth = currentWidth;
// @ts-ignore
window.state.isMobileView = currentWidth <= 1199;
});
var paymentForm = document.getElementById("payment-form");
if (paymentForm instanceof HTMLFormElement) {
paymentForm.addEventListener("submit", function (event) {
event.preventDefault();
handleSubmit(event);
})
}
if (paymentDetails.enable_button_only_on_form_ready) {
handleFormReadyForSubmission();
}
}
function handleFormReadyForSubmission() {
window.addEventListener("message", function (event) {
// Event listener for updating the button rules
if (isObject(event.data) && event.data["isFormReadyForSubmission"] !== null) {
let isFormReadyForSubmission = event.data["isFormReadyForSubmission"];
var submitButtonNode = document.getElementById("submit");
if (submitButtonNode instanceof HTMLButtonElement) {
if (isFormReadyForSubmission === false) {
submitButtonNode.disabled = true;
addClass("#submit", "not-ready");
addClass("#submit", "disabled");
} else if (isFormReadyForSubmission === true) {
submitButtonNode.disabled = false;
removeClass("#submit", "not-ready");
removeClass("#submit", "disabled");
}
}
}
});
}
/**
* Trigger - post mounting SDK
* Use - set relevant classes to elements in the doc for showing SDK
**/
function showSDK(display_sdk_only, enable_button_only_on_form_ready) {
if (!display_sdk_only) {
show("#hyper-checkout-details");
}
show("#hyper-checkout-sdk");
if (enable_button_only_on_form_ready) {
addClass("#submit", "not-ready");
addClass("#submit", "disabled");
var submitButtonNode = document.getElementById("submit");
if (submitButtonNode instanceof HTMLButtonElement) {
submitButtonNode.disabled = true;
}
}
show("#submit");
show("#unified-checkout");
hide("#sdk-spinner");
}
/**
* Use - mount payment widget on the passed element
* @param {String} id
**/
function mountUnifiedCheckout(id) {
if (unifiedCheckout !== null) {
unifiedCheckout.mount(id);
}
}
/**
* Trigger - on clicking submit button
* Uses
* - Trigger /payment/confirm through SDK
* - Toggle UI loaders appropriately
* - Handle errors and redirect to status page
* @param {Event} e
*/
// @ts-ignore
function handleSubmit(e) {
// @ts-ignore
var paymentDetails = window.__PAYMENT_DETAILS;
// Update button loader
hide("#submit-button-text");
show("#submit-spinner");
addClass("#submit", "processing");
addClass("#submit", "disabled");
var submitButtonNode = document.getElementById("submit");
if (submitButtonNode instanceof HTMLButtonElement) {
submitButtonNode.disabled = true;
}
hyper
.confirmPayment({
widgets: widgets,
confirmParams: {
// Make sure to change this to your payment completion page
return_url: paymentDetails.return_url,
},
})
.then(function (result) {
var error = result.error;
if (error) {
if (error.type === "validation_error") {
showMessage(error.message);
} else {
showMessage(translations.unexpectedError);
}
} else if (paymentDetails.skip_status_screen) {
// Form query params
var queryParams = {
payment_id: paymentDetails.payment_id,
status: result.status
};
var url = new URL(paymentDetails.return_url);
var params = new URLSearchParams(url.search);
// Attach query params to return_url
for (var key in queryParams) {
if (queryParams.hasOwnProperty(key)) {
params.set(key, queryParams[key]);
}
}
url.search = params.toString();
window.top.location.href = url.toString();
} else {
redirectToStatus();
}
})
.catch(function (error) {
console.error("Error confirming payment_intent", error);
})
.finally(() => {
removeClass("#submit", "processing");
hide("#submit-spinner");
show("#submit-button-text");
removeClass("#submit", "disabled");
if (submitButtonNode instanceof HTMLButtonElement) {
submitButtonNode.disabled = false;
}
});
}
function show(id) {
removeClass(id, "hidden");
}
function hide(id) {
addClass(id, "hidden");
}
function showMessage(msg) {
show("#payment-message");
addText("#payment-message", msg);
}
function addText(id, msg) {
var element = document.querySelector(id);
element.innerText = msg;
}
function addClass(id, className) {
var element = document.querySelector(id);
if (element instanceof HTMLElement) {
element.classList.add(className);
}
}
function removeClass(id, className) {
var element = document.querySelector(id);
if (element instanceof HTMLElement) {
element.classList.remove(className);
}
}
/**
* Use - format date in "hh:mm AM/PM timezone MM DD, YYYY"
* @param {Date} date
**/
function formatDate(date) {
var months = [
"January",
"February",
"March",
"April",
"May",
"June",
"July",
"August",
"September",
"October",
"November",
"December",
];
var hours = date.getHours();
var minutes = date.getMinutes();
// @ts-ignore
minutes = minutes < 10 ? "0" + minutes : minutes;
var suffix = hours > 11 ? "PM" : "AM";
hours = hours % 12;
hours = hours ? hours : 12;
var day = date.getDate();
var month = months[date.getMonth()];
var year = date.getUTCFullYear();
// @ts-ignore
var locale = navigator.language || navigator.userLanguage;
var timezoneShorthand = date
.toLocaleDateString(locale, {
day: "2-digit",
timeZoneName: "long",
})
.substring(4)
.split(" ")
.reduce(function (tz, c) {
return tz + c.charAt(0).toUpperCase();
}, "");
var formatted =
hours +
":" +
minutes +
" " +
suffix +
" " +
timezoneShorthand +
" " +
month +
" " +
day +
", " +
year;
return formatted;
}
/**
* Trigger - on boot
* Uses
* - Render payment related details (header bit)
* - Amount
* - Merchant's name
* - Expiry
* @param {PaymentDetails} paymentDetails
**/
function renderPaymentDetails(paymentDetails) {
// Create price node
var priceNode = document.createElement("div");
priceNode.className = "hyper-checkout-payment-price";
priceNode.innerText = paymentDetails.currency + " " + paymentDetails.amount;
// Create merchant name's node
var merchantNameNode = document.createElement("div");
merchantNameNode.className = "hyper-checkout-payment-merchant-name";
merchantNameNode.innerText = translations.requestedBy + paymentDetails.merchant_name;
// Create payment ID node
var paymentIdNode = document.createElement("div");
paymentIdNode.className = "hyper-checkout-payment-ref";
paymentIdNode.innerText = translations.refId + paymentDetails.payment_id;
// Create merchant logo's node
var merchantLogoNode = document.createElement("img");
merchantLogoNode.src = paymentDetails.merchant_logo;
merchantLogoNode.setAttribute("height", "48");
// Create expiry node
var paymentExpiryNode = document.createElement("div");
paymentExpiryNode.className = "hyper-checkout-payment-footer-expiry";
var expiryDate = new Date(paymentDetails.session_expiry);
var formattedDate = formatDate(expiryDate);
paymentExpiryNode.innerText = translations.expiresOn + formattedDate;
// Append information to DOM
var paymentContextNode = document.getElementById(
"hyper-checkout-payment-context"
);
if (paymentContextNode instanceof HTMLDivElement) {
paymentContextNode.prepend(priceNode);
}
var paymentMerchantDetails = document.getElementById(
"hyper-checkout-payment-merchant-details"
);
if (paymentMerchantDetails instanceof HTMLDivElement) {
paymentMerchantDetails.append(merchantNameNode);
paymentMerchantDetails.append(paymentIdNode);
}
var merchantImageNode = document.getElementById(
"hyper-checkout-merchant-image"
);
if (merchantImageNode instanceof HTMLDivElement) {
merchantImageNode.prepend(merchantLogoNode);
}
var footerNode = document.getElementById("hyper-checkout-payment-footer");
if (footerNode instanceof HTMLDivElement) {
footerNode.append(paymentExpiryNode);
}
}
function renderDynamicMerchantDetails(paymentDetails) {
var merchantDynamicDetails = document.getElementById(
"hyper-checkout-payment-merchant-dynamic-details"
);
if (merchantDynamicDetails instanceof HTMLDivElement) {
// add dynamic merchant details in the payment details section if present
appendMerchantDetails(paymentDetails, merchantDynamicDetails);
}
}
function appendMerchantDetails(paymentDetails, merchantDynamicDetails) {
if (
!(
Array.isArray(paymentDetails.transaction_details) &&
paymentDetails.transaction_details.length > 0
)
) {
return;
}
try {
let merchantDetailsObject = paymentDetails.transaction_details;
// sort the merchant details based on the position
// if position is null, then it will be shown at the end
merchantDetailsObject.sort((a, b) => {
if (a.ui_configuration === null || a.ui_configuration.position === null)
return 1;
if (b.ui_configuration === null || b.ui_configuration.position === null)
return -1;
if (typeof a.ui_configuration.position === "number" && typeof b.ui_configuration.position === "number") {
return a.ui_configuration.position - b.ui_configuration.position;
}
else return 0;
});
if (merchantDetailsObject.length > 0) {
// show the dynamic merchant details container
show("#hyper-checkout-payment-merchant-dynamic-details");
// set min-height for the dynamic merchant details container
merchantDynamicDetails.style.minHeight = "80px";
// render a horizontal line above dynamic merchant details
var horizontalLineContainer = document.getElementById(
"hyper-checkout-payment-horizontal-line-container",
);
var horizontalLine = document.createElement("hr");
horizontalLine.className = "hyper-checkout-payment-horizontal-line";
horizontalLineContainer.append(horizontalLine);
// max number of items to show in the merchant details
let maxItemsInDetails = 50;
for (var item of merchantDetailsObject) {
var merchantData = document.createElement("div");
merchantData.className = "hyper-checkout-payment-merchant-dynamic-data";
// make the key and value bold if specified in the ui_configuration
var key = item.ui_configuration
? item.ui_configuration.is_key_bold
? item.key.bold()
: item.key
: item.key;
var value = item.ui_configuration
? item.ui_configuration.is_value_bold
? item.value.bold()
: item.value
: item.value;
merchantData.innerHTML = key + " : " + value;
merchantDynamicDetails.append(merchantData);
if (--maxItemsInDetails === 0) {
break;
}
}
}
}
catch (error) {
console.error("Error parsing merchant details", error);
}
}
/**
* Uses
* - Creates and appends description below the cart section (LAYOUT 1 / DEFAULT LAYOUT specification)
* @param {String} merchantDescription
*/
function renderDefaultLayout(merchantDescription) {
var cartItemNode = document.getElementById("hyper-checkout-cart");
if (cartItemNode instanceof HTMLDivElement) {
var merchantDescriptionNode = document.createElement("div");
merchantDescriptionNode.id = "hyper-checkout-merchant-description";
merchantDescriptionNode.innerText = merchantDescription;
cartItemNode.appendChild(merchantDescriptionNode);
show("#hyper-checkout-merchant-description");
}
}
/**
* Uses
* - Renders description in the appropriate section based on the specified layout
* @param {PaymentDetails} paymentDetails
*/
function renderDescription(paymentDetails) {
var detailsLayout = paymentDetails.details_layout;
if (typeof paymentDetails.merchant_description === "string" && paymentDetails.merchant_description.length > 0) {
switch (detailsLayout) {
case "layout1": {
renderDefaultLayout(paymentDetails.merchant_description);
break;
}
case "layout2": {
var paymentContextNode = document.getElementById("hyper-checkout-payment-context");
if (paymentContextNode instanceof HTMLDivElement) {
var merchantDescriptionNode = document.createElement("div");
merchantDescriptionNode.id = "hyper-checkout-merchant-description";
merchantDescriptionNode.innerText = paymentDetails.merchant_description;
var merchantDetailsNode = document.getElementById("hyper-checkout-payment-merchant-details");
if (merchantDetailsNode instanceof HTMLDivElement) {
paymentContextNode.insertBefore(merchantDescriptionNode, merchantDetailsNode);
show("#hyper-checkout-merchant-description");
}
}
break;
}
default: {
renderDefaultLayout(paymentDetails.merchant_description);
}
}
}
}
/**
* Uses
* - Creates and returns a div element with the HyperSwitch branding SVG
* @param {String} wrapperId
* @returns {HTMLDivElement} brandingWrapperNode
*/
function createHyperSwitchBrandingSVGElement(wrapperId) {
var brandingWrapperNode = document.createElement("div");
brandingWrapperNode.id = wrapperId;
brandingWrapperNode.innerHTML = '<svg class="fill-current" height="18" width="130"><use xlink:href="#hyperswitch-brand" x="0" y="0" height="18" width="130"></use></svg>';
return brandingWrapperNode;
}
/**
* Uses
* - Creates and appends HyperSwitch branding in appropriate sections based on the viewport dimensions (web vs mobile views)
* @param {PaymentDetails} paymentDetails
*/
function renderBranding(paymentDetails) {
if (paymentDetails.branding_visibility !== false) {
// Append below cart section for web views
var cartItemNode = document.getElementById("hyper-checkout-cart");
if (cartItemNode instanceof HTMLDivElement) {
var brandingWrapper = createHyperSwitchBrandingSVGElement("powered-by-hyper");
cartItemNode.appendChild(brandingWrapper);
}
// Append in document's body for mobile views
var mobileBrandingWrapper = createHyperSwitchBrandingSVGElement("hyper-footer");
document.body.appendChild(mobileBrandingWrapper);
if (!window.state.isMobileView) {
hide("#hyper-footer");
}
}
}
/**
* Uses
* - Renders background image in the payment details section
* @param {PaymentDetails} paymentDetails
*/
function renderBackgroundImage(paymentDetails) {
var backgroundImage = paymentDetails.background_image;
if (typeof backgroundImage === "object" && backgroundImage !== null) {
var paymentDetailsNode = document.getElementById("hyper-checkout-details");
if (paymentDetailsNode instanceof HTMLDivElement) {
paymentDetailsNode.style.backgroundImage = "url(" + backgroundImage.url + ")";
if (typeof backgroundImage.size === "string") {
paymentDetailsNode.style.backgroundSize = backgroundImage.size;
}
if (typeof backgroundImage.position === "string") {
paymentDetailsNode.style.backgroundPosition = backgroundImage.position;
}
}
}
}
/**
* Trigger - on boot
* Uses
* - Render cart wrapper and items
* - Attaches an onclick event for toggling expand on the items list
* @param {PaymentDetails} paymentDetails
**/
function renderCart(paymentDetails) {
var orderDetails = paymentDetails.order_details;
// Cart items
if (Array.isArray(orderDetails) && orderDetails.length > 0) {
var cartNode = document.getElementById("hyper-checkout-cart");
var cartItemsNode = document.getElementById("hyper-checkout-cart-items");
var MAX_ITEMS_VISIBLE_AFTER_COLLAPSE =
paymentDetails.max_items_visible_after_collapse;
var yourCartText = document.createElement("span");
var yourCartText = document.getElementById("your-cart-text");
if (yourCartText) {
yourCartText.textContent = translations.yourCart;
}
orderDetails.map(function (item, index) {
if (index >= MAX_ITEMS_VISIBLE_AFTER_COLLAPSE) {
return;
}
renderCartItem(
item,
paymentDetails,
index !== 0 && index < MAX_ITEMS_VISIBLE_AFTER_COLLAPSE,
// @ts-ignore
cartItemsNode
);
});
// Expand / collapse button
var totalItems = orderDetails.length;
if (totalItems > MAX_ITEMS_VISIBLE_AFTER_COLLAPSE) {
var expandButtonNode = document.createElement("div");
expandButtonNode.className = "hyper-checkout-cart-button";
expandButtonNode.onclick = () => {
handleCartView(paymentDetails);
};
var buttonImageNode = document.createElement("svg");
buttonImageNode.id = "hyper-checkout-cart-button-arrow";
var arrowDownImage = document.getElementById("arrow-down");
if (arrowDownImage instanceof Object) {
buttonImageNode.innerHTML = arrowDownImage.innerHTML;
}
var buttonTextNode = document.createElement("span");
buttonTextNode.id = "hyper-checkout-cart-button-text";
var hiddenItemsCount =
orderDetails.length - MAX_ITEMS_VISIBLE_AFTER_COLLAPSE;
buttonTextNode.innerText = translations.showMore + " (" + hiddenItemsCount + ")";
expandButtonNode.append(buttonTextNode, buttonImageNode);
if (cartNode instanceof HTMLDivElement) {
cartNode.insertBefore(expandButtonNode, cartNode.lastElementChild);
}
}
} else {
hide("#hyper-checkout-cart-header");
hide("#hyper-checkout-cart-items");
hide("#hyper-checkout-cart-image");
}
}
/**
* Trigger - on cart render
* Uses
* - Renders a single cart item which includes
* - Product image
* - Product name
* - Quantity
* - Single item amount
* @param {OrderDetailsWithAmount} item
* @param {PaymentDetails} paymentDetails
* @param {boolean} shouldAddDividerNode
* @param {HTMLDivElement} cartItemsNode
**/
function renderCartItem(
item,
paymentDetails,
shouldAddDividerNode,
cartItemsNode,
) {
// Wrappers
var itemWrapperNode = document.createElement("div");
itemWrapperNode.className = "hyper-checkout-cart-item";
var nameAndQuantityWrapperNode = document.createElement("div");
nameAndQuantityWrapperNode.className = "hyper-checkout-cart-product-details";
// Image
var productImageNode = document.createElement("img");
productImageNode.setAttribute("width", 56);
productImageNode.setAttribute("height", 56);
productImageNode.className = "hyper-checkout-cart-product-image";
productImageNode.src = item.product_img_link;
// Product title
var productNameNode = document.createElement("div");
productNameNode.className = "hyper-checkout-card-item-name";
productNameNode.innerText = item.product_name;
// Product quantity
if (item.quantity !== null) {
var quantityNode = document.createElement("div");
quantityNode.className = "hyper-checkout-card-item-quantity";
quantityNode.innerText = translations.quantity + ": " + item.quantity;
}
// Product price
var priceNode = document.createElement("div");
priceNode.className = "hyper-checkout-card-item-price";
priceNode.innerText = paymentDetails.currency + " " + item.amount;
// Append items
nameAndQuantityWrapperNode.append(productNameNode);
if (item.quantity !== null) {
// @ts-ignore
nameAndQuantityWrapperNode.append(quantityNode);
}
itemWrapperNode.append(
productImageNode,
nameAndQuantityWrapperNode,
priceNode
);
if (shouldAddDividerNode) {
var dividerNode = document.createElement("div");
dividerNode.className = "hyper-checkout-cart-item-divider";
cartItemsNode.append(dividerNode);
}
cartItemsNode.append(itemWrapperNode);
}
/**
* Trigger - on toggling expansion of cart list
* Uses
* - Render or delete items based on current state of the rendered cart list
* @param {PaymentDetails} paymentDetails
**/
function handleCartView(paymentDetails) {
var orderDetails = paymentDetails.order_details;
var MAX_ITEMS_VISIBLE_AFTER_COLLAPSE =
paymentDetails.max_items_visible_after_collapse;
var itemsHTMLCollection = document.getElementsByClassName(
"hyper-checkout-cart-item"
);
var dividerHTMLCollection = document.getElementsByClassName(
"hyper-checkout-cart-item-divider"
);
var cartItems = [].slice.call(itemsHTMLCollection);
var dividerItems = [].slice.call(dividerHTMLCollection);
var isHidden = cartItems.length < orderDetails.length;
var cartItemsNode = document.getElementById("hyper-checkout-cart-items");
var cartButtonTextNode = document.getElementById(
"hyper-checkout-cart-button-text"
);
var cartButtonImageNode = document.getElementById(
"hyper-checkout-cart-button-arrow"
);
if (isHidden) {
if (Array.isArray(orderDetails)) {
orderDetails.map(function (item, index) {
if (index < MAX_ITEMS_VISIBLE_AFTER_COLLAPSE) {
return;
}
renderCartItem(
item,
paymentDetails,
index >= MAX_ITEMS_VISIBLE_AFTER_COLLAPSE,
cartItemsNode
);
});
}
if (cartItemsNode instanceof HTMLDivElement) {
cartItemsNode.style.maxHeight = cartItemsNode.scrollHeight + "px";
cartItemsNode.style.height = cartItemsNode.scrollHeight + "px";
}
if (cartButtonTextNode instanceof HTMLSpanElement) {
cartButtonTextNode.innerText = translations.showLess;
}
var arrowUpImage = document.getElementById("arrow-up");
if (
cartButtonImageNode instanceof Object &&
arrowUpImage instanceof Object
) {
cartButtonImageNode.innerHTML = arrowUpImage.innerHTML;
}
} else {
if (cartItemsNode instanceof HTMLDivElement) {
cartItemsNode.style.maxHeight = "300px";
cartItemsNode.style.height = "290px";
cartItemsNode.scrollTo({ top: 0, behavior: "smooth" });
setTimeout(function () {
cartItems.map(function (item, index) {
if (index < MAX_ITEMS_VISIBLE_AFTER_COLLAPSE) {
return;
}
if (cartItemsNode instanceof HTMLDivElement) {
cartItemsNode.removeChild(item);
}
});
dividerItems.map(function (item, index) {
if (index < MAX_ITEMS_VISIBLE_AFTER_COLLAPSE - 1) {
return;
}
if (cartItemsNode instanceof HTMLDivElement) {
cartItemsNode.removeChild(item);
}
});
}, 300);
}
setTimeout(function () {
var hiddenItemsCount =
orderDetails.length - MAX_ITEMS_VISIBLE_AFTER_COLLAPSE;
if (cartButtonTextNode instanceof HTMLSpanElement) {
cartButtonTextNode.innerText = translations.showMore + " (" + hiddenItemsCount + ")";
}
var arrowDownImage = document.getElementById("arrow-down");
if (
cartButtonImageNode instanceof Object &&
arrowDownImage instanceof Object
) {
cartButtonImageNode.innerHTML = arrowDownImage.innerHTML;
}
}, 250);
}
}
/**
* Use - hide cart when in mobile view
**/
function hideCartInMobileView() {
window.history.back();
var cartNode = document.getElementById("hyper-checkout-cart");
if (cartNode instanceof HTMLDivElement) {
cartNode.style.animation = "slide-to-right 0.3s linear";
cartNode.style.right = "-582px";
}
setTimeout(function () {
hide("#hyper-checkout-cart");
}, 300);
}
/**
* Use - show cart when in mobile view
**/
function viewCartInMobileView() {
window.history.pushState("view-cart", "");
var cartNode = document.getElementById("hyper-checkout-cart");
if (cartNode instanceof HTMLDivElement) {
cartNode.style.animation = "slide-from-right 0.3s linear";
cartNode.style.right = "0px";
}
show("#hyper-checkout-cart");
}
/**
* Trigger - on boot
* Uses
* - Render SDK header node
* - merchant's name
* - currency + amount
* @param {PaymentDetails} paymentDetails
**/
function renderSDKHeader(paymentDetails) {
// SDK headers' items
var sdkHeaderItemNode = document.createElement("div");
sdkHeaderItemNode.className = "hyper-checkout-sdk-items";
var sdkHeaderMerchantNameNode = document.createElement("div");
sdkHeaderMerchantNameNode.className = "hyper-checkout-sdk-header-brand-name";
sdkHeaderMerchantNameNode.innerText = paymentDetails.merchant_name;
var sdkHeaderAmountNode = document.createElement("div");
sdkHeaderAmountNode.className = "hyper-checkout-sdk-header-amount";
sdkHeaderAmountNode.innerText =
paymentDetails.currency + " " + paymentDetails.amount;
sdkHeaderItemNode.append(sdkHeaderMerchantNameNode);
sdkHeaderItemNode.append(sdkHeaderAmountNode);
// Append to SDK header's node
var sdkHeaderNode = document.getElementById("hyper-checkout-sdk-header");
if (sdkHeaderNode instanceof HTMLDivElement) {
// sdkHeaderNode.append(sdkHeaderLogoNode);
sdkHeaderNode.append(sdkHeaderItemNode);
}
}
/**
* Trigger - post UI render
* Use - add CSS rules for the payment link
* @param {Object} paymentLinkUiRules
*/
function updatePaymentLinkUi(paymentLinkUiRules) {
Object.keys(paymentLinkUiRules).forEach(function (selector) {
try {
var node = document.querySelector(selector);
if (node instanceof HTMLElement) {
var styles = paymentLinkUiRules[selector];
Object.keys(styles).forEach(function (property) {
node.style[property] = styles[property];
});
}
} catch (error) {
console.error("Failed to apply styles to selector", selector, error);
}
})
} | 8,877 | 1,724 |
hyperswitch | crates/router/src/core/user/sample_data.rs | .rs | use api_models::user::sample_data::SampleDataRequest;
use common_utils::errors::ReportSwitchExt;
use diesel_models::{DisputeNew, RefundNew};
use error_stack::ResultExt;
use hyperswitch_domain_models::payments::PaymentIntent;
pub type SampleDataApiResponse<T> = SampleDataResult<ApplicationResponse<T>>;
use crate::{
core::errors::sample_data::{SampleDataError, SampleDataResult},
routes::{app::ReqState, SessionState},
services::{authentication::UserFromToken, ApplicationResponse},
utils,
};
#[cfg(feature = "v1")]
pub async fn generate_sample_data_for_user(
state: SessionState,
user_from_token: UserFromToken,
req: SampleDataRequest,
_req_state: ReqState,
) -> SampleDataApiResponse<()> {
let sample_data = utils::user::sample_data::generate_sample_data(
&state,
req,
&user_from_token.merchant_id,
&user_from_token.org_id,
)
.await?;
let key_store = state
.store
.get_merchant_key_store_by_merchant_id(
&(&state).into(),
&user_from_token.merchant_id,
&state.store.get_master_key().to_vec().into(),
)
.await
.change_context(SampleDataError::InternalServerError)
.attach_printable("Not able to fetch merchant key store")?; // If not able to fetch merchant key store for any reason, this should be an internal server error
let (payment_intents, payment_attempts, refunds, disputes): (
Vec<PaymentIntent>,
Vec<diesel_models::user::sample_data::PaymentAttemptBatchNew>,
Vec<RefundNew>,
Vec<DisputeNew>,
) = sample_data.into_iter().fold(
(Vec::new(), Vec::new(), Vec::new(), Vec::new()),
|(mut pi, mut pa, mut rf, mut dp), (payment_intent, payment_attempt, refund, dispute)| {
pi.push(payment_intent);
pa.push(payment_attempt);
if let Some(refund) = refund {
rf.push(refund);
}
if let Some(dispute) = dispute {
dp.push(dispute);
}
(pi, pa, rf, dp)
},
);
state
.store
.insert_payment_intents_batch_for_sample_data(&(&state).into(), payment_intents, &key_store)
.await
.switch()?;
state
.store
.insert_payment_attempts_batch_for_sample_data(payment_attempts)
.await
.switch()?;
state
.store
.insert_refunds_batch_for_sample_data(refunds)
.await
.switch()?;
state
.store
.insert_disputes_batch_for_sample_data(disputes)
.await
.switch()?;
Ok(ApplicationResponse::StatusOk)
}
#[cfg(feature = "v1")]
pub async fn delete_sample_data_for_user(
state: SessionState,
user_from_token: UserFromToken,
_req: SampleDataRequest,
_req_state: ReqState,
) -> SampleDataApiResponse<()> {
let merchant_id_del = user_from_token.merchant_id;
let key_manager_state = &(&state).into();
let key_store = state
.store
.get_merchant_key_store_by_merchant_id(
key_manager_state,
&merchant_id_del,
&state.store.get_master_key().to_vec().into(),
)
.await
.change_context(SampleDataError::InternalServerError)
.attach_printable("Not able to fetch merchant key store")?; // If not able to fetch merchant key store for any reason, this should be an internal server error
state
.store
.delete_payment_intents_for_sample_data(key_manager_state, &merchant_id_del, &key_store)
.await
.switch()?;
state
.store
.delete_payment_attempts_for_sample_data(&merchant_id_del)
.await
.switch()?;
state
.store
.delete_refunds_for_sample_data(&merchant_id_del)
.await
.switch()?;
state
.store
.delete_disputes_for_sample_data(&merchant_id_del)
.await
.switch()?;
Ok(ApplicationResponse::StatusOk)
}
| 934 | 1,725 |
hyperswitch | crates/router/src/core/user/dashboard_metadata.rs | .rs | use std::str::FromStr;
use api_models::user::dashboard_metadata::{self as api, GetMultipleMetaDataPayload};
#[cfg(feature = "email")]
use common_enums::EntityType;
use common_utils::pii;
use diesel_models::{
enums::DashboardMetadata as DBEnum, user::dashboard_metadata::DashboardMetadata,
};
use error_stack::{report, ResultExt};
#[cfg(feature = "email")]
use masking::ExposeInterface;
use masking::PeekInterface;
#[cfg(feature = "email")]
use router_env::logger;
use crate::{
core::errors::{UserErrors, UserResponse, UserResult},
routes::{app::ReqState, SessionState},
services::{authentication::UserFromToken, ApplicationResponse},
types::domain::{self, user::dashboard_metadata as types, MerchantKeyStore},
utils::user::dashboard_metadata as utils,
};
#[cfg(feature = "email")]
use crate::{services::email::types as email_types, utils::user::theme as theme_utils};
pub async fn set_metadata(
state: SessionState,
user: UserFromToken,
request: api::SetMetaDataRequest,
_req_state: ReqState,
) -> UserResponse<()> {
let metadata_value = parse_set_request(request)?;
let metadata_key = DBEnum::from(&metadata_value);
insert_metadata(&state, user, metadata_key, metadata_value).await?;
Ok(ApplicationResponse::StatusOk)
}
pub async fn get_multiple_metadata(
state: SessionState,
user: UserFromToken,
request: GetMultipleMetaDataPayload,
_req_state: ReqState,
) -> UserResponse<Vec<api::GetMetaDataResponse>> {
let metadata_keys: Vec<DBEnum> = request.results.into_iter().map(parse_get_request).collect();
let metadata = fetch_metadata(&state, &user, metadata_keys.clone()).await?;
let mut response = Vec::with_capacity(metadata_keys.len());
for key in metadata_keys {
let data = metadata.iter().find(|ele| ele.data_key == key);
let resp;
if data.is_none() && utils::is_backfill_required(key) {
let backfill_data = backfill_metadata(&state, &user, &key).await?;
resp = into_response(backfill_data.as_ref(), key)?;
} else {
resp = into_response(data, key)?;
}
response.push(resp);
}
Ok(ApplicationResponse::Json(response))
}
fn parse_set_request(data_enum: api::SetMetaDataRequest) -> UserResult<types::MetaData> {
match data_enum {
api::SetMetaDataRequest::ProductionAgreement(req) => {
let ip_address = req
.ip_address
.ok_or(report!(UserErrors::InternalServerError))
.attach_printable("Error Getting Ip Address")?;
Ok(types::MetaData::ProductionAgreement(
types::ProductionAgreementValue {
version: req.version,
ip_address,
timestamp: common_utils::date_time::now(),
},
))
}
api::SetMetaDataRequest::SetupProcessor(req) => Ok(types::MetaData::SetupProcessor(req)),
api::SetMetaDataRequest::ConfigureEndpoint => Ok(types::MetaData::ConfigureEndpoint(true)),
api::SetMetaDataRequest::SetupComplete => Ok(types::MetaData::SetupComplete(true)),
api::SetMetaDataRequest::FirstProcessorConnected(req) => {
Ok(types::MetaData::FirstProcessorConnected(req))
}
api::SetMetaDataRequest::SecondProcessorConnected(req) => {
Ok(types::MetaData::SecondProcessorConnected(req))
}
api::SetMetaDataRequest::ConfiguredRouting(req) => {
Ok(types::MetaData::ConfiguredRouting(req))
}
api::SetMetaDataRequest::TestPayment(req) => Ok(types::MetaData::TestPayment(req)),
api::SetMetaDataRequest::IntegrationMethod(req) => {
Ok(types::MetaData::IntegrationMethod(req))
}
api::SetMetaDataRequest::ConfigurationType(req) => {
Ok(types::MetaData::ConfigurationType(req))
}
api::SetMetaDataRequest::IntegrationCompleted => {
Ok(types::MetaData::IntegrationCompleted(true))
}
api::SetMetaDataRequest::SPRoutingConfigured(req) => {
Ok(types::MetaData::SPRoutingConfigured(req))
}
api::SetMetaDataRequest::Feedback(req) => Ok(types::MetaData::Feedback(req)),
api::SetMetaDataRequest::ProdIntent(req) => Ok(types::MetaData::ProdIntent(req)),
api::SetMetaDataRequest::SPTestPayment => Ok(types::MetaData::SPTestPayment(true)),
api::SetMetaDataRequest::DownloadWoocom => Ok(types::MetaData::DownloadWoocom(true)),
api::SetMetaDataRequest::ConfigureWoocom => Ok(types::MetaData::ConfigureWoocom(true)),
api::SetMetaDataRequest::SetupWoocomWebhook => {
Ok(types::MetaData::SetupWoocomWebhook(true))
}
api::SetMetaDataRequest::IsMultipleConfiguration => {
Ok(types::MetaData::IsMultipleConfiguration(true))
}
api::SetMetaDataRequest::IsChangePasswordRequired => {
Ok(types::MetaData::IsChangePasswordRequired(true))
}
api::SetMetaDataRequest::OnboardingSurvey(req) => {
Ok(types::MetaData::OnboardingSurvey(req))
}
api::SetMetaDataRequest::ReconStatus(req) => Ok(types::MetaData::ReconStatus(req)),
}
}
fn parse_get_request(data_enum: api::GetMetaDataRequest) -> DBEnum {
match data_enum {
api::GetMetaDataRequest::ProductionAgreement => DBEnum::ProductionAgreement,
api::GetMetaDataRequest::SetupProcessor => DBEnum::SetupProcessor,
api::GetMetaDataRequest::ConfigureEndpoint => DBEnum::ConfigureEndpoint,
api::GetMetaDataRequest::SetupComplete => DBEnum::SetupComplete,
api::GetMetaDataRequest::FirstProcessorConnected => DBEnum::FirstProcessorConnected,
api::GetMetaDataRequest::SecondProcessorConnected => DBEnum::SecondProcessorConnected,
api::GetMetaDataRequest::ConfiguredRouting => DBEnum::ConfiguredRouting,
api::GetMetaDataRequest::TestPayment => DBEnum::TestPayment,
api::GetMetaDataRequest::IntegrationMethod => DBEnum::IntegrationMethod,
api::GetMetaDataRequest::ConfigurationType => DBEnum::ConfigurationType,
api::GetMetaDataRequest::IntegrationCompleted => DBEnum::IntegrationCompleted,
api::GetMetaDataRequest::StripeConnected => DBEnum::StripeConnected,
api::GetMetaDataRequest::PaypalConnected => DBEnum::PaypalConnected,
api::GetMetaDataRequest::SPRoutingConfigured => DBEnum::SpRoutingConfigured,
api::GetMetaDataRequest::Feedback => DBEnum::Feedback,
api::GetMetaDataRequest::ProdIntent => DBEnum::ProdIntent,
api::GetMetaDataRequest::SPTestPayment => DBEnum::SpTestPayment,
api::GetMetaDataRequest::DownloadWoocom => DBEnum::DownloadWoocom,
api::GetMetaDataRequest::ConfigureWoocom => DBEnum::ConfigureWoocom,
api::GetMetaDataRequest::SetupWoocomWebhook => DBEnum::SetupWoocomWebhook,
api::GetMetaDataRequest::IsMultipleConfiguration => DBEnum::IsMultipleConfiguration,
api::GetMetaDataRequest::IsChangePasswordRequired => DBEnum::IsChangePasswordRequired,
api::GetMetaDataRequest::OnboardingSurvey => DBEnum::OnboardingSurvey,
api::GetMetaDataRequest::ReconStatus => DBEnum::ReconStatus,
}
}
fn into_response(
data: Option<&DashboardMetadata>,
data_type: DBEnum,
) -> UserResult<api::GetMetaDataResponse> {
match data_type {
DBEnum::ProductionAgreement => Ok(api::GetMetaDataResponse::ProductionAgreement(
data.is_some(),
)),
DBEnum::SetupProcessor => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::SetupProcessor(resp))
}
DBEnum::ConfigureEndpoint => {
Ok(api::GetMetaDataResponse::ConfigureEndpoint(data.is_some()))
}
DBEnum::SetupComplete => Ok(api::GetMetaDataResponse::SetupComplete(data.is_some())),
DBEnum::FirstProcessorConnected => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::FirstProcessorConnected(resp))
}
DBEnum::SecondProcessorConnected => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::SecondProcessorConnected(resp))
}
DBEnum::ConfiguredRouting => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::ConfiguredRouting(resp))
}
DBEnum::TestPayment => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::TestPayment(resp))
}
DBEnum::IntegrationMethod => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::IntegrationMethod(resp))
}
DBEnum::ConfigurationType => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::ConfigurationType(resp))
}
DBEnum::IntegrationCompleted => Ok(api::GetMetaDataResponse::IntegrationCompleted(
data.is_some(),
)),
DBEnum::StripeConnected => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::StripeConnected(resp))
}
DBEnum::PaypalConnected => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::PaypalConnected(resp))
}
DBEnum::SpRoutingConfigured => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::SPRoutingConfigured(resp))
}
DBEnum::Feedback => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::Feedback(resp))
}
DBEnum::ProdIntent => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::ProdIntent(resp))
}
DBEnum::SpTestPayment => Ok(api::GetMetaDataResponse::SPTestPayment(data.is_some())),
DBEnum::DownloadWoocom => Ok(api::GetMetaDataResponse::DownloadWoocom(data.is_some())),
DBEnum::ConfigureWoocom => Ok(api::GetMetaDataResponse::ConfigureWoocom(data.is_some())),
DBEnum::SetupWoocomWebhook => {
Ok(api::GetMetaDataResponse::SetupWoocomWebhook(data.is_some()))
}
DBEnum::IsMultipleConfiguration => Ok(api::GetMetaDataResponse::IsMultipleConfiguration(
data.is_some(),
)),
DBEnum::IsChangePasswordRequired => Ok(api::GetMetaDataResponse::IsChangePasswordRequired(
data.is_some(),
)),
DBEnum::OnboardingSurvey => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::OnboardingSurvey(resp))
}
DBEnum::ReconStatus => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::ReconStatus(resp))
}
}
}
async fn insert_metadata(
state: &SessionState,
user: UserFromToken,
metadata_key: DBEnum,
metadata_value: types::MetaData,
) -> UserResult<DashboardMetadata> {
match metadata_value {
types::MetaData::ProductionAgreement(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::SetupProcessor(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::ConfigureEndpoint(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::SetupComplete(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::FirstProcessorConnected(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::SecondProcessorConnected(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::ConfiguredRouting(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::TestPayment(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::IntegrationMethod(data) => {
let mut metadata = utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id.clone(),
user.merchant_id.clone(),
user.org_id.clone(),
metadata_key,
data.clone(),
)
.await;
if utils::is_update_required(&metadata) {
metadata = utils::update_merchant_scoped_metadata(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
.change_context(UserErrors::InternalServerError);
}
metadata
}
types::MetaData::ConfigurationType(data) => {
let mut metadata = utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id.clone(),
user.merchant_id.clone(),
user.org_id.clone(),
metadata_key,
data.clone(),
)
.await;
if utils::is_update_required(&metadata) {
metadata = utils::update_merchant_scoped_metadata(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
.change_context(UserErrors::InternalServerError);
}
metadata
}
types::MetaData::IntegrationCompleted(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::StripeConnected(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::PaypalConnected(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::SPRoutingConfigured(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::Feedback(data) => {
let mut metadata = utils::insert_user_scoped_metadata_to_db(
state,
user.user_id.clone(),
user.merchant_id.clone(),
user.org_id.clone(),
metadata_key,
data.clone(),
)
.await;
if utils::is_update_required(&metadata) {
metadata = utils::update_user_scoped_metadata(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
.change_context(UserErrors::InternalServerError);
}
metadata
}
types::MetaData::ProdIntent(data) => {
if let Some(poc_email) = &data.poc_email {
let inner_poc_email = poc_email.peek().as_str();
pii::Email::from_str(inner_poc_email)
.change_context(UserErrors::EmailParsingError)?;
}
let mut metadata = utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id.clone(),
user.merchant_id.clone(),
user.org_id.clone(),
metadata_key,
data.clone(),
)
.await;
if utils::is_update_required(&metadata) {
metadata = utils::update_merchant_scoped_metadata(
state,
user.user_id.clone(),
user.merchant_id.clone(),
user.org_id.clone(),
metadata_key,
data.clone(),
)
.await
.change_context(UserErrors::InternalServerError);
}
#[cfg(feature = "email")]
{
let user_data = user.get_user_from_db(state).await?;
let user_email = domain::UserEmail::from_pii_email(user_data.get_email())
.change_context(UserErrors::InternalServerError)?
.get_secret()
.expose();
if utils::is_prod_email_required(&data, user_email) {
let theme = theme_utils::get_most_specific_theme_using_token_and_min_entity(
state,
&user,
EntityType::Merchant,
)
.await?;
let email_contents = email_types::BizEmailProd::new(
state,
data,
theme.as_ref().map(|theme| theme.theme_id.clone()),
theme
.map(|theme| theme.email_config())
.unwrap_or(state.conf.theme.email_config.clone()),
)?;
let send_email_result = state
.email_client
.compose_and_send_email(
email_types::get_base_url(state),
Box::new(email_contents),
state.conf.proxy.https_url.as_ref(),
)
.await;
logger::info!(prod_intent_email=?send_email_result);
}
}
metadata
}
types::MetaData::SPTestPayment(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::DownloadWoocom(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::ConfigureWoocom(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::SetupWoocomWebhook(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::IsMultipleConfiguration(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::IsChangePasswordRequired(data) => {
utils::insert_user_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::OnboardingSurvey(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::ReconStatus(data) => {
let mut metadata = utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id.clone(),
user.merchant_id.clone(),
user.org_id.clone(),
metadata_key,
data.clone(),
)
.await;
if utils::is_update_required(&metadata) {
metadata = utils::update_merchant_scoped_metadata(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await;
}
metadata
}
}
}
async fn fetch_metadata(
state: &SessionState,
user: &UserFromToken,
metadata_keys: Vec<DBEnum>,
) -> UserResult<Vec<DashboardMetadata>> {
let mut dashboard_metadata = Vec::with_capacity(metadata_keys.len());
let (merchant_scoped_enums, user_scoped_enums) =
utils::separate_metadata_type_based_on_scope(metadata_keys);
if !merchant_scoped_enums.is_empty() {
let mut res = utils::get_merchant_scoped_metadata_from_db(
state,
user.merchant_id.to_owned(),
user.org_id.to_owned(),
merchant_scoped_enums,
)
.await?;
dashboard_metadata.append(&mut res);
}
if !user_scoped_enums.is_empty() {
let mut res = utils::get_user_scoped_metadata_from_db(
state,
user.user_id.to_owned(),
user.merchant_id.to_owned(),
user.org_id.to_owned(),
user_scoped_enums,
)
.await?;
dashboard_metadata.append(&mut res);
}
Ok(dashboard_metadata)
}
pub async fn backfill_metadata(
state: &SessionState,
user: &UserFromToken,
key: &DBEnum,
) -> UserResult<Option<DashboardMetadata>> {
let key_store = state
.store
.get_merchant_key_store_by_merchant_id(
&state.into(),
&user.merchant_id,
&state.store.get_master_key().to_vec().into(),
)
.await
.change_context(UserErrors::InternalServerError)?;
match key {
DBEnum::StripeConnected => {
let mca = if let Some(stripe_connected) = get_merchant_connector_account_by_name(
state,
&user.merchant_id,
api_models::enums::RoutableConnectors::Stripe
.to_string()
.as_str(),
&key_store,
)
.await?
{
stripe_connected
} else if let Some(stripe_test_connected) = get_merchant_connector_account_by_name(
state,
&user.merchant_id,
//TODO: Use Enum with proper feature flag
"stripe_test",
&key_store,
)
.await?
{
stripe_test_connected
} else {
return Ok(None);
};
#[cfg(feature = "v1")]
let processor_name = mca.connector_name.clone();
#[cfg(feature = "v2")]
let processor_name = mca.connector_name.to_string().clone();
Some(
insert_metadata(
state,
user.to_owned(),
DBEnum::StripeConnected,
types::MetaData::StripeConnected(api::ProcessorConnected {
processor_id: mca.get_id(),
processor_name,
}),
)
.await,
)
.transpose()
}
DBEnum::PaypalConnected => {
let mca = if let Some(paypal_connected) = get_merchant_connector_account_by_name(
state,
&user.merchant_id,
api_models::enums::RoutableConnectors::Paypal
.to_string()
.as_str(),
&key_store,
)
.await?
{
paypal_connected
} else if let Some(paypal_test_connected) = get_merchant_connector_account_by_name(
state,
&user.merchant_id,
//TODO: Use Enum with proper feature flag
"paypal_test",
&key_store,
)
.await?
{
paypal_test_connected
} else {
return Ok(None);
};
#[cfg(feature = "v1")]
let processor_name = mca.connector_name.clone();
#[cfg(feature = "v2")]
let processor_name = mca.connector_name.to_string().clone();
Some(
insert_metadata(
state,
user.to_owned(),
DBEnum::PaypalConnected,
types::MetaData::PaypalConnected(api::ProcessorConnected {
processor_id: mca.get_id(),
processor_name,
}),
)
.await,
)
.transpose()
}
_ => Ok(None),
}
}
pub async fn get_merchant_connector_account_by_name(
state: &SessionState,
merchant_id: &common_utils::id_type::MerchantId,
connector_name: &str,
key_store: &MerchantKeyStore,
) -> UserResult<Option<domain::MerchantConnectorAccount>> {
#[cfg(feature = "v1")]
{
state
.store
.find_merchant_connector_account_by_merchant_id_connector_name(
&state.into(),
merchant_id,
connector_name,
key_store,
)
.await
.map_err(|e| {
e.change_context(UserErrors::InternalServerError)
.attach_printable("DB Error Fetching DashboardMetaData")
})
.map(|data| data.first().cloned())
}
#[cfg(feature = "v2")]
{
let _ = state;
let _ = merchant_id;
let _ = connector_name;
let _ = key_store;
todo!()
}
}
| 5,523 | 1,726 |
hyperswitch | crates/router/src/core/user/theme.rs | .rs | use api_models::user::theme as theme_api;
use common_utils::{
ext_traits::{ByteSliceExt, Encode},
types::theme::ThemeLineage,
};
use diesel_models::user::theme::ThemeNew;
use error_stack::ResultExt;
use hyperswitch_domain_models::api::ApplicationResponse;
use masking::ExposeInterface;
use rdkafka::message::ToBytes;
use uuid::Uuid;
use crate::{
core::errors::{StorageErrorExt, UserErrors, UserResponse},
routes::SessionState,
utils::user::theme as theme_utils,
};
pub async fn get_theme_using_lineage(
state: SessionState,
lineage: ThemeLineage,
) -> UserResponse<theme_api::GetThemeResponse> {
let theme = state
.store
.find_theme_by_lineage(lineage)
.await
.to_not_found_response(UserErrors::ThemeNotFound)?;
let file = theme_utils::retrieve_file_from_theme_bucket(
&state,
&theme_utils::get_theme_file_key(&theme.theme_id),
)
.await?;
let parsed_data = file
.to_bytes()
.parse_struct("ThemeData")
.change_context(UserErrors::InternalServerError)?;
Ok(ApplicationResponse::Json(theme_api::GetThemeResponse {
email_config: theme.email_config(),
theme_id: theme.theme_id,
theme_name: theme.theme_name,
entity_type: theme.entity_type,
tenant_id: theme.tenant_id,
org_id: theme.org_id,
merchant_id: theme.merchant_id,
profile_id: theme.profile_id,
theme_data: parsed_data,
}))
}
pub async fn get_theme_using_theme_id(
state: SessionState,
theme_id: String,
) -> UserResponse<theme_api::GetThemeResponse> {
let theme = state
.store
.find_theme_by_theme_id(theme_id.clone())
.await
.to_not_found_response(UserErrors::ThemeNotFound)?;
let file = theme_utils::retrieve_file_from_theme_bucket(
&state,
&theme_utils::get_theme_file_key(&theme_id),
)
.await?;
let parsed_data = file
.to_bytes()
.parse_struct("ThemeData")
.change_context(UserErrors::InternalServerError)?;
Ok(ApplicationResponse::Json(theme_api::GetThemeResponse {
email_config: theme.email_config(),
theme_id: theme.theme_id,
theme_name: theme.theme_name,
entity_type: theme.entity_type,
tenant_id: theme.tenant_id,
org_id: theme.org_id,
merchant_id: theme.merchant_id,
profile_id: theme.profile_id,
theme_data: parsed_data,
}))
}
pub async fn upload_file_to_theme_storage(
state: SessionState,
theme_id: String,
request: theme_api::UploadFileRequest,
) -> UserResponse<()> {
let db_theme = state
.store
.find_theme_by_lineage(request.lineage)
.await
.to_not_found_response(UserErrors::ThemeNotFound)?;
if theme_id != db_theme.theme_id {
return Err(UserErrors::ThemeNotFound.into());
}
theme_utils::upload_file_to_theme_bucket(
&state,
&theme_utils::get_specific_file_key(&theme_id, &request.asset_name),
request.asset_data.expose(),
)
.await?;
Ok(ApplicationResponse::StatusOk)
}
pub async fn create_theme(
state: SessionState,
request: theme_api::CreateThemeRequest,
) -> UserResponse<theme_api::GetThemeResponse> {
theme_utils::validate_lineage(&state, &request.lineage).await?;
let email_config = if cfg!(feature = "email") {
request.email_config.ok_or(UserErrors::MissingEmailConfig)?
} else {
request
.email_config
.unwrap_or(state.conf.theme.email_config.clone())
};
let new_theme = ThemeNew::new(
Uuid::new_v4().to_string(),
request.theme_name,
request.lineage,
email_config,
);
let db_theme = state
.store
.insert_theme(new_theme)
.await
.to_duplicate_response(UserErrors::ThemeAlreadyExists)?;
theme_utils::upload_file_to_theme_bucket(
&state,
&theme_utils::get_theme_file_key(&db_theme.theme_id),
request
.theme_data
.encode_to_vec()
.change_context(UserErrors::InternalServerError)?,
)
.await?;
let file = theme_utils::retrieve_file_from_theme_bucket(
&state,
&theme_utils::get_theme_file_key(&db_theme.theme_id),
)
.await?;
let parsed_data = file
.to_bytes()
.parse_struct("ThemeData")
.change_context(UserErrors::InternalServerError)?;
Ok(ApplicationResponse::Json(theme_api::GetThemeResponse {
email_config: db_theme.email_config(),
theme_id: db_theme.theme_id,
entity_type: db_theme.entity_type,
tenant_id: db_theme.tenant_id,
org_id: db_theme.org_id,
merchant_id: db_theme.merchant_id,
profile_id: db_theme.profile_id,
theme_name: db_theme.theme_name,
theme_data: parsed_data,
}))
}
pub async fn update_theme(
state: SessionState,
theme_id: String,
request: theme_api::UpdateThemeRequest,
) -> UserResponse<theme_api::GetThemeResponse> {
let db_theme = state
.store
.find_theme_by_lineage(request.lineage)
.await
.to_not_found_response(UserErrors::ThemeNotFound)?;
if theme_id != db_theme.theme_id {
return Err(UserErrors::ThemeNotFound.into());
}
theme_utils::upload_file_to_theme_bucket(
&state,
&theme_utils::get_theme_file_key(&db_theme.theme_id),
request
.theme_data
.encode_to_vec()
.change_context(UserErrors::InternalServerError)?,
)
.await?;
let file = theme_utils::retrieve_file_from_theme_bucket(
&state,
&theme_utils::get_theme_file_key(&db_theme.theme_id),
)
.await?;
let parsed_data = file
.to_bytes()
.parse_struct("ThemeData")
.change_context(UserErrors::InternalServerError)?;
Ok(ApplicationResponse::Json(theme_api::GetThemeResponse {
email_config: db_theme.email_config(),
theme_id: db_theme.theme_id,
entity_type: db_theme.entity_type,
tenant_id: db_theme.tenant_id,
org_id: db_theme.org_id,
merchant_id: db_theme.merchant_id,
profile_id: db_theme.profile_id,
theme_name: db_theme.theme_name,
theme_data: parsed_data,
}))
}
pub async fn delete_theme(
state: SessionState,
theme_id: String,
lineage: ThemeLineage,
) -> UserResponse<()> {
state
.store
.delete_theme_by_lineage_and_theme_id(theme_id.clone(), lineage)
.await
.to_not_found_response(UserErrors::ThemeNotFound)?;
// TODO (#6717): Delete theme folder from the theme storage.
// Currently there is no simple or easy way to delete a whole folder from S3.
// So, we are not deleting the theme folder from the theme storage.
Ok(ApplicationResponse::StatusOk)
}
| 1,575 | 1,727 |
hyperswitch | crates/router/src/consts/user.rs | .rs | use common_enums;
use common_utils::consts::MAX_ALLOWED_MERCHANT_NAME_LENGTH;
pub const MAX_NAME_LENGTH: usize = 70;
/// The max length of company name and merchant should be same
/// because we are deriving the merchant name from company name
pub const MAX_COMPANY_NAME_LENGTH: usize = MAX_ALLOWED_MERCHANT_NAME_LENGTH;
pub const RECOVERY_CODES_COUNT: usize = 8;
pub const RECOVERY_CODE_LENGTH: usize = 8; // This is without counting the hyphen in between
/// The number of digits composing the auth code.
pub const TOTP_DIGITS: usize = 6;
/// Duration in seconds of a step.
pub const TOTP_VALIDITY_DURATION_IN_SECONDS: u64 = 30;
/// Number of totps allowed as network delay. 1 would mean one totp before current totp and one totp after are valids.
pub const TOTP_TOLERANCE: u8 = 1;
/// Number of maximum attempts user has for totp
pub const TOTP_MAX_ATTEMPTS: u8 = 4;
/// Number of maximum attempts user has for recovery code
pub const RECOVERY_CODE_MAX_ATTEMPTS: u8 = 4;
/// The default number of organizations to fetch for a tenant-level user
pub const ORG_LIST_LIMIT_FOR_TENANT: u32 = 20;
pub const MAX_PASSWORD_LENGTH: usize = 70;
pub const MIN_PASSWORD_LENGTH: usize = 8;
pub const REDIS_TOTP_PREFIX: &str = "TOTP_";
pub const REDIS_RECOVERY_CODE_PREFIX: &str = "RC_";
pub const REDIS_TOTP_SECRET_PREFIX: &str = "TOTP_SEC_";
pub const REDIS_TOTP_SECRET_TTL_IN_SECS: i64 = 15 * 60; // 15 minutes
pub const REDIS_TOTP_ATTEMPTS_PREFIX: &str = "TOTP_ATTEMPTS_";
pub const REDIS_RECOVERY_CODE_ATTEMPTS_PREFIX: &str = "RC_ATTEMPTS_";
pub const REDIS_TOTP_ATTEMPTS_TTL_IN_SECS: i64 = 5 * 60; // 5 mins
pub const REDIS_RECOVERY_CODE_ATTEMPTS_TTL_IN_SECS: i64 = 10 * 60; // 10 mins
pub const REDIS_SSO_PREFIX: &str = "SSO_";
pub const REDIS_SSO_TTL: i64 = 5 * 60; // 5 minutes
pub const DEFAULT_PROFILE_NAME: &str = "default";
pub const DEFAULT_PRODUCT_TYPE: common_enums::MerchantProductType =
common_enums::MerchantProductType::Orchestration;
| 577 | 1,728 |
hyperswitch | crates/router/src/consts/opensearch.rs | .rs | use api_models::analytics::search::SearchIndex;
pub const fn get_search_indexes() -> [SearchIndex; 8] {
[
SearchIndex::PaymentAttempts,
SearchIndex::PaymentIntents,
SearchIndex::Refunds,
SearchIndex::Disputes,
SearchIndex::SessionizerPaymentAttempts,
SearchIndex::SessionizerPaymentIntents,
SearchIndex::SessionizerRefunds,
SearchIndex::SessionizerDisputes,
]
}
pub const SEARCH_INDEXES: [SearchIndex; 8] = get_search_indexes();
| 118 | 1,729 |
hyperswitch | crates/router/src/consts/user_role.rs | .rs | // User Roles
pub const ROLE_ID_MERCHANT_ADMIN: &str = "merchant_admin";
pub const ROLE_ID_MERCHANT_VIEW_ONLY: &str = "merchant_view_only";
pub const ROLE_ID_MERCHANT_IAM_ADMIN: &str = "merchant_iam_admin";
pub const ROLE_ID_MERCHANT_DEVELOPER: &str = "merchant_developer";
pub const ROLE_ID_MERCHANT_OPERATOR: &str = "merchant_operator";
pub const ROLE_ID_MERCHANT_CUSTOMER_SUPPORT: &str = "merchant_customer_support";
pub const ROLE_ID_PROFILE_ADMIN: &str = "profile_admin";
pub const ROLE_ID_PROFILE_VIEW_ONLY: &str = "profile_view_only";
pub const ROLE_ID_PROFILE_IAM_ADMIN: &str = "profile_iam_admin";
pub const ROLE_ID_PROFILE_DEVELOPER: &str = "profile_developer";
pub const ROLE_ID_PROFILE_OPERATOR: &str = "profile_operator";
pub const ROLE_ID_PROFILE_CUSTOMER_SUPPORT: &str = "profile_customer_support";
pub const INTERNAL_USER_MERCHANT_ID: &str = "juspay000";
pub const MAX_ROLE_NAME_LENGTH: usize = 64;
| 232 | 1,730 |
hyperswitch | crates/router/src/bin/router.rs | .rs | use error_stack::ResultExt;
use router::{
configs::settings::{CmdLineConf, Settings},
core::errors::{ApplicationError, ApplicationResult},
logger,
routes::metrics,
};
#[tokio::main]
async fn main() -> ApplicationResult<()> {
// get commandline config before initializing config
let cmd_line = <CmdLineConf as clap::Parser>::parse();
#[allow(clippy::expect_used)]
let conf = Settings::with_config_path(cmd_line.config_path)
.expect("Unable to construct application configuration");
#[allow(clippy::expect_used)]
conf.validate()
.expect("Failed to validate router configuration");
#[allow(clippy::print_stdout)] // The logger has not yet been initialized
#[cfg(feature = "vergen")]
{
println!("Starting router (Version: {})", router_env::git_tag!());
}
let _guard = router_env::setup(
&conf.log,
router_env::service_name!(),
[router_env::service_name!(), "actix_server"],
)
.change_context(ApplicationError::ConfigurationError)?;
logger::info!("Application started [{:?}] [{:?}]", conf.server, conf.log);
// Spawn a thread for collecting metrics at fixed intervals
metrics::bg_metrics_collector::spawn_metrics_collector(
conf.log.telemetry.bg_metrics_collection_interval_in_secs,
);
#[allow(clippy::expect_used)]
let server = Box::pin(router::start_server(conf))
.await
.expect("Failed to create the server");
let _ = server.await;
Err(error_stack::Report::from(ApplicationError::from(
std::io::Error::new(std::io::ErrorKind::Other, "Server shut down"),
)))
}
| 376 | 1,731 |
hyperswitch | crates/router/src/bin/scheduler.rs | .rs | use std::{collections::HashMap, str::FromStr, sync::Arc};
use actix_web::{dev::Server, web, Scope};
use api_models::health_check::SchedulerHealthCheckResponse;
use common_utils::ext_traits::{OptionExt, StringExt};
use diesel_models::process_tracker::{self as storage, business_status};
use error_stack::ResultExt;
use router::{
configs::settings::{CmdLineConf, Settings},
core::{
errors::{self, CustomResult},
health_check::HealthCheckInterface,
},
logger, routes,
services::{self, api},
workflows,
};
use router_env::{
instrument,
tracing::{self, Instrument},
};
use scheduler::{
consumer::workflows::ProcessTrackerWorkflow, errors::ProcessTrackerError,
workflows::ProcessTrackerWorkflows, SchedulerSessionState,
};
use storage_impl::errors::ApplicationError;
use tokio::sync::{mpsc, oneshot};
const SCHEDULER_FLOW: &str = "SCHEDULER_FLOW";
#[tokio::main]
async fn main() -> CustomResult<(), ProcessTrackerError> {
let cmd_line = <CmdLineConf as clap::Parser>::parse();
#[allow(clippy::expect_used)]
let conf = Settings::with_config_path(cmd_line.config_path)
.expect("Unable to construct application configuration");
let api_client = Box::new(
services::ProxyClient::new(&conf.proxy)
.change_context(ProcessTrackerError::ConfigurationError)?,
);
// channel for listening to redis disconnect events
let (redis_shutdown_signal_tx, redis_shutdown_signal_rx) = oneshot::channel();
let state = Box::pin(routes::AppState::new(
conf,
redis_shutdown_signal_tx,
api_client,
))
.await;
// channel to shutdown scheduler gracefully
let (tx, rx) = mpsc::channel(1);
let _task_handle = tokio::spawn(
router::receiver_for_error(redis_shutdown_signal_rx, tx.clone()).in_current_span(),
);
#[allow(clippy::expect_used)]
let scheduler_flow_str =
std::env::var(SCHEDULER_FLOW).expect("SCHEDULER_FLOW environment variable not set");
#[allow(clippy::expect_used)]
let scheduler_flow = scheduler::SchedulerFlow::from_str(&scheduler_flow_str)
.expect("Unable to parse SchedulerFlow from environment variable");
#[allow(clippy::print_stdout)] // The logger has not yet been initialized
#[cfg(feature = "vergen")]
{
println!(
"Starting {scheduler_flow} (Version: {})",
router_env::git_tag!()
);
}
let _guard = router_env::setup(
&state.conf.log,
&scheduler_flow_str,
[router_env::service_name!()],
);
#[allow(clippy::expect_used)]
let web_server = Box::pin(start_web_server(
state.clone(),
scheduler_flow_str.to_string(),
))
.await
.expect("Failed to create the server");
let _task_handle = tokio::spawn(
async move {
let _ = web_server.await;
logger::error!("The health check probe stopped working!");
}
.in_current_span(),
);
logger::debug!(startup_config=?state.conf);
start_scheduler(&state, scheduler_flow, (tx, rx)).await?;
logger::error!("Scheduler shut down");
Ok(())
}
pub async fn start_web_server(
state: routes::AppState,
service: String,
) -> errors::ApplicationResult<Server> {
let server = state
.conf
.scheduler
.as_ref()
.ok_or(ApplicationError::InvalidConfigurationValueError(
"Scheduler server is invalidly configured".into(),
))?
.server
.clone();
let web_server = actix_web::HttpServer::new(move || {
actix_web::App::new().service(Health::server(state.clone(), service.clone()))
})
.bind((server.host.as_str(), server.port))
.change_context(ApplicationError::ConfigurationError)?
.workers(server.workers)
.run();
let _ = web_server.handle();
Ok(web_server)
}
pub struct Health;
impl Health {
pub fn server(state: routes::AppState, service: String) -> Scope {
web::scope("health")
.app_data(web::Data::new(state))
.app_data(web::Data::new(service))
.service(web::resource("").route(web::get().to(health)))
.service(web::resource("/ready").route(web::get().to(deep_health_check)))
}
}
#[instrument(skip_all)]
pub async fn health() -> impl actix_web::Responder {
logger::info!("Scheduler health was called");
actix_web::HttpResponse::Ok().body("Scheduler health is good")
}
#[instrument(skip_all)]
pub async fn deep_health_check(
state: web::Data<routes::AppState>,
service: web::Data<String>,
) -> impl actix_web::Responder {
let mut checks = HashMap::new();
let stores = state.stores.clone();
let app_state = Arc::clone(&state.into_inner());
let service_name = service.into_inner();
for (tenant, _) in stores {
let session_state_res = app_state.clone().get_session_state(&tenant, None, || {
errors::ApiErrorResponse::MissingRequiredField {
field_name: "tenant_id",
}
.into()
});
let session_state = match session_state_res {
Ok(state) => state,
Err(err) => {
return api::log_and_return_error_response(err);
}
};
let report = deep_health_check_func(session_state, &service_name).await;
match report {
Ok(response) => {
checks.insert(
tenant,
serde_json::to_string(&response)
.map_err(|err| {
logger::error!(serialization_error=?err);
})
.unwrap_or_default(),
);
}
Err(err) => {
return api::log_and_return_error_response(err);
}
}
}
services::http_response_json(
serde_json::to_string(&checks)
.map_err(|err| {
logger::error!(serialization_error=?err);
})
.unwrap_or_default(),
)
}
#[instrument(skip_all)]
pub async fn deep_health_check_func(
state: routes::SessionState,
service: &str,
) -> errors::RouterResult<SchedulerHealthCheckResponse> {
logger::info!("{} deep health check was called", service);
logger::debug!("Database health check begin");
let db_status = state
.health_check_db()
.await
.map(|_| true)
.map_err(|error| {
let message = error.to_string();
error.change_context(errors::ApiErrorResponse::HealthCheckError {
component: "Database",
message,
})
})?;
logger::debug!("Database health check end");
logger::debug!("Redis health check begin");
let redis_status = state
.health_check_redis()
.await
.map(|_| true)
.map_err(|error| {
let message = error.to_string();
error.change_context(errors::ApiErrorResponse::HealthCheckError {
component: "Redis",
message,
})
})?;
let outgoing_req_check =
state
.health_check_outgoing()
.await
.map(|_| true)
.map_err(|error| {
let message = error.to_string();
error.change_context(errors::ApiErrorResponse::HealthCheckError {
component: "Outgoing Request",
message,
})
})?;
logger::debug!("Redis health check end");
let response = SchedulerHealthCheckResponse {
database: db_status,
redis: redis_status,
outgoing_request: outgoing_req_check,
};
Ok(response)
}
#[derive(Debug, Copy, Clone)]
pub struct WorkflowRunner;
#[async_trait::async_trait]
impl ProcessTrackerWorkflows<routes::SessionState> for WorkflowRunner {
async fn trigger_workflow<'a>(
&'a self,
state: &'a routes::SessionState,
process: storage::ProcessTracker,
) -> CustomResult<(), ProcessTrackerError> {
let runner = process
.runner
.clone()
.get_required_value("runner")
.change_context(ProcessTrackerError::MissingRequiredField)
.attach_printable("Missing runner field in process information")?;
let runner: storage::ProcessTrackerRunner = runner
.parse_enum("ProcessTrackerRunner")
.change_context(ProcessTrackerError::UnexpectedFlow)
.attach_printable("Failed to parse workflow runner name")?;
let get_operation = |runner: storage::ProcessTrackerRunner| -> CustomResult<
Box<dyn ProcessTrackerWorkflow<routes::SessionState>>,
ProcessTrackerError,
> {
match runner {
storage::ProcessTrackerRunner::PaymentsSyncWorkflow => {
Ok(Box::new(workflows::payment_sync::PaymentsSyncWorkflow))
}
storage::ProcessTrackerRunner::RefundWorkflowRouter => {
Ok(Box::new(workflows::refund_router::RefundWorkflowRouter))
}
storage::ProcessTrackerRunner::DeleteTokenizeDataWorkflow => Ok(Box::new(
workflows::tokenized_data::DeleteTokenizeDataWorkflow,
)),
storage::ProcessTrackerRunner::ApiKeyExpiryWorkflow => {
#[cfg(feature = "email")]
{
Ok(Box::new(workflows::api_key_expiry::ApiKeyExpiryWorkflow))
}
#[cfg(not(feature = "email"))]
{
Err(error_stack::report!(ProcessTrackerError::UnexpectedFlow))
.attach_printable(
"Cannot run API key expiry workflow when email feature is disabled",
)
}
}
storage::ProcessTrackerRunner::OutgoingWebhookRetryWorkflow => Ok(Box::new(
workflows::outgoing_webhook_retry::OutgoingWebhookRetryWorkflow,
)),
storage::ProcessTrackerRunner::AttachPayoutAccountWorkflow => {
#[cfg(feature = "payouts")]
{
Ok(Box::new(
workflows::attach_payout_account_workflow::AttachPayoutAccountWorkflow,
))
}
#[cfg(not(feature = "payouts"))]
{
Err(
error_stack::report!(ProcessTrackerError::UnexpectedFlow),
)
.attach_printable(
"Cannot run Stripe external account workflow when payouts feature is disabled",
)
}
}
storage::ProcessTrackerRunner::PaymentMethodStatusUpdateWorkflow => Ok(Box::new(
workflows::payment_method_status_update::PaymentMethodStatusUpdateWorkflow,
)),
storage::ProcessTrackerRunner::PassiveRecoveryWorkflow => {
Ok(Box::new(workflows::revenue_recovery::ExecutePcrWorkflow))
}
}
};
let operation = get_operation(runner)?;
let app_state = &state.clone();
let output = operation.execute_workflow(state, process.clone()).await;
match output {
Ok(_) => operation.success_handler(app_state, process).await,
Err(error) => match operation
.error_handler(app_state, process.clone(), error)
.await
{
Ok(_) => (),
Err(error) => {
logger::error!(?error, "Failed while handling error");
let status = state
.get_db()
.as_scheduler()
.finish_process_with_business_status(
process,
business_status::GLOBAL_FAILURE,
)
.await;
if let Err(error) = status {
logger::error!(
?error,
"Failed while performing database operation: {}",
business_status::GLOBAL_FAILURE
);
}
}
},
};
Ok(())
}
}
async fn start_scheduler(
state: &routes::AppState,
scheduler_flow: scheduler::SchedulerFlow,
channel: (mpsc::Sender<()>, mpsc::Receiver<()>),
) -> CustomResult<(), ProcessTrackerError> {
let scheduler_settings = state
.conf
.scheduler
.clone()
.ok_or(ProcessTrackerError::ConfigurationError)?;
scheduler::start_process_tracker(
state,
scheduler_flow,
Arc::new(scheduler_settings),
channel,
WorkflowRunner {},
|state, tenant| {
Arc::new(state.clone())
.get_session_state(tenant, None, || ProcessTrackerError::TenantNotFound.into())
},
)
.await
}
| 2,639 | 1,732 |
hyperswitch | crates/router/src/workflows/attach_payout_account_workflow.rs | .rs | use common_utils::{
consts::DEFAULT_LOCALE,
ext_traits::{OptionExt, ValueExt},
};
use scheduler::{
consumer::{self, workflows::ProcessTrackerWorkflow},
errors,
};
use crate::{
core::payouts,
errors as core_errors,
routes::SessionState,
types::{api, storage},
};
pub struct AttachPayoutAccountWorkflow;
#[async_trait::async_trait]
impl ProcessTrackerWorkflow<SessionState> for AttachPayoutAccountWorkflow {
async fn execute_workflow<'a>(
&'a self,
state: &'a SessionState,
process: storage::ProcessTracker,
) -> Result<(), errors::ProcessTrackerError> {
// Gather context
let db = &*state.store;
let tracking_data: api::PayoutRetrieveRequest = process
.tracking_data
.clone()
.parse_value("PayoutRetrieveRequest")?;
let merchant_id = tracking_data
.merchant_id
.clone()
.get_required_value("merchant_id")?;
let key_manager_state = &state.into();
let key_store = db
.get_merchant_key_store_by_merchant_id(
key_manager_state,
&merchant_id,
&db.get_master_key().to_vec().into(),
)
.await?;
let merchant_account = db
.find_merchant_account_by_merchant_id(key_manager_state, &merchant_id, &key_store)
.await?;
let request = api::payouts::PayoutRequest::PayoutRetrieveRequest(tracking_data);
let mut payout_data = payouts::make_payout_data(
state,
&merchant_account,
None,
&key_store,
&request,
DEFAULT_LOCALE,
)
.await?;
payouts::payouts_core(
state,
&merchant_account,
&key_store,
&mut payout_data,
None,
None,
)
.await?;
Ok(())
}
async fn error_handler<'a>(
&'a self,
state: &'a SessionState,
process: storage::ProcessTracker,
error: errors::ProcessTrackerError,
) -> core_errors::CustomResult<(), errors::ProcessTrackerError> {
consumer::consumer_error_handler(state.store.as_scheduler(), process, error).await
}
}
| 490 | 1,733 |
hyperswitch | crates/router/src/workflows/payment_sync.rs | .rs | use common_utils::ext_traits::{OptionExt, StringExt, ValueExt};
use diesel_models::process_tracker::business_status;
use error_stack::ResultExt;
use router_env::logger;
use scheduler::{
consumer::{self, types::process_data, workflows::ProcessTrackerWorkflow},
errors as sch_errors, utils as scheduler_utils,
};
use crate::{
consts,
core::{
errors::StorageErrorExt,
payments::{self as payment_flows, operations},
},
db::StorageInterface,
errors,
routes::SessionState,
services,
types::{
api,
storage::{self, enums},
},
utils,
};
pub struct PaymentsSyncWorkflow;
#[async_trait::async_trait]
impl ProcessTrackerWorkflow<SessionState> for PaymentsSyncWorkflow {
#[cfg(feature = "v2")]
async fn execute_workflow<'a>(
&'a self,
_state: &'a SessionState,
_process: storage::ProcessTracker,
) -> Result<(), sch_errors::ProcessTrackerError> {
todo!()
}
#[cfg(feature = "v1")]
async fn execute_workflow<'a>(
&'a self,
state: &'a SessionState,
process: storage::ProcessTracker,
) -> Result<(), sch_errors::ProcessTrackerError> {
let db: &dyn StorageInterface = &*state.store;
let tracking_data: api::PaymentsRetrieveRequest = process
.tracking_data
.clone()
.parse_value("PaymentsRetrieveRequest")?;
let key_manager_state = &state.into();
let key_store = db
.get_merchant_key_store_by_merchant_id(
key_manager_state,
tracking_data
.merchant_id
.as_ref()
.get_required_value("merchant_id")?,
&db.get_master_key().to_vec().into(),
)
.await?;
let merchant_account = db
.find_merchant_account_by_merchant_id(
key_manager_state,
tracking_data
.merchant_id
.as_ref()
.get_required_value("merchant_id")?,
&key_store,
)
.await?;
// TODO: Add support for ReqState in PT flows
let (mut payment_data, _, customer, _, _) =
Box::pin(payment_flows::payments_operation_core::<
api::PSync,
_,
_,
_,
payment_flows::PaymentData<api::PSync>,
>(
state,
state.get_req_state(),
merchant_account.clone(),
None,
key_store.clone(),
operations::PaymentStatus,
tracking_data.clone(),
payment_flows::CallConnectorAction::Trigger,
services::AuthFlow::Client,
None,
hyperswitch_domain_models::payments::HeaderPayload::default(),
None, //Platform merchant account
))
.await?;
let terminal_status = [
enums::AttemptStatus::RouterDeclined,
enums::AttemptStatus::Charged,
enums::AttemptStatus::AutoRefunded,
enums::AttemptStatus::Voided,
enums::AttemptStatus::VoidFailed,
enums::AttemptStatus::CaptureFailed,
enums::AttemptStatus::Failure,
];
match &payment_data.payment_attempt.status {
status if terminal_status.contains(status) => {
state
.store
.as_scheduler()
.finish_process_with_business_status(process, business_status::COMPLETED_BY_PT)
.await?
}
_ => {
let connector = payment_data
.payment_attempt
.connector
.clone()
.ok_or(sch_errors::ProcessTrackerError::MissingRequiredField)?;
let is_last_retry = retry_sync_task(
db,
connector,
payment_data.payment_attempt.merchant_id.clone(),
process,
)
.await?;
// If the payment status is still processing and there is no connector transaction_id
// then change the payment status to failed if all retries exceeded
if is_last_retry
&& payment_data.payment_attempt.status == enums::AttemptStatus::Pending
&& payment_data
.payment_attempt
.connector_transaction_id
.as_ref()
.is_none()
{
let payment_intent_update = hyperswitch_domain_models::payments::payment_intent::PaymentIntentUpdate::PGStatusUpdate { status: api_models::enums::IntentStatus::Failed,updated_by: merchant_account.storage_scheme.to_string(), incremental_authorization_allowed: Some(false) };
let payment_attempt_update =
hyperswitch_domain_models::payments::payment_attempt::PaymentAttemptUpdate::ErrorUpdate {
connector: None,
status: api_models::enums::AttemptStatus::Failure,
error_code: None,
error_message: None,
error_reason: Some(Some(
consts::REQUEST_TIMEOUT_ERROR_MESSAGE_FROM_PSYNC.to_string(),
)),
amount_capturable: Some(common_utils::types::MinorUnit::new(0)),
updated_by: merchant_account.storage_scheme.to_string(),
unified_code: None,
unified_message: None,
connector_transaction_id: None,
payment_method_data: None,
authentication_type: None,
issuer_error_code: None,
issuer_error_message: None,
};
payment_data.payment_attempt = db
.update_payment_attempt_with_attempt_id(
payment_data.payment_attempt,
payment_attempt_update,
merchant_account.storage_scheme,
)
.await
.to_not_found_response(errors::ApiErrorResponse::PaymentNotFound)?;
payment_data.payment_intent = db
.update_payment_intent(
&state.into(),
payment_data.payment_intent,
payment_intent_update,
&key_store,
merchant_account.storage_scheme,
)
.await
.to_not_found_response(errors::ApiErrorResponse::PaymentNotFound)?;
let profile_id = payment_data
.payment_intent
.profile_id
.as_ref()
.get_required_value("profile_id")
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Could not find profile_id in payment intent")?;
let business_profile = db
.find_business_profile_by_profile_id(
key_manager_state,
&key_store,
profile_id,
)
.await
.to_not_found_response(errors::ApiErrorResponse::ProfileNotFound {
id: profile_id.get_string_repr().to_owned(),
})?;
// Trigger the outgoing webhook to notify the merchant about failed payment
let operation = operations::PaymentStatus;
Box::pin(utils::trigger_payments_webhook(
merchant_account,
business_profile,
&key_store,
payment_data,
customer,
state,
operation,
))
.await
.map_err(|error| logger::warn!(payments_outgoing_webhook_error=?error))
.ok();
}
}
};
Ok(())
}
async fn error_handler<'a>(
&'a self,
state: &'a SessionState,
process: storage::ProcessTracker,
error: sch_errors::ProcessTrackerError,
) -> errors::CustomResult<(), sch_errors::ProcessTrackerError> {
consumer::consumer_error_handler(state.store.as_scheduler(), process, error).await
}
}
/// Get the next schedule time
///
/// The schedule time can be configured in configs by this key `pt_mapping_trustpay`
/// ```json
/// {
/// "default_mapping": {
/// "start_after": 60,
/// "frequency": [300],
/// "count": [5]
/// },
/// "max_retries_count": 5
/// }
/// ```
///
/// This config represents
///
/// `start_after`: The first psync should happen after 60 seconds
///
/// `frequency` and `count`: The next 5 retries should have an interval of 300 seconds between them
pub async fn get_sync_process_schedule_time(
db: &dyn StorageInterface,
connector: &str,
merchant_id: &common_utils::id_type::MerchantId,
retry_count: i32,
) -> Result<Option<time::PrimitiveDateTime>, errors::ProcessTrackerError> {
let mapping: common_utils::errors::CustomResult<
process_data::ConnectorPTMapping,
errors::StorageError,
> = db
.find_config_by_key(&format!("pt_mapping_{connector}"))
.await
.map(|value| value.config)
.and_then(|config| {
config
.parse_struct("ConnectorPTMapping")
.change_context(errors::StorageError::DeserializationFailed)
});
let mapping = match mapping {
Ok(x) => x,
Err(error) => {
logger::info!(?error, "Redis Mapping Error");
process_data::ConnectorPTMapping::default()
}
};
let time_delta = scheduler_utils::get_schedule_time(mapping, merchant_id, retry_count);
Ok(scheduler_utils::get_time_from_delta(time_delta))
}
/// Schedule the task for retry
///
/// Returns bool which indicates whether this was the last retry or not
pub async fn retry_sync_task(
db: &dyn StorageInterface,
connector: String,
merchant_id: common_utils::id_type::MerchantId,
pt: storage::ProcessTracker,
) -> Result<bool, sch_errors::ProcessTrackerError> {
let schedule_time =
get_sync_process_schedule_time(db, &connector, &merchant_id, pt.retry_count + 1).await?;
match schedule_time {
Some(s_time) => {
db.as_scheduler().retry_process(pt, s_time).await?;
Ok(false)
}
None => {
db.as_scheduler()
.finish_process_with_business_status(pt, business_status::RETRIES_EXCEEDED)
.await?;
Ok(true)
}
}
}
#[cfg(test)]
mod tests {
#![allow(clippy::expect_used, clippy::unwrap_used)]
use super::*;
#[test]
fn test_get_default_schedule_time() {
let merchant_id =
common_utils::id_type::MerchantId::try_from(std::borrow::Cow::from("-")).unwrap();
let schedule_time_delta = scheduler_utils::get_schedule_time(
process_data::ConnectorPTMapping::default(),
&merchant_id,
0,
)
.unwrap();
let first_retry_time_delta = scheduler_utils::get_schedule_time(
process_data::ConnectorPTMapping::default(),
&merchant_id,
1,
)
.unwrap();
let cpt_default = process_data::ConnectorPTMapping::default().default_mapping;
assert_eq!(
vec![schedule_time_delta, first_retry_time_delta],
vec![
cpt_default.start_after,
cpt_default.frequencies.first().unwrap().0
]
);
}
}
| 2,263 | 1,734 |
hyperswitch | crates/router/src/workflows/payment_method_status_update.rs | .rs | use common_utils::ext_traits::ValueExt;
use error_stack::ResultExt;
use scheduler::{
consumer::types::process_data, utils as pt_utils, workflows::ProcessTrackerWorkflow,
};
use crate::{
errors,
logger::error,
routes::SessionState,
types::storage::{self, PaymentMethodStatusTrackingData},
};
pub struct PaymentMethodStatusUpdateWorkflow;
#[async_trait::async_trait]
impl ProcessTrackerWorkflow<SessionState> for PaymentMethodStatusUpdateWorkflow {
#[cfg(all(
any(feature = "v2", feature = "v1"),
not(feature = "payment_methods_v2")
))]
async fn execute_workflow<'a>(
&'a self,
state: &'a SessionState,
process: storage::ProcessTracker,
) -> Result<(), errors::ProcessTrackerError> {
let db = &*state.store;
let tracking_data: PaymentMethodStatusTrackingData = process
.tracking_data
.clone()
.parse_value("PaymentMethodStatusTrackingData")?;
let retry_count = process.retry_count;
let pm_id = tracking_data.payment_method_id;
let prev_pm_status = tracking_data.prev_status;
let curr_pm_status = tracking_data.curr_status;
let merchant_id = tracking_data.merchant_id;
let key_manager_state = &state.into();
let key_store = state
.store
.get_merchant_key_store_by_merchant_id(
key_manager_state,
&merchant_id,
&state.store.get_master_key().to_vec().into(),
)
.await?;
let merchant_account = db
.find_merchant_account_by_merchant_id(key_manager_state, &merchant_id, &key_store)
.await?;
let payment_method = db
.find_payment_method(
&(state.into()),
&key_store,
&pm_id,
merchant_account.storage_scheme,
)
.await
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Unable to decode billing address")?;
if payment_method.status != prev_pm_status {
return db
.as_scheduler()
.finish_process_with_business_status(process, "PROCESS_ALREADY_COMPLETED")
.await
.map_err(Into::<errors::ProcessTrackerError>::into);
}
let pm_update = storage::PaymentMethodUpdate::StatusUpdate {
status: Some(curr_pm_status),
};
let res = db
.update_payment_method(
&(state.into()),
&key_store,
payment_method,
pm_update,
merchant_account.storage_scheme,
)
.await
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Unable to update payment method");
if let Ok(_pm) = res {
db.as_scheduler()
.finish_process_with_business_status(process, "COMPLETED_BY_PT")
.await?;
} else {
let mapping = process_data::PaymentMethodsPTMapping::default();
let time_delta = if retry_count == 0 {
Some(mapping.default_mapping.start_after)
} else {
pt_utils::get_delay(retry_count + 1, &mapping.default_mapping.frequencies)
};
let schedule_time = pt_utils::get_time_from_delta(time_delta);
match schedule_time {
Some(s_time) => db
.as_scheduler()
.retry_process(process, s_time)
.await
.map_err(Into::<errors::ProcessTrackerError>::into)?,
None => db
.as_scheduler()
.finish_process_with_business_status(process, "RETRIES_EXCEEDED")
.await
.map_err(Into::<errors::ProcessTrackerError>::into)?,
};
};
Ok(())
}
#[cfg(feature = "v2")]
async fn execute_workflow<'a>(
&'a self,
_state: &'a SessionState,
_process: storage::ProcessTracker,
) -> Result<(), errors::ProcessTrackerError> {
todo!()
}
async fn error_handler<'a>(
&'a self,
_state: &'a SessionState,
process: storage::ProcessTracker,
_error: errors::ProcessTrackerError,
) -> errors::CustomResult<(), errors::ProcessTrackerError> {
error!(%process.id, "Failed while executing workflow");
Ok(())
}
}
| 918 | 1,735 |
hyperswitch | crates/router/src/workflows/api_key_expiry.rs | .rs | use common_utils::{errors::ValidationError, ext_traits::ValueExt, types::theme::ThemeLineage};
use diesel_models::{
enums as storage_enums, process_tracker::business_status, ApiKeyExpiryTrackingData,
};
use router_env::logger;
use scheduler::{workflows::ProcessTrackerWorkflow, SchedulerSessionState};
use crate::{
consts, errors,
logger::error,
routes::{metrics, SessionState},
services::email::types::{self as email_types, ApiKeyExpiryReminder},
types::{api, domain::UserEmail, storage},
utils::{user::theme as theme_utils, OptionExt},
};
pub struct ApiKeyExpiryWorkflow;
#[async_trait::async_trait]
impl ProcessTrackerWorkflow<SessionState> for ApiKeyExpiryWorkflow {
async fn execute_workflow<'a>(
&'a self,
state: &'a SessionState,
process: storage::ProcessTracker,
) -> Result<(), errors::ProcessTrackerError> {
let db = &*state.store;
let tracking_data: ApiKeyExpiryTrackingData = process
.tracking_data
.clone()
.parse_value("ApiKeyExpiryTrackingData")?;
let key_manager_satte = &state.into();
let key_store = state
.store
.get_merchant_key_store_by_merchant_id(
key_manager_satte,
&tracking_data.merchant_id,
&state.store.get_master_key().to_vec().into(),
)
.await?;
let merchant_account = db
.find_merchant_account_by_merchant_id(
key_manager_satte,
&tracking_data.merchant_id,
&key_store,
)
.await?;
let email_id = merchant_account
.merchant_details
.clone()
.parse_value::<api::MerchantDetails>("MerchantDetails")?
.primary_email
.ok_or(errors::ProcessTrackerError::EValidationError(
ValidationError::MissingRequiredField {
field_name: "email".to_string(),
}
.into(),
))?;
let task_id = process.id.clone();
let retry_count = process.retry_count;
let api_key_name = tracking_data.api_key_name.clone();
let prefix = tracking_data.prefix.clone();
let expires_in = tracking_data
.expiry_reminder_days
.get(
usize::try_from(retry_count)
.map_err(|_| errors::ProcessTrackerError::TypeConversionError)?,
)
.ok_or(errors::ProcessTrackerError::EApiErrorResponse)?;
let theme = theme_utils::get_most_specific_theme_using_lineage(
state,
ThemeLineage::Merchant {
tenant_id: state.tenant.tenant_id.clone(),
org_id: merchant_account.get_org_id().clone(),
merchant_id: merchant_account.get_id().clone(),
},
)
.await
.map_err(|err| {
logger::error!(?err, "Failed to get theme");
errors::ProcessTrackerError::EApiErrorResponse
})?;
let email_contents = ApiKeyExpiryReminder {
recipient_email: UserEmail::from_pii_email(email_id).map_err(|error| {
logger::error!(
?error,
"Failed to convert recipient's email to UserEmail from pii::Email"
);
errors::ProcessTrackerError::EApiErrorResponse
})?,
subject: consts::EMAIL_SUBJECT_API_KEY_EXPIRY,
expires_in: *expires_in,
api_key_name,
prefix,
theme_id: theme.as_ref().map(|theme| theme.theme_id.clone()),
theme_config: theme
.map(|theme| theme.email_config())
.unwrap_or(state.conf.theme.email_config.clone()),
};
state
.email_client
.clone()
.compose_and_send_email(
email_types::get_base_url(state),
Box::new(email_contents),
state.conf.proxy.https_url.as_ref(),
)
.await
.map_err(errors::ProcessTrackerError::EEmailError)?;
// If all the mails have been sent, then retry_count would be equal to length of the expiry_reminder_days vector
if retry_count
== i32::try_from(tracking_data.expiry_reminder_days.len() - 1)
.map_err(|_| errors::ProcessTrackerError::TypeConversionError)?
{
state
.get_db()
.as_scheduler()
.finish_process_with_business_status(process, business_status::COMPLETED_BY_PT)
.await?
}
// If tasks are remaining that has to be scheduled
else {
let expiry_reminder_day = tracking_data
.expiry_reminder_days
.get(
usize::try_from(retry_count + 1)
.map_err(|_| errors::ProcessTrackerError::TypeConversionError)?,
)
.ok_or(errors::ProcessTrackerError::EApiErrorResponse)?;
let updated_schedule_time = tracking_data.api_key_expiry.map(|api_key_expiry| {
api_key_expiry.saturating_sub(time::Duration::days(i64::from(*expiry_reminder_day)))
});
let updated_process_tracker_data = storage::ProcessTrackerUpdate::Update {
name: None,
retry_count: Some(retry_count + 1),
schedule_time: updated_schedule_time,
tracking_data: None,
business_status: None,
status: Some(storage_enums::ProcessTrackerStatus::New),
updated_at: Some(common_utils::date_time::now()),
};
let task_ids = vec![task_id];
db.process_tracker_update_process_status_by_ids(task_ids, updated_process_tracker_data)
.await?;
// Remaining tasks are re-scheduled, so will be resetting the added count
metrics::TASKS_RESET_COUNT
.add(1, router_env::metric_attributes!(("flow", "ApiKeyExpiry")));
}
Ok(())
}
async fn error_handler<'a>(
&'a self,
_state: &'a SessionState,
process: storage::ProcessTracker,
_error: errors::ProcessTrackerError,
) -> errors::CustomResult<(), errors::ProcessTrackerError> {
error!(%process.id, "Failed while executing workflow");
Ok(())
}
}
| 1,318 | 1,736 |
hyperswitch | crates/router/src/workflows/revenue_recovery.rs | .rs | #[cfg(feature = "v2")]
use api_models::payments::PaymentsGetIntentRequest;
#[cfg(feature = "v2")]
use common_utils::{
ext_traits::{StringExt, ValueExt},
id_type,
};
#[cfg(feature = "v2")]
use error_stack::ResultExt;
#[cfg(feature = "v2")]
use hyperswitch_domain_models::payments::PaymentIntentData;
#[cfg(feature = "v2")]
use router_env::logger;
use scheduler::{consumer::workflows::ProcessTrackerWorkflow, errors};
#[cfg(feature = "v2")]
use scheduler::{types::process_data, utils as scheduler_utils};
#[cfg(feature = "v2")]
use storage_impl::errors as storage_errors;
#[cfg(feature = "v2")]
use crate::{
core::{
admin, payments,
revenue_recovery::{self as pcr, types},
},
db::StorageInterface,
errors::StorageError,
types::{
api::{self as api_types},
storage::revenue_recovery as pcr_storage_types,
},
};
use crate::{routes::SessionState, types::storage};
pub struct ExecutePcrWorkflow;
#[async_trait::async_trait]
impl ProcessTrackerWorkflow<SessionState> for ExecutePcrWorkflow {
#[cfg(feature = "v1")]
async fn execute_workflow<'a>(
&'a self,
_state: &'a SessionState,
_process: storage::ProcessTracker,
) -> Result<(), errors::ProcessTrackerError> {
Ok(())
}
#[cfg(feature = "v2")]
async fn execute_workflow<'a>(
&'a self,
state: &'a SessionState,
process: storage::ProcessTracker,
) -> Result<(), errors::ProcessTrackerError> {
let tracking_data = process
.tracking_data
.clone()
.parse_value::<pcr_storage_types::PcrWorkflowTrackingData>(
"PCRWorkflowTrackingData",
)?;
let request = PaymentsGetIntentRequest {
id: tracking_data.global_payment_id.clone(),
};
let key_manager_state = &state.into();
let pcr_data = extract_data_and_perform_action(state, &tracking_data).await?;
let (payment_data, _, _) = payments::payments_intent_operation_core::<
api_types::PaymentGetIntent,
_,
_,
PaymentIntentData<api_types::PaymentGetIntent>,
>(
state,
state.get_req_state(),
pcr_data.merchant_account.clone(),
pcr_data.profile.clone(),
pcr_data.key_store.clone(),
payments::operations::PaymentGetIntent,
request,
tracking_data.global_payment_id.clone(),
hyperswitch_domain_models::payments::HeaderPayload::default(),
None,
)
.await?;
let store = state.store.as_ref();
let billing_merchant_connector_account_id: id_type::MerchantConnectorAccountId =
payment_data
.payment_intent
.get_billing_merchant_connector_account_id()
.ok_or(errors::ProcessTrackerError::ERecoveryError(
storage_errors::RecoveryError::BillingMerchantConnectorAccountIdNotFound.into(),
))?;
let billing_mca = store
.find_merchant_connector_account_by_id(
key_manager_state,
&billing_merchant_connector_account_id,
&pcr_data.key_store,
)
.await?;
match process.name.as_deref() {
Some("EXECUTE_WORKFLOW") => {
Box::pin(pcr::perform_execute_payment(
state,
&process,
&tracking_data,
&pcr_data,
key_manager_state,
&payment_data.payment_intent,
&billing_mca,
))
.await
}
Some("PSYNC_WORKFLOW") => todo!(),
Some("REVIEW_WORKFLOW") => todo!(),
_ => Err(errors::ProcessTrackerError::JobNotFound),
}
}
}
#[cfg(feature = "v2")]
pub(crate) async fn extract_data_and_perform_action(
state: &SessionState,
tracking_data: &pcr_storage_types::PcrWorkflowTrackingData,
) -> Result<pcr_storage_types::PcrPaymentData, errors::ProcessTrackerError> {
let db = &state.store;
let key_manager_state = &state.into();
let key_store = db
.get_merchant_key_store_by_merchant_id(
key_manager_state,
&tracking_data.merchant_id,
&db.get_master_key().to_vec().into(),
)
.await?;
let merchant_account = db
.find_merchant_account_by_merchant_id(
key_manager_state,
&tracking_data.merchant_id,
&key_store,
)
.await?;
let profile = db
.find_business_profile_by_profile_id(
key_manager_state,
&key_store,
&tracking_data.profile_id,
)
.await?;
let pcr_payment_data = pcr_storage_types::PcrPaymentData {
merchant_account,
profile,
key_store,
};
Ok(pcr_payment_data)
}
#[cfg(feature = "v2")]
pub(crate) async fn get_schedule_time_to_retry_mit_payments(
db: &dyn StorageInterface,
merchant_id: &id_type::MerchantId,
retry_count: i32,
) -> Option<time::PrimitiveDateTime> {
let key = "pt_mapping_pcr_retries";
let result = db
.find_config_by_key(key)
.await
.map(|value| value.config)
.and_then(|config| {
config
.parse_struct("RevenueRecoveryPaymentProcessTrackerMapping")
.change_context(StorageError::DeserializationFailed)
});
let mapping = result.map_or_else(
|error| {
if error.current_context().is_db_not_found() {
logger::debug!("Revenue Recovery retry config `{key}` not found, ignoring");
} else {
logger::error!(
?error,
"Failed to read Revenue Recovery retry config `{key}`"
);
}
process_data::RevenueRecoveryPaymentProcessTrackerMapping::default()
},
|mapping| {
logger::debug!(?mapping, "Using custom pcr payments retry config");
mapping
},
);
let time_delta =
scheduler_utils::get_pcr_payments_retry_schedule_time(mapping, merchant_id, retry_count);
scheduler_utils::get_time_from_delta(time_delta)
}
| 1,357 | 1,737 |
hyperswitch | crates/router/src/workflows/outgoing_webhook_retry.rs | .rs | #[cfg(feature = "payouts")]
use api_models::payouts as payout_models;
use api_models::{
enums::EventType,
webhook_events::OutgoingWebhookRequestContent,
webhooks::{OutgoingWebhook, OutgoingWebhookContent},
};
use common_utils::{
consts::DEFAULT_LOCALE,
ext_traits::{StringExt, ValueExt},
};
use diesel_models::process_tracker::business_status;
use error_stack::ResultExt;
use masking::PeekInterface;
use router_env::tracing::{self, instrument};
use scheduler::{
consumer::{self, workflows::ProcessTrackerWorkflow},
types::process_data,
utils as scheduler_utils,
};
#[cfg(feature = "payouts")]
use crate::core::payouts;
use crate::{
core::{
payments,
webhooks::{self as webhooks_core, types::OutgoingWebhookTrackingData},
},
db::StorageInterface,
errors, logger,
routes::{app::ReqState, SessionState},
types::{domain, storage},
};
pub struct OutgoingWebhookRetryWorkflow;
#[async_trait::async_trait]
impl ProcessTrackerWorkflow<SessionState> for OutgoingWebhookRetryWorkflow {
#[cfg(feature = "v1")]
#[instrument(skip_all)]
async fn execute_workflow<'a>(
&'a self,
state: &'a SessionState,
process: storage::ProcessTracker,
) -> Result<(), errors::ProcessTrackerError> {
let delivery_attempt = storage::enums::WebhookDeliveryAttempt::AutomaticRetry;
let tracking_data: OutgoingWebhookTrackingData = process
.tracking_data
.clone()
.parse_value("OutgoingWebhookTrackingData")?;
let db = &*state.store;
let key_manager_state = &state.into();
let key_store = db
.get_merchant_key_store_by_merchant_id(
key_manager_state,
&tracking_data.merchant_id,
&db.get_master_key().to_vec().into(),
)
.await?;
let business_profile = db
.find_business_profile_by_profile_id(
key_manager_state,
&key_store,
&tracking_data.business_profile_id,
)
.await?;
let event_id = webhooks_core::utils::generate_event_id();
let idempotent_event_id = webhooks_core::utils::get_idempotent_event_id(
&tracking_data.primary_object_id,
tracking_data.event_type,
delivery_attempt,
);
let initial_event = match &tracking_data.initial_attempt_id {
Some(initial_attempt_id) => {
db.find_event_by_merchant_id_event_id(
key_manager_state,
&business_profile.merchant_id,
initial_attempt_id,
&key_store,
)
.await?
}
// Tracking data inserted by old version of application, fetch event using old event ID
// format
None => {
let old_event_id = format!(
"{}_{}",
tracking_data.primary_object_id, tracking_data.event_type
);
db.find_event_by_merchant_id_event_id(
key_manager_state,
&business_profile.merchant_id,
&old_event_id,
&key_store,
)
.await?
}
};
let now = common_utils::date_time::now();
let new_event = domain::Event {
event_id,
event_type: initial_event.event_type,
event_class: initial_event.event_class,
is_webhook_notified: false,
primary_object_id: initial_event.primary_object_id,
primary_object_type: initial_event.primary_object_type,
created_at: now,
merchant_id: Some(business_profile.merchant_id.clone()),
business_profile_id: Some(business_profile.get_id().to_owned()),
primary_object_created_at: initial_event.primary_object_created_at,
idempotent_event_id: Some(idempotent_event_id),
initial_attempt_id: Some(initial_event.event_id.clone()),
request: initial_event.request,
response: None,
delivery_attempt: Some(delivery_attempt),
metadata: initial_event.metadata,
is_overall_delivery_successful: Some(false),
};
let event = db
.insert_event(key_manager_state, new_event, &key_store)
.await
.inspect_err(|error| {
logger::error!(?error, "Failed to insert event in events table");
})?;
match &event.request {
Some(request) => {
let request_content: OutgoingWebhookRequestContent = request
.get_inner()
.peek()
.parse_struct("OutgoingWebhookRequestContent")?;
Box::pin(webhooks_core::trigger_webhook_and_raise_event(
state.clone(),
business_profile,
&key_store,
event,
request_content,
delivery_attempt,
None,
Some(process),
))
.await;
}
// Event inserted by old version of application, fetch current information about
// resource
None => {
let merchant_account = db
.find_merchant_account_by_merchant_id(
key_manager_state,
&tracking_data.merchant_id,
&key_store,
)
.await?;
// TODO: Add request state for the PT flows as well
let (content, event_type) = Box::pin(get_outgoing_webhook_content_and_event_type(
state.clone(),
state.get_req_state(),
merchant_account.clone(),
key_store.clone(),
&tracking_data,
))
.await?;
match event_type {
// Resource status is same as the event type of the current event
Some(event_type) if event_type == tracking_data.event_type => {
let outgoing_webhook = OutgoingWebhook {
merchant_id: tracking_data.merchant_id.clone(),
event_id: event.event_id.clone(),
event_type,
content: content.clone(),
timestamp: event.created_at,
};
let request_content = webhooks_core::get_outgoing_webhook_request(
&merchant_account,
outgoing_webhook,
&business_profile,
)
.map_err(|error| {
logger::error!(
?error,
"Failed to obtain outgoing webhook request content"
);
errors::ProcessTrackerError::EApiErrorResponse
})?;
Box::pin(webhooks_core::trigger_webhook_and_raise_event(
state.clone(),
business_profile,
&key_store,
event,
request_content,
delivery_attempt,
Some(content),
Some(process),
))
.await;
}
// Resource status has changed since the event was created, finish task
_ => {
logger::warn!(
%event.event_id,
"The current status of the resource `{:?}` (event type: {:?}) and the status of \
the resource when the event was created (event type: {:?}) differ, finishing task",
tracking_data.primary_object_id,
event_type,
tracking_data.event_type
);
db.as_scheduler()
.finish_process_with_business_status(
process.clone(),
business_status::RESOURCE_STATUS_MISMATCH,
)
.await?;
}
}
}
};
Ok(())
}
#[cfg(feature = "v2")]
async fn execute_workflow<'a>(
&'a self,
_state: &'a SessionState,
_process: storage::ProcessTracker,
) -> Result<(), errors::ProcessTrackerError> {
todo!()
}
#[instrument(skip_all)]
async fn error_handler<'a>(
&'a self,
state: &'a SessionState,
process: storage::ProcessTracker,
error: errors::ProcessTrackerError,
) -> errors::CustomResult<(), errors::ProcessTrackerError> {
consumer::consumer_error_handler(state.store.as_scheduler(), process, error).await
}
}
/// Get the schedule time for the specified retry count.
///
/// The schedule time can be configured in configs with this key: `pt_mapping_outgoing_webhooks`.
///
/// ```json
/// {
/// "default_mapping": {
/// "start_after": 60,
/// "frequency": [300],
/// "count": [5]
/// },
/// "custom_merchant_mapping": {
/// "merchant_id1": {
/// "start_after": 30,
/// "frequency": [300],
/// "count": [2]
/// }
/// }
/// }
/// ```
///
/// This configuration value represents:
/// - `default_mapping.start_after`: The first retry attempt should happen after 60 seconds by
/// default.
/// - `default_mapping.frequency` and `count`: The next 5 retries should have an interval of 300
/// seconds between them by default.
/// - `custom_merchant_mapping.merchant_id1`: Merchant-specific retry configuration for merchant
/// with merchant ID `merchant_id1`.
#[cfg(feature = "v1")]
#[instrument(skip_all)]
pub(crate) async fn get_webhook_delivery_retry_schedule_time(
db: &dyn StorageInterface,
merchant_id: &common_utils::id_type::MerchantId,
retry_count: i32,
) -> Option<time::PrimitiveDateTime> {
let key = "pt_mapping_outgoing_webhooks";
let result = db
.find_config_by_key(key)
.await
.map(|value| value.config)
.and_then(|config| {
config
.parse_struct("OutgoingWebhookRetryProcessTrackerMapping")
.change_context(errors::StorageError::DeserializationFailed)
});
let mapping = result.map_or_else(
|error| {
if error.current_context().is_db_not_found() {
logger::debug!("Outgoing webhooks retry config `{key}` not found, ignoring");
} else {
logger::error!(
?error,
"Failed to read outgoing webhooks retry config `{key}`"
);
}
process_data::OutgoingWebhookRetryProcessTrackerMapping::default()
},
|mapping| {
logger::debug!(?mapping, "Using custom outgoing webhooks retry config");
mapping
},
);
let time_delta = scheduler_utils::get_outgoing_webhook_retry_schedule_time(
mapping,
merchant_id,
retry_count,
);
scheduler_utils::get_time_from_delta(time_delta)
}
/// Schedule the webhook delivery task for retry
#[cfg(feature = "v1")]
#[instrument(skip_all)]
pub(crate) async fn retry_webhook_delivery_task(
db: &dyn StorageInterface,
merchant_id: &common_utils::id_type::MerchantId,
process: storage::ProcessTracker,
) -> errors::CustomResult<(), errors::StorageError> {
let schedule_time =
get_webhook_delivery_retry_schedule_time(db, merchant_id, process.retry_count + 1).await;
match schedule_time {
Some(schedule_time) => {
db.as_scheduler()
.retry_process(process, schedule_time)
.await
}
None => {
db.as_scheduler()
.finish_process_with_business_status(process, business_status::RETRIES_EXCEEDED)
.await
}
}
}
#[cfg(feature = "v1")]
#[instrument(skip_all)]
async fn get_outgoing_webhook_content_and_event_type(
state: SessionState,
req_state: ReqState,
merchant_account: domain::MerchantAccount,
key_store: domain::MerchantKeyStore,
tracking_data: &OutgoingWebhookTrackingData,
) -> Result<(OutgoingWebhookContent, Option<EventType>), errors::ProcessTrackerError> {
use api_models::{
mandates::MandateId,
payments::{PaymentIdType, PaymentsResponse, PaymentsRetrieveRequest},
refunds::{RefundResponse, RefundsRetrieveRequest},
};
use crate::{
core::{
disputes::retrieve_dispute,
mandate::get_mandate,
payments::{payments_core, CallConnectorAction, PaymentStatus},
refunds::refund_retrieve_core_with_refund_id,
},
services::{ApplicationResponse, AuthFlow},
types::{
api::{DisputeId, PSync},
transformers::ForeignFrom,
},
};
match tracking_data.event_class {
diesel_models::enums::EventClass::Payments => {
let payment_id = tracking_data.primary_object_id.clone();
let payment_id =
common_utils::id_type::PaymentId::try_from(std::borrow::Cow::Owned(payment_id))
.map_err(|payment_id_parsing_error| {
logger::error!(
?payment_id_parsing_error,
"Failed to parse payment ID from tracking data"
);
errors::ProcessTrackerError::DeserializationFailed
})?;
let request = PaymentsRetrieveRequest {
resource_id: PaymentIdType::PaymentIntentId(payment_id),
merchant_id: Some(tracking_data.merchant_id.clone()),
force_sync: false,
..Default::default()
};
let payments_response = match Box::pin(payments_core::<
PSync,
PaymentsResponse,
_,
_,
_,
payments::PaymentData<PSync>,
>(
state,
req_state,
merchant_account,
None,
key_store,
PaymentStatus,
request,
AuthFlow::Client,
CallConnectorAction::Avoid,
None,
hyperswitch_domain_models::payments::HeaderPayload::default(),
None, //Platform merchant account
))
.await?
{
ApplicationResponse::Json(payments_response)
| ApplicationResponse::JsonWithHeaders((payments_response, _)) => {
Ok(payments_response)
}
ApplicationResponse::StatusOk
| ApplicationResponse::TextPlain(_)
| ApplicationResponse::JsonForRedirection(_)
| ApplicationResponse::Form(_)
| ApplicationResponse::GenericLinkForm(_)
| ApplicationResponse::PaymentLinkForm(_)
| ApplicationResponse::FileData(_) => {
Err(errors::ProcessTrackerError::ResourceFetchingFailed {
resource_name: tracking_data.primary_object_id.clone(),
})
}
}?;
let event_type = Option::<EventType>::foreign_from(payments_response.status);
logger::debug!(current_resource_status=%payments_response.status);
Ok((
OutgoingWebhookContent::PaymentDetails(Box::new(payments_response)),
event_type,
))
}
diesel_models::enums::EventClass::Refunds => {
let refund_id = tracking_data.primary_object_id.clone();
let request = RefundsRetrieveRequest {
refund_id,
force_sync: Some(false),
merchant_connector_details: None,
};
let refund = Box::pin(refund_retrieve_core_with_refund_id(
state,
merchant_account,
None,
key_store,
request,
))
.await?;
let event_type = Option::<EventType>::foreign_from(refund.refund_status);
logger::debug!(current_resource_status=%refund.refund_status);
let refund_response = RefundResponse::foreign_from(refund);
Ok((
OutgoingWebhookContent::RefundDetails(Box::new(refund_response)),
event_type,
))
}
diesel_models::enums::EventClass::Disputes => {
let dispute_id = tracking_data.primary_object_id.clone();
let request = DisputeId { dispute_id };
let dispute_response =
match retrieve_dispute(state, merchant_account, None, request).await? {
ApplicationResponse::Json(dispute_response)
| ApplicationResponse::JsonWithHeaders((dispute_response, _)) => {
Ok(dispute_response)
}
ApplicationResponse::StatusOk
| ApplicationResponse::TextPlain(_)
| ApplicationResponse::JsonForRedirection(_)
| ApplicationResponse::Form(_)
| ApplicationResponse::GenericLinkForm(_)
| ApplicationResponse::PaymentLinkForm(_)
| ApplicationResponse::FileData(_) => {
Err(errors::ProcessTrackerError::ResourceFetchingFailed {
resource_name: tracking_data.primary_object_id.clone(),
})
}
}
.map(Box::new)?;
let event_type = Some(EventType::foreign_from(dispute_response.dispute_status));
logger::debug!(current_resource_status=%dispute_response.dispute_status);
Ok((
OutgoingWebhookContent::DisputeDetails(dispute_response),
event_type,
))
}
diesel_models::enums::EventClass::Mandates => {
let mandate_id = tracking_data.primary_object_id.clone();
let request = MandateId { mandate_id };
let mandate_response =
match get_mandate(state, merchant_account, key_store, request).await? {
ApplicationResponse::Json(mandate_response)
| ApplicationResponse::JsonWithHeaders((mandate_response, _)) => {
Ok(mandate_response)
}
ApplicationResponse::StatusOk
| ApplicationResponse::TextPlain(_)
| ApplicationResponse::JsonForRedirection(_)
| ApplicationResponse::Form(_)
| ApplicationResponse::GenericLinkForm(_)
| ApplicationResponse::PaymentLinkForm(_)
| ApplicationResponse::FileData(_) => {
Err(errors::ProcessTrackerError::ResourceFetchingFailed {
resource_name: tracking_data.primary_object_id.clone(),
})
}
}
.map(Box::new)?;
let event_type = Option::<EventType>::foreign_from(mandate_response.status);
logger::debug!(current_resource_status=%mandate_response.status);
Ok((
OutgoingWebhookContent::MandateDetails(mandate_response),
event_type,
))
}
#[cfg(feature = "payouts")]
diesel_models::enums::EventClass::Payouts => {
let payout_id = tracking_data.primary_object_id.clone();
let request = payout_models::PayoutRequest::PayoutActionRequest(
payout_models::PayoutActionRequest { payout_id },
);
let payout_data = payouts::make_payout_data(
&state,
&merchant_account,
None,
&key_store,
&request,
DEFAULT_LOCALE,
)
.await?;
let router_response =
payouts::response_handler(&state, &merchant_account, &payout_data).await?;
let payout_create_response: payout_models::PayoutCreateResponse = match router_response
{
ApplicationResponse::Json(response) => response,
_ => Err(errors::ApiErrorResponse::WebhookResourceNotFound)
.attach_printable("Failed to fetch the payout create response")?,
};
let event_type = Option::<EventType>::foreign_from(payout_data.payout_attempt.status);
logger::debug!(current_resource_status=%payout_data.payout_attempt.status);
Ok((
OutgoingWebhookContent::PayoutDetails(Box::new(payout_create_response)),
event_type,
))
}
}
}
| 3,970 | 1,738 |
hyperswitch | crates/router/src/workflows/refund_router.rs | .rs | use scheduler::consumer::workflows::ProcessTrackerWorkflow;
#[cfg(feature = "v1")]
use crate::core::refunds as refund_flow;
use crate::{errors, logger::error, routes::SessionState, types::storage};
pub struct RefundWorkflowRouter;
#[async_trait::async_trait]
impl ProcessTrackerWorkflow<SessionState> for RefundWorkflowRouter {
#[cfg(feature = "v1")]
async fn execute_workflow<'a>(
&'a self,
state: &'a SessionState,
process: storage::ProcessTracker,
) -> Result<(), errors::ProcessTrackerError> {
Ok(Box::pin(refund_flow::start_refund_workflow(state, &process)).await?)
}
#[cfg(feature = "v2")]
async fn execute_workflow<'a>(
&'a self,
_state: &'a SessionState,
_process: storage::ProcessTracker,
) -> Result<(), errors::ProcessTrackerError> {
todo!()
}
async fn error_handler<'a>(
&'a self,
_state: &'a SessionState,
process: storage::ProcessTracker,
_error: errors::ProcessTrackerError,
) -> errors::CustomResult<(), errors::ProcessTrackerError> {
error!(%process.id, "Failed while executing workflow");
Ok(())
}
}
| 281 | 1,739 |
hyperswitch | crates/router/src/workflows/tokenized_data.rs | .rs | use scheduler::consumer::workflows::ProcessTrackerWorkflow;
#[cfg(feature = "v1")]
use crate::core::payment_methods::vault;
use crate::{errors, logger::error, routes::SessionState, types::storage};
pub struct DeleteTokenizeDataWorkflow;
#[async_trait::async_trait]
impl ProcessTrackerWorkflow<SessionState> for DeleteTokenizeDataWorkflow {
#[cfg(feature = "v1")]
async fn execute_workflow<'a>(
&'a self,
state: &'a SessionState,
process: storage::ProcessTracker,
) -> Result<(), errors::ProcessTrackerError> {
Ok(vault::start_tokenize_data_workflow(state, &process).await?)
}
#[cfg(feature = "v2")]
async fn execute_workflow<'a>(
&'a self,
_state: &'a SessionState,
_process: storage::ProcessTracker,
) -> Result<(), errors::ProcessTrackerError> {
todo!()
}
async fn error_handler<'a>(
&'a self,
_state: &'a SessionState,
process: storage::ProcessTracker,
_error: errors::ProcessTrackerError,
) -> errors::CustomResult<(), errors::ProcessTrackerError> {
error!(%process.id, "Failed while executing workflow");
Ok(())
}
}
| 278 | 1,740 |
hyperswitch | crates/hsdev/Cargo.toml | .toml | [package]
name = "hsdev"
version = "0.1.0"
license.workspace = true
edition.workspace = true
rust-version.workspace = true
description = "A simple diesel postgres migrator that uses TOML files"
repository = "https://github.com/juspay/hyperswitch.git"
readme = "README.md"
[dependencies]
clap = { version = "4.1.8", features = ["derive"] }
diesel = { version = "2.2.3", features = ["postgres"] }
diesel_migrations = "2.1.0"
serde = { version = "1.0", features = ["derive"] }
toml = "0.5"
[lints]
workspace = true
| 157 | 1,741 |
hyperswitch | crates/hsdev/README.md | .md | # HSDEV
`hsdev` is a simple diesel Postgres migration tool. It is designed to simply running a Postgres database migration with diesel.
## Installing hsdev
`hsdev` can be installed using `cargo`
```shell
cargo install --force --path crates/hsdev
```
## Using hsdev
Using `hsdev` is simple. All you need to do is run the following command.
```shell
hsdev --toml-file [path/to/TOML/file]
```
provide `hsdev` with a TOML file containing the following keys:
```toml
username = "your_username"
password = "your_password"
dbname = "your_db_name"
```
Simply run the command and let `hsdev` handle the rest.
| 160 | 1,742 |
hyperswitch | crates/hsdev/src/input_file.rs | .rs | use std::string::String;
use serde::Deserialize;
use toml::Value;
#[derive(Deserialize)]
pub struct InputData {
username: String,
password: String,
dbname: String,
host: String,
port: u16,
}
impl InputData {
pub fn read(db_table: &Value) -> Result<Self, toml::de::Error> {
db_table.clone().try_into()
}
pub fn postgres_url(&self) -> String {
format!(
"postgres://{}:{}@{}:{}/{}",
self.username, self.password, self.host, self.port, self.dbname
)
}
}
| 140 | 1,743 |
hyperswitch | crates/hsdev/src/main.rs | .rs | #![allow(clippy::print_stdout, clippy::print_stderr)]
use clap::{Parser, ValueHint};
use diesel::{pg::PgConnection, Connection};
use diesel_migrations::{FileBasedMigrations, HarnessWithOutput, MigrationHarness};
use toml::Value;
mod input_file;
#[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)]
struct Args {
#[arg(short, long, value_hint = ValueHint::FilePath)]
toml_file: std::path::PathBuf,
#[arg(long, default_value_t = String::from(""))]
toml_table: String,
}
fn main() {
let args = Args::parse();
let toml_file = &args.toml_file;
let table_name = &args.toml_table;
let toml_contents = match std::fs::read_to_string(toml_file) {
Ok(contents) => contents,
Err(e) => {
eprintln!("Error reading TOML file: {}", e);
return;
}
};
let toml_data: Value = match toml_contents.parse() {
Ok(data) => data,
Err(e) => {
eprintln!("Error parsing TOML file: {}", e);
return;
}
};
let table = get_toml_table(table_name, &toml_data);
let input = match input_file::InputData::read(table) {
Ok(data) => data,
Err(e) => {
eprintln!("Error loading TOML file: {}", e);
return;
}
};
let db_url = input.postgres_url();
println!("Attempting to connect to {}", db_url);
let mut conn = match PgConnection::establish(&db_url) {
Ok(value) => value,
Err(_) => {
eprintln!("Unable to establish database connection");
return;
}
};
let migrations = match FileBasedMigrations::find_migrations_directory() {
Ok(value) => value,
Err(_) => {
eprintln!("Could not find migrations directory");
return;
}
};
let mut harness = HarnessWithOutput::write_to_stdout(&mut conn);
match harness.run_pending_migrations(migrations) {
Ok(_) => println!("Successfully ran migrations"),
Err(_) => eprintln!("Couldn't run migrations"),
};
}
pub fn get_toml_table<'a>(table_name: &'a str, toml_data: &'a Value) -> &'a Value {
if !table_name.is_empty() {
match toml_data.get(table_name) {
Some(value) => value,
None => {
eprintln!("Unable to find toml table: \"{}\"", &table_name);
std::process::abort()
}
}
} else {
toml_data
}
}
#[cfg(test)]
mod tests {
#![allow(clippy::unwrap_used)]
use std::str::FromStr;
use toml::Value;
use crate::{get_toml_table, input_file::InputData};
#[test]
fn test_input_file() {
let toml_str = r#"username = "db_user"
password = "db_pass"
dbname = "db_name"
host = "localhost"
port = 5432"#;
let toml_value = Value::from_str(toml_str);
assert!(toml_value.is_ok());
let toml_value = toml_value.unwrap();
let toml_table = InputData::read(&toml_value);
assert!(toml_table.is_ok());
let toml_table = toml_table.unwrap();
let db_url = toml_table.postgres_url();
assert_eq!("postgres://db_user:db_pass@localhost:5432/db_name", db_url);
}
#[test]
fn test_given_toml() {
let toml_str_table = r#"[database]
username = "db_user"
password = "db_pass"
dbname = "db_name"
host = "localhost"
port = 5432"#;
let table_name = "database";
let toml_value = Value::from_str(toml_str_table).unwrap();
let table = get_toml_table(table_name, &toml_value);
assert!(table.is_table());
let table_name = "";
let table = get_toml_table(table_name, &toml_value);
assert!(table.is_table());
}
}
| 951 | 1,744 |
hyperswitch | crates/analytics/Cargo.toml | .toml | [package]
name = "analytics"
version = "0.1.0"
description = "Analytics / Reports / Search related functionality"
edition.workspace = true
rust-version.workspace = true
license.workspace = true
[features]
v1 = ["api_models/v1", "diesel_models/v1", "hyperswitch_domain_models/v1", "storage_impl/v1", "common_utils/v1"]
v2 = ["api_models/v2", "diesel_models/v2", "hyperswitch_domain_models/v2", "storage_impl/v2", "common_utils/v2"]
[dependencies]
# First party crates
api_models = { version = "0.1.0", path = "../api_models", features = ["errors"] }
common_enums = { version = "0.1.0", path = "../common_enums" }
common_utils = { version = "0.1.0", path = "../common_utils" }
diesel_models = { version = "0.1.0", path = "../diesel_models", features = ["kv_store"], default-features = false }
hyperswitch_domain_models = { version = "0.1.0", path = "../hyperswitch_domain_models", default-features = false }
hyperswitch_interfaces = { version = "0.1.0", path = "../hyperswitch_interfaces", default-features = false }
masking = { version = "0.1.0", path = "../masking" }
router_env = { version = "0.1.0", path = "../router_env", features = ["log_extra_implicit_fields", "log_custom_entries_to_extra"] }
storage_impl = { version = "0.1.0", path = "../storage_impl", default-features = false }
currency_conversion = { version = "0.1.0", path = "../currency_conversion" }
#Third Party dependencies
actix-web = "4.5.1"
async-trait = "0.1.79"
aws-config = { version = "1.5.10", features = ["behavior-version-latest"] }
aws-sdk-lambda = { version = "1.60.0" }
aws-smithy-types = { version = "1.3.0" }
bigdecimal = { version = "0.4.5", features = ["serde"] }
error-stack = "0.4.1"
futures = "0.3.30"
once_cell = "1.19.0"
opensearch = { version = "2.2.0", features = ["aws-auth"] }
reqwest = { version = "0.11.27", features = ["serde_json"] }
rust_decimal = "1.35"
serde = { version = "1.0.197", features = ["derive", "rc"] }
serde_json = "1.0.115"
sqlx = { version = "0.8.3", features = ["postgres", "runtime-tokio", "runtime-tokio-native-tls", "time", "bigdecimal"] }
strum = { version = "0.26.2", features = ["derive"] }
thiserror = "1.0.58"
time = { version = "0.3.35", features = ["serde", "serde-well-known", "std"] }
tokio = { version = "1.37.0", features = ["macros", "rt-multi-thread"] }
[lints]
workspace = true
| 746 | 1,745 |
hyperswitch | crates/analytics/docs/README.md | .md | # Running Kafka & Clickhouse with Analytics and Events Source Configuration
This document provides instructions on how to run Kafka and Clickhouse using Docker Compose, and how to configure the analytics and events source.
## Architecture
+------------------------+
| Hyperswitch |
+------------------------+
|
|
v
+------------------------+
| Kafka |
| (Event Stream Broker) |
+------------------------+
|
|
v
+------------------------+
| ClickHouse |
| +------------------+ |
| | Kafka Engine | |
| | Table | |
| +------------------+ |
| | |
| v |
| +------------------+ |
| | Materialized | |
| | View (MV) | |
| +------------------+ |
| | |
| v |
| +------------------+ |
| | Storage Table | |
| +------------------+ |
+------------------------+
## Starting the Containers
Docker Compose can be used to start all the components.
Run the following command:
```bash
docker compose --profile olap up -d
```
This will spawn up the following services
1. kafka
2. clickhouse
3. opensearch
## Setting up Kafka
Kafka-UI is a visual tool for inspecting Kafka and it can be accessed at `localhost:8090` to view topics, partitions, consumers & generated events.
## Setting up Clickhouse
Once Clickhouse is up and running, you can interact with it via web.
You can either visit the URL (`http://localhost:8123/play`) where the Clickhouse server is running to get a playground, or you can bash into the Clickhouse container and execute commands manually.
Run the following commands:
```bash
# On your local terminal
docker compose exec clickhouse-server bash
# Inside the clickhouse-server container shell
clickhouse-client --user default
# Inside the clickhouse-client shell
SHOW TABLES;
```
## Configuring Analytics and Events Source
To use Clickhouse and Kafka, you need to enable the `analytics.source` and update the `events.source` in the configuration file.
You can do this in either the `config/development.toml` or `config/docker_compose.toml` file.
Here's an example of how to do this:
```toml
[analytics]
source = "clickhouse"
[events]
source = "kafka"
```
After making this change, save the file and restart your application for the changes to take effect.
## Setting up Forex APIs
To use Forex services, you need to sign up and get your API keys from the following providers:
1. Primary Service
- Sign up for a free account and get your Primary API key [here](https://openexchangerates.org/).
- It will be in dashboard, labeled as `app_id`.
2. Fallback Service
- Sign up for a free account and get your Fallback API key [here](https://apilayer.com/marketplace/exchangerate_host-api).
- It will be in dashboard, labeled as `access key`.
### Configuring Forex APIs
To enable Forex functionality, update the `config/development.toml` or `config/docker_compose.toml` file:
```toml
[analytics]
forex_enabled = true # default set to false
```
To configure the Forex APIs, update the `config/development.toml` or `config/docker_compose.toml` file with your API keys:
```toml
[forex_api]
api_key = ""
fallback_api_key = ""
```
### Important Note
```bash
ERROR router::services::api: error: {"error":{"type":"api","message":"Failed to fetch currency exchange rate","code":"HE_00"}}
│
├─▶ Failed to fetch currency exchange rate
│
╰─▶ Could not acquire the lock for cache entry
```
_If you get the above error after setting up, simply remove the `redis` key `"{forex_cache}_lock"` by running this in shell_
```bash
redis-cli del "{forex_cache}_lock"
```
After making these changes, save the file and restart your application for the changes to take effect.
## Enabling Data Features in Dashboard
To check the data features in the dashboard, you need to enable them in the `config/dashboard.toml` configuration file.
Here's an example of how to do this:
```toml
[default.features]
audit_trail=true
system_metrics=true
global_search=true
```
## Viewing the data on OpenSearch Dashboard
To view the data on the OpenSearch dashboard perform the following steps:
- Go to the OpenSearch Dashboard home and click on `Dashboards Management` under the Management tab
- Select `Index Patterns`
- Click on `Create index pattern`
- Define an index pattern with the same name that matches your indices and click on `Next Step`
- Select a time field that will be used for time-based queries
- Save the index pattern
Now, head on to `Discover` under the `OpenSearch Dashboards` tab, to select the newly created index pattern and query the data
| 1,129 | 1,746 |
hyperswitch | crates/analytics/docs/clickhouse/scripts/outgoing_webhook_events.sql | .sql | CREATE TABLE outgoing_webhook_events_queue (
`merchant_id` String,
`event_id` String,
`event_type` LowCardinality(String),
`outgoing_webhook_event_type` LowCardinality(String),
`payment_id` Nullable(String),
`refund_id` Nullable(String),
`attempt_id` Nullable(String),
`dispute_id` Nullable(String),
`payment_method_id` Nullable(String),
`mandate_id` Nullable(String),
`content` Nullable(String),
`is_error` Bool,
`error` Nullable(String),
`initial_attempt_id` Nullable(String),
`status_code` Nullable(UInt16),
`delivery_attempt` LowCardinality(String),
`created_at_timestamp` DateTime64(3)
) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
kafka_topic_list = 'hyperswitch-outgoing-webhook-events',
kafka_group_name = 'hyper',
kafka_format = 'JSONEachRow',
kafka_handle_error_mode = 'stream';
CREATE TABLE outgoing_webhook_events (
`merchant_id` LowCardinality(String),
`event_id` String,
`event_type` LowCardinality(String),
`outgoing_webhook_event_type` LowCardinality(String),
`payment_id` Nullable(String),
`refund_id` Nullable(String),
`attempt_id` Nullable(String),
`dispute_id` Nullable(String),
`payment_method_id` Nullable(String),
`mandate_id` Nullable(String),
`content` Nullable(String),
`is_error` Bool,
`error` Nullable(String),
`initial_attempt_id` Nullable(String),
`status_code` Nullable(UInt16),
`delivery_attempt` LowCardinality(String),
`created_at` DateTime64(3),
`inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
INDEX eventIndex event_type TYPE bloom_filter GRANULARITY 1,
INDEX webhookeventIndex outgoing_webhook_event_type TYPE bloom_filter GRANULARITY 1
) ENGINE = MergeTree PARTITION BY toStartOfDay(created_at)
ORDER BY
(
created_at,
merchant_id,
event_id,
event_type,
outgoing_webhook_event_type
) TTL inserted_at + toIntervalMonth(18) SETTINGS index_granularity = 8192;
CREATE TABLE outgoing_webhook_events_audit (
`merchant_id` LowCardinality(String),
`event_id` String,
`event_type` LowCardinality(String),
`outgoing_webhook_event_type` LowCardinality(String),
`payment_id` String,
`refund_id` Nullable(String),
`attempt_id` Nullable(String),
`dispute_id` Nullable(String),
`payment_method_id` Nullable(String),
`mandate_id` Nullable(String),
`content` Nullable(String),
`is_error` Bool,
`error` Nullable(String),
`initial_attempt_id` Nullable(String),
`status_code` Nullable(UInt16),
`delivery_attempt` LowCardinality(String),
`created_at` DateTime64(3),
`inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4)
) ENGINE = MergeTree PARTITION BY merchant_id
ORDER BY
(merchant_id, payment_id) TTL inserted_at + toIntervalMonth(18) SETTINGS index_granularity = 8192;
CREATE MATERIALIZED VIEW outgoing_webhook_events_mv TO outgoing_webhook_events (
`merchant_id` String,
`event_id` String,
`event_type` LowCardinality(String),
`outgoing_webhook_event_type` LowCardinality(String),
`payment_id` Nullable(String),
`refund_id` Nullable(String),
`attempt_id` Nullable(String),
`dispute_id` Nullable(String),
`payment_method_id` Nullable(String),
`mandate_id` Nullable(String),
`content` Nullable(String),
`is_error` Bool,
`error` Nullable(String),
`initial_attempt_id` Nullable(String),
`status_code` Nullable(UInt16),
`delivery_attempt` LowCardinality(String),
`created_at` DateTime64(3),
`inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4)
) AS
SELECT
merchant_id,
event_id,
event_type,
outgoing_webhook_event_type,
payment_id,
refund_id,
attempt_id,
dispute_id,
payment_method_id,
mandate_id,
content,
is_error,
error,
initial_attempt_id,
status_code,
delivery_attempt,
created_at_timestamp AS created_at,
now() AS inserted_at
FROM
outgoing_webhook_events_queue
WHERE
length(_error) = 0;
CREATE MATERIALIZED VIEW outgoing_webhook_events_audit_mv TO outgoing_webhook_events_audit (
`merchant_id` String,
`event_id` String,
`event_type` LowCardinality(String),
`outgoing_webhook_event_type` LowCardinality(String),
`payment_id` String,
`refund_id` Nullable(String),
`attempt_id` Nullable(String),
`dispute_id` Nullable(String),
`payment_method_id` Nullable(String),
`mandate_id` Nullable(String),
`content` Nullable(String),
`is_error` Bool,
`error` Nullable(String),
`initial_attempt_id` Nullable(String),
`status_code` Nullable(UInt16),
`delivery_attempt` LowCardinality(String),
`created_at` DateTime64(3),
`inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4)
) AS
SELECT
merchant_id,
event_id,
event_type,
outgoing_webhook_event_type,
payment_id,
refund_id,
attempt_id,
dispute_id,
payment_method_id,
mandate_id,
content,
is_error,
error,
initial_attempt_id,
status_code,
delivery_attempt,
created_at_timestamp AS created_at,
now() AS inserted_at
FROM
outgoing_webhook_events_queue
WHERE
(length(_error) = 0)
AND (payment_id IS NOT NULL);
CREATE MATERIALIZED VIEW outgoing_webhook_parse_errors (
`topic` String,
`partition` Int64,
`offset` Int64,
`raw` String,
`error` String
) ENGINE = MergeTree
ORDER BY
(topic, partition, offset) SETTINGS index_granularity = 8192 AS
SELECT
_topic AS topic,
_partition AS partition,
_offset AS offset,
_raw_message AS raw,
_error AS error
FROM
outgoing_webhook_events_queue
WHERE
length(_error) > 0; | 1,452 | 1,747 |
hyperswitch | crates/analytics/docs/clickhouse/scripts/authentications.sql | .sql | CREATE TABLE authentication_queue (
`authentication_id` String,
`merchant_id` String,
`authentication_connector` LowCardinality(String),
`connector_authentication_id` Nullable(String),
`authentication_data` Nullable(String),
`payment_method_id` Nullable(String),
`authentication_type` LowCardinality(Nullable(String)),
`authentication_status` LowCardinality(String),
`authentication_lifecycle_status` LowCardinality(String),
`created_at` DateTime64(3),
`modified_at` DateTime64(3),
`error_message` Nullable(String),
`error_code` Nullable(String),
`connector_metadata` Nullable(String),
`maximum_supported_version` LowCardinality(Nullable(String)),
`threeds_server_transaction_id` Nullable(String),
`cavv` Nullable(String),
`authentication_flow_type` Nullable(String),
`message_version` LowCardinality(Nullable(String)),
`eci` Nullable(String),
`trans_status` LowCardinality(Nullable(String)),
`acquirer_bin` Nullable(String),
`acquirer_merchant_id` Nullable(String),
`three_ds_method_data` Nullable(String),
`three_ds_method_url` Nullable(String),
`acs_url` Nullable(String),
`challenge_request` Nullable(String),
`acs_reference_number` Nullable(String),
`acs_trans_id` Nullable(String),
`acs_signed_content` Nullable(String),
`profile_id` String,
`payment_id` Nullable(String),
`merchant_connector_id` Nullable(String),
`ds_trans_id` Nullable(String),
`directory_server_id` Nullable(String),
`acquirer_country_code` Nullable(String),
`organization_id` String,
`sign_flag` Int8
) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
kafka_topic_list = 'hyperswitch-authentication-events',
kafka_group_name = 'hyper',
kafka_format = 'JSONEachRow',
kafka_handle_error_mode = 'stream';
CREATE TABLE authentications (
`authentication_id` String,
`merchant_id` String,
`authentication_connector` LowCardinality(String),
`connector_authentication_id` Nullable(String),
`authentication_data` Nullable(String),
`payment_method_id` Nullable(String),
`authentication_type` LowCardinality(Nullable(String)),
`authentication_status` LowCardinality(String),
`authentication_lifecycle_status` LowCardinality(String),
`created_at` DateTime64(3) DEFAULT now64(),
`inserted_at` DateTime64(3) DEFAULT now64(),
`modified_at` DateTime64(3) DEFAULT now64(),
`error_message` Nullable(String),
`error_code` Nullable(String),
`connector_metadata` Nullable(String),
`maximum_supported_version` LowCardinality(Nullable(String)),
`threeds_server_transaction_id` Nullable(String),
`cavv` Nullable(String),
`authentication_flow_type` Nullable(String),
`message_version` LowCardinality(Nullable(String)),
`eci` Nullable(String),
`trans_status` LowCardinality(Nullable(String)),
`acquirer_bin` Nullable(String),
`acquirer_merchant_id` Nullable(String),
`three_ds_method_data` Nullable(String),
`three_ds_method_url` Nullable(String),
`acs_url` Nullable(String),
`challenge_request` Nullable(String),
`acs_reference_number` Nullable(String),
`acs_trans_id` Nullable(String),
`acs_signed_content` Nullable(String),
`profile_id` String,
`payment_id` Nullable(String),
`merchant_connector_id` Nullable(String),
`ds_trans_id` Nullable(String),
`directory_server_id` Nullable(String),
`acquirer_country_code` Nullable(String),
`organization_id` String,
`sign_flag` Int8,
INDEX authenticationConnectorIndex authentication_connector TYPE bloom_filter GRANULARITY 1,
INDEX transStatusIndex trans_status TYPE bloom_filter GRANULARITY 1,
INDEX authenticationTypeIndex authentication_type TYPE bloom_filter GRANULARITY 1,
INDEX authenticationStatusIndex authentication_status TYPE bloom_filter GRANULARITY 1
) ENGINE = CollapsingMergeTree(sign_flag) PARTITION BY toStartOfDay(created_at)
ORDER BY
(created_at, merchant_id, authentication_id) TTL toStartOfDay(created_at) + toIntervalMonth(18) SETTINGS index_granularity = 8192;
CREATE MATERIALIZED VIEW authentication_mv TO authentications (
`authentication_id` String,
`merchant_id` String,
`authentication_connector` LowCardinality(String),
`connector_authentication_id` Nullable(String),
`authentication_data` Nullable(String),
`payment_method_id` Nullable(String),
`authentication_type` LowCardinality(Nullable(String)),
`authentication_status` LowCardinality(String),
`authentication_lifecycle_status` LowCardinality(String),
`created_at` DateTime64(3) DEFAULT now64(),
`inserted_at` DateTime64(3) DEFAULT now64(),
`modified_at` DateTime64(3) DEFAULT now64(),
`error_message` Nullable(String),
`error_code` Nullable(String),
`connector_metadata` Nullable(String),
`maximum_supported_version` LowCardinality(Nullable(String)),
`threeds_server_transaction_id` Nullable(String),
`cavv` Nullable(String),
`authentication_flow_type` Nullable(String),
`message_version` LowCardinality(Nullable(String)),
`eci` Nullable(String),
`trans_status` LowCardinality(Nullable(String)),
`acquirer_bin` Nullable(String),
`acquirer_merchant_id` Nullable(String),
`three_ds_method_data` Nullable(String),
`three_ds_method_url` Nullable(String),
`acs_url` Nullable(String),
`challenge_request` Nullable(String),
`acs_reference_number` Nullable(String),
`acs_trans_id` Nullable(String),
`acs_signed_content` Nullable(String),
`profile_id` String,
`payment_id` Nullable(String),
`merchant_connector_id` Nullable(String),
`ds_trans_id` Nullable(String),
`directory_server_id` Nullable(String),
`acquirer_country_code` Nullable(String),
`organization_id` String,
`sign_flag` Int8
) AS
SELECT
authentication_id,
merchant_id,
authentication_connector,
connector_authentication_id,
authentication_data,
payment_method_id,
authentication_type,
authentication_status,
authentication_lifecycle_status,
created_at,
now64() as inserted_at,
modified_at,
error_message,
error_code,
connector_metadata,
maximum_supported_version,
threeds_server_transaction_id,
cavv,
authentication_flow_type,
message_version,
eci,
trans_status,
acquirer_bin,
acquirer_merchant_id,
three_ds_method_data,
three_ds_method_url,
acs_url,
challenge_request,
acs_reference_number,
acs_trans_id,
acs_signed_content,
profile_id,
payment_id,
merchant_connector_id,
ds_trans_id,
directory_server_id,
acquirer_country_code,
organization_id,
sign_flag
FROM
authentication_queue
WHERE
length(_error) = 0;
CREATE MATERIALIZED VIEW authentication_parse_errors (
`topic` String,
`partition` Int64,
`offset` Int64,
`raw` String,
`error` String
) ENGINE = MergeTree
ORDER BY
(topic, partition, offset) SETTINGS index_granularity = 8192 AS
SELECT
_topic AS topic,
_partition AS partition,
_offset AS offset,
_raw_message AS raw,
_error AS error
FROM
authentication_queue
WHERE
length(_error) > 0; | 1,662 | 1,748 |
hyperswitch | crates/analytics/docs/clickhouse/scripts/disputes.sql | .sql | CREATE TABLE dispute_queue (
`dispute_id` String,
`dispute_amount` UInt32,
`currency` String,
`dispute_stage` LowCardinality(String),
`dispute_status` LowCardinality(String),
`payment_id` String,
`attempt_id` String,
`merchant_id` String,
`connector_status` String,
`connector_dispute_id` String,
`connector_reason` Nullable(String),
`connector_reason_code` Nullable(String),
`challenge_required_by` Nullable(DateTime) CODEC(T64, LZ4),
`connector_created_at` Nullable(DateTime) CODEC(T64, LZ4),
`connector_updated_at` Nullable(DateTime) CODEC(T64, LZ4),
`created_at` DateTime CODEC(T64, LZ4),
`modified_at` DateTime CODEC(T64, LZ4),
`connector` LowCardinality(String),
`evidence` Nullable(String),
`profile_id` Nullable(String),
`merchant_connector_id` Nullable(String),
`organization_id` String,
`sign_flag` Int8
) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
kafka_topic_list = 'hyperswitch-dispute-events',
kafka_group_name = 'hyper',
kafka_format = 'JSONEachRow',
kafka_handle_error_mode = 'stream';
CREATE TABLE dispute (
`dispute_id` String,
`dispute_amount` UInt32,
`currency` String,
`dispute_stage` LowCardinality(String),
`dispute_status` LowCardinality(String),
`payment_id` String,
`attempt_id` String,
`merchant_id` LowCardinality(String),
`connector_status` String,
`connector_dispute_id` String,
`connector_reason` Nullable(String),
`connector_reason_code` Nullable(String),
`challenge_required_by` Nullable(DateTime) CODEC(T64, LZ4),
`connector_created_at` Nullable(DateTime) CODEC(T64, LZ4),
`connector_updated_at` Nullable(DateTime) CODEC(T64, LZ4),
`created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`modified_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`connector` LowCardinality(String),
`evidence` String DEFAULT '{}',
`profile_id` Nullable(String),
`merchant_connector_id` Nullable(String),
`inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`organization_id` String,
`sign_flag` Int8,
INDEX connectorIndex connector TYPE bloom_filter GRANULARITY 1,
INDEX disputeStatusIndex dispute_status TYPE bloom_filter GRANULARITY 1,
INDEX disputeStageIndex dispute_stage TYPE bloom_filter GRANULARITY 1
) ENGINE = CollapsingMergeTree(sign_flag) PARTITION BY toStartOfDay(created_at)
ORDER BY
(created_at, merchant_id, dispute_id) TTL inserted_at + toIntervalMonth(18) SETTINGS index_granularity = 8192;
CREATE MATERIALIZED VIEW dispute_mv TO dispute (
`dispute_id` String,
`dispute_amount` UInt32,
`currency` String,
`dispute_stage` LowCardinality(String),
`dispute_status` LowCardinality(String),
`payment_id` String,
`attempt_id` String,
`merchant_id` String,
`connector_status` String,
`connector_dispute_id` String,
`connector_reason` Nullable(String),
`connector_reason_code` Nullable(String),
`challenge_required_by` Nullable(DateTime64(3)),
`connector_created_at` Nullable(DateTime64(3)),
`connector_updated_at` Nullable(DateTime64(3)),
`created_at` DateTime64(3),
`modified_at` DateTime64(3),
`connector` LowCardinality(String),
`evidence` Nullable(String),
`profile_id` Nullable(String),
`merchant_connector_id` Nullable(String),
`organization_id` String,
`inserted_at` DateTime64(3),
`sign_flag` Int8
) AS
SELECT
dispute_id,
dispute_amount,
currency,
dispute_stage,
dispute_status,
payment_id,
attempt_id,
merchant_id,
connector_status,
connector_dispute_id,
connector_reason,
connector_reason_code,
challenge_required_by,
connector_created_at,
connector_updated_at,
created_at,
modified_at,
connector,
evidence,
profile_id,
merchant_connector_id,
organization_id,
now() AS inserted_at,
sign_flag
FROM
dispute_queue
WHERE
length(_error) = 0;
CREATE MATERIALIZED VIEW dispute_parse_errors (
`topic` String,
`partition` Int64,
`offset` Int64,
`raw` String,
`error` String
) ENGINE = MergeTree
ORDER BY
(topic, partition, offset) SETTINGS index_granularity = 8192 AS
SELECT
_topic AS topic,
_partition AS partition,
_offset AS offset,
_raw_message AS raw,
_error AS error
FROM
dispute_queue
WHERE
length(_error) > 0; | 1,150 | 1,749 |
hyperswitch | crates/analytics/docs/clickhouse/scripts/payment_attempts.sql | .sql | CREATE TABLE payment_attempt_queue (
`payment_id` String,
`merchant_id` String,
`attempt_id` String,
`status` LowCardinality(String),
`amount` Nullable(UInt32),
`currency` LowCardinality(Nullable(String)),
`connector` LowCardinality(Nullable(String)),
`save_to_locker` Nullable(Bool),
`error_message` Nullable(String),
`offer_amount` Nullable(UInt32),
`surcharge_amount` Nullable(UInt32),
`tax_amount` Nullable(UInt32),
`payment_method_id` Nullable(String),
`payment_method` LowCardinality(Nullable(String)),
`payment_method_type` LowCardinality(Nullable(String)),
`connector_transaction_id` Nullable(String),
`capture_method` LowCardinality(Nullable(String)),
`capture_on` Nullable(DateTime) CODEC(T64, LZ4),
`confirm` Bool,
`authentication_type` LowCardinality(Nullable(String)),
`cancellation_reason` Nullable(String),
`amount_to_capture` Nullable(UInt32),
`mandate_id` Nullable(String),
`browser_info` Nullable(String),
`error_code` Nullable(String),
`connector_metadata` Nullable(String),
`payment_experience` Nullable(String),
`created_at` DateTime CODEC(T64, LZ4),
`last_synced` Nullable(DateTime) CODEC(T64, LZ4),
`modified_at` DateTime CODEC(T64, LZ4),
`payment_method_data` Nullable(String),
`error_reason` Nullable(String),
`multiple_capture_count` Nullable(Int16),
`amount_capturable` Nullable(UInt64),
`merchant_connector_id` Nullable(String),
`net_amount` Nullable(UInt64),
`unified_code` Nullable(String),
`unified_message` Nullable(String),
`mandate_data` Nullable(String),
`client_source` LowCardinality(Nullable(String)),
`client_version` LowCardinality(Nullable(String)),
`organization_id` String,
`profile_id` String,
`card_network` Nullable(String),
`sign_flag` Int8
) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
kafka_topic_list = 'hyperswitch-payment-attempt-events',
kafka_group_name = 'hyper',
kafka_format = 'JSONEachRow',
kafka_handle_error_mode = 'stream';
CREATE TABLE payment_attempts (
`payment_id` String,
`merchant_id` LowCardinality(String),
`attempt_id` String,
`status` LowCardinality(String),
`amount` Nullable(UInt32),
`currency` LowCardinality(Nullable(String)),
`connector` LowCardinality(Nullable(String)),
`save_to_locker` Nullable(Bool),
`error_message` Nullable(String),
`offer_amount` Nullable(UInt32),
`surcharge_amount` Nullable(UInt32),
`tax_amount` Nullable(UInt32),
`payment_method_id` Nullable(String),
`payment_method` LowCardinality(Nullable(String)),
`payment_method_type` LowCardinality(Nullable(String)),
`connector_transaction_id` Nullable(String),
`capture_method` Nullable(String),
`capture_on` Nullable(DateTime) CODEC(T64, LZ4),
`confirm` Bool,
`authentication_type` LowCardinality(Nullable(String)),
`cancellation_reason` Nullable(String),
`amount_to_capture` Nullable(UInt32),
`mandate_id` Nullable(String),
`browser_info` Nullable(String),
`error_code` Nullable(String),
`connector_metadata` Nullable(String),
`payment_experience` Nullable(String),
`created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`last_synced` Nullable(DateTime) CODEC(T64, LZ4),
`modified_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`payment_method_data` Nullable(String),
`error_reason` Nullable(String),
`multiple_capture_count` Nullable(Int16),
`amount_capturable` Nullable(UInt64),
`merchant_connector_id` Nullable(String),
`net_amount` Nullable(UInt64),
`unified_code` Nullable(String),
`unified_message` Nullable(String),
`mandate_data` Nullable(String),
`inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`client_source` LowCardinality(Nullable(String)),
`client_version` LowCardinality(Nullable(String)),
`organization_id` String,
`profile_id` String,
`card_network` Nullable(String),
`sign_flag` Int8,
INDEX connectorIndex connector TYPE bloom_filter GRANULARITY 1,
INDEX paymentMethodIndex payment_method TYPE bloom_filter GRANULARITY 1,
INDEX authenticationTypeIndex authentication_type TYPE bloom_filter GRANULARITY 1,
INDEX currencyIndex currency TYPE bloom_filter GRANULARITY 1,
INDEX statusIndex status TYPE bloom_filter GRANULARITY 1
) ENGINE = CollapsingMergeTree(sign_flag) PARTITION BY toStartOfDay(created_at)
ORDER BY
(created_at, merchant_id, attempt_id) TTL created_at + toIntervalMonth(18) SETTINGS index_granularity = 8192;
CREATE MATERIALIZED VIEW payment_attempt_mv TO payment_attempts (
`payment_id` String,
`merchant_id` String,
`attempt_id` String,
`status` LowCardinality(String),
`amount` Nullable(UInt32),
`currency` LowCardinality(Nullable(String)),
`connector` LowCardinality(Nullable(String)),
`save_to_locker` Nullable(Bool),
`error_message` Nullable(String),
`offer_amount` Nullable(UInt32),
`surcharge_amount` Nullable(UInt32),
`tax_amount` Nullable(UInt32),
`payment_method_id` Nullable(String),
`payment_method` LowCardinality(Nullable(String)),
`payment_method_type` LowCardinality(Nullable(String)),
`connector_transaction_id` Nullable(String),
`capture_method` Nullable(String),
`confirm` Bool,
`authentication_type` LowCardinality(Nullable(String)),
`cancellation_reason` Nullable(String),
`amount_to_capture` Nullable(UInt32),
`mandate_id` Nullable(String),
`browser_info` Nullable(String),
`error_code` Nullable(String),
`connector_metadata` Nullable(String),
`payment_experience` Nullable(String),
`created_at` DateTime64(3),
`capture_on` Nullable(DateTime64(3)),
`last_synced` Nullable(DateTime64(3)),
`modified_at` DateTime64(3),
`payment_method_data` Nullable(String),
`error_reason` Nullable(String),
`multiple_capture_count` Nullable(Int16),
`amount_capturable` Nullable(UInt64),
`merchant_connector_id` Nullable(String),
`net_amount` Nullable(UInt64),
`unified_code` Nullable(String),
`unified_message` Nullable(String),
`mandate_data` Nullable(String),
`inserted_at` DateTime64(3),
`client_source` LowCardinality(Nullable(String)),
`client_version` LowCardinality(Nullable(String)),
`organization_id` String,
`profile_id` String,
`card_network` Nullable(String),
`sign_flag` Int8
) AS
SELECT
payment_id,
merchant_id,
attempt_id,
status,
amount,
currency,
connector,
save_to_locker,
error_message,
offer_amount,
surcharge_amount,
tax_amount,
payment_method_id,
payment_method,
payment_method_type,
connector_transaction_id,
capture_method,
confirm,
authentication_type,
cancellation_reason,
amount_to_capture,
mandate_id,
browser_info,
error_code,
connector_metadata,
payment_experience,
created_at,
capture_on,
last_synced,
modified_at,
payment_method_data,
error_reason,
multiple_capture_count,
amount_capturable,
merchant_connector_id,
net_amount,
unified_code,
unified_message,
mandate_data,
now() AS inserted_at,
client_source,
client_version,
organization_id,
profile_id,
card_network,
sign_flag
FROM
payment_attempt_queue
WHERE
length(_error) = 0;
| 1,802 | 1,750 |
hyperswitch | crates/analytics/docs/clickhouse/scripts/api_events.sql | .sql | CREATE TABLE api_events_queue (
`merchant_id` String,
`payment_id` Nullable(String),
`refund_id` Nullable(String),
`payment_method_id` Nullable(String),
`payment_method` Nullable(String),
`payment_method_type` Nullable(String),
`customer_id` Nullable(String),
`user_id` Nullable(String),
`connector` Nullable(String),
`request_id` String,
`flow_type` LowCardinality(String),
`api_flow` LowCardinality(String),
`api_auth_type` LowCardinality(String),
`request` String,
`response` Nullable(String),
`error` Nullable(String),
`authentication_data` Nullable(String),
`status_code` UInt32,
`created_at_timestamp` DateTime64(3),
`latency` UInt128,
`user_agent` String,
`ip_addr` String,
`hs_latency` Nullable(UInt128),
`http_method` LowCardinality(String),
`url_path` Nullable(String),
`dispute_id` Nullable(String)
) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
kafka_topic_list = 'hyperswitch-api-log-events',
kafka_group_name = 'hyper',
kafka_format = 'JSONEachRow',
kafka_handle_error_mode = 'stream';
CREATE TABLE api_events (
`merchant_id` LowCardinality(String),
`payment_id` Nullable(String),
`refund_id` Nullable(String),
`payment_method_id` Nullable(String),
`payment_method` Nullable(String),
`payment_method_type` Nullable(String),
`customer_id` Nullable(String),
`user_id` Nullable(String),
`connector` Nullable(String),
`request_id` String,
`flow_type` LowCardinality(String),
`api_flow` LowCardinality(String),
`api_auth_type` LowCardinality(String),
`request` String,
`response` Nullable(String),
`error` Nullable(String),
`authentication_data` Nullable(String),
`status_code` UInt32,
`created_at` DateTime64(3),
`inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`latency` UInt128,
`user_agent` String,
`ip_addr` String,
`hs_latency` Nullable(UInt128),
`http_method` LowCardinality(String),
`url_path` Nullable(String),
`dispute_id` Nullable(String),
`masked_response` Nullable(String),
INDEX flowIndex flow_type TYPE bloom_filter GRANULARITY 1,
INDEX apiIndex api_flow TYPE bloom_filter GRANULARITY 1,
INDEX statusIndex status_code TYPE bloom_filter GRANULARITY 1
) ENGINE = MergeTree PARTITION BY toStartOfDay(created_at)
ORDER BY
(
created_at,
merchant_id,
flow_type,
status_code,
api_flow
) TTL inserted_at + toIntervalMonth(18) SETTINGS index_granularity = 8192;
CREATE TABLE api_events_audit (
`merchant_id` LowCardinality(String),
`payment_id` String,
`refund_id` Nullable(String),
`payment_method_id` Nullable(String),
`payment_method` Nullable(String),
`payment_method_type` Nullable(String),
`customer_id` Nullable(String),
`user_id` Nullable(String),
`connector` Nullable(String),
`request_id` String,
`flow_type` LowCardinality(String),
`api_flow` LowCardinality(String),
`api_auth_type` LowCardinality(String),
`request` String,
`response` Nullable(String),
`error` Nullable(String),
`authentication_data` Nullable(String),
`status_code` UInt32,
`created_at` DateTime64(3),
`inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`latency` UInt128,
`user_agent` String,
`ip_addr` String,
`hs_latency` Nullable(UInt128),
`http_method` LowCardinality(Nullable(String)),
`url_path` Nullable(String),
`dispute_id` Nullable(String),
`masked_response` Nullable(String)
) ENGINE = MergeTree PARTITION BY merchant_id
ORDER BY
(merchant_id, payment_id) TTL inserted_at + toIntervalMonth(18) SETTINGS index_granularity = 8192;
CREATE MATERIALIZED VIEW api_events_parse_errors (
`topic` String,
`partition` Int64,
`offset` Int64,
`raw` String,
`error` String
) ENGINE = MergeTree
ORDER BY
(topic, partition, offset) SETTINGS index_granularity = 8192 AS
SELECT
_topic AS topic,
_partition AS partition,
_offset AS offset,
_raw_message AS raw,
_error AS error
FROM
api_events_queue
WHERE
length(_error) > 0;
CREATE MATERIALIZED VIEW api_events_audit_mv TO api_events_audit (
`merchant_id` String,
`payment_id` String,
`refund_id` Nullable(String),
`payment_method_id` Nullable(String),
`payment_method` Nullable(String),
`payment_method_type` Nullable(String),
`customer_id` Nullable(String),
`user_id` Nullable(String),
`connector` Nullable(String),
`request_id` String,
`flow_type` LowCardinality(String),
`api_flow` LowCardinality(String),
`api_auth_type` LowCardinality(String),
`request` String,
`response` Nullable(String),
`error` Nullable(String),
`authentication_data` Nullable(String),
`status_code` UInt32,
`created_at` DateTime64(3),
`inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`latency` UInt128,
`user_agent` String,
`ip_addr` String,
`hs_latency` Nullable(UInt128),
`http_method` LowCardinality(Nullable(String)),
`url_path` Nullable(String),
`dispute_id` Nullable(String),
`masked_response` Nullable(String)
) AS
SELECT
merchant_id,
payment_id,
refund_id,
payment_method_id,
payment_method,
payment_method_type,
customer_id,
user_id,
connector,
request_id,
flow_type,
api_flow,
api_auth_type,
request,
response,
error,
authentication_data,
status_code,
created_at_timestamp AS created_at,
now() AS inserted_at,
latency,
user_agent,
ip_addr,
hs_latency,
http_method,
url_path,
dispute_id,
response AS masked_response
FROM
api_events_queue
WHERE
(length(_error) = 0)
AND (payment_id IS NOT NULL);
CREATE MATERIALIZED VIEW api_events_mv TO api_events (
`merchant_id` String,
`payment_id` Nullable(String),
`refund_id` Nullable(String),
`payment_method_id` Nullable(String),
`payment_method` Nullable(String),
`payment_method_type` Nullable(String),
`customer_id` Nullable(String),
`user_id` Nullable(String),
`connector` Nullable(String),
`request_id` String,
`flow_type` LowCardinality(String),
`api_flow` LowCardinality(String),
`api_auth_type` LowCardinality(String),
`request` String,
`response` Nullable(String),
`error` Nullable(String),
`authentication_data` Nullable(String),
`status_code` UInt32,
`created_at` DateTime64(3),
`inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`latency` UInt128,
`user_agent` String,
`ip_addr` String,
`hs_latency` Nullable(UInt128),
`http_method` LowCardinality(Nullable(String)),
`url_path` Nullable(String),
`dispute_id` Nullable(String),
`masked_response` Nullable(String)
) AS
SELECT
merchant_id,
payment_id,
refund_id,
payment_method_id,
payment_method,
payment_method_type,
customer_id,
user_id,
connector,
request_id,
flow_type,
api_flow,
api_auth_type,
request,
response,
error,
authentication_data,
status_code,
created_at_timestamp AS created_at,
now() AS inserted_at,
latency,
user_agent,
ip_addr,
hs_latency,
http_method,
url_path,
dispute_id,
response AS masked_response
FROM
api_events_queue
WHERE
length(_error) = 0; | 1,898 | 1,751 |
hyperswitch | crates/analytics/docs/clickhouse/scripts/connector_events.sql | .sql | CREATE TABLE connector_events_queue (
`merchant_id` String,
`payment_id` Nullable(String),
`connector_name` LowCardinality(String),
`request_id` String,
`flow` LowCardinality(String),
`request` String,
`masked_response` Nullable(String),
`error` Nullable(String),
`status_code` UInt32,
`created_at` DateTime64(3),
`latency` UInt128,
`method` LowCardinality(String),
`dispute_id` Nullable(String),
`refund_id` Nullable(String)
) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
kafka_topic_list = 'hyperswitch-outgoing-connector-events',
kafka_group_name = 'hyper',
kafka_format = 'JSONEachRow',
kafka_handle_error_mode = 'stream';
CREATE MATERIALIZED VIEW connector_events_parse_errors (
`topic` String,
`partition` Int64,
`offset` Int64,
`raw` String,
`error` String
) ENGINE = MergeTree
ORDER BY
(topic, partition, offset) SETTINGS index_granularity = 8192 AS
SELECT
_topic AS topic,
_partition AS partition,
_offset AS offset,
_raw_message AS raw,
_error AS error
FROM
connector_events_queue
WHERE
length(_error) > 0;
CREATE TABLE connector_events (
`merchant_id` LowCardinality(String),
`payment_id` Nullable(String),
`connector_name` LowCardinality(String),
`request_id` String,
`flow` LowCardinality(String),
`request` String,
`response` Nullable(String),
`masked_response` Nullable(String),
`error` Nullable(String),
`status_code` UInt32,
`created_at` DateTime64(3),
`inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`latency` UInt128,
`method` LowCardinality(String),
`dispute_id` Nullable(String),
`refund_id` Nullable(String),
INDEX flowIndex flow TYPE bloom_filter GRANULARITY 1,
INDEX connectorIndex connector_name TYPE bloom_filter GRANULARITY 1,
INDEX statusIndex status_code TYPE bloom_filter GRANULARITY 1
) ENGINE = MergeTree PARTITION BY toStartOfDay(created_at)
ORDER BY
(
created_at,
merchant_id,
connector_name,
flow,
status_code
) TTL inserted_at + toIntervalMonth(18) SETTINGS index_granularity = 8192;
CREATE TABLE connector_events_audit (
`merchant_id` LowCardinality(String),
`payment_id` String,
`connector_name` LowCardinality(String),
`request_id` String,
`flow` LowCardinality(String),
`request` String,
`response` Nullable(String),
`masked_response` Nullable(String),
`error` Nullable(String),
`status_code` UInt32,
`created_at` DateTime64(3),
`inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`latency` UInt128,
`method` LowCardinality(String),
`dispute_id` Nullable(String),
`refund_id` Nullable(String),
INDEX flowIndex flow TYPE bloom_filter GRANULARITY 1,
INDEX connectorIndex connector_name TYPE bloom_filter GRANULARITY 1,
INDEX statusIndex status_code TYPE bloom_filter GRANULARITY 1
) ENGINE = MergeTree PARTITION BY merchant_id
ORDER BY
(merchant_id, payment_id) TTL inserted_at + toIntervalMonth(18) SETTINGS index_granularity = 8192;
CREATE MATERIALIZED VIEW connector_events_audit_mv TO connector_events_audit (
`merchant_id` String,
`payment_id` Nullable(String),
`connector_name` LowCardinality(String),
`request_id` String,
`flow` LowCardinality(String),
`request` String,
`response` Nullable(String),
`masked_response` Nullable(String),
`error` Nullable(String),
`status_code` UInt32,
`created_at` DateTime64(3),
`inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`latency` UInt128,
`method` LowCardinality(String),
`refund_id` Nullable(String),
`dispute_id` Nullable(String)
) AS
SELECT
merchant_id,
payment_id,
connector_name,
request_id,
flow,
request,
masked_response AS response,
masked_response,
error,
status_code,
created_at,
now64() AS inserted_at,
latency,
method,
refund_id,
dispute_id
FROM
connector_events_queue
WHERE
(length(_error) = 0)
AND (payment_id IS NOT NULL);
CREATE MATERIALIZED VIEW connector_events_mv TO connector_events (
`merchant_id` String,
`payment_id` Nullable(String),
`connector_name` LowCardinality(String),
`request_id` String,
`flow` LowCardinality(String),
`request` String,
`response` Nullable(String),
`masked_response` Nullable(String),
`error` Nullable(String),
`status_code` UInt32,
`created_at` DateTime64(3),
`inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`latency` UInt128,
`method` LowCardinality(String),
`refund_id` Nullable(String),
`dispute_id` Nullable(String)
) AS
SELECT
merchant_id,
payment_id,
connector_name,
request_id,
flow,
request,
masked_response AS response,
masked_response,
error,
status_code,
created_at,
now64() AS inserted_at,
latency,
method,
refund_id,
dispute_id
FROM
connector_events_queue
WHERE
length(_error) = 0; | 1,319 | 1,752 |
hyperswitch | crates/analytics/docs/clickhouse/scripts/payment_intents.sql | .sql | CREATE TABLE payment_intents_queue
(
`payment_id` String,
`merchant_id` String,
`status` LowCardinality(String),
`amount` UInt32,
`currency` LowCardinality(Nullable(String)),
`amount_captured` Nullable(UInt32),
`customer_id` Nullable(String),
`description` Nullable(String),
`return_url` Nullable(String),
`connector_id` LowCardinality(Nullable(String)),
`statement_descriptor_name` Nullable(String),
`statement_descriptor_suffix` Nullable(String),
`setup_future_usage` LowCardinality(Nullable(String)),
`off_session` Nullable(Bool),
`client_secret` Nullable(String),
`active_attempt_id` String,
`business_country` LowCardinality(String),
`business_label` String,
`attempt_count` UInt8,
`profile_id` Nullable(String),
`modified_at` DateTime CODEC(T64, LZ4),
`created_at` DateTime CODEC(T64, LZ4),
`last_synced` Nullable(DateTime) CODEC(T64, LZ4),
`organization_id` String,
`sign_flag` Int8
) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
kafka_topic_list = 'hyperswitch-payment-intent-events',
kafka_group_name = 'hyper',
kafka_format = 'JSONEachRow',
kafka_handle_error_mode = 'stream';
CREATE TABLE payment_intents
(
`payment_id` String,
`merchant_id` LowCardinality(String),
`status` LowCardinality(String),
`amount` UInt32,
`currency` LowCardinality(Nullable(String)),
`amount_captured` Nullable(UInt32),
`customer_id` Nullable(String),
`description` Nullable(String),
`return_url` Nullable(String),
`connector_id` LowCardinality(Nullable(String)),
`statement_descriptor_name` Nullable(String),
`statement_descriptor_suffix` Nullable(String),
`setup_future_usage` LowCardinality(Nullable(String)),
`off_session` Nullable(Bool),
`client_secret` Nullable(String),
`active_attempt_id` String,
`business_country` LowCardinality(String),
`business_label` String,
`attempt_count` UInt8,
`profile_id` Nullable(String),
`modified_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`last_synced` Nullable(DateTime) CODEC(T64, LZ4),
`inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`organization_id` String,
`sign_flag` Int8,
INDEX connectorIndex connector_id TYPE bloom_filter GRANULARITY 1,
INDEX currencyIndex currency TYPE bloom_filter GRANULARITY 1,
INDEX statusIndex status TYPE bloom_filter GRANULARITY 1
)
ENGINE = CollapsingMergeTree(sign_flag)
PARTITION BY toStartOfDay(created_at)
ORDER BY (created_at, merchant_id, payment_id)
TTL created_at + toIntervalMonth(18)
SETTINGS index_granularity = 8192;
CREATE MATERIALIZED VIEW payment_intents_mv TO payment_intents
(
`payment_id` String,
`merchant_id` String,
`status` LowCardinality(String),
`amount` UInt32,
`currency` LowCardinality(Nullable(String)),
`amount_captured` Nullable(UInt32),
`customer_id` Nullable(String),
`description` Nullable(String),
`return_url` Nullable(String),
`connector_id` LowCardinality(Nullable(String)),
`statement_descriptor_name` Nullable(String),
`statement_descriptor_suffix` Nullable(String),
`setup_future_usage` LowCardinality(Nullable(String)),
`off_session` Nullable(Bool),
`client_secret` Nullable(String),
`active_attempt_id` String,
`business_country` LowCardinality(String),
`business_label` String,
`attempt_count` UInt8,
`profile_id` Nullable(String),
`modified_at` DateTime64(3),
`created_at` DateTime64(3),
`last_synced` Nullable(DateTime64(3)),
`inserted_at` DateTime64(3),
`organization_id` String,
`sign_flag` Int8
) AS
SELECT
payment_id,
merchant_id,
status,
amount,
currency,
amount_captured,
customer_id,
description,
return_url,
connector_id,
statement_descriptor_name,
statement_descriptor_suffix,
setup_future_usage,
off_session,
client_secret,
active_attempt_id,
business_country,
business_label,
attempt_count,
profile_id,
modified_at,
created_at,
last_synced,
now() AS inserted_at,
organization_id,
sign_flag
FROM payment_intents_queue; | 1,060 | 1,753 |
hyperswitch | crates/analytics/docs/clickhouse/scripts/fraud_check.sql | .sql | CREATE TABLE fraud_check_queue (
`frm_id` String,
`payment_id` String,
`merchant_id` String,
`attempt_id` String,
`created_at` DateTime CODEC(T64, LZ4),
`frm_name` LowCardinality(String),
`frm_transaction_id` String,
`frm_transaction_type` LowCardinality(String),
`frm_status` LowCardinality(String),
`frm_score` Int32,
`frm_reason` LowCardinality(String),
`frm_error` Nullable(String),
`amount` UInt32,
`currency` LowCardinality(String),
`payment_method` LowCardinality(String),
`payment_method_type` LowCardinality(String),
`refund_transaction_id` Nullable(String),
`metadata` Nullable(String),
`modified_at` DateTime CODEC(T64, LZ4),
`last_step` LowCardinality(String),
`payment_capture_method` LowCardinality(String),
`sign_flag` Int8
) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
kafka_topic_list = 'hyperswitch-fraud-check-events',
kafka_group_name = 'hyper',
kafka_format = 'JSONEachRow',
kafka_handle_error_mode = 'stream';
CREATE TABLE fraud_check (
`frm_id` String,
`payment_id` String,
`merchant_id` LowCardinality(String),
`attempt_id` String,
`created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`frm_name` LowCardinality(String),
`frm_transaction_id` String,
`frm_transaction_type` LowCardinality(String),
`frm_status` LowCardinality(String),
`frm_score` Int32,
`frm_reason` LowCardinality(String),
`frm_error` Nullable(String),
`amount` UInt32,
`currency` LowCardinality(String),
`payment_method` LowCardinality(String),
`payment_method_type` LowCardinality(String),
`refund_transaction_id` Nullable(String),
`metadata` Nullable(String),
`modified_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`last_step` LowCardinality(String),
`payment_capture_method` LowCardinality(String),
`sign_flag` Int8,
INDEX frmNameIndex frm_name TYPE bloom_filter GRANULARITY 1,
INDEX frmStatusIndex frm_status TYPE bloom_filter GRANULARITY 1,
INDEX paymentMethodIndex payment_method TYPE bloom_filter GRANULARITY 1,
INDEX paymentMethodTypeIndex payment_method_type TYPE bloom_filter GRANULARITY 1,
INDEX currencyIndex currency TYPE bloom_filter GRANULARITY 1
) ENGINE = CollapsingMergeTree(sign_flag) PARTITION BY toStartOfDay(created_at)
ORDER BY
(created_at, merchant_id, attempt_id, frm_id) TTL created_at + toIntervalMonth(18) SETTINGS index_granularity = 8192;
CREATE MATERIALIZED VIEW fraud_check_mv TO fraud_check (
`frm_id` String,
`payment_id` String,
`merchant_id` String,
`attempt_id` String,
`created_at` DateTime64(3),
`frm_name` LowCardinality(String),
`frm_transaction_id` String,
`frm_transaction_type` LowCardinality(String),
`frm_status` LowCardinality(String),
`frm_score` Int32,
`frm_reason` LowCardinality(String),
`frm_error` Nullable(String),
`amount` UInt32,
`currency` LowCardinality(String),
`payment_method` LowCardinality(String),
`payment_method_type` LowCardinality(String),
`refund_transaction_id` Nullable(String),
`metadata` Nullable(String),
`modified_at` DateTime64(3),
`last_step` LowCardinality(String),
`payment_capture_method` LowCardinality(String),
`sign_flag` Int8
) AS
SELECT
frm_id,
payment_id,
merchant_id,
attempt_id,
created_at,
frm_name,
frm_transaction_id,
frm_transaction_type,
frm_status,
frm_score,
frm_reason,
frm_error,
amount,
currency,
payment_method,
payment_method_type,
refund_transaction_id,
metadata,
modified_at,
last_step,
payment_capture_method,
sign_flag
FROM
fraud_check_queue
WHERE
length(_error) = 0;
CREATE MATERIALIZED VIEW fraud_check_parse_errors (
`topic` String,
`partition` Int64,
`offset` Int64,
`raw` String,
`error` String
) ENGINE = MergeTree
ORDER BY
(topic, partition, offset) SETTINGS index_granularity = 8192 AS
SELECT
_topic AS topic,
_partition AS partition,
_offset AS offset,
_raw_message AS raw,
_error AS error
FROM
fraud_check_queue
WHERE
length(_error) > 0; | 1,089 | 1,754 |
hyperswitch | crates/analytics/docs/clickhouse/scripts/sdk_events.sql | .sql | CREATE TABLE sdk_events_queue (
`payment_id` Nullable(String),
`merchant_id` String,
`remote_ip` Nullable(String),
`log_type` LowCardinality(Nullable(String)),
`event_name` LowCardinality(Nullable(String)),
`first_event` LowCardinality(Nullable(String)),
`latency` Nullable(UInt32),
`timestamp` DateTime64(3),
`browser_name` LowCardinality(Nullable(String)),
`browser_version` Nullable(String),
`platform` LowCardinality(Nullable(String)),
`source` LowCardinality(Nullable(String)),
`category` LowCardinality(Nullable(String)),
`version` LowCardinality(Nullable(String)),
`value` Nullable(String),
`component` LowCardinality(Nullable(String)),
`payment_method` LowCardinality(Nullable(String)),
`payment_experience` LowCardinality(Nullable(String))
) ENGINE = Kafka SETTINGS
kafka_broker_list = 'kafka0:29092',
kafka_topic_list = 'hyper-sdk-logs',
kafka_group_name = 'hyper-ckh',
kafka_format = 'JSONEachRow',
kafka_handle_error_mode = 'stream';
CREATE TABLE sdk_events (
`payment_id` Nullable(String),
`merchant_id` String,
`remote_ip` Nullable(String),
`log_type` LowCardinality(Nullable(String)),
`event_name` LowCardinality(Nullable(String)),
`first_event` Bool DEFAULT 1,
`browser_name` LowCardinality(Nullable(String)),
`browser_version` Nullable(String),
`platform` LowCardinality(Nullable(String)),
`source` LowCardinality(Nullable(String)),
`category` LowCardinality(Nullable(String)),
`version` LowCardinality(Nullable(String)),
`component` LowCardinality(Nullable(String)),
`payment_method` LowCardinality(Nullable(String)),
`payment_experience` LowCardinality(Nullable(String)) DEFAULT '',
`created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`latency` Nullable(UInt32) DEFAULT 0,
`value` Nullable(String),
`created_at_precise` DateTime64(3),
INDEX paymentMethodIndex payment_method TYPE bloom_filter GRANULARITY 1,
INDEX eventIndex event_name TYPE bloom_filter GRANULARITY 1,
INDEX platformIndex platform TYPE bloom_filter GRANULARITY 1,
INDEX logTypeIndex log_type TYPE bloom_filter GRANULARITY 1,
INDEX categoryIndex category TYPE bloom_filter GRANULARITY 1,
INDEX sourceIndex source TYPE bloom_filter GRANULARITY 1,
INDEX componentIndex component TYPE bloom_filter GRANULARITY 1,
INDEX firstEventIndex first_event TYPE bloom_filter GRANULARITY 1
) ENGINE = MergeTree
PARTITION BY
toStartOfDay(created_at)
ORDER BY
(created_at, merchant_id)
TTL
toDateTime(created_at) + toIntervalMonth(6)
SETTINGS
index_granularity = 8192
;
CREATE MATERIALIZED VIEW sdk_events_mv TO sdk_events (
`payment_id` Nullable(String),
`merchant_id` String,
`remote_ip` Nullable(String),
`log_type` LowCardinality(Nullable(String)),
`event_name` LowCardinality(Nullable(String)),
`first_event` Bool,
`latency` Nullable(UInt32),
`browser_name` LowCardinality(Nullable(String)),
`browser_version` Nullable(String),
`platform` LowCardinality(Nullable(String)),
`source` LowCardinality(Nullable(String)),
`category` LowCardinality(Nullable(String)),
`version` LowCardinality(Nullable(String)),
`value` Nullable(String),
`component` LowCardinality(Nullable(String)),
`payment_method` LowCardinality(Nullable(String)),
`payment_experience` LowCardinality(Nullable(String)),
`created_at` DateTime64(3),
`created_at_precise` DateTime64(3)
) AS
SELECT
payment_id,
merchant_id,
remote_ip,
log_type,
event_name,
multiIf(first_event = 'true', 1, 0) AS first_event,
latency,
browser_name,
browser_version,
platform,
source,
category,
version,
value,
component,
payment_method,
payment_experience,
toDateTime64(timestamp, 3) AS created_at,
toDateTime64(timestamp, 3) AS created_at_precise
FROM
sdk_events_queue
WHERE length(_error) = 0;
CREATE TABLE sdk_events_audit (
`payment_id` String,
`merchant_id` String,
`remote_ip` Nullable(String),
`log_type` LowCardinality(Nullable(String)),
`event_name` LowCardinality(Nullable(String)),
`first_event` Bool,
`latency` Nullable(UInt32),
`browser_name` LowCardinality(Nullable(String)),
`browser_version` Nullable(String),
`platform` LowCardinality(Nullable(String)),
`source` LowCardinality(Nullable(String)),
`category` LowCardinality(Nullable(String)),
`version` LowCardinality(Nullable(String)),
`value` Nullable(String),
`component` LowCardinality(Nullable(String)),
`payment_method` LowCardinality(Nullable(String)),
`payment_experience` LowCardinality(Nullable(String)),
`created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`created_at_precise` DateTime64(3),
`inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4)
) ENGINE = MergeTree PARTITION BY merchant_id
ORDER BY
(merchant_id, payment_id)
TTL inserted_at + toIntervalMonth(18)
SETTINGS index_granularity = 8192;
CREATE MATERIALIZED VIEW sdk_events_parse_errors (
`topic` String,
`partition` Int64,
`offset` Int64,
`raw` String,
`error` String
) ENGINE = MergeTree
ORDER BY
(topic, partition, offset) SETTINGS index_granularity = 8192 AS
SELECT
_topic AS topic,
_partition AS partition,
_offset AS offset,
_raw_message AS raw,
_error AS error
FROM
sdk_events_queue
WHERE
length(_error) > 0;
CREATE MATERIALIZED VIEW sdk_events_audit_mv TO sdk_events_audit (
`payment_id` String,
`merchant_id` String,
`remote_ip` Nullable(String),
`log_type` LowCardinality(Nullable(String)),
`event_name` LowCardinality(Nullable(String)),
`first_event` Bool,
`latency` Nullable(UInt32),
`browser_name` LowCardinality(Nullable(String)),
`browser_version` Nullable(String),
`platform` LowCardinality(Nullable(String)),
`source` LowCardinality(Nullable(String)),
`category` LowCardinality(Nullable(String)),
`version` LowCardinality(Nullable(String)),
`value` Nullable(String),
`component` LowCardinality(Nullable(String)),
`payment_method` LowCardinality(Nullable(String)),
`payment_experience` LowCardinality(Nullable(String)),
`created_at` DateTime64(3),
`created_at_precise` DateTime64(3),
`inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4)
) AS
SELECT
payment_id,
merchant_id,
remote_ip,
log_type,
event_name,
multiIf(first_event = 'true', 1, 0) AS first_event,
latency,
browser_name,
browser_version,
platform,
source,
category,
version,
value,
component,
payment_method,
payment_experience,
toDateTime64(timestamp, 3) AS created_at,
toDateTime64(timestamp, 3) AS created_at_precise,
now() AS inserted_at
FROM
sdk_events_queue
WHERE
(length(_error) = 0)
AND (payment_id IS NOT NULL);
CREATE TABLE active_payments (
`payment_id` Nullable(String),
`merchant_id` String,
`created_at` DateTime64,
`flow_type` LowCardinality(Nullable(String)),
INDEX merchantIndex merchant_id TYPE bloom_filter GRANULARITY 1,
INDEX flowTypeIndex flow_type TYPE bloom_filter GRANULARITY 1
) ENGINE = MergeTree
PARTITION BY toStartOfSecond(created_at)
ORDER BY
merchant_id
TTL
toDateTime(created_at) + INTERVAL 60 SECOND
SETTINGS
index_granularity = 8192;
CREATE MATERIALIZED VIEW sdk_active_payments_mv TO active_payments (
`payment_id` Nullable(String),
`merchant_id` String,
`created_at` DateTime64,
`flow_type` LowCardinality(Nullable(String))
) AS
SELECT
payment_id,
merchant_id,
toDateTime64(timestamp, 3) AS created_at,
'sdk' AS flow_type
FROM
sdk_events_queue
WHERE length(_error) = 0;
CREATE MATERIALIZED VIEW api_active_payments_mv TO active_payments (
`payment_id` Nullable(String),
`merchant_id` String,
`created_at` DateTime64,
`flow_type` LowCardinality(Nullable(String))
) AS
SELECT
payment_id,
merchant_id,
created_at_timestamp AS created_at,
flow_type
FROM
api_events_queue
WHERE length(_error) = 0; | 2,105 | 1,755 |
hyperswitch | crates/analytics/docs/clickhouse/scripts/payouts.sql | .sql | CREATE TABLE payout_queue (
`payout_id` String,
`payout_attempt_id` String,
`merchant_id` String,
`customer_id` String,
`address_id` String,
`profile_id` String,
`payout_method_id` Nullable(String),
`payout_type` LowCardinality(String),
`amount` UInt64,
`destination_currency` LowCardinality(String),
`source_currency` LowCardinality(String),
`description` Nullable(String),
`recurring` Bool,
`auto_fulfill` Bool,
`return_url` Nullable(String),
`entity_type` LowCardinality(String),
`metadata` Nullable(String),
`created_at` DateTime CODEC(T64, LZ4),
`last_modified_at` DateTime CODEC(T64, LZ4),
`attempt_count` UInt16,
`status` LowCardinality(String),
`connector` Nullable(String),
`connector_payout_id` String,
`is_eligible` Nullable(Bool),
`error_message` Nullable(String),
`error_code` Nullable(String),
`business_country` LowCardinality(Nullable(String)),
`business_label` Nullable(String),
`merchant_connector_id` Nullable(String),
`sign_flag` Int8
) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
kafka_topic_list = 'hyperswitch-payout-events',
kafka_group_name = 'hyper',
kafka_format = 'JSONEachRow',
kafka_handle_error_mode = 'stream';
CREATE TABLE payout (
`payout_id` String,
`payout_attempt_id` String,
`merchant_id` String,
`customer_id` String,
`address_id` String,
`profile_id` String,
`payout_method_id` Nullable(String),
`payout_type` LowCardinality(String),
`amount` UInt64,
`destination_currency` LowCardinality(String),
`source_currency` LowCardinality(String),
`description` Nullable(String),
`recurring` Bool,
`auto_fulfill` Bool,
`return_url` Nullable(String),
`entity_type` LowCardinality(String),
`metadata` Nullable(String),
`created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`last_modified_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`attempt_count` UInt16,
`status` LowCardinality(String),
`connector` Nullable(String),
`connector_payout_id` String,
`is_eligible` Nullable(Bool),
`error_message` Nullable(String),
`error_code` Nullable(String),
`business_country` LowCardinality(Nullable(String)),
`business_label` Nullable(String),
`merchant_connector_id` Nullable(String),
`inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`sign_flag` Int8,
INDEX payoutTypeIndex payout_type TYPE bloom_filter GRANULARITY 1,
INDEX destinationCurrencyIndex destination_currency TYPE bloom_filter GRANULARITY 1,
INDEX sourceCurrencyIndex source_currency TYPE bloom_filter GRANULARITY 1,
INDEX entityTypeIndex entity_type TYPE bloom_filter GRANULARITY 1,
INDEX statusIndex status TYPE bloom_filter GRANULARITY 1,
INDEX businessCountryIndex business_country TYPE bloom_filter GRANULARITY 1
) ENGINE = CollapsingMergeTree(sign_flag) PARTITION BY toStartOfDay(created_at)
ORDER BY
(created_at, merchant_id, payout_id) TTL created_at + toIntervalMonth(6) SETTINGS index_granularity = 8192;
CREATE MATERIALIZED VIEW payout_mv TO payout (
`payout_id` String,
`payout_attempt_id` String,
`merchant_id` String,
`customer_id` String,
`address_id` String,
`profile_id` String,
`payout_method_id` Nullable(String),
`payout_type` LowCardinality(String),
`amount` UInt64,
`destination_currency` LowCardinality(String),
`source_currency` LowCardinality(String),
`description` Nullable(String),
`recurring` Bool,
`auto_fulfill` Bool,
`return_url` Nullable(String),
`entity_type` LowCardinality(String),
`metadata` Nullable(String),
`created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`last_modified_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`attempt_count` UInt16,
`status` LowCardinality(String),
`connector` Nullable(String),
`connector_payout_id` String,
`is_eligible` Nullable(Bool),
`error_message` Nullable(String),
`error_code` Nullable(String),
`business_country` LowCardinality(Nullable(String)),
`business_label` Nullable(String),
`merchant_connector_id` Nullable(String),
`inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`sign_flag` Int8,
) AS
SELECT
payout_id,
payout_attempt_id,
merchant_id,
customer_id,
address_id,
profile_id,
payout_method_id,
payout_type,
amount,
destination_currency,
source_currency,
description,
recurring,
auto_fulfill,
return_url,
entity_type,
metadata,
created_at,
last_modified_at,
attempt_count,
status,
connector,
connector_payout_id,
is_eligible,
error_message,
error_code,
business_country,
business_label,
merchant_connector_id,
now() as inserted_at,
sign_flag
FROM
payout_queue; | 1,235 | 1,756 |
hyperswitch | crates/analytics/docs/clickhouse/scripts/refunds.sql | .sql | CREATE TABLE refund_queue (
`internal_reference_id` String,
`refund_id` String,
`payment_id` String,
`merchant_id` String,
`connector_transaction_id` String,
`connector` LowCardinality(Nullable(String)),
`connector_refund_id` Nullable(String),
`external_reference_id` Nullable(String),
`refund_type` LowCardinality(String),
`total_amount` Nullable(UInt32),
`currency` LowCardinality(String),
`refund_amount` Nullable(UInt32),
`refund_status` LowCardinality(String),
`sent_to_gateway` Bool,
`refund_error_message` Nullable(String),
`refund_arn` Nullable(String),
`attempt_id` String,
`description` Nullable(String),
`refund_reason` Nullable(String),
`refund_error_code` Nullable(String),
`created_at` DateTime,
`modified_at` DateTime,
`organization_id` String,
`profile_id` String,
`sign_flag` Int8
) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
kafka_topic_list = 'hyperswitch-refund-events',
kafka_group_name = 'hyper',
kafka_format = 'JSONEachRow',
kafka_handle_error_mode = 'stream';
CREATE TABLE refunds (
`internal_reference_id` String,
`refund_id` String,
`payment_id` String,
`merchant_id` LowCardinality(String),
`connector_transaction_id` String,
`connector` LowCardinality(Nullable(String)),
`connector_refund_id` Nullable(String),
`external_reference_id` Nullable(String),
`refund_type` LowCardinality(String),
`total_amount` Nullable(UInt32),
`currency` LowCardinality(String),
`refund_amount` Nullable(UInt32),
`refund_status` LowCardinality(String),
`sent_to_gateway` Bool,
`refund_error_message` Nullable(String),
`refund_arn` Nullable(String),
`attempt_id` String,
`description` Nullable(String),
`refund_reason` Nullable(String),
`refund_error_code` Nullable(String),
`created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`modified_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
`organization_id` String,
`profile_id` String,
`sign_flag` Int8,
INDEX connectorIndex connector TYPE bloom_filter GRANULARITY 1,
INDEX refundTypeIndex refund_type TYPE bloom_filter GRANULARITY 1,
INDEX currencyIndex currency TYPE bloom_filter GRANULARITY 1,
INDEX statusIndex refund_status TYPE bloom_filter GRANULARITY 1
) ENGINE = CollapsingMergeTree(sign_flag) PARTITION BY toStartOfDay(created_at)
ORDER BY
(created_at, merchant_id, refund_id) TTL created_at + toIntervalMonth(18) SETTINGS index_granularity = 8192;
CREATE MATERIALIZED VIEW refund_mv TO refunds (
`internal_reference_id` String,
`refund_id` String,
`payment_id` String,
`merchant_id` String,
`connector_transaction_id` String,
`connector` LowCardinality(Nullable(String)),
`connector_refund_id` Nullable(String),
`external_reference_id` Nullable(String),
`refund_type` LowCardinality(String),
`total_amount` Nullable(UInt32),
`currency` LowCardinality(String),
`refund_amount` Nullable(UInt32),
`refund_status` LowCardinality(String),
`sent_to_gateway` Bool,
`refund_error_message` Nullable(String),
`refund_arn` Nullable(String),
`attempt_id` String,
`description` Nullable(String),
`refund_reason` Nullable(String),
`refund_error_code` Nullable(String),
`created_at` DateTime64(3),
`modified_at` DateTime64(3),
`inserted_at` DateTime64(3),
`organization_id` String,
`profile_id` String,
`sign_flag` Int8
) AS
SELECT
internal_reference_id,
refund_id,
payment_id,
merchant_id,
connector_transaction_id,
connector,
connector_refund_id,
external_reference_id,
refund_type,
total_amount,
currency,
refund_amount,
refund_status,
sent_to_gateway,
refund_error_message,
refund_arn,
attempt_id,
description,
refund_reason,
refund_error_code,
created_at,
modified_at,
now() AS inserted_at,
organization_id,
profile_id,
sign_flag
FROM
refund_queue
WHERE
length(_error) = 0; | 1,025 | 1,757 |
hyperswitch | crates/analytics/src/core.rs | .rs | use api_models::analytics::GetInfoResponse;
use crate::{types::AnalyticsDomain, utils};
pub async fn get_domain_info(
domain: AnalyticsDomain,
) -> crate::errors::AnalyticsResult<GetInfoResponse> {
let info = match domain {
AnalyticsDomain::Payments => GetInfoResponse {
metrics: utils::get_payment_metrics_info(),
download_dimensions: None,
dimensions: utils::get_payment_dimensions(),
},
AnalyticsDomain::PaymentIntents => GetInfoResponse {
metrics: utils::get_payment_intent_metrics_info(),
download_dimensions: None,
dimensions: utils::get_payment_intent_dimensions(),
},
AnalyticsDomain::Refunds => GetInfoResponse {
metrics: utils::get_refund_metrics_info(),
download_dimensions: None,
dimensions: utils::get_refund_dimensions(),
},
AnalyticsDomain::Frm => GetInfoResponse {
metrics: utils::get_frm_metrics_info(),
download_dimensions: None,
dimensions: utils::get_frm_dimensions(),
},
AnalyticsDomain::SdkEvents => GetInfoResponse {
metrics: utils::get_sdk_event_metrics_info(),
download_dimensions: None,
dimensions: utils::get_sdk_event_dimensions(),
},
AnalyticsDomain::AuthEvents => GetInfoResponse {
metrics: utils::get_auth_event_metrics_info(),
download_dimensions: None,
dimensions: utils::get_auth_event_dimensions(),
},
AnalyticsDomain::ApiEvents => GetInfoResponse {
metrics: utils::get_api_event_metrics_info(),
download_dimensions: None,
dimensions: utils::get_api_event_dimensions(),
},
AnalyticsDomain::Dispute => GetInfoResponse {
metrics: utils::get_dispute_metrics_info(),
download_dimensions: None,
dimensions: utils::get_dispute_dimensions(),
},
};
Ok(info)
}
| 377 | 1,758 |
hyperswitch | crates/analytics/src/sqlx.rs | .rs | use std::{fmt::Display, str::FromStr};
use api_models::{
analytics::{frm::FrmTransactionType, refunds::RefundType},
enums::{DisputeStage, DisputeStatus},
};
use common_enums::{
AuthenticationConnectors, AuthenticationStatus, DecoupledAuthenticationType, TransactionStatus,
};
use common_utils::{
errors::{CustomResult, ParsingError},
DbConnectionParams,
};
use diesel_models::enums::{
AttemptStatus, AuthenticationType, Currency, FraudCheckStatus, IntentStatus, PaymentMethod,
RefundStatus,
};
use error_stack::ResultExt;
use sqlx::{
postgres::{PgArgumentBuffer, PgPoolOptions, PgRow, PgTypeInfo, PgValueRef},
Decode, Encode,
Error::ColumnNotFound,
FromRow, Pool, Postgres, Row,
};
use storage_impl::config::Database;
use time::PrimitiveDateTime;
use super::{
health_check::HealthCheck,
query::{Aggregate, ToSql, Window},
types::{
AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, LoadRow, QueryExecutionError,
TableEngine,
},
};
#[derive(Debug, Clone)]
pub struct SqlxClient {
pool: Pool<Postgres>,
}
impl Default for SqlxClient {
fn default() -> Self {
let database_url = format!(
"postgres://{}:{}@{}:{}/{}",
"db_user", "db_pass", "localhost", 5432, "hyperswitch_db"
);
Self {
#[allow(clippy::expect_used)]
pool: PgPoolOptions::new()
.connect_lazy(&database_url)
.expect("SQLX Pool Creation failed"),
}
}
}
impl SqlxClient {
pub async fn from_conf(conf: &Database, schema: &str) -> Self {
let database_url = conf.get_database_url(schema);
#[allow(clippy::expect_used)]
let pool = PgPoolOptions::new()
.max_connections(conf.pool_size)
.acquire_timeout(std::time::Duration::from_secs(conf.connection_timeout))
.connect_lazy(&database_url)
.expect("SQLX Pool Creation failed");
Self { pool }
}
}
pub trait DbType {
fn name() -> &'static str;
}
macro_rules! db_type {
($a: ident, $str: tt) => {
impl DbType for $a {
fn name() -> &'static str {
stringify!($str)
}
}
};
($a:ident) => {
impl DbType for $a {
fn name() -> &'static str {
stringify!($a)
}
}
};
}
db_type!(Currency);
db_type!(AuthenticationType);
db_type!(AttemptStatus);
db_type!(IntentStatus);
db_type!(PaymentMethod, TEXT);
db_type!(RefundStatus);
db_type!(RefundType);
db_type!(FraudCheckStatus);
db_type!(FrmTransactionType);
db_type!(DisputeStage);
db_type!(DisputeStatus);
db_type!(AuthenticationStatus);
db_type!(TransactionStatus);
db_type!(AuthenticationConnectors);
db_type!(DecoupledAuthenticationType);
impl<'q, Type> Encode<'q, Postgres> for DBEnumWrapper<Type>
where
Type: DbType + FromStr + Display,
{
fn encode_by_ref(
&self,
buf: &mut PgArgumentBuffer,
) -> Result<sqlx::encode::IsNull, Box<(dyn std::error::Error + Send + Sync + 'static)>> {
<String as Encode<'q, Postgres>>::encode(self.0.to_string(), buf)
}
fn size_hint(&self) -> usize {
<String as Encode<'q, Postgres>>::size_hint(&self.0.to_string())
}
}
impl<'r, Type> Decode<'r, Postgres> for DBEnumWrapper<Type>
where
Type: DbType + FromStr + Display,
{
fn decode(
value: PgValueRef<'r>,
) -> Result<Self, Box<dyn std::error::Error + 'static + Send + Sync>> {
let str_value = <&'r str as Decode<'r, Postgres>>::decode(value)?;
Type::from_str(str_value).map(DBEnumWrapper).or(Err(format!(
"invalid value {:?} for enum {}",
str_value,
Type::name()
)
.into()))
}
}
impl<Type> sqlx::Type<Postgres> for DBEnumWrapper<Type>
where
Type: DbType + FromStr + Display,
{
fn type_info() -> PgTypeInfo {
PgTypeInfo::with_name(Type::name())
}
}
impl<T> LoadRow<T> for SqlxClient
where
for<'a> T: FromRow<'a, PgRow>,
{
fn load_row(row: PgRow) -> CustomResult<T, QueryExecutionError> {
T::from_row(&row).change_context(QueryExecutionError::RowExtractionFailure)
}
}
impl super::payments::filters::PaymentFilterAnalytics for SqlxClient {}
impl super::payments::metrics::PaymentMetricAnalytics for SqlxClient {}
impl super::payments::distribution::PaymentDistributionAnalytics for SqlxClient {}
impl super::payment_intents::filters::PaymentIntentFilterAnalytics for SqlxClient {}
impl super::payment_intents::metrics::PaymentIntentMetricAnalytics for SqlxClient {}
impl super::refunds::metrics::RefundMetricAnalytics for SqlxClient {}
impl super::refunds::filters::RefundFilterAnalytics for SqlxClient {}
impl super::refunds::distribution::RefundDistributionAnalytics for SqlxClient {}
impl super::disputes::filters::DisputeFilterAnalytics for SqlxClient {}
impl super::disputes::metrics::DisputeMetricAnalytics for SqlxClient {}
impl super::frm::metrics::FrmMetricAnalytics for SqlxClient {}
impl super::frm::filters::FrmFilterAnalytics for SqlxClient {}
impl super::auth_events::metrics::AuthEventMetricAnalytics for SqlxClient {}
impl super::auth_events::filters::AuthEventFilterAnalytics for SqlxClient {}
#[async_trait::async_trait]
impl AnalyticsDataSource for SqlxClient {
type Row = PgRow;
async fn load_results<T>(&self, query: &str) -> CustomResult<Vec<T>, QueryExecutionError>
where
Self: LoadRow<T>,
{
sqlx::query(&format!("{query};"))
.fetch_all(&self.pool)
.await
.change_context(QueryExecutionError::DatabaseError)
.attach_printable_lazy(|| format!("Failed to run query {query}"))?
.into_iter()
.map(Self::load_row)
.collect::<Result<Vec<_>, _>>()
.change_context(QueryExecutionError::RowExtractionFailure)
}
}
#[async_trait::async_trait]
impl HealthCheck for SqlxClient {
async fn deep_health_check(&self) -> CustomResult<(), QueryExecutionError> {
sqlx::query("SELECT 1")
.fetch_all(&self.pool)
.await
.map(|_| ())
.change_context(QueryExecutionError::DatabaseError)
}
}
impl<'a> FromRow<'a, PgRow> for super::auth_events::metrics::AuthEventMetricRow {
fn from_row(row: &'a PgRow) -> sqlx::Result<Self> {
let authentication_status: Option<DBEnumWrapper<AuthenticationStatus>> =
row.try_get("authentication_status").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let trans_status: Option<DBEnumWrapper<TransactionStatus>> =
row.try_get("trans_status").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let authentication_type: Option<DBEnumWrapper<DecoupledAuthenticationType>> =
row.try_get("authentication_type").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let error_message: Option<String> = row.try_get("error_message").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let authentication_connector: Option<DBEnumWrapper<AuthenticationConnectors>> = row
.try_get("authentication_connector")
.or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let message_version: Option<String> =
row.try_get("message_version").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let acs_reference_number: Option<String> =
row.try_get("acs_reference_number").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let count: Option<i64> = row.try_get("count").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
// Removing millisecond precision to get accurate diffs against clickhouse
let start_bucket: Option<PrimitiveDateTime> = row
.try_get::<Option<PrimitiveDateTime>, _>("start_bucket")?
.and_then(|dt| dt.replace_millisecond(0).ok());
let end_bucket: Option<PrimitiveDateTime> = row
.try_get::<Option<PrimitiveDateTime>, _>("end_bucket")?
.and_then(|dt| dt.replace_millisecond(0).ok());
Ok(Self {
authentication_status,
trans_status,
authentication_type,
error_message,
authentication_connector,
message_version,
acs_reference_number,
count,
start_bucket,
end_bucket,
})
}
}
impl<'a> FromRow<'a, PgRow> for super::auth_events::filters::AuthEventFilterRow {
fn from_row(row: &'a PgRow) -> sqlx::Result<Self> {
let authentication_status: Option<DBEnumWrapper<AuthenticationStatus>> =
row.try_get("authentication_status").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let trans_status: Option<DBEnumWrapper<TransactionStatus>> =
row.try_get("trans_status").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let authentication_type: Option<DBEnumWrapper<DecoupledAuthenticationType>> =
row.try_get("authentication_type").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let error_message: Option<String> = row.try_get("error_message").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let authentication_connector: Option<DBEnumWrapper<AuthenticationConnectors>> = row
.try_get("authentication_connector")
.or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let message_version: Option<String> =
row.try_get("message_version").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let acs_reference_number: Option<String> =
row.try_get("acs_reference_number").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
Ok(Self {
authentication_status,
trans_status,
authentication_type,
error_message,
authentication_connector,
message_version,
acs_reference_number,
})
}
}
impl<'a> FromRow<'a, PgRow> for super::refunds::metrics::RefundMetricRow {
fn from_row(row: &'a PgRow) -> sqlx::Result<Self> {
let currency: Option<DBEnumWrapper<Currency>> =
row.try_get("currency").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let refund_status: Option<DBEnumWrapper<RefundStatus>> =
row.try_get("refund_status").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let connector: Option<String> = row.try_get("connector").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let refund_type: Option<DBEnumWrapper<RefundType>> =
row.try_get("refund_type").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let profile_id: Option<String> = row.try_get("profile_id").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let refund_reason: Option<String> = row.try_get("refund_reason").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let refund_error_message: Option<String> =
row.try_get("refund_error_message").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let total: Option<bigdecimal::BigDecimal> = row.try_get("total").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let count: Option<i64> = row.try_get("count").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
// Removing millisecond precision to get accurate diffs against clickhouse
let start_bucket: Option<PrimitiveDateTime> = row
.try_get::<Option<PrimitiveDateTime>, _>("start_bucket")?
.and_then(|dt| dt.replace_millisecond(0).ok());
let end_bucket: Option<PrimitiveDateTime> = row
.try_get::<Option<PrimitiveDateTime>, _>("end_bucket")?
.and_then(|dt| dt.replace_millisecond(0).ok());
Ok(Self {
currency,
refund_status,
connector,
refund_type,
profile_id,
refund_reason,
refund_error_message,
total,
count,
start_bucket,
end_bucket,
})
}
}
impl<'a> FromRow<'a, PgRow> for super::frm::metrics::FrmMetricRow {
fn from_row(row: &'a PgRow) -> sqlx::Result<Self> {
let frm_name: Option<String> = row.try_get("frm_name").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let frm_status: Option<DBEnumWrapper<FraudCheckStatus>> =
row.try_get("frm_status").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let frm_transaction_type: Option<DBEnumWrapper<FrmTransactionType>> =
row.try_get("frm_transaction_type").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let total: Option<bigdecimal::BigDecimal> = row.try_get("total").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let count: Option<i64> = row.try_get("count").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
// Removing millisecond precision to get accurate diffs against clickhouse
let start_bucket: Option<PrimitiveDateTime> = row
.try_get::<Option<PrimitiveDateTime>, _>("start_bucket")?
.and_then(|dt| dt.replace_millisecond(0).ok());
let end_bucket: Option<PrimitiveDateTime> = row
.try_get::<Option<PrimitiveDateTime>, _>("end_bucket")?
.and_then(|dt| dt.replace_millisecond(0).ok());
Ok(Self {
frm_name,
frm_status,
frm_transaction_type,
total,
count,
start_bucket,
end_bucket,
})
}
}
impl<'a> FromRow<'a, PgRow> for super::payments::metrics::PaymentMetricRow {
fn from_row(row: &'a PgRow) -> sqlx::Result<Self> {
let currency: Option<DBEnumWrapper<Currency>> =
row.try_get("currency").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let status: Option<DBEnumWrapper<AttemptStatus>> =
row.try_get("status").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let connector: Option<String> = row.try_get("connector").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let authentication_type: Option<DBEnumWrapper<AuthenticationType>> =
row.try_get("authentication_type").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let payment_method: Option<String> =
row.try_get("payment_method").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let payment_method_type: Option<String> =
row.try_get("payment_method_type").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let client_source: Option<String> = row.try_get("client_source").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let client_version: Option<String> =
row.try_get("client_version").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let profile_id: Option<String> = row.try_get("profile_id").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let card_network: Option<String> = row.try_get("card_network").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let merchant_id: Option<String> = row.try_get("merchant_id").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let card_last_4: Option<String> = row.try_get("card_last_4").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let card_issuer: Option<String> = row.try_get("card_issuer").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let error_reason: Option<String> = row.try_get("error_reason").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let first_attempt: Option<bool> = row.try_get("first_attempt").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let total: Option<bigdecimal::BigDecimal> = row.try_get("total").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let count: Option<i64> = row.try_get("count").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
// Removing millisecond precision to get accurate diffs against clickhouse
let start_bucket: Option<PrimitiveDateTime> = row
.try_get::<Option<PrimitiveDateTime>, _>("start_bucket")?
.and_then(|dt| dt.replace_millisecond(0).ok());
let end_bucket: Option<PrimitiveDateTime> = row
.try_get::<Option<PrimitiveDateTime>, _>("end_bucket")?
.and_then(|dt| dt.replace_millisecond(0).ok());
Ok(Self {
currency,
status,
connector,
authentication_type,
payment_method,
payment_method_type,
client_source,
client_version,
profile_id,
card_network,
merchant_id,
card_last_4,
card_issuer,
error_reason,
first_attempt,
total,
count,
start_bucket,
end_bucket,
})
}
}
impl<'a> FromRow<'a, PgRow> for super::payments::distribution::PaymentDistributionRow {
fn from_row(row: &'a PgRow) -> sqlx::Result<Self> {
let currency: Option<DBEnumWrapper<Currency>> =
row.try_get("currency").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let status: Option<DBEnumWrapper<AttemptStatus>> =
row.try_get("status").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let connector: Option<String> = row.try_get("connector").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let authentication_type: Option<DBEnumWrapper<AuthenticationType>> =
row.try_get("authentication_type").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let payment_method: Option<String> =
row.try_get("payment_method").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let payment_method_type: Option<String> =
row.try_get("payment_method_type").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let client_source: Option<String> = row.try_get("client_source").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let client_version: Option<String> =
row.try_get("client_version").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let profile_id: Option<String> = row.try_get("profile_id").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let card_network: Option<String> = row.try_get("card_network").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let merchant_id: Option<String> = row.try_get("merchant_id").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let card_last_4: Option<String> = row.try_get("card_last_4").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let card_issuer: Option<String> = row.try_get("card_issuer").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let error_reason: Option<String> = row.try_get("error_reason").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let total: Option<bigdecimal::BigDecimal> = row.try_get("total").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let count: Option<i64> = row.try_get("count").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let error_message: Option<String> = row.try_get("error_message").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let first_attempt: Option<bool> = row.try_get("first_attempt").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
// Removing millisecond precision to get accurate diffs against clickhouse
let start_bucket: Option<PrimitiveDateTime> = row
.try_get::<Option<PrimitiveDateTime>, _>("start_bucket")?
.and_then(|dt| dt.replace_millisecond(0).ok());
let end_bucket: Option<PrimitiveDateTime> = row
.try_get::<Option<PrimitiveDateTime>, _>("end_bucket")?
.and_then(|dt| dt.replace_millisecond(0).ok());
Ok(Self {
currency,
status,
connector,
authentication_type,
payment_method,
payment_method_type,
client_source,
client_version,
profile_id,
card_network,
merchant_id,
card_last_4,
card_issuer,
error_reason,
first_attempt,
total,
count,
error_message,
start_bucket,
end_bucket,
})
}
}
impl<'a> FromRow<'a, PgRow> for super::payments::filters::PaymentFilterRow {
fn from_row(row: &'a PgRow) -> sqlx::Result<Self> {
let currency: Option<DBEnumWrapper<Currency>> =
row.try_get("currency").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let status: Option<DBEnumWrapper<AttemptStatus>> =
row.try_get("status").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let connector: Option<String> = row.try_get("connector").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let authentication_type: Option<DBEnumWrapper<AuthenticationType>> =
row.try_get("authentication_type").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let payment_method: Option<String> =
row.try_get("payment_method").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let payment_method_type: Option<String> =
row.try_get("payment_method_type").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let client_source: Option<String> = row.try_get("client_source").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let client_version: Option<String> =
row.try_get("client_version").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let profile_id: Option<String> = row.try_get("profile_id").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let card_network: Option<String> = row.try_get("card_network").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let merchant_id: Option<String> = row.try_get("merchant_id").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let card_last_4: Option<String> = row.try_get("card_last_4").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let card_issuer: Option<String> = row.try_get("card_issuer").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let error_reason: Option<String> = row.try_get("error_reason").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let first_attempt: Option<bool> = row.try_get("first_attempt").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
Ok(Self {
currency,
status,
connector,
authentication_type,
payment_method,
payment_method_type,
client_source,
client_version,
profile_id,
card_network,
merchant_id,
card_last_4,
card_issuer,
error_reason,
first_attempt,
})
}
}
impl<'a> FromRow<'a, PgRow> for super::payment_intents::metrics::PaymentIntentMetricRow {
fn from_row(row: &'a PgRow) -> sqlx::Result<Self> {
let status: Option<DBEnumWrapper<IntentStatus>> =
row.try_get("status").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let currency: Option<DBEnumWrapper<Currency>> =
row.try_get("currency").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let profile_id: Option<String> = row.try_get("profile_id").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let connector: Option<String> = row.try_get("connector").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let authentication_type: Option<DBEnumWrapper<AuthenticationType>> =
row.try_get("authentication_type").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let payment_method: Option<String> =
row.try_get("payment_method").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let payment_method_type: Option<String> =
row.try_get("payment_method_type").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let card_network: Option<String> = row.try_get("card_network").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let merchant_id: Option<String> = row.try_get("merchant_id").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let card_last_4: Option<String> = row.try_get("card_last_4").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let card_issuer: Option<String> = row.try_get("card_issuer").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let error_reason: Option<String> = row.try_get("error_reason").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let total: Option<bigdecimal::BigDecimal> = row.try_get("total").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let count: Option<i64> = row.try_get("count").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let first_attempt: Option<i64> = row.try_get("first_attempt").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
// Removing millisecond precision to get accurate diffs against clickhouse
let start_bucket: Option<PrimitiveDateTime> = row
.try_get::<Option<PrimitiveDateTime>, _>("start_bucket")?
.and_then(|dt| dt.replace_millisecond(0).ok());
let end_bucket: Option<PrimitiveDateTime> = row
.try_get::<Option<PrimitiveDateTime>, _>("end_bucket")?
.and_then(|dt| dt.replace_millisecond(0).ok());
Ok(Self {
status,
currency,
profile_id,
connector,
authentication_type,
payment_method,
payment_method_type,
card_network,
merchant_id,
card_last_4,
card_issuer,
error_reason,
first_attempt,
total,
count,
start_bucket,
end_bucket,
})
}
}
impl<'a> FromRow<'a, PgRow> for super::payment_intents::filters::PaymentIntentFilterRow {
fn from_row(row: &'a PgRow) -> sqlx::Result<Self> {
let status: Option<DBEnumWrapper<IntentStatus>> =
row.try_get("status").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let currency: Option<DBEnumWrapper<Currency>> =
row.try_get("currency").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let profile_id: Option<String> = row.try_get("profile_id").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let connector: Option<String> = row.try_get("connector").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let authentication_type: Option<DBEnumWrapper<AuthenticationType>> =
row.try_get("authentication_type").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let payment_method: Option<String> =
row.try_get("payment_method").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let payment_method_type: Option<String> =
row.try_get("payment_method_type").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let card_network: Option<String> = row.try_get("card_network").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let merchant_id: Option<String> = row.try_get("merchant_id").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let card_last_4: Option<String> = row.try_get("card_last_4").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let card_issuer: Option<String> = row.try_get("card_issuer").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let error_reason: Option<String> = row.try_get("error_reason").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let customer_id: Option<String> = row.try_get("customer_id").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
Ok(Self {
status,
currency,
profile_id,
connector,
authentication_type,
payment_method,
payment_method_type,
card_network,
merchant_id,
card_last_4,
card_issuer,
error_reason,
customer_id,
})
}
}
impl<'a> FromRow<'a, PgRow> for super::refunds::filters::RefundFilterRow {
fn from_row(row: &'a PgRow) -> sqlx::Result<Self> {
let currency: Option<DBEnumWrapper<Currency>> =
row.try_get("currency").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let refund_status: Option<DBEnumWrapper<RefundStatus>> =
row.try_get("refund_status").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let connector: Option<String> = row.try_get("connector").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let refund_type: Option<DBEnumWrapper<RefundType>> =
row.try_get("refund_type").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let profile_id: Option<String> = row.try_get("profile_id").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let refund_reason: Option<String> = row.try_get("refund_reason").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let refund_error_message: Option<String> =
row.try_get("refund_error_message").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
Ok(Self {
currency,
refund_status,
connector,
refund_type,
profile_id,
refund_reason,
refund_error_message,
})
}
}
impl<'a> FromRow<'a, PgRow> for super::refunds::distribution::RefundDistributionRow {
fn from_row(row: &'a PgRow) -> sqlx::Result<Self> {
let currency: Option<DBEnumWrapper<Currency>> =
row.try_get("currency").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let refund_status: Option<DBEnumWrapper<RefundStatus>> =
row.try_get("refund_status").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let connector: Option<String> = row.try_get("connector").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let refund_type: Option<DBEnumWrapper<RefundType>> =
row.try_get("refund_type").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let profile_id: Option<String> = row.try_get("profile_id").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let total: Option<bigdecimal::BigDecimal> = row.try_get("total").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let count: Option<i64> = row.try_get("count").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let refund_reason: Option<String> = row.try_get("refund_reason").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let refund_error_message: Option<String> =
row.try_get("refund_error_message").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
// Removing millisecond precision to get accurate diffs against clickhouse
let start_bucket: Option<PrimitiveDateTime> = row
.try_get::<Option<PrimitiveDateTime>, _>("start_bucket")?
.and_then(|dt| dt.replace_millisecond(0).ok());
let end_bucket: Option<PrimitiveDateTime> = row
.try_get::<Option<PrimitiveDateTime>, _>("end_bucket")?
.and_then(|dt| dt.replace_millisecond(0).ok());
Ok(Self {
currency,
refund_status,
connector,
refund_type,
profile_id,
total,
count,
refund_reason,
refund_error_message,
start_bucket,
end_bucket,
})
}
}
impl<'a> FromRow<'a, PgRow> for super::frm::filters::FrmFilterRow {
fn from_row(row: &'a PgRow) -> sqlx::Result<Self> {
let frm_name: Option<String> = row.try_get("frm_name").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let frm_status: Option<DBEnumWrapper<FraudCheckStatus>> =
row.try_get("frm_status").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let frm_transaction_type: Option<DBEnumWrapper<FrmTransactionType>> =
row.try_get("frm_transaction_type").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
Ok(Self {
frm_name,
frm_status,
frm_transaction_type,
})
}
}
impl<'a> FromRow<'a, PgRow> for super::disputes::filters::DisputeFilterRow {
fn from_row(row: &'a PgRow) -> sqlx::Result<Self> {
let dispute_stage: Option<String> = row.try_get("dispute_stage").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let dispute_status: Option<String> =
row.try_get("dispute_status").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let connector: Option<String> = row.try_get("connector").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let connector_status: Option<String> =
row.try_get("connector_status").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let currency: Option<DBEnumWrapper<Currency>> =
row.try_get("currency").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
Ok(Self {
dispute_stage,
dispute_status,
connector,
connector_status,
currency,
})
}
}
impl<'a> FromRow<'a, PgRow> for super::disputes::metrics::DisputeMetricRow {
fn from_row(row: &'a PgRow) -> sqlx::Result<Self> {
let dispute_stage: Option<DBEnumWrapper<DisputeStage>> =
row.try_get("dispute_stage").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let dispute_status: Option<DBEnumWrapper<DisputeStatus>> =
row.try_get("dispute_status").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let connector: Option<String> = row.try_get("connector").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let currency: Option<DBEnumWrapper<Currency>> =
row.try_get("currency").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let total: Option<bigdecimal::BigDecimal> = row.try_get("total").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
let count: Option<i64> = row.try_get("count").or_else(|e| match e {
ColumnNotFound(_) => Ok(Default::default()),
e => Err(e),
})?;
// Removing millisecond precision to get accurate diffs against clickhouse
let start_bucket: Option<PrimitiveDateTime> = row
.try_get::<Option<PrimitiveDateTime>, _>("start_bucket")?
.and_then(|dt| dt.replace_millisecond(0).ok());
let end_bucket: Option<PrimitiveDateTime> = row
.try_get::<Option<PrimitiveDateTime>, _>("end_bucket")?
.and_then(|dt| dt.replace_millisecond(0).ok());
Ok(Self {
dispute_stage,
dispute_status,
connector,
currency,
total,
count,
start_bucket,
end_bucket,
})
}
}
impl ToSql<SqlxClient> for PrimitiveDateTime {
fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> {
Ok(self.to_string())
}
}
impl ToSql<SqlxClient> for AnalyticsCollection {
fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> {
match self {
Self::Payment => Ok("payment_attempt".to_string()),
Self::PaymentSessionized => Err(error_stack::report!(ParsingError::UnknownError)
.attach_printable("PaymentSessionized table is not implemented for Sqlx"))?,
Self::Refund => Ok("refund".to_string()),
Self::RefundSessionized => Err(error_stack::report!(ParsingError::UnknownError)
.attach_printable("RefundSessionized table is not implemented for Sqlx"))?,
Self::SdkEvents => Err(error_stack::report!(ParsingError::UnknownError)
.attach_printable("SdkEventsAudit table is not implemented for Sqlx"))?,
Self::SdkEventsAnalytics => Err(error_stack::report!(ParsingError::UnknownError)
.attach_printable("SdkEvents table is not implemented for Sqlx"))?,
Self::ApiEvents => Err(error_stack::report!(ParsingError::UnknownError)
.attach_printable("ApiEvents table is not implemented for Sqlx"))?,
Self::FraudCheck => Ok("fraud_check".to_string()),
Self::PaymentIntent => Ok("payment_intent".to_string()),
Self::PaymentIntentSessionized => Err(error_stack::report!(
ParsingError::UnknownError
)
.attach_printable("PaymentIntentSessionized table is not implemented for Sqlx"))?,
Self::ConnectorEvents => Err(error_stack::report!(ParsingError::UnknownError)
.attach_printable("ConnectorEvents table is not implemented for Sqlx"))?,
Self::ApiEventsAnalytics => Err(error_stack::report!(ParsingError::UnknownError)
.attach_printable("ApiEvents table is not implemented for Sqlx"))?,
Self::ActivePaymentsAnalytics => Err(error_stack::report!(ParsingError::UnknownError)
.attach_printable("ActivePaymentsAnalytics table is not implemented for Sqlx"))?,
Self::OutgoingWebhookEvent => Err(error_stack::report!(ParsingError::UnknownError)
.attach_printable("OutgoingWebhookEvents table is not implemented for Sqlx"))?,
Self::Dispute => Ok("dispute".to_string()),
Self::DisputeSessionized => Err(error_stack::report!(ParsingError::UnknownError)
.attach_printable("DisputeSessionized table is not implemented for Sqlx"))?,
Self::Authentications => Err(error_stack::report!(ParsingError::UnknownError)
.attach_printable("Authentications table is not implemented for Sqlx"))?,
}
}
}
impl<T> ToSql<SqlxClient> for Aggregate<T>
where
T: ToSql<SqlxClient>,
{
fn to_sql(&self, table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> {
Ok(match self {
Self::Count { field: _, alias } => {
format!(
"count(*){}",
alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
)
}
Self::Sum { field, alias } => {
format!(
"sum({}){}",
field
.to_sql(table_engine)
.attach_printable("Failed to sum aggregate")?,
alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
)
}
Self::Min { field, alias } => {
format!(
"min({}){}",
field
.to_sql(table_engine)
.attach_printable("Failed to min aggregate")?,
alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
)
}
Self::Max { field, alias } => {
format!(
"max({}){}",
field
.to_sql(table_engine)
.attach_printable("Failed to max aggregate")?,
alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
)
}
Self::Percentile {
field,
alias,
percentile,
} => {
format!(
"percentile_cont(0.{}) within group (order by {} asc){}",
percentile.map_or_else(|| "50".to_owned(), |percentile| percentile.to_string()),
field
.to_sql(table_engine)
.attach_printable("Failed to percentile aggregate")?,
alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
)
}
Self::DistinctCount { field, alias } => {
format!(
"count(distinct {}){}",
field
.to_sql(table_engine)
.attach_printable("Failed to distinct count aggregate")?,
alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
)
}
})
}
}
impl<T> ToSql<SqlxClient> for Window<T>
where
T: ToSql<SqlxClient>,
{
fn to_sql(&self, table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> {
Ok(match self {
Self::Sum {
field,
partition_by,
order_by,
alias,
} => {
format!(
"sum({}) over ({}{}){}",
field
.to_sql(table_engine)
.attach_printable("Failed to sum window")?,
partition_by.as_ref().map_or_else(
|| "".to_owned(),
|partition_by| format!("partition by {}", partition_by.to_owned())
),
order_by.as_ref().map_or_else(
|| "".to_owned(),
|(order_column, order)| format!(
" order by {} {}",
order_column.to_owned(),
order
)
),
alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
)
}
Self::RowNumber {
field: _,
partition_by,
order_by,
alias,
} => {
format!(
"row_number() over ({}{}){}",
partition_by.as_ref().map_or_else(
|| "".to_owned(),
|partition_by| format!("partition by {}", partition_by.to_owned())
),
order_by.as_ref().map_or_else(
|| "".to_owned(),
|(order_column, order)| format!(
" order by {} {}",
order_column.to_owned(),
order
)
),
alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
)
}
})
}
}
| 11,378 | 1,759 |
hyperswitch | crates/analytics/src/query.rs | .rs | use std::{fmt, marker::PhantomData};
use api_models::{
analytics::{
self as analytics_api,
api_event::ApiEventDimensions,
auth_events::{AuthEventDimensions, AuthEventFlows},
disputes::DisputeDimensions,
frm::{FrmDimensions, FrmTransactionType},
payment_intents::PaymentIntentDimensions,
payments::{PaymentDimensions, PaymentDistributions},
refunds::{RefundDimensions, RefundDistributions, RefundType},
sdk_events::{SdkEventDimensions, SdkEventNames},
Granularity,
},
enums::{
AttemptStatus, AuthenticationType, Connector, Currency, DisputeStage, IntentStatus,
PaymentMethod, PaymentMethodType,
},
refunds::RefundStatus,
};
use common_enums::{
AuthenticationConnectors, AuthenticationStatus, DecoupledAuthenticationType, TransactionStatus,
};
use common_utils::{
errors::{CustomResult, ParsingError},
id_type::{MerchantId, OrganizationId, ProfileId},
};
use diesel_models::{enums as storage_enums, enums::FraudCheckStatus};
use error_stack::ResultExt;
use router_env::{logger, Flow};
use super::types::{AnalyticsCollection, AnalyticsDataSource, LoadRow, TableEngine};
use crate::{enums::AuthInfo, types::QueryExecutionError};
pub type QueryResult<T> = error_stack::Result<T, QueryBuildingError>;
pub trait QueryFilter<T>
where
T: AnalyticsDataSource,
AnalyticsCollection: ToSql<T>,
{
fn set_filter_clause(&self, builder: &mut QueryBuilder<T>) -> QueryResult<()>;
}
pub trait GroupByClause<T>
where
T: AnalyticsDataSource,
AnalyticsCollection: ToSql<T>,
{
fn set_group_by_clause(&self, builder: &mut QueryBuilder<T>) -> QueryResult<()>;
}
pub trait SeriesBucket {
type SeriesType;
type GranularityLevel;
fn get_lowest_common_granularity_level(&self) -> Self::GranularityLevel;
fn get_bucket_size(&self) -> u8;
fn clip_to_start(
&self,
value: Self::SeriesType,
) -> error_stack::Result<Self::SeriesType, PostProcessingError>;
fn clip_to_end(
&self,
value: Self::SeriesType,
) -> error_stack::Result<Self::SeriesType, PostProcessingError>;
}
impl<T> QueryFilter<T> for analytics_api::TimeRange
where
T: AnalyticsDataSource,
time::PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
{
fn set_filter_clause(&self, builder: &mut QueryBuilder<T>) -> QueryResult<()> {
builder.add_custom_filter_clause("created_at", self.start_time, FilterTypes::Gte)?;
if let Some(end) = self.end_time {
builder.add_custom_filter_clause("created_at", end, FilterTypes::Lte)?;
}
Ok(())
}
}
impl GroupByClause<super::SqlxClient> for Granularity {
fn set_group_by_clause(
&self,
builder: &mut QueryBuilder<super::SqlxClient>,
) -> QueryResult<()> {
let trunc_scale = self.get_lowest_common_granularity_level();
let granularity_bucket_scale = match self {
Self::OneMin => None,
Self::FiveMin | Self::FifteenMin | Self::ThirtyMin => Some("minute"),
Self::OneHour | Self::OneDay => None,
};
let granularity_divisor = self.get_bucket_size();
builder
.add_group_by_clause(format!("DATE_TRUNC('{trunc_scale}', created_at)"))
.attach_printable("Error adding time prune group by")?;
if let Some(scale) = granularity_bucket_scale {
builder
.add_group_by_clause(format!(
"FLOOR(DATE_PART('{scale}', created_at)/{granularity_divisor})"
))
.attach_printable("Error adding time binning group by")?;
}
Ok(())
}
}
impl GroupByClause<super::ClickhouseClient> for Granularity {
fn set_group_by_clause(
&self,
builder: &mut QueryBuilder<super::ClickhouseClient>,
) -> QueryResult<()> {
let interval = match self {
Self::OneMin => "toStartOfMinute(created_at)",
Self::FiveMin => "toStartOfFiveMinutes(created_at)",
Self::FifteenMin => "toStartOfFifteenMinutes(created_at)",
Self::ThirtyMin => "toStartOfInterval(created_at, INTERVAL 30 minute)",
Self::OneHour => "toStartOfHour(created_at)",
Self::OneDay => "toStartOfDay(created_at)",
};
builder
.add_group_by_clause(interval)
.attach_printable("Error adding interval group by")
}
}
#[derive(strum::Display)]
#[strum(serialize_all = "lowercase")]
pub enum TimeGranularityLevel {
Minute,
Hour,
Day,
}
impl SeriesBucket for Granularity {
type SeriesType = time::PrimitiveDateTime;
type GranularityLevel = TimeGranularityLevel;
fn get_lowest_common_granularity_level(&self) -> Self::GranularityLevel {
match self {
Self::OneMin => TimeGranularityLevel::Minute,
Self::FiveMin | Self::FifteenMin | Self::ThirtyMin | Self::OneHour => {
TimeGranularityLevel::Hour
}
Self::OneDay => TimeGranularityLevel::Day,
}
}
fn get_bucket_size(&self) -> u8 {
match self {
Self::OneMin => 60,
Self::FiveMin => 5,
Self::FifteenMin => 15,
Self::ThirtyMin => 30,
Self::OneHour => 60,
Self::OneDay => 24,
}
}
fn clip_to_start(
&self,
value: Self::SeriesType,
) -> error_stack::Result<Self::SeriesType, PostProcessingError> {
let clip_start = |value: u8, modulo: u8| -> u8 { value - value % modulo };
let clipped_time = match (
self.get_lowest_common_granularity_level(),
self.get_bucket_size(),
) {
(TimeGranularityLevel::Minute, i) => time::Time::MIDNIGHT
.replace_second(clip_start(value.second(), i))
.and_then(|t| t.replace_minute(value.minute()))
.and_then(|t| t.replace_hour(value.hour())),
(TimeGranularityLevel::Hour, i) => time::Time::MIDNIGHT
.replace_minute(clip_start(value.minute(), i))
.and_then(|t| t.replace_hour(value.hour())),
(TimeGranularityLevel::Day, i) => {
time::Time::MIDNIGHT.replace_hour(clip_start(value.hour(), i))
}
}
.change_context(PostProcessingError::BucketClipping)?;
Ok(value.replace_time(clipped_time))
}
fn clip_to_end(
&self,
value: Self::SeriesType,
) -> error_stack::Result<Self::SeriesType, PostProcessingError> {
let clip_end = |value: u8, modulo: u8| -> u8 { value + modulo - 1 - value % modulo };
let clipped_time = match (
self.get_lowest_common_granularity_level(),
self.get_bucket_size(),
) {
(TimeGranularityLevel::Minute, i) => time::Time::MIDNIGHT
.replace_second(clip_end(value.second(), i))
.and_then(|t| t.replace_minute(value.minute()))
.and_then(|t| t.replace_hour(value.hour())),
(TimeGranularityLevel::Hour, i) => time::Time::MIDNIGHT
.replace_minute(clip_end(value.minute(), i))
.and_then(|t| t.replace_hour(value.hour())),
(TimeGranularityLevel::Day, i) => {
time::Time::MIDNIGHT.replace_hour(clip_end(value.hour(), i))
}
}
.change_context(PostProcessingError::BucketClipping)
.attach_printable_lazy(|| format!("Bucket Clip Error: {value}"))?;
Ok(value.replace_time(clipped_time))
}
}
#[derive(thiserror::Error, Debug)]
pub enum QueryBuildingError {
#[allow(dead_code)]
#[error("Not Implemented: {0}")]
NotImplemented(String),
#[error("Failed to Serialize to SQL")]
SqlSerializeError,
#[error("Failed to build sql query: {0}")]
InvalidQuery(&'static str),
}
#[derive(thiserror::Error, Debug)]
pub enum PostProcessingError {
#[error("Error Clipping values to bucket sizes")]
BucketClipping,
}
#[derive(Debug)]
pub enum Aggregate<R> {
Count {
field: Option<R>,
alias: Option<&'static str>,
},
Sum {
field: R,
alias: Option<&'static str>,
},
Min {
field: R,
alias: Option<&'static str>,
},
Max {
field: R,
alias: Option<&'static str>,
},
Percentile {
field: R,
alias: Option<&'static str>,
percentile: Option<&'static u8>,
},
DistinctCount {
field: R,
alias: Option<&'static str>,
},
}
// Window functions in query
// ---
// Description -
// field: to_sql type value used as expr in aggregation
// partition_by: partition by fields in window
// order_by: order by fields and order (Ascending / Descending) in window
// alias: alias of window expr in query
// ---
// Usage -
// Window::Sum {
// field: "count",
// partition_by: Some(query_builder.transform_to_sql_values(&dimensions).switch()?),
// order_by: Some(("value", Descending)),
// alias: Some("total"),
// }
#[derive(Debug)]
pub enum Window<R> {
Sum {
field: R,
partition_by: Option<String>,
order_by: Option<(String, Order)>,
alias: Option<&'static str>,
},
RowNumber {
field: R,
partition_by: Option<String>,
order_by: Option<(String, Order)>,
alias: Option<&'static str>,
},
}
#[derive(Debug, Clone, Copy)]
pub enum Order {
Ascending,
Descending,
}
impl fmt::Display for Order {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Ascending => write!(f, "asc"),
Self::Descending => write!(f, "desc"),
}
}
}
// Select TopN values for a group based on a metric
// ---
// Description -
// columns: Columns in group to select TopN values for
// count: N in TopN
// order_column: metric used to sort and limit TopN
// order: sort order of metric (Ascending / Descending)
// ---
// Usage -
// Use via add_top_n_clause fn of query_builder
// add_top_n_clause(
// &dimensions,
// distribution.distribution_cardinality.into(),
// "count",
// Order::Descending,
// )
#[allow(dead_code)]
#[derive(Debug)]
pub struct TopN {
pub columns: String,
pub count: u64,
pub order_column: String,
pub order: Order,
}
#[derive(Debug, Clone)]
pub struct LimitByClause {
limit: u64,
columns: Vec<String>,
}
impl fmt::Display for LimitByClause {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "LIMIT {} BY {}", self.limit, self.columns.join(", "))
}
}
#[derive(Debug, Default, Clone, Copy)]
pub enum FilterCombinator {
#[default]
And,
Or,
}
impl<T: AnalyticsDataSource> ToSql<T> for FilterCombinator {
fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> {
Ok(match self {
Self::And => " AND ",
Self::Or => " OR ",
}
.to_owned())
}
}
#[derive(Debug, Clone)]
pub enum Filter {
Plain(String, FilterTypes, String),
NestedFilter(FilterCombinator, Vec<Filter>),
}
impl Default for Filter {
fn default() -> Self {
Self::NestedFilter(FilterCombinator::default(), Vec::new())
}
}
impl<T: AnalyticsDataSource> ToSql<T> for Filter {
fn to_sql(&self, table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> {
Ok(match self {
Self::Plain(l, op, r) => filter_type_to_sql(l, *op, r),
Self::NestedFilter(operator, filters) => {
format!(
"( {} )",
filters
.iter()
.map(|f| <Self as ToSql<T>>::to_sql(f, table_engine))
.collect::<Result<Vec<String>, _>>()?
.join(
<FilterCombinator as ToSql<T>>::to_sql(operator, table_engine)?
.as_ref()
)
)
}
})
}
}
#[derive(Debug)]
pub struct QueryBuilder<T>
where
T: AnalyticsDataSource,
AnalyticsCollection: ToSql<T>,
{
columns: Vec<String>,
filters: Filter,
group_by: Vec<String>,
order_by: Vec<String>,
having: Option<Vec<(String, FilterTypes, String)>>,
limit_by: Option<LimitByClause>,
outer_select: Vec<String>,
top_n: Option<TopN>,
table: AnalyticsCollection,
distinct: bool,
db_type: PhantomData<T>,
table_engine: TableEngine,
}
pub trait ToSql<T: AnalyticsDataSource> {
fn to_sql(&self, table_engine: &TableEngine) -> error_stack::Result<String, ParsingError>;
}
impl<T: AnalyticsDataSource> ToSql<T> for &MerchantId {
fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> {
Ok(self.get_string_repr().to_owned())
}
}
impl<T: AnalyticsDataSource> ToSql<T> for MerchantId {
fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> {
Ok(self.get_string_repr().to_owned())
}
}
impl<T: AnalyticsDataSource> ToSql<T> for &OrganizationId {
fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> {
Ok(self.get_string_repr().to_owned())
}
}
impl<T: AnalyticsDataSource> ToSql<T> for ProfileId {
fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> {
Ok(self.get_string_repr().to_owned())
}
}
impl<T: AnalyticsDataSource> ToSql<T> for &common_utils::id_type::PaymentId {
fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> {
Ok(self.get_string_repr().to_owned())
}
}
impl<T: AnalyticsDataSource> ToSql<T> for common_utils::id_type::CustomerId {
fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> {
Ok(self.get_string_repr().to_owned())
}
}
impl<T: AnalyticsDataSource> ToSql<T> for bool {
fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> {
let flag = *self;
Ok(i8::from(flag).to_string())
}
}
/// Implement `ToSql` on arrays of types that impl `ToString`.
macro_rules! impl_to_sql_for_to_string {
($($type:ty),+) => {
$(
impl<T: AnalyticsDataSource> ToSql<T> for $type {
fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> {
Ok(self.to_string())
}
}
)+
};
}
impl_to_sql_for_to_string!(
String,
&str,
&PaymentDimensions,
&PaymentIntentDimensions,
&RefundDimensions,
&FrmDimensions,
PaymentDimensions,
PaymentIntentDimensions,
&PaymentDistributions,
RefundDimensions,
&RefundDistributions,
FrmDimensions,
PaymentMethod,
PaymentMethodType,
AuthenticationType,
Connector,
AttemptStatus,
IntentStatus,
RefundStatus,
FraudCheckStatus,
storage_enums::RefundStatus,
Currency,
RefundType,
FrmTransactionType,
TransactionStatus,
AuthenticationStatus,
AuthenticationConnectors,
DecoupledAuthenticationType,
Flow,
&String,
&bool,
&u64,
u64,
Order
);
impl_to_sql_for_to_string!(
&SdkEventDimensions,
SdkEventDimensions,
SdkEventNames,
AuthEventFlows,
&ApiEventDimensions,
ApiEventDimensions,
&DisputeDimensions,
DisputeDimensions,
DisputeStage,
AuthEventDimensions,
&AuthEventDimensions
);
#[derive(Debug, Clone, Copy)]
pub enum FilterTypes {
Equal,
NotEqual,
EqualBool,
In,
Gte,
Lte,
Gt,
Like,
NotLike,
IsNotNull,
}
pub fn filter_type_to_sql(l: &str, op: FilterTypes, r: &str) -> String {
match op {
FilterTypes::EqualBool => format!("{l} = {r}"),
FilterTypes::Equal => format!("{l} = '{r}'"),
FilterTypes::NotEqual => format!("{l} != '{r}'"),
FilterTypes::In => format!("{l} IN ({r})"),
FilterTypes::Gte => format!("{l} >= '{r}'"),
FilterTypes::Gt => format!("{l} > {r}"),
FilterTypes::Lte => format!("{l} <= '{r}'"),
FilterTypes::Like => format!("{l} LIKE '%{r}%'"),
FilterTypes::NotLike => format!("{l} NOT LIKE '%{r}%'"),
FilterTypes::IsNotNull => format!("{l} IS NOT NULL"),
}
}
impl<T> QueryBuilder<T>
where
T: AnalyticsDataSource,
AnalyticsCollection: ToSql<T>,
{
pub fn new(table: AnalyticsCollection) -> Self {
Self {
columns: Default::default(),
filters: Default::default(),
group_by: Default::default(),
order_by: Default::default(),
having: Default::default(),
limit_by: Default::default(),
outer_select: Default::default(),
top_n: Default::default(),
table,
distinct: Default::default(),
db_type: Default::default(),
table_engine: T::get_table_engine(table),
}
}
pub fn add_select_column(&mut self, column: impl ToSql<T>) -> QueryResult<()> {
self.columns.push(
column
.to_sql(&self.table_engine)
.change_context(QueryBuildingError::SqlSerializeError)
.attach_printable("Error serializing select column")?,
);
Ok(())
}
pub fn transform_to_sql_values(&mut self, values: &[impl ToSql<T>]) -> QueryResult<String> {
let res = values
.iter()
.map(|i| i.to_sql(&self.table_engine))
.collect::<error_stack::Result<Vec<String>, ParsingError>>()
.change_context(QueryBuildingError::SqlSerializeError)
.attach_printable("Error serializing range filter value")?
.join(", ");
Ok(res)
}
pub fn add_top_n_clause(
&mut self,
columns: &[impl ToSql<T>],
count: u64,
order_column: impl ToSql<T>,
order: Order,
) -> QueryResult<()>
where
Window<&'static str>: ToSql<T>,
{
let partition_by_columns = self.transform_to_sql_values(columns)?;
let order_by_column = order_column
.to_sql(&self.table_engine)
.change_context(QueryBuildingError::SqlSerializeError)
.attach_printable("Error serializing select column")?;
self.add_outer_select_column(Window::RowNumber {
field: "",
partition_by: Some(partition_by_columns.clone()),
order_by: Some((order_by_column.clone(), order)),
alias: Some("top_n"),
})?;
self.top_n = Some(TopN {
columns: partition_by_columns,
count,
order_column: order_by_column,
order,
});
Ok(())
}
pub fn set_distinct(&mut self) {
self.distinct = true
}
pub fn add_filter_clause(
&mut self,
key: impl ToSql<T>,
value: impl ToSql<T>,
) -> QueryResult<()> {
self.add_custom_filter_clause(key, value, FilterTypes::Equal)
}
pub fn add_bool_filter_clause(
&mut self,
key: impl ToSql<T>,
value: impl ToSql<T>,
) -> QueryResult<()> {
self.add_custom_filter_clause(key, value, FilterTypes::EqualBool)
}
pub fn add_negative_filter_clause(
&mut self,
key: impl ToSql<T>,
value: impl ToSql<T>,
) -> QueryResult<()> {
self.add_custom_filter_clause(key, value, FilterTypes::NotEqual)
}
pub fn add_custom_filter_clause(
&mut self,
lhs: impl ToSql<T>,
rhs: impl ToSql<T>,
comparison: FilterTypes,
) -> QueryResult<()> {
let filter = Filter::Plain(
lhs.to_sql(&self.table_engine)
.change_context(QueryBuildingError::SqlSerializeError)
.attach_printable("Error serializing filter key")?,
comparison,
rhs.to_sql(&self.table_engine)
.change_context(QueryBuildingError::SqlSerializeError)
.attach_printable("Error serializing filter value")?,
);
self.add_nested_filter_clause(filter);
Ok(())
}
pub fn add_nested_filter_clause(&mut self, filter: Filter) {
match &mut self.filters {
Filter::NestedFilter(_, ref mut filters) => filters.push(filter),
f @ Filter::Plain(_, _, _) => {
self.filters = Filter::NestedFilter(FilterCombinator::And, vec![f.clone(), filter]);
}
}
}
pub fn add_filter_in_range_clause(
&mut self,
key: impl ToSql<T>,
values: &[impl ToSql<T>],
) -> QueryResult<()> {
let list = values
.iter()
.map(|i| {
// trimming whitespaces from the filter values received in request, to prevent a possibility of an SQL injection
i.to_sql(&self.table_engine).map(|s| {
let trimmed_str = s.replace(' ', "");
format!("'{trimmed_str}'")
})
})
.collect::<error_stack::Result<Vec<String>, ParsingError>>()
.change_context(QueryBuildingError::SqlSerializeError)
.attach_printable("Error serializing range filter value")?
.join(", ");
self.add_custom_filter_clause(key, list, FilterTypes::In)
}
pub fn add_group_by_clause(&mut self, column: impl ToSql<T>) -> QueryResult<()> {
self.group_by.push(
column
.to_sql(&self.table_engine)
.change_context(QueryBuildingError::SqlSerializeError)
.attach_printable("Error serializing group by field")?,
);
Ok(())
}
pub fn add_order_by_clause(
&mut self,
column: impl ToSql<T>,
order: impl ToSql<T>,
) -> QueryResult<()> {
let column_sql = column
.to_sql(&self.table_engine)
.change_context(QueryBuildingError::SqlSerializeError)
.attach_printable("Error serializing order by column")?;
let order_sql = order
.to_sql(&self.table_engine)
.change_context(QueryBuildingError::SqlSerializeError)
.attach_printable("Error serializing order direction")?;
self.order_by.push(format!("{} {}", column_sql, order_sql));
Ok(())
}
pub fn set_limit_by(&mut self, limit: u64, columns: &[impl ToSql<T>]) -> QueryResult<()> {
let columns = columns
.iter()
.map(|col| col.to_sql(&self.table_engine))
.collect::<Result<Vec<String>, _>>()
.change_context(QueryBuildingError::SqlSerializeError)
.attach_printable("Error serializing LIMIT BY columns")?;
self.limit_by = Some(LimitByClause { limit, columns });
Ok(())
}
pub fn add_granularity_in_mins(&mut self, granularity: Granularity) -> QueryResult<()> {
let interval = match granularity {
Granularity::OneMin => "1",
Granularity::FiveMin => "5",
Granularity::FifteenMin => "15",
Granularity::ThirtyMin => "30",
Granularity::OneHour => "60",
Granularity::OneDay => "1440",
};
let _ = self.add_select_column(format!(
"toStartOfInterval(created_at, INTERVAL {interval} MINUTE) as time_bucket"
));
Ok(())
}
fn get_filter_clause(&self) -> QueryResult<String> {
<Filter as ToSql<T>>::to_sql(&self.filters, &self.table_engine)
.change_context(QueryBuildingError::SqlSerializeError)
}
fn get_select_clause(&self) -> String {
self.columns.join(", ")
}
fn get_group_by_clause(&self) -> String {
self.group_by.join(", ")
}
fn get_outer_select_clause(&self) -> String {
self.outer_select.join(", ")
}
pub fn add_having_clause<R>(
&mut self,
aggregate: Aggregate<R>,
filter_type: FilterTypes,
value: impl ToSql<T>,
) -> QueryResult<()>
where
Aggregate<R>: ToSql<T>,
{
let aggregate = aggregate
.to_sql(&self.table_engine)
.change_context(QueryBuildingError::SqlSerializeError)
.attach_printable("Error serializing having aggregate")?;
let value = value
.to_sql(&self.table_engine)
.change_context(QueryBuildingError::SqlSerializeError)
.attach_printable("Error serializing having value")?;
let entry = (aggregate, filter_type, value);
if let Some(having) = &mut self.having {
having.push(entry);
} else {
self.having = Some(vec![entry]);
}
Ok(())
}
pub fn add_outer_select_column(&mut self, column: impl ToSql<T>) -> QueryResult<()> {
self.outer_select.push(
column
.to_sql(&self.table_engine)
.change_context(QueryBuildingError::SqlSerializeError)
.attach_printable("Error serializing outer select column")?,
);
Ok(())
}
pub fn get_filter_type_clause(&self) -> Option<String> {
self.having.as_ref().map(|vec| {
vec.iter()
.map(|(l, op, r)| filter_type_to_sql(l, *op, r))
.collect::<Vec<String>>()
.join(" AND ")
})
}
pub fn build_query(&mut self) -> QueryResult<String>
where
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
if self.columns.is_empty() {
Err(QueryBuildingError::InvalidQuery(
"No select fields provided",
))?;
}
let mut query = String::from("SELECT ");
if self.distinct {
query.push_str("DISTINCT ");
}
query.push_str(&self.get_select_clause());
query.push_str(" FROM ");
query.push_str(
&self
.table
.to_sql(&self.table_engine)
.change_context(QueryBuildingError::SqlSerializeError)
.attach_printable("Error serializing table value")?,
);
let filter_clause = self.get_filter_clause()?;
if !filter_clause.is_empty() {
query.push_str(" WHERE ");
query.push_str(filter_clause.as_str());
}
if !self.group_by.is_empty() {
query.push_str(" GROUP BY ");
query.push_str(&self.get_group_by_clause());
if let TableEngine::CollapsingMergeTree { sign } = self.table_engine {
self.add_having_clause(
Aggregate::Count {
field: Some(sign),
alias: None,
},
FilterTypes::Gte,
"1",
)?;
}
}
if self.having.is_some() {
if let Some(condition) = self.get_filter_type_clause() {
query.push_str(" HAVING ");
query.push_str(condition.as_str());
}
}
if !self.order_by.is_empty() {
query.push_str(" ORDER BY ");
query.push_str(&self.order_by.join(", "));
}
if let Some(limit_by) = &self.limit_by {
query.push_str(&format!(" {}", limit_by));
}
if !self.outer_select.is_empty() {
query.insert_str(
0,
format!("SELECT {} FROM (", &self.get_outer_select_clause()).as_str(),
);
query.push_str(") _");
}
if let Some(top_n) = &self.top_n {
query.insert_str(0, "SELECT * FROM (");
query.push_str(format!(") _ WHERE top_n <= {}", top_n.count).as_str());
}
logger::debug!(%query);
Ok(query)
}
pub async fn execute_query<R, P>(
&mut self,
store: &P,
) -> CustomResult<CustomResult<Vec<R>, QueryExecutionError>, QueryBuildingError>
where
P: LoadRow<R> + AnalyticsDataSource,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
let query = self
.build_query()
.change_context(QueryBuildingError::SqlSerializeError)
.attach_printable("Failed to execute query")?;
Ok(store.load_results(query.as_str()).await)
}
}
impl<T> QueryFilter<T> for AuthInfo
where
T: AnalyticsDataSource,
AnalyticsCollection: ToSql<T>,
{
fn set_filter_clause(&self, builder: &mut QueryBuilder<T>) -> QueryResult<()> {
match self {
Self::OrgLevel { org_id } => {
builder
.add_filter_clause("organization_id", org_id)
.attach_printable("Error adding organization_id filter")?;
}
Self::MerchantLevel {
org_id,
merchant_ids,
} => {
builder
.add_filter_clause("organization_id", org_id)
.attach_printable("Error adding organization_id filter")?;
builder
.add_filter_in_range_clause("merchant_id", merchant_ids)
.attach_printable("Error adding merchant_id filter")?;
}
Self::ProfileLevel {
org_id,
merchant_id,
profile_ids,
} => {
builder
.add_filter_clause("organization_id", org_id)
.attach_printable("Error adding organization_id filter")?;
builder
.add_filter_clause("merchant_id", merchant_id)
.attach_printable("Error adding merchant_id filter")?;
builder
.add_filter_in_range_clause("profile_id", profile_ids)
.attach_printable("Error adding profile_id filter")?;
}
}
Ok(())
}
}
| 6,913 | 1,760 |
hyperswitch | crates/analytics/src/lambda_utils.rs | .rs | use aws_config::{self, meta::region::RegionProviderChain, Region};
use aws_sdk_lambda::{types::InvocationType::Event, Client};
use aws_smithy_types::Blob;
use common_utils::errors::CustomResult;
use error_stack::{report, ResultExt};
use crate::errors::AnalyticsError;
async fn get_aws_client(region: String) -> Client {
let region_provider = RegionProviderChain::first_try(Region::new(region));
let sdk_config = aws_config::from_env().region(region_provider).load().await;
Client::new(&sdk_config)
}
pub async fn invoke_lambda(
function_name: &str,
region: &str,
json_bytes: &[u8],
) -> CustomResult<(), AnalyticsError> {
get_aws_client(region.to_string())
.await
.invoke()
.function_name(function_name)
.invocation_type(Event)
.payload(Blob::new(json_bytes.to_owned()))
.send()
.await
.map_err(|er| {
let er_rep = format!("{er:?}");
report!(er).attach_printable(er_rep)
})
.change_context(AnalyticsError::UnknownError)
.attach_printable("Lambda invocation failed")?;
Ok(())
}
| 267 | 1,761 |
hyperswitch | crates/analytics/src/frm.rs | .rs | pub mod accumulator;
mod core;
pub mod filters;
pub mod metrics;
pub mod types;
pub use accumulator::{FrmMetricAccumulator, FrmMetricsAccumulator};
pub use self::core::{get_filters, get_metrics};
| 48 | 1,762 |
hyperswitch | crates/analytics/src/metrics.rs | .rs | use router_env::{global_meter, histogram_metric_f64, histogram_metric_u64};
global_meter!(GLOBAL_METER, "ROUTER_API");
histogram_metric_f64!(METRIC_FETCH_TIME, GLOBAL_METER);
histogram_metric_u64!(BUCKETS_FETCHED, GLOBAL_METER);
pub mod request;
| 68 | 1,763 |
hyperswitch | crates/analytics/src/enums.rs | .rs | pub use common_utils::types::authentication::AuthInfo;
| 12 | 1,764 |
hyperswitch | crates/analytics/src/disputes.rs | .rs | pub mod accumulators;
mod core;
pub mod filters;
pub mod metrics;
pub mod types;
pub use accumulators::{DisputeMetricAccumulator, DisputeMetricsAccumulator};
pub trait DisputeAnalytics: metrics::DisputeMetricAnalytics {}
pub use self::core::{get_filters, get_metrics};
| 64 | 1,765 |
hyperswitch | crates/analytics/src/search.rs | .rs | use api_models::analytics::search::{
GetGlobalSearchRequest, GetSearchRequestWithIndex, GetSearchResponse, OpenMsearchOutput,
OpensearchOutput, SearchIndex, SearchStatus,
};
use common_utils::errors::{CustomResult, ReportSwitchExt};
use error_stack::ResultExt;
use router_env::tracing;
use serde_json::Value;
use crate::{
enums::AuthInfo,
opensearch::{OpenSearchClient, OpenSearchError, OpenSearchQuery, OpenSearchQueryBuilder},
};
pub fn convert_to_value<T: Into<Value>>(items: Vec<T>) -> Vec<Value> {
items.into_iter().map(|item| item.into()).collect()
}
pub async fn msearch_results(
client: &OpenSearchClient,
req: GetGlobalSearchRequest,
search_params: Vec<AuthInfo>,
indexes: Vec<SearchIndex>,
) -> CustomResult<Vec<GetSearchResponse>, OpenSearchError> {
if req.query.trim().is_empty()
&& req
.filters
.as_ref()
.map_or(true, |filters| filters.is_all_none())
{
return Err(OpenSearchError::BadRequestError(
"Both query and filters are empty".to_string(),
)
.into());
}
let mut query_builder = OpenSearchQueryBuilder::new(
OpenSearchQuery::Msearch(indexes.clone()),
req.query,
search_params,
);
if let Some(filters) = req.filters {
if let Some(currency) = filters.currency {
if !currency.is_empty() {
query_builder
.add_filter_clause("currency.keyword".to_string(), convert_to_value(currency))
.switch()?;
}
};
if let Some(status) = filters.status {
if !status.is_empty() {
query_builder
.add_filter_clause("status.keyword".to_string(), convert_to_value(status))
.switch()?;
}
};
if let Some(payment_method) = filters.payment_method {
if !payment_method.is_empty() {
query_builder
.add_filter_clause(
"payment_method.keyword".to_string(),
convert_to_value(payment_method),
)
.switch()?;
}
};
if let Some(customer_email) = filters.customer_email {
if !customer_email.is_empty() {
query_builder
.add_filter_clause(
"customer_email.keyword".to_string(),
convert_to_value(
customer_email
.iter()
.filter_map(|email| {
// TODO: Add trait based inputs instead of converting this to strings
serde_json::to_value(email)
.ok()
.and_then(|a| a.as_str().map(|a| a.to_string()))
})
.collect(),
),
)
.switch()?;
}
};
if let Some(search_tags) = filters.search_tags {
if !search_tags.is_empty() {
query_builder
.add_filter_clause(
"feature_metadata.search_tags.keyword".to_string(),
convert_to_value(
search_tags
.iter()
.filter_map(|search_tag| {
// TODO: Add trait based inputs instead of converting this to strings
serde_json::to_value(search_tag)
.ok()
.and_then(|a| a.as_str().map(|a| a.to_string()))
})
.collect(),
),
)
.switch()?;
}
};
if let Some(connector) = filters.connector {
if !connector.is_empty() {
query_builder
.add_filter_clause("connector.keyword".to_string(), convert_to_value(connector))
.switch()?;
}
};
if let Some(payment_method_type) = filters.payment_method_type {
if !payment_method_type.is_empty() {
query_builder
.add_filter_clause(
"payment_method_type.keyword".to_string(),
convert_to_value(payment_method_type),
)
.switch()?;
}
};
if let Some(card_network) = filters.card_network {
if !card_network.is_empty() {
query_builder
.add_filter_clause(
"card_network.keyword".to_string(),
convert_to_value(card_network),
)
.switch()?;
}
};
if let Some(card_last_4) = filters.card_last_4 {
if !card_last_4.is_empty() {
query_builder
.add_filter_clause(
"card_last_4.keyword".to_string(),
convert_to_value(card_last_4),
)
.switch()?;
}
};
if let Some(payment_id) = filters.payment_id {
if !payment_id.is_empty() {
query_builder
.add_filter_clause(
"payment_id.keyword".to_string(),
convert_to_value(payment_id),
)
.switch()?;
}
};
if let Some(amount) = filters.amount {
if !amount.is_empty() {
query_builder
.add_filter_clause("amount".to_string(), convert_to_value(amount))
.switch()?;
}
};
if let Some(customer_id) = filters.customer_id {
if !customer_id.is_empty() {
query_builder
.add_filter_clause(
"customer_id.keyword".to_string(),
convert_to_value(customer_id),
)
.switch()?;
}
};
};
if let Some(time_range) = req.time_range {
query_builder.set_time_range(time_range.into()).switch()?;
};
let response_text: OpenMsearchOutput = client
.execute(query_builder)
.await
.change_context(OpenSearchError::ConnectionError)?
.text()
.await
.change_context(OpenSearchError::ResponseError)
.and_then(|body: String| {
serde_json::from_str::<OpenMsearchOutput>(&body)
.change_context(OpenSearchError::DeserialisationError)
.attach_printable(body.clone())
})?;
let response_body: OpenMsearchOutput = response_text;
Ok(response_body
.responses
.into_iter()
.zip(indexes)
.map(|(index_hit, index)| match index_hit {
OpensearchOutput::Success(success) => GetSearchResponse {
count: success.hits.total.value,
index,
hits: success
.hits
.hits
.into_iter()
.map(|hit| hit.source)
.collect(),
status: SearchStatus::Success,
},
OpensearchOutput::Error(error) => {
tracing::error!(
index = ?index,
error_response = ?error,
"Search error"
);
GetSearchResponse {
count: 0,
index,
hits: Vec::new(),
status: SearchStatus::Failure,
}
}
})
.collect())
}
pub async fn search_results(
client: &OpenSearchClient,
req: GetSearchRequestWithIndex,
search_params: Vec<AuthInfo>,
) -> CustomResult<GetSearchResponse, OpenSearchError> {
let search_req = req.search_req;
if search_req.query.trim().is_empty()
&& search_req
.filters
.as_ref()
.map_or(true, |filters| filters.is_all_none())
{
return Err(OpenSearchError::BadRequestError(
"Both query and filters are empty".to_string(),
)
.into());
}
let mut query_builder = OpenSearchQueryBuilder::new(
OpenSearchQuery::Search(req.index),
search_req.query,
search_params,
);
if let Some(filters) = search_req.filters {
if let Some(currency) = filters.currency {
if !currency.is_empty() {
query_builder
.add_filter_clause("currency.keyword".to_string(), convert_to_value(currency))
.switch()?;
}
};
if let Some(status) = filters.status {
if !status.is_empty() {
query_builder
.add_filter_clause("status.keyword".to_string(), convert_to_value(status))
.switch()?;
}
};
if let Some(payment_method) = filters.payment_method {
if !payment_method.is_empty() {
query_builder
.add_filter_clause(
"payment_method.keyword".to_string(),
convert_to_value(payment_method),
)
.switch()?;
}
};
if let Some(customer_email) = filters.customer_email {
if !customer_email.is_empty() {
query_builder
.add_filter_clause(
"customer_email.keyword".to_string(),
convert_to_value(
customer_email
.iter()
.filter_map(|email| {
// TODO: Add trait based inputs instead of converting this to strings
serde_json::to_value(email)
.ok()
.and_then(|a| a.as_str().map(|a| a.to_string()))
})
.collect(),
),
)
.switch()?;
}
};
if let Some(search_tags) = filters.search_tags {
if !search_tags.is_empty() {
query_builder
.add_filter_clause(
"feature_metadata.search_tags.keyword".to_string(),
convert_to_value(
search_tags
.iter()
.filter_map(|search_tag| {
// TODO: Add trait based inputs instead of converting this to strings
serde_json::to_value(search_tag)
.ok()
.and_then(|a| a.as_str().map(|a| a.to_string()))
})
.collect(),
),
)
.switch()?;
}
};
if let Some(connector) = filters.connector {
if !connector.is_empty() {
query_builder
.add_filter_clause("connector.keyword".to_string(), convert_to_value(connector))
.switch()?;
}
};
if let Some(payment_method_type) = filters.payment_method_type {
if !payment_method_type.is_empty() {
query_builder
.add_filter_clause(
"payment_method_type.keyword".to_string(),
convert_to_value(payment_method_type),
)
.switch()?;
}
};
if let Some(card_network) = filters.card_network {
if !card_network.is_empty() {
query_builder
.add_filter_clause(
"card_network.keyword".to_string(),
convert_to_value(card_network),
)
.switch()?;
}
};
if let Some(card_last_4) = filters.card_last_4 {
if !card_last_4.is_empty() {
query_builder
.add_filter_clause(
"card_last_4.keyword".to_string(),
convert_to_value(card_last_4),
)
.switch()?;
}
};
if let Some(payment_id) = filters.payment_id {
if !payment_id.is_empty() {
query_builder
.add_filter_clause(
"payment_id.keyword".to_string(),
convert_to_value(payment_id),
)
.switch()?;
}
};
if let Some(amount) = filters.amount {
if !amount.is_empty() {
query_builder
.add_filter_clause("amount".to_string(), convert_to_value(amount))
.switch()?;
}
};
if let Some(customer_id) = filters.customer_id {
if !customer_id.is_empty() {
query_builder
.add_filter_clause(
"customer_id.keyword".to_string(),
convert_to_value(customer_id),
)
.switch()?;
}
};
};
if let Some(time_range) = search_req.time_range {
query_builder.set_time_range(time_range.into()).switch()?;
};
query_builder
.set_offset_n_count(search_req.offset, search_req.count)
.switch()?;
let response_text: OpensearchOutput = client
.execute(query_builder)
.await
.change_context(OpenSearchError::ConnectionError)?
.text()
.await
.change_context(OpenSearchError::ResponseError)
.and_then(|body: String| {
serde_json::from_str::<OpensearchOutput>(&body)
.change_context(OpenSearchError::DeserialisationError)
.attach_printable(body.clone())
})?;
let response_body: OpensearchOutput = response_text;
match response_body {
OpensearchOutput::Success(success) => Ok(GetSearchResponse {
count: success.hits.total.value,
index: req.index,
hits: success
.hits
.hits
.into_iter()
.map(|hit| hit.source)
.collect(),
status: SearchStatus::Success,
}),
OpensearchOutput::Error(error) => {
tracing::error!(
index = ?req.index,
error_response = ?error,
"Search error"
);
Ok(GetSearchResponse {
count: 0,
index: req.index,
hits: Vec::new(),
status: SearchStatus::Failure,
})
}
}
}
| 2,717 | 1,766 |
hyperswitch | crates/analytics/src/clickhouse.rs | .rs | use std::sync::Arc;
use actix_web::http::StatusCode;
use common_utils::errors::ParsingError;
use error_stack::{report, Report, ResultExt};
use router_env::logger;
use time::PrimitiveDateTime;
use super::{
active_payments::metrics::ActivePaymentsMetricRow,
auth_events::metrics::AuthEventMetricRow,
frm::{filters::FrmFilterRow, metrics::FrmMetricRow},
health_check::HealthCheck,
payment_intents::{filters::PaymentIntentFilterRow, metrics::PaymentIntentMetricRow},
payments::{
distribution::PaymentDistributionRow, filters::PaymentFilterRow, metrics::PaymentMetricRow,
},
query::{Aggregate, ToSql, Window},
refunds::{
distribution::RefundDistributionRow, filters::RefundFilterRow, metrics::RefundMetricRow,
},
sdk_events::{filters::SdkEventFilter, metrics::SdkEventMetricRow},
types::{AnalyticsCollection, AnalyticsDataSource, LoadRow, QueryExecutionError},
};
use crate::{
api_event::{
events::ApiLogsResult,
filters::ApiEventFilter,
metrics::{latency::LatencyAvg, ApiEventMetricRow},
},
auth_events::filters::AuthEventFilterRow,
connector_events::events::ConnectorEventsResult,
disputes::{filters::DisputeFilterRow, metrics::DisputeMetricRow},
outgoing_webhook_event::events::OutgoingWebhookLogsResult,
sdk_events::events::SdkEventsResult,
types::TableEngine,
};
pub type ClickhouseResult<T> = error_stack::Result<T, ClickhouseError>;
#[derive(Clone, Debug)]
pub struct ClickhouseClient {
pub config: Arc<ClickhouseConfig>,
pub database: String,
}
#[derive(Clone, Debug, serde::Deserialize)]
pub struct ClickhouseConfig {
username: String,
password: Option<String>,
host: String,
}
impl Default for ClickhouseConfig {
fn default() -> Self {
Self {
username: "default".to_string(),
password: None,
host: "http://localhost:8123".to_string(),
}
}
}
impl ClickhouseClient {
async fn execute_query(&self, query: &str) -> ClickhouseResult<Vec<serde_json::Value>> {
logger::debug!("Executing query: {query}");
let client = reqwest::Client::new();
let params = CkhQuery {
date_time_output_format: String::from("iso"),
output_format_json_quote_64bit_integers: 0,
database: self.database.clone(),
};
let response = client
.post(&self.config.host)
.query(¶ms)
.basic_auth(self.config.username.clone(), self.config.password.clone())
.body(format!("{query}\nFORMAT JSON"))
.send()
.await
.change_context(ClickhouseError::ConnectionError)?;
logger::debug!(clickhouse_response=?response, query=?query, "Clickhouse response");
if response.status() != StatusCode::OK {
response.text().await.map_or_else(
|er| {
Err(ClickhouseError::ResponseError)
.attach_printable_lazy(|| format!("Error: {er:?}"))
},
|t| Err(report!(ClickhouseError::ResponseNotOK(t))),
)
} else {
Ok(response
.json::<CkhOutput<serde_json::Value>>()
.await
.change_context(ClickhouseError::ResponseError)?
.data)
}
}
}
#[async_trait::async_trait]
impl HealthCheck for ClickhouseClient {
async fn deep_health_check(
&self,
) -> common_utils::errors::CustomResult<(), QueryExecutionError> {
self.execute_query("SELECT 1")
.await
.map(|_| ())
.change_context(QueryExecutionError::DatabaseError)
}
}
#[async_trait::async_trait]
impl AnalyticsDataSource for ClickhouseClient {
type Row = serde_json::Value;
async fn load_results<T>(
&self,
query: &str,
) -> common_utils::errors::CustomResult<Vec<T>, QueryExecutionError>
where
Self: LoadRow<T>,
{
self.execute_query(query)
.await
.change_context(QueryExecutionError::DatabaseError)?
.into_iter()
.map(Self::load_row)
.collect::<Result<Vec<_>, _>>()
.change_context(QueryExecutionError::RowExtractionFailure)
}
fn get_table_engine(table: AnalyticsCollection) -> TableEngine {
match table {
AnalyticsCollection::Payment
| AnalyticsCollection::PaymentSessionized
| AnalyticsCollection::Refund
| AnalyticsCollection::RefundSessionized
| AnalyticsCollection::FraudCheck
| AnalyticsCollection::PaymentIntent
| AnalyticsCollection::PaymentIntentSessionized
| AnalyticsCollection::Authentications
| AnalyticsCollection::Dispute => {
TableEngine::CollapsingMergeTree { sign: "sign_flag" }
}
AnalyticsCollection::DisputeSessionized => {
TableEngine::CollapsingMergeTree { sign: "sign_flag" }
}
AnalyticsCollection::SdkEvents
| AnalyticsCollection::SdkEventsAnalytics
| AnalyticsCollection::ApiEvents
| AnalyticsCollection::ConnectorEvents
| AnalyticsCollection::ApiEventsAnalytics
| AnalyticsCollection::OutgoingWebhookEvent
| AnalyticsCollection::ActivePaymentsAnalytics => TableEngine::BasicTree,
}
}
}
impl<T, E> LoadRow<T> for ClickhouseClient
where
Self::Row: TryInto<T, Error = Report<E>>,
{
fn load_row(row: Self::Row) -> common_utils::errors::CustomResult<T, QueryExecutionError> {
row.try_into()
.map_err(|error| error.change_context(QueryExecutionError::RowExtractionFailure))
}
}
impl super::payments::filters::PaymentFilterAnalytics for ClickhouseClient {}
impl super::payments::metrics::PaymentMetricAnalytics for ClickhouseClient {}
impl super::payments::distribution::PaymentDistributionAnalytics for ClickhouseClient {}
impl super::payment_intents::filters::PaymentIntentFilterAnalytics for ClickhouseClient {}
impl super::payment_intents::metrics::PaymentIntentMetricAnalytics for ClickhouseClient {}
impl super::refunds::metrics::RefundMetricAnalytics for ClickhouseClient {}
impl super::refunds::filters::RefundFilterAnalytics for ClickhouseClient {}
impl super::refunds::distribution::RefundDistributionAnalytics for ClickhouseClient {}
impl super::frm::metrics::FrmMetricAnalytics for ClickhouseClient {}
impl super::frm::filters::FrmFilterAnalytics for ClickhouseClient {}
impl super::sdk_events::filters::SdkEventFilterAnalytics for ClickhouseClient {}
impl super::sdk_events::metrics::SdkEventMetricAnalytics for ClickhouseClient {}
impl super::sdk_events::events::SdkEventsFilterAnalytics for ClickhouseClient {}
impl super::active_payments::metrics::ActivePaymentsMetricAnalytics for ClickhouseClient {}
impl super::auth_events::metrics::AuthEventMetricAnalytics for ClickhouseClient {}
impl super::auth_events::filters::AuthEventFilterAnalytics for ClickhouseClient {}
impl super::api_event::events::ApiLogsFilterAnalytics for ClickhouseClient {}
impl super::api_event::filters::ApiEventFilterAnalytics for ClickhouseClient {}
impl super::api_event::metrics::ApiEventMetricAnalytics for ClickhouseClient {}
impl super::connector_events::events::ConnectorEventLogAnalytics for ClickhouseClient {}
impl super::outgoing_webhook_event::events::OutgoingWebhookLogsFilterAnalytics
for ClickhouseClient
{
}
impl super::disputes::filters::DisputeFilterAnalytics for ClickhouseClient {}
impl super::disputes::metrics::DisputeMetricAnalytics for ClickhouseClient {}
#[derive(Debug, serde::Serialize)]
struct CkhQuery {
date_time_output_format: String,
output_format_json_quote_64bit_integers: u8,
database: String,
}
#[derive(Debug, serde::Deserialize)]
struct CkhOutput<T> {
data: Vec<T>,
}
impl TryInto<ApiLogsResult> for serde_json::Value {
type Error = Report<ParsingError>;
fn try_into(self) -> Result<ApiLogsResult, Self::Error> {
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse ApiLogsResult in clickhouse results",
))
}
}
impl TryInto<SdkEventsResult> for serde_json::Value {
type Error = Report<ParsingError>;
fn try_into(self) -> Result<SdkEventsResult, Self::Error> {
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse SdkEventsResult in clickhouse results",
))
}
}
impl TryInto<ConnectorEventsResult> for serde_json::Value {
type Error = Report<ParsingError>;
fn try_into(self) -> Result<ConnectorEventsResult, Self::Error> {
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse ConnectorEventsResult in clickhouse results",
))
}
}
impl TryInto<PaymentMetricRow> for serde_json::Value {
type Error = Report<ParsingError>;
fn try_into(self) -> Result<PaymentMetricRow, Self::Error> {
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse PaymentMetricRow in clickhouse results",
))
}
}
impl TryInto<PaymentDistributionRow> for serde_json::Value {
type Error = Report<ParsingError>;
fn try_into(self) -> Result<PaymentDistributionRow, Self::Error> {
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse PaymentDistributionRow in clickhouse results",
))
}
}
impl TryInto<PaymentFilterRow> for serde_json::Value {
type Error = Report<ParsingError>;
fn try_into(self) -> Result<PaymentFilterRow, Self::Error> {
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse FilterRow in clickhouse results",
))
}
}
impl TryInto<PaymentIntentMetricRow> for serde_json::Value {
type Error = Report<ParsingError>;
fn try_into(self) -> Result<PaymentIntentMetricRow, Self::Error> {
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse PaymentIntentMetricRow in clickhouse results",
))
}
}
impl TryInto<PaymentIntentFilterRow> for serde_json::Value {
type Error = Report<ParsingError>;
fn try_into(self) -> Result<PaymentIntentFilterRow, Self::Error> {
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse PaymentIntentFilterRow in clickhouse results",
))
}
}
impl TryInto<RefundMetricRow> for serde_json::Value {
type Error = Report<ParsingError>;
fn try_into(self) -> Result<RefundMetricRow, Self::Error> {
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse RefundMetricRow in clickhouse results",
))
}
}
impl TryInto<RefundFilterRow> for serde_json::Value {
type Error = Report<ParsingError>;
fn try_into(self) -> Result<RefundFilterRow, Self::Error> {
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse RefundFilterRow in clickhouse results",
))
}
}
impl TryInto<RefundDistributionRow> for serde_json::Value {
type Error = Report<ParsingError>;
fn try_into(self) -> Result<RefundDistributionRow, Self::Error> {
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse RefundDistributionRow in clickhouse results",
))
}
}
impl TryInto<FrmMetricRow> for serde_json::Value {
type Error = Report<ParsingError>;
fn try_into(self) -> Result<FrmMetricRow, Self::Error> {
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse FrmMetricRow in clickhouse results",
))
}
}
impl TryInto<FrmFilterRow> for serde_json::Value {
type Error = Report<ParsingError>;
fn try_into(self) -> Result<FrmFilterRow, Self::Error> {
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse FrmFilterRow in clickhouse results",
))
}
}
impl TryInto<DisputeMetricRow> for serde_json::Value {
type Error = Report<ParsingError>;
fn try_into(self) -> Result<DisputeMetricRow, Self::Error> {
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse DisputeMetricRow in clickhouse results",
))
}
}
impl TryInto<DisputeFilterRow> for serde_json::Value {
type Error = Report<ParsingError>;
fn try_into(self) -> Result<DisputeFilterRow, Self::Error> {
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse DisputeFilterRow in clickhouse results",
))
}
}
impl TryInto<ApiEventMetricRow> for serde_json::Value {
type Error = Report<ParsingError>;
fn try_into(self) -> Result<ApiEventMetricRow, Self::Error> {
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse ApiEventMetricRow in clickhouse results",
))
}
}
impl TryInto<LatencyAvg> for serde_json::Value {
type Error = Report<ParsingError>;
fn try_into(self) -> Result<LatencyAvg, Self::Error> {
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse LatencyAvg in clickhouse results",
))
}
}
impl TryInto<SdkEventMetricRow> for serde_json::Value {
type Error = Report<ParsingError>;
fn try_into(self) -> Result<SdkEventMetricRow, Self::Error> {
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse SdkEventMetricRow in clickhouse results",
))
}
}
impl TryInto<SdkEventFilter> for serde_json::Value {
type Error = Report<ParsingError>;
fn try_into(self) -> Result<SdkEventFilter, Self::Error> {
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse SdkEventFilter in clickhouse results",
))
}
}
impl TryInto<AuthEventMetricRow> for serde_json::Value {
type Error = Report<ParsingError>;
fn try_into(self) -> Result<AuthEventMetricRow, Self::Error> {
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse AuthEventMetricRow in clickhouse results",
))
}
}
impl TryInto<AuthEventFilterRow> for serde_json::Value {
type Error = Report<ParsingError>;
fn try_into(self) -> Result<AuthEventFilterRow, Self::Error> {
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse AuthEventFilterRow in clickhouse results",
))
}
}
impl TryInto<ApiEventFilter> for serde_json::Value {
type Error = Report<ParsingError>;
fn try_into(self) -> Result<ApiEventFilter, Self::Error> {
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse ApiEventFilter in clickhouse results",
))
}
}
impl TryInto<OutgoingWebhookLogsResult> for serde_json::Value {
type Error = Report<ParsingError>;
fn try_into(self) -> Result<OutgoingWebhookLogsResult, Self::Error> {
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse OutgoingWebhookLogsResult in clickhouse results",
))
}
}
impl TryInto<ActivePaymentsMetricRow> for serde_json::Value {
type Error = Report<ParsingError>;
fn try_into(self) -> Result<ActivePaymentsMetricRow, Self::Error> {
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse ActivePaymentsMetricRow in clickhouse results",
))
}
}
impl ToSql<ClickhouseClient> for PrimitiveDateTime {
fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> {
Ok(self.assume_utc().unix_timestamp().to_string())
}
}
impl ToSql<ClickhouseClient> for AnalyticsCollection {
fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> {
match self {
Self::Payment => Ok("payment_attempts".to_string()),
Self::PaymentSessionized => Ok("sessionizer_payment_attempts".to_string()),
Self::Refund => Ok("refunds".to_string()),
Self::RefundSessionized => Ok("sessionizer_refunds".to_string()),
Self::FraudCheck => Ok("fraud_check".to_string()),
Self::SdkEvents => Ok("sdk_events_audit".to_string()),
Self::SdkEventsAnalytics => Ok("sdk_events".to_string()),
Self::ApiEvents => Ok("api_events_audit".to_string()),
Self::ApiEventsAnalytics => Ok("api_events".to_string()),
Self::PaymentIntent => Ok("payment_intents".to_string()),
Self::PaymentIntentSessionized => Ok("sessionizer_payment_intents".to_string()),
Self::ConnectorEvents => Ok("connector_events_audit".to_string()),
Self::OutgoingWebhookEvent => Ok("outgoing_webhook_events_audit".to_string()),
Self::Dispute => Ok("dispute".to_string()),
Self::DisputeSessionized => Ok("sessionizer_dispute".to_string()),
Self::ActivePaymentsAnalytics => Ok("active_payments".to_string()),
Self::Authentications => Ok("authentications".to_string()),
}
}
}
impl<T> ToSql<ClickhouseClient> for Aggregate<T>
where
T: ToSql<ClickhouseClient>,
{
fn to_sql(&self, table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> {
Ok(match self {
Self::Count { field: _, alias } => {
let query = match table_engine {
TableEngine::CollapsingMergeTree { sign } => format!("sum({sign})"),
TableEngine::BasicTree => "count(*)".to_string(),
};
format!(
"{query}{}",
alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
)
}
Self::Sum { field, alias } => {
let query = match table_engine {
TableEngine::CollapsingMergeTree { sign } => format!(
"sum({sign} * {})",
field
.to_sql(table_engine)
.attach_printable("Failed to sum aggregate")?
),
TableEngine::BasicTree => format!(
"sum({})",
field
.to_sql(table_engine)
.attach_printable("Failed to sum aggregate")?
),
};
format!(
"{query}{}",
alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
)
}
Self::Min { field, alias } => {
format!(
"min({}){}",
field
.to_sql(table_engine)
.attach_printable("Failed to min aggregate")?,
alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
)
}
Self::Max { field, alias } => {
format!(
"max({}){}",
field
.to_sql(table_engine)
.attach_printable("Failed to max aggregate")?,
alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
)
}
Self::Percentile {
field,
alias,
percentile,
} => {
format!(
"quantilesExact(0.{})({})[1]{}",
percentile.map_or_else(|| "50".to_owned(), |percentile| percentile.to_string()),
field
.to_sql(table_engine)
.attach_printable("Failed to percentile aggregate")?,
alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
)
}
Self::DistinctCount { field, alias } => {
format!(
"count(distinct {}){}",
field
.to_sql(table_engine)
.attach_printable("Failed to percentile aggregate")?,
alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
)
}
})
}
}
impl<T> ToSql<ClickhouseClient> for Window<T>
where
T: ToSql<ClickhouseClient>,
{
fn to_sql(&self, table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> {
Ok(match self {
Self::Sum {
field,
partition_by,
order_by,
alias,
} => {
format!(
"sum({}) over ({}{}){}",
field
.to_sql(table_engine)
.attach_printable("Failed to sum window")?,
partition_by.as_ref().map_or_else(
|| "".to_owned(),
|partition_by| format!("partition by {}", partition_by.to_owned())
),
order_by.as_ref().map_or_else(
|| "".to_owned(),
|(order_column, order)| format!(
" order by {} {}",
order_column.to_owned(),
order
)
),
alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
)
}
Self::RowNumber {
field: _,
partition_by,
order_by,
alias,
} => {
format!(
"row_number() over ({}{}){}",
partition_by.as_ref().map_or_else(
|| "".to_owned(),
|partition_by| format!("partition by {}", partition_by.to_owned())
),
order_by.as_ref().map_or_else(
|| "".to_owned(),
|(order_column, order)| format!(
" order by {} {}",
order_column.to_owned(),
order
)
),
alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
)
}
})
}
}
#[derive(Debug, thiserror::Error)]
pub enum ClickhouseError {
#[error("Clickhouse connection error")]
ConnectionError,
#[error("Clickhouse NON-200 response content: '{0}'")]
ResponseNotOK(String),
#[error("Clickhouse response error")]
ResponseError,
}
| 5,006 | 1,767 |
hyperswitch | crates/analytics/src/health_check.rs | .rs | use common_utils::errors::CustomResult;
use crate::types::QueryExecutionError;
#[async_trait::async_trait]
pub trait HealthCheck {
async fn deep_health_check(&self) -> CustomResult<(), QueryExecutionError>;
}
| 48 | 1,768 |
hyperswitch | crates/analytics/src/payments.rs | .rs | pub mod accumulator;
mod core;
pub mod distribution;
pub mod filters;
pub mod metrics;
pub mod types;
pub use accumulator::{
PaymentDistributionAccumulator, PaymentMetricAccumulator, PaymentMetricsAccumulator,
};
pub trait PaymentAnalytics:
metrics::PaymentMetricAnalytics + filters::PaymentFilterAnalytics
{
}
pub use self::core::{get_filters, get_metrics};
| 79 | 1,769 |
hyperswitch | crates/analytics/src/utils.rs | .rs | use api_models::analytics::{
api_event::{ApiEventDimensions, ApiEventMetrics},
auth_events::{AuthEventDimensions, AuthEventMetrics},
disputes::{DisputeDimensions, DisputeMetrics},
frm::{FrmDimensions, FrmMetrics},
payment_intents::{PaymentIntentDimensions, PaymentIntentMetrics},
payments::{PaymentDimensions, PaymentMetrics},
refunds::{RefundDimensions, RefundMetrics},
sdk_events::{SdkEventDimensions, SdkEventMetrics},
NameDescription,
};
use strum::IntoEnumIterator;
pub fn get_payment_dimensions() -> Vec<NameDescription> {
vec![
PaymentDimensions::Connector,
PaymentDimensions::PaymentMethod,
PaymentDimensions::PaymentMethodType,
PaymentDimensions::Currency,
PaymentDimensions::AuthType,
PaymentDimensions::PaymentStatus,
PaymentDimensions::ClientSource,
PaymentDimensions::ClientVersion,
PaymentDimensions::ProfileId,
PaymentDimensions::CardNetwork,
PaymentDimensions::MerchantId,
]
.into_iter()
.map(Into::into)
.collect()
}
pub fn get_payment_intent_dimensions() -> Vec<NameDescription> {
vec![
PaymentIntentDimensions::PaymentIntentStatus,
PaymentIntentDimensions::Currency,
PaymentIntentDimensions::ProfileId,
PaymentIntentDimensions::Connector,
PaymentIntentDimensions::AuthType,
PaymentIntentDimensions::PaymentMethod,
PaymentIntentDimensions::PaymentMethodType,
PaymentIntentDimensions::CardNetwork,
PaymentIntentDimensions::MerchantId,
]
.into_iter()
.map(Into::into)
.collect()
}
pub fn get_auth_event_dimensions() -> Vec<NameDescription> {
vec![
AuthEventDimensions::AuthenticationConnector,
AuthEventDimensions::MessageVersion,
AuthEventDimensions::AcsReferenceNumber,
]
.into_iter()
.map(Into::into)
.collect()
}
pub fn get_refund_dimensions() -> Vec<NameDescription> {
RefundDimensions::iter().map(Into::into).collect()
}
pub fn get_frm_dimensions() -> Vec<NameDescription> {
FrmDimensions::iter().map(Into::into).collect()
}
pub fn get_sdk_event_dimensions() -> Vec<NameDescription> {
SdkEventDimensions::iter().map(Into::into).collect()
}
pub fn get_api_event_dimensions() -> Vec<NameDescription> {
ApiEventDimensions::iter().map(Into::into).collect()
}
pub fn get_payment_metrics_info() -> Vec<NameDescription> {
PaymentMetrics::iter().map(Into::into).collect()
}
pub fn get_payment_intent_metrics_info() -> Vec<NameDescription> {
PaymentIntentMetrics::iter().map(Into::into).collect()
}
pub fn get_refund_metrics_info() -> Vec<NameDescription> {
RefundMetrics::iter().map(Into::into).collect()
}
pub fn get_frm_metrics_info() -> Vec<NameDescription> {
FrmMetrics::iter().map(Into::into).collect()
}
pub fn get_sdk_event_metrics_info() -> Vec<NameDescription> {
SdkEventMetrics::iter().map(Into::into).collect()
}
pub fn get_auth_event_metrics_info() -> Vec<NameDescription> {
AuthEventMetrics::iter().map(Into::into).collect()
}
pub fn get_api_event_metrics_info() -> Vec<NameDescription> {
ApiEventMetrics::iter().map(Into::into).collect()
}
pub fn get_dispute_metrics_info() -> Vec<NameDescription> {
DisputeMetrics::iter().map(Into::into).collect()
}
pub fn get_dispute_dimensions() -> Vec<NameDescription> {
DisputeDimensions::iter().map(Into::into).collect()
}
| 788 | 1,770 |
hyperswitch | crates/analytics/src/connector_events.rs | .rs | mod core;
pub mod events;
pub trait ConnectorEventAnalytics: events::ConnectorEventLogAnalytics {}
pub use self::core::connector_events_core;
| 30 | 1,771 |
hyperswitch | crates/analytics/src/auth_events.rs | .rs | pub mod accumulator;
mod core;
pub mod filters;
pub mod metrics;
pub mod types;
pub use accumulator::{AuthEventMetricAccumulator, AuthEventMetricsAccumulator};
pub use self::core::{get_filters, get_metrics};
| 49 | 1,772 |
hyperswitch | crates/analytics/src/opensearch.rs | .rs | use std::collections::HashSet;
use api_models::{
analytics::search::SearchIndex,
errors::types::{ApiError, ApiErrorResponse},
};
use aws_config::{self, meta::region::RegionProviderChain, Region};
use common_utils::{
errors::{CustomResult, ErrorSwitch},
types::TimeRange,
};
use error_stack::ResultExt;
use opensearch::{
auth::Credentials,
cert::CertificateValidation,
cluster::{Cluster, ClusterHealthParts},
http::{
request::JsonBody,
response::Response,
transport::{SingleNodeConnectionPool, Transport, TransportBuilder},
Url,
},
MsearchParts, OpenSearch, SearchParts,
};
use serde_json::{json, Map, Value};
use storage_impl::errors::{ApplicationError, StorageError, StorageResult};
use time::PrimitiveDateTime;
use super::{health_check::HealthCheck, query::QueryResult, types::QueryExecutionError};
use crate::{enums::AuthInfo, query::QueryBuildingError};
#[derive(Clone, Debug, serde::Deserialize)]
#[serde(tag = "auth")]
#[serde(rename_all = "lowercase")]
pub enum OpenSearchAuth {
Basic { username: String, password: String },
Aws { region: String },
}
#[derive(Clone, Debug, serde::Deserialize)]
pub struct OpenSearchIndexes {
pub payment_attempts: String,
pub payment_intents: String,
pub refunds: String,
pub disputes: String,
pub sessionizer_payment_attempts: String,
pub sessionizer_payment_intents: String,
pub sessionizer_refunds: String,
pub sessionizer_disputes: String,
}
#[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize, PartialEq, Eq, Hash)]
pub struct OpensearchTimeRange {
#[serde(with = "common_utils::custom_serde::iso8601")]
pub gte: PrimitiveDateTime,
#[serde(default, with = "common_utils::custom_serde::iso8601::option")]
pub lte: Option<PrimitiveDateTime>,
}
impl From<TimeRange> for OpensearchTimeRange {
fn from(time_range: TimeRange) -> Self {
Self {
gte: time_range.start_time,
lte: time_range.end_time,
}
}
}
#[derive(Clone, Debug, serde::Deserialize)]
pub struct OpenSearchConfig {
host: String,
auth: OpenSearchAuth,
indexes: OpenSearchIndexes,
#[serde(default)]
enabled: bool,
}
impl Default for OpenSearchConfig {
fn default() -> Self {
Self {
host: "https://localhost:9200".to_string(),
auth: OpenSearchAuth::Basic {
username: "admin".to_string(),
password: "admin".to_string(),
},
indexes: OpenSearchIndexes {
payment_attempts: "hyperswitch-payment-attempt-events".to_string(),
payment_intents: "hyperswitch-payment-intent-events".to_string(),
refunds: "hyperswitch-refund-events".to_string(),
disputes: "hyperswitch-dispute-events".to_string(),
sessionizer_payment_attempts: "sessionizer-payment-attempt-events".to_string(),
sessionizer_payment_intents: "sessionizer-payment-intent-events".to_string(),
sessionizer_refunds: "sessionizer-refund-events".to_string(),
sessionizer_disputes: "sessionizer-dispute-events".to_string(),
},
enabled: false,
}
}
}
#[derive(Debug, thiserror::Error)]
pub enum OpenSearchError {
#[error("Opensearch is not enabled")]
NotEnabled,
#[error("Opensearch connection error")]
ConnectionError,
#[error("Opensearch NON-200 response content: '{0}'")]
ResponseNotOK(String),
#[error("Opensearch bad request error")]
BadRequestError(String),
#[error("Opensearch response error")]
ResponseError,
#[error("Opensearch query building error")]
QueryBuildingError,
#[error("Opensearch deserialisation error")]
DeserialisationError,
#[error("Opensearch index access not present error: {0:?}")]
IndexAccessNotPermittedError(SearchIndex),
#[error("Opensearch unknown error")]
UnknownError,
#[error("Opensearch access forbidden error")]
AccessForbiddenError,
}
impl ErrorSwitch<OpenSearchError> for QueryBuildingError {
fn switch(&self) -> OpenSearchError {
OpenSearchError::QueryBuildingError
}
}
impl ErrorSwitch<ApiErrorResponse> for OpenSearchError {
fn switch(&self) -> ApiErrorResponse {
match self {
Self::ConnectionError => ApiErrorResponse::InternalServerError(ApiError::new(
"IR",
0,
"Connection error",
None,
)),
Self::BadRequestError(response) => {
ApiErrorResponse::BadRequest(ApiError::new("IR", 1, response.to_string(), None))
}
Self::ResponseNotOK(response) => ApiErrorResponse::InternalServerError(ApiError::new(
"IR",
1,
format!("Something went wrong {}", response),
None,
)),
Self::ResponseError => ApiErrorResponse::InternalServerError(ApiError::new(
"IR",
2,
"Something went wrong",
None,
)),
Self::QueryBuildingError => ApiErrorResponse::InternalServerError(ApiError::new(
"IR",
3,
"Query building error",
None,
)),
Self::DeserialisationError => ApiErrorResponse::InternalServerError(ApiError::new(
"IR",
4,
"Deserialisation error",
None,
)),
Self::IndexAccessNotPermittedError(index) => {
ApiErrorResponse::ForbiddenCommonResource(ApiError::new(
"IR",
5,
format!("Index access not permitted: {index:?}"),
None,
))
}
Self::UnknownError => {
ApiErrorResponse::InternalServerError(ApiError::new("IR", 6, "Unknown error", None))
}
Self::AccessForbiddenError => ApiErrorResponse::ForbiddenCommonResource(ApiError::new(
"IR",
7,
"Access Forbidden error",
None,
)),
Self::NotEnabled => ApiErrorResponse::InternalServerError(ApiError::new(
"IR",
8,
"Opensearch is not enabled",
None,
)),
}
}
}
#[derive(Clone, Debug)]
pub struct OpenSearchClient {
pub client: OpenSearch,
pub transport: Transport,
pub indexes: OpenSearchIndexes,
}
impl OpenSearchClient {
pub async fn create(conf: &OpenSearchConfig) -> CustomResult<Self, OpenSearchError> {
let url = Url::parse(&conf.host).map_err(|_| OpenSearchError::ConnectionError)?;
let transport = match &conf.auth {
OpenSearchAuth::Basic { username, password } => {
let credentials = Credentials::Basic(username.clone(), password.clone());
TransportBuilder::new(SingleNodeConnectionPool::new(url))
.cert_validation(CertificateValidation::None)
.auth(credentials)
.build()
.map_err(|_| OpenSearchError::ConnectionError)?
}
OpenSearchAuth::Aws { region } => {
let region_provider = RegionProviderChain::first_try(Region::new(region.clone()));
let sdk_config = aws_config::from_env().region(region_provider).load().await;
let conn_pool = SingleNodeConnectionPool::new(url);
TransportBuilder::new(conn_pool)
.auth(
sdk_config
.clone()
.try_into()
.map_err(|_| OpenSearchError::ConnectionError)?,
)
.service_name("es")
.build()
.map_err(|_| OpenSearchError::ConnectionError)?
}
};
Ok(Self {
transport: transport.clone(),
client: OpenSearch::new(transport),
indexes: conf.indexes.clone(),
})
}
pub fn search_index_to_opensearch_index(&self, index: SearchIndex) -> String {
match index {
SearchIndex::PaymentAttempts => self.indexes.payment_attempts.clone(),
SearchIndex::PaymentIntents => self.indexes.payment_intents.clone(),
SearchIndex::Refunds => self.indexes.refunds.clone(),
SearchIndex::Disputes => self.indexes.disputes.clone(),
SearchIndex::SessionizerPaymentAttempts => {
self.indexes.sessionizer_payment_attempts.clone()
}
SearchIndex::SessionizerPaymentIntents => {
self.indexes.sessionizer_payment_intents.clone()
}
SearchIndex::SessionizerRefunds => self.indexes.sessionizer_refunds.clone(),
SearchIndex::SessionizerDisputes => self.indexes.sessionizer_disputes.clone(),
}
}
pub async fn execute(
&self,
query_builder: OpenSearchQueryBuilder,
) -> CustomResult<Response, OpenSearchError> {
match query_builder.query_type {
OpenSearchQuery::Msearch(ref indexes) => {
let payload = query_builder
.construct_payload(indexes)
.change_context(OpenSearchError::QueryBuildingError)?;
let payload_with_indexes = payload.into_iter().zip(indexes).fold(
Vec::new(),
|mut payload_with_indexes, (index_hit, index)| {
payload_with_indexes.push(
json!({"index": self.search_index_to_opensearch_index(*index)}).into(),
);
payload_with_indexes.push(JsonBody::new(index_hit.clone()));
payload_with_indexes
},
);
self.client
.msearch(MsearchParts::None)
.body(payload_with_indexes)
.send()
.await
.change_context(OpenSearchError::ResponseError)
}
OpenSearchQuery::Search(index) => {
let payload = query_builder
.clone()
.construct_payload(&[index])
.change_context(OpenSearchError::QueryBuildingError)?;
let final_payload = payload.first().unwrap_or(&Value::Null);
self.client
.search(SearchParts::Index(&[
&self.search_index_to_opensearch_index(index)
]))
.from(query_builder.offset.unwrap_or(0))
.size(query_builder.count.unwrap_or(10))
.body(final_payload)
.send()
.await
.change_context(OpenSearchError::ResponseError)
}
}
}
}
#[async_trait::async_trait]
impl HealthCheck for OpenSearchClient {
async fn deep_health_check(&self) -> CustomResult<(), QueryExecutionError> {
let health = Cluster::new(&self.transport)
.health(ClusterHealthParts::None)
.send()
.await
.change_context(QueryExecutionError::DatabaseError)?
.json::<OpenSearchHealth>()
.await
.change_context(QueryExecutionError::DatabaseError)?;
if health.status != OpenSearchHealthStatus::Red {
Ok(())
} else {
Err::<(), error_stack::Report<QueryExecutionError>>(
QueryExecutionError::DatabaseError.into(),
)
.attach_printable_lazy(|| format!("Opensearch cluster health is red: {health:?}"))
}
}
}
impl OpenSearchIndexes {
pub fn validate(&self) -> Result<(), ApplicationError> {
use common_utils::{ext_traits::ConfigExt, fp_utils::when};
when(self.payment_attempts.is_default_or_empty(), || {
Err(ApplicationError::InvalidConfigurationValueError(
"Opensearch Payment Attempts index must not be empty".into(),
))
})?;
when(self.payment_intents.is_default_or_empty(), || {
Err(ApplicationError::InvalidConfigurationValueError(
"Opensearch Payment Intents index must not be empty".into(),
))
})?;
when(self.refunds.is_default_or_empty(), || {
Err(ApplicationError::InvalidConfigurationValueError(
"Opensearch Refunds index must not be empty".into(),
))
})?;
when(self.disputes.is_default_or_empty(), || {
Err(ApplicationError::InvalidConfigurationValueError(
"Opensearch Disputes index must not be empty".into(),
))
})?;
when(
self.sessionizer_payment_attempts.is_default_or_empty(),
|| {
Err(ApplicationError::InvalidConfigurationValueError(
"Opensearch Sessionizer Payment Attempts index must not be empty".into(),
))
},
)?;
when(
self.sessionizer_payment_intents.is_default_or_empty(),
|| {
Err(ApplicationError::InvalidConfigurationValueError(
"Opensearch Sessionizer Payment Intents index must not be empty".into(),
))
},
)?;
when(self.sessionizer_refunds.is_default_or_empty(), || {
Err(ApplicationError::InvalidConfigurationValueError(
"Opensearch Sessionizer Refunds index must not be empty".into(),
))
})?;
when(self.sessionizer_disputes.is_default_or_empty(), || {
Err(ApplicationError::InvalidConfigurationValueError(
"Opensearch Sessionizer Disputes index must not be empty".into(),
))
})?;
Ok(())
}
}
impl OpenSearchAuth {
pub fn validate(&self) -> Result<(), ApplicationError> {
use common_utils::{ext_traits::ConfigExt, fp_utils::when};
match self {
Self::Basic { username, password } => {
when(username.is_default_or_empty(), || {
Err(ApplicationError::InvalidConfigurationValueError(
"Opensearch Basic auth username must not be empty".into(),
))
})?;
when(password.is_default_or_empty(), || {
Err(ApplicationError::InvalidConfigurationValueError(
"Opensearch Basic auth password must not be empty".into(),
))
})?;
}
Self::Aws { region } => {
when(region.is_default_or_empty(), || {
Err(ApplicationError::InvalidConfigurationValueError(
"Opensearch Aws auth region must not be empty".into(),
))
})?;
}
};
Ok(())
}
}
impl OpenSearchConfig {
pub async fn get_opensearch_client(&self) -> StorageResult<Option<OpenSearchClient>> {
if !self.enabled {
return Ok(None);
}
Ok(Some(
OpenSearchClient::create(self)
.await
.change_context(StorageError::InitializationError)?,
))
}
pub fn validate(&self) -> Result<(), ApplicationError> {
use common_utils::{ext_traits::ConfigExt, fp_utils::when};
if !self.enabled {
return Ok(());
}
when(self.host.is_default_or_empty(), || {
Err(ApplicationError::InvalidConfigurationValueError(
"Opensearch host must not be empty".into(),
))
})?;
self.indexes.validate()?;
self.auth.validate()?;
Ok(())
}
}
#[derive(Debug, serde::Deserialize, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum OpenSearchHealthStatus {
Red,
Green,
Yellow,
}
#[derive(Debug, serde::Deserialize)]
pub struct OpenSearchHealth {
pub status: OpenSearchHealthStatus,
}
#[derive(Debug, Clone)]
pub enum OpenSearchQuery {
Msearch(Vec<SearchIndex>),
Search(SearchIndex),
}
#[derive(Debug, Clone)]
pub struct OpenSearchQueryBuilder {
pub query_type: OpenSearchQuery,
pub query: String,
pub offset: Option<i64>,
pub count: Option<i64>,
pub filters: Vec<(String, Vec<Value>)>,
pub time_range: Option<OpensearchTimeRange>,
search_params: Vec<AuthInfo>,
case_sensitive_fields: HashSet<&'static str>,
}
impl OpenSearchQueryBuilder {
pub fn new(query_type: OpenSearchQuery, query: String, search_params: Vec<AuthInfo>) -> Self {
Self {
query_type,
query,
search_params,
offset: Default::default(),
count: Default::default(),
filters: Default::default(),
time_range: Default::default(),
case_sensitive_fields: HashSet::from([
"customer_email.keyword",
"search_tags.keyword",
"card_last_4.keyword",
"payment_id.keyword",
"amount",
"customer_id.keyword",
]),
}
}
pub fn set_offset_n_count(&mut self, offset: i64, count: i64) -> QueryResult<()> {
self.offset = Some(offset);
self.count = Some(count);
Ok(())
}
pub fn set_time_range(&mut self, time_range: OpensearchTimeRange) -> QueryResult<()> {
self.time_range = Some(time_range);
Ok(())
}
pub fn add_filter_clause(&mut self, lhs: String, rhs: Vec<Value>) -> QueryResult<()> {
self.filters.push((lhs, rhs));
Ok(())
}
pub fn get_status_field(&self, index: SearchIndex) -> &str {
match index {
SearchIndex::Refunds | SearchIndex::SessionizerRefunds => "refund_status.keyword",
SearchIndex::Disputes | SearchIndex::SessionizerDisputes => "dispute_status.keyword",
_ => "status.keyword",
}
}
pub fn get_amount_field(&self, index: SearchIndex) -> &str {
match index {
SearchIndex::Refunds | SearchIndex::SessionizerRefunds => "refund_amount",
SearchIndex::Disputes | SearchIndex::SessionizerDisputes => "dispute_amount",
_ => "amount",
}
}
pub fn build_filter_array(
&self,
case_sensitive_filters: Vec<&(String, Vec<Value>)>,
index: SearchIndex,
) -> Vec<Value> {
let mut filter_array = Vec::new();
if !self.query.is_empty() {
filter_array.push(json!({
"multi_match": {
"type": "phrase",
"query": self.query,
"lenient": true
}
}));
}
let case_sensitive_json_filters = case_sensitive_filters
.into_iter()
.map(|(k, v)| {
let key = if *k == "amount" {
self.get_amount_field(index).to_string()
} else {
k.clone()
};
json!({"terms": {key: v}})
})
.collect::<Vec<Value>>();
filter_array.extend(case_sensitive_json_filters);
if let Some(ref time_range) = self.time_range {
let range = json!(time_range);
filter_array.push(json!({
"range": {
"@timestamp": range
}
}));
}
filter_array
}
pub fn build_case_insensitive_filters(
&self,
mut payload: Value,
case_insensitive_filters: &[&(String, Vec<Value>)],
auth_array: Vec<Value>,
index: SearchIndex,
) -> Value {
let mut must_array = case_insensitive_filters
.iter()
.map(|(k, v)| {
let key = if *k == "status.keyword" {
self.get_status_field(index).to_string()
} else {
k.clone()
};
json!({
"bool": {
"must": [
{
"bool": {
"should": v.iter().map(|value| {
json!({
"term": {
format!("{}", key): {
"value": value,
"case_insensitive": true
}
}
})
}).collect::<Vec<Value>>(),
"minimum_should_match": 1
}
}
]
}
})
})
.collect::<Vec<Value>>();
must_array.push(json!({ "bool": {
"must": [
{
"bool": {
"should": auth_array,
"minimum_should_match": 1
}
}
]
}}));
if let Some(query) = payload.get_mut("query") {
if let Some(bool_obj) = query.get_mut("bool") {
if let Some(bool_map) = bool_obj.as_object_mut() {
bool_map.insert("must".to_string(), Value::Array(must_array));
}
}
}
payload
}
pub fn build_auth_array(&self) -> Vec<Value> {
self.search_params
.iter()
.map(|user_level| match user_level {
AuthInfo::OrgLevel { org_id } => {
let must_clauses = vec![json!({
"term": {
"organization_id.keyword": {
"value": org_id
}
}
})];
json!({
"bool": {
"must": must_clauses
}
})
}
AuthInfo::MerchantLevel {
org_id,
merchant_ids,
} => {
let must_clauses = vec![
json!({
"term": {
"organization_id.keyword": {
"value": org_id
}
}
}),
json!({
"terms": {
"merchant_id.keyword": merchant_ids
}
}),
];
json!({
"bool": {
"must": must_clauses
}
})
}
AuthInfo::ProfileLevel {
org_id,
merchant_id,
profile_ids,
} => {
let must_clauses = vec![
json!({
"term": {
"organization_id.keyword": {
"value": org_id
}
}
}),
json!({
"term": {
"merchant_id.keyword": {
"value": merchant_id
}
}
}),
json!({
"terms": {
"profile_id.keyword": profile_ids
}
}),
];
json!({
"bool": {
"must": must_clauses
}
})
}
})
.collect::<Vec<Value>>()
}
/// # Panics
///
/// This function will panic if:
///
/// * The structure of the JSON query is not as expected (e.g., missing keys or incorrect types).
///
/// Ensure that the input data and the structure of the query are valid and correctly handled.
pub fn construct_payload(&self, indexes: &[SearchIndex]) -> QueryResult<Vec<Value>> {
let mut query_obj = Map::new();
let bool_obj = Map::new();
let (case_sensitive_filters, case_insensitive_filters): (Vec<_>, Vec<_>) = self
.filters
.iter()
.partition(|(k, _)| self.case_sensitive_fields.contains(k.as_str()));
let should_array = self.build_auth_array();
query_obj.insert("bool".to_string(), Value::Object(bool_obj.clone()));
let mut sort_obj = Map::new();
sort_obj.insert(
"@timestamp".to_string(),
json!({
"order": "desc"
}),
);
Ok(indexes
.iter()
.map(|index| {
let mut payload = json!({
"query": query_obj.clone(),
"sort": [
Value::Object(sort_obj.clone())
]
});
let filter_array = self.build_filter_array(case_sensitive_filters.clone(), *index);
if !filter_array.is_empty() {
payload
.get_mut("query")
.and_then(|query| query.get_mut("bool"))
.and_then(|bool_obj| bool_obj.as_object_mut())
.map(|bool_map| {
bool_map.insert("filter".to_string(), Value::Array(filter_array));
});
}
payload = self.build_case_insensitive_filters(
payload,
&case_insensitive_filters,
should_array.clone(),
*index,
);
payload
})
.collect::<Vec<Value>>())
}
}
| 5,053 | 1,773 |
hyperswitch | crates/analytics/src/payment_intents.rs | .rs | pub mod accumulator;
mod core;
pub mod filters;
pub mod metrics;
pub mod sankey;
pub mod types;
pub use accumulator::{PaymentIntentMetricAccumulator, PaymentIntentMetricsAccumulator};
pub trait PaymentIntentAnalytics:
metrics::PaymentIntentMetricAnalytics + filters::PaymentIntentFilterAnalytics
{
}
pub use self::core::{get_filters, get_metrics, get_sankey};
| 81 | 1,774 |
hyperswitch | crates/analytics/src/outgoing_webhook_event.rs | .rs | mod core;
pub mod events;
pub trait OutgoingWebhookEventAnalytics: events::OutgoingWebhookLogsFilterAnalytics {}
pub use self::core::outgoing_webhook_events_core;
| 39 | 1,775 |
hyperswitch | crates/analytics/src/refunds.rs | .rs | pub mod accumulator;
mod core;
pub mod distribution;
pub mod filters;
pub mod metrics;
pub mod types;
pub use accumulator::{RefundMetricAccumulator, RefundMetricsAccumulator};
pub use self::core::{get_filters, get_metrics};
| 53 | 1,776 |
hyperswitch | crates/analytics/src/types.rs | .rs | use std::{fmt::Display, str::FromStr};
use common_utils::{
errors::{CustomResult, ErrorSwitch, ParsingError},
events::{ApiEventMetric, ApiEventsType},
impl_api_event_type,
};
use error_stack::{report, Report, ResultExt};
use super::query::QueryBuildingError;
use crate::errors::AnalyticsError;
#[derive(serde::Deserialize, Debug, serde::Serialize)]
#[serde(rename_all = "snake_case")]
pub enum AnalyticsDomain {
Payments,
Refunds,
Frm,
PaymentIntents,
AuthEvents,
SdkEvents,
ApiEvents,
Dispute,
}
#[derive(Debug, strum::AsRefStr, strum::Display, Clone, Copy)]
pub enum AnalyticsCollection {
Payment,
PaymentSessionized,
Refund,
RefundSessionized,
FraudCheck,
SdkEvents,
SdkEventsAnalytics,
ApiEvents,
PaymentIntent,
PaymentIntentSessionized,
ConnectorEvents,
OutgoingWebhookEvent,
Authentications,
Dispute,
DisputeSessionized,
ApiEventsAnalytics,
ActivePaymentsAnalytics,
}
#[allow(dead_code)]
#[derive(Debug)]
pub enum TableEngine {
CollapsingMergeTree { sign: &'static str },
BasicTree,
}
#[derive(Debug, serde::Serialize, serde::Deserialize, Eq, PartialEq, Hash)]
#[serde(transparent)]
pub struct DBEnumWrapper<T: FromStr + Display>(pub T);
impl<T: FromStr + Display> AsRef<T> for DBEnumWrapper<T> {
fn as_ref(&self) -> &T {
&self.0
}
}
impl<T: FromStr + Display + Default> Default for DBEnumWrapper<T> {
fn default() -> Self {
Self(T::default())
}
}
impl<T> FromStr for DBEnumWrapper<T>
where
T: FromStr + Display,
{
type Err = Report<ParsingError>;
fn from_str(s: &str) -> Result<Self, Self::Err> {
T::from_str(s)
.map_err(|_er| report!(ParsingError::EnumParseFailure(std::any::type_name::<T>())))
.map(DBEnumWrapper)
.attach_printable_lazy(|| format!("raw_value: {s}"))
}
}
#[async_trait::async_trait]
pub trait AnalyticsDataSource
where
Self: Sized + Sync + Send,
{
type Row;
async fn load_results<T>(&self, query: &str) -> CustomResult<Vec<T>, QueryExecutionError>
where
Self: LoadRow<T>;
fn get_table_engine(_table: AnalyticsCollection) -> TableEngine {
TableEngine::BasicTree
}
}
pub trait LoadRow<T>
where
Self: AnalyticsDataSource,
T: Sized,
{
fn load_row(row: Self::Row) -> CustomResult<T, QueryExecutionError>;
}
#[derive(thiserror::Error, Debug)]
pub enum MetricsError {
#[error("Error building query")]
QueryBuildingError,
#[error("Error running Query")]
QueryExecutionFailure,
#[error("Error processing query results")]
PostProcessingFailure,
#[allow(dead_code)]
#[error("Not Implemented")]
NotImplemented,
}
#[derive(Debug, thiserror::Error)]
pub enum QueryExecutionError {
#[error("Failed to extract domain rows")]
RowExtractionFailure,
#[error("Database error")]
DatabaseError,
}
pub type MetricsResult<T> = CustomResult<T, MetricsError>;
impl ErrorSwitch<MetricsError> for QueryBuildingError {
fn switch(&self) -> MetricsError {
MetricsError::QueryBuildingError
}
}
pub type FiltersResult<T> = CustomResult<T, FiltersError>;
#[derive(thiserror::Error, Debug)]
pub enum FiltersError {
#[error("Error building query")]
QueryBuildingError,
#[error("Error running Query")]
QueryExecutionFailure,
#[allow(dead_code)]
#[error("Not Implemented: {0}")]
NotImplemented(&'static str),
}
impl ErrorSwitch<FiltersError> for QueryBuildingError {
fn switch(&self) -> FiltersError {
FiltersError::QueryBuildingError
}
}
impl ErrorSwitch<AnalyticsError> for FiltersError {
fn switch(&self) -> AnalyticsError {
match self {
Self::QueryBuildingError | Self::QueryExecutionFailure => AnalyticsError::UnknownError,
Self::NotImplemented(a) => AnalyticsError::NotImplemented(a),
}
}
}
impl_api_event_type!(Miscellaneous, (AnalyticsDomain));
| 968 | 1,777 |
hyperswitch | crates/analytics/src/lib.rs | .rs | pub mod active_payments;
pub mod api_event;
pub mod auth_events;
mod clickhouse;
pub mod connector_events;
pub mod core;
pub mod disputes;
pub mod enums;
pub mod errors;
pub mod frm;
pub mod health_check;
pub mod metrics;
pub mod opensearch;
pub mod outgoing_webhook_event;
pub mod payment_intents;
pub mod payments;
mod query;
pub mod refunds;
pub mod sdk_events;
pub mod search;
mod sqlx;
mod types;
use api_event::metrics::{ApiEventMetric, ApiEventMetricRow};
use common_utils::errors::CustomResult;
use disputes::metrics::{DisputeMetric, DisputeMetricRow};
use enums::AuthInfo;
use hyperswitch_interfaces::secrets_interface::{
secret_handler::SecretsHandler,
secret_state::{RawSecret, SecretStateContainer, SecuredSecret},
SecretManagementInterface, SecretsManagementError,
};
use refunds::distribution::{RefundDistribution, RefundDistributionRow};
pub use types::AnalyticsDomain;
pub mod lambda_utils;
pub mod utils;
use std::{collections::HashSet, sync::Arc};
use api_models::analytics::{
active_payments::{ActivePaymentsMetrics, ActivePaymentsMetricsBucketIdentifier},
api_event::{
ApiEventDimensions, ApiEventFilters, ApiEventMetrics, ApiEventMetricsBucketIdentifier,
},
auth_events::{
AuthEventDimensions, AuthEventFilters, AuthEventMetrics, AuthEventMetricsBucketIdentifier,
},
disputes::{DisputeDimensions, DisputeFilters, DisputeMetrics, DisputeMetricsBucketIdentifier},
frm::{FrmDimensions, FrmFilters, FrmMetrics, FrmMetricsBucketIdentifier},
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetrics,
PaymentIntentMetricsBucketIdentifier,
},
payments::{PaymentDimensions, PaymentFilters, PaymentMetrics, PaymentMetricsBucketIdentifier},
refunds::{RefundDimensions, RefundFilters, RefundMetrics, RefundMetricsBucketIdentifier},
sdk_events::{
SdkEventDimensions, SdkEventFilters, SdkEventMetrics, SdkEventMetricsBucketIdentifier,
},
Granularity, PaymentDistributionBody, RefundDistributionBody, TimeRange,
};
use clickhouse::ClickhouseClient;
pub use clickhouse::ClickhouseConfig;
use error_stack::report;
use router_env::{
logger,
tracing::{self, instrument},
types::FlowMetric,
};
use storage_impl::config::Database;
use strum::Display;
use self::{
active_payments::metrics::{ActivePaymentsMetric, ActivePaymentsMetricRow},
auth_events::metrics::{AuthEventMetric, AuthEventMetricRow},
frm::metrics::{FrmMetric, FrmMetricRow},
payment_intents::metrics::{PaymentIntentMetric, PaymentIntentMetricRow},
payments::{
distribution::{PaymentDistribution, PaymentDistributionRow},
metrics::{PaymentMetric, PaymentMetricRow},
},
refunds::metrics::{RefundMetric, RefundMetricRow},
sdk_events::metrics::{SdkEventMetric, SdkEventMetricRow},
sqlx::SqlxClient,
types::MetricsError,
};
#[derive(Clone, Debug)]
pub enum AnalyticsProvider {
Sqlx(SqlxClient),
Clickhouse(ClickhouseClient),
CombinedCkh(SqlxClient, ClickhouseClient),
CombinedSqlx(SqlxClient, ClickhouseClient),
}
impl Default for AnalyticsProvider {
fn default() -> Self {
Self::Sqlx(SqlxClient::default())
}
}
impl std::fmt::Display for AnalyticsProvider {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let analytics_provider = match self {
Self::Clickhouse(_) => "Clickhouse",
Self::Sqlx(_) => "Sqlx",
Self::CombinedCkh(_, _) => "CombinedCkh",
Self::CombinedSqlx(_, _) => "CombinedSqlx",
};
write!(f, "{}", analytics_provider)
}
}
impl AnalyticsProvider {
#[instrument(skip_all)]
pub async fn get_payment_metrics(
&self,
metric: &PaymentMetrics,
dimensions: &[PaymentDimensions],
auth: &AuthInfo,
filters: &PaymentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
) -> types::MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> {
// Metrics to get the fetch time for each payment metric
metrics::request::record_operation_time(
async {
match self {
Self::Sqlx(pool) => {
metric
.load_metrics(
dimensions,
auth,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::Clickhouse(pool) => {
metric
.load_metrics(
dimensions,
auth,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::CombinedCkh(sqlx_pool, ckh_pool) => {
let (ckh_result, sqlx_result) = tokio::join!(metric
.load_metrics(
dimensions,
auth,
filters,
granularity,
time_range,
ckh_pool,
),
metric
.load_metrics(
dimensions,
auth,
filters,
granularity,
time_range,
sqlx_pool,
));
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics metrics")
},
_ => {}
};
ckh_result
}
Self::CombinedSqlx(sqlx_pool, ckh_pool) => {
let (ckh_result, sqlx_result) = tokio::join!(metric
.load_metrics(
dimensions,
auth,
filters,
granularity,
time_range,
ckh_pool,
),
metric
.load_metrics(
dimensions,
auth,
filters,
granularity,
time_range,
sqlx_pool,
));
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics metrics")
},
_ => {}
};
sqlx_result
}
}
},
&metrics::METRIC_FETCH_TIME,
metric,
self,
)
.await
}
pub async fn get_payment_distribution(
&self,
distribution: &PaymentDistributionBody,
dimensions: &[PaymentDimensions],
auth: &AuthInfo,
filters: &PaymentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
) -> types::MetricsResult<Vec<(PaymentMetricsBucketIdentifier, PaymentDistributionRow)>> {
// Metrics to get the fetch time for each payment metric
metrics::request::record_operation_time(
async {
match self {
Self::Sqlx(pool) => {
distribution.distribution_for
.load_distribution(
distribution,
dimensions,
auth,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::Clickhouse(pool) => {
distribution.distribution_for
.load_distribution(
distribution,
dimensions,
auth,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::CombinedCkh(sqlx_pool, ckh_pool) => {
let (ckh_result, sqlx_result) = tokio::join!(distribution.distribution_for
.load_distribution(
distribution,
dimensions,
auth,
filters,
granularity,
time_range,
ckh_pool,
),
distribution.distribution_for
.load_distribution(
distribution,
dimensions,
auth,
filters,
granularity,
time_range,
sqlx_pool,
));
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics distribution")
},
_ => {}
};
ckh_result
}
Self::CombinedSqlx(sqlx_pool, ckh_pool) => {
let (ckh_result, sqlx_result) = tokio::join!(distribution.distribution_for
.load_distribution(
distribution,
dimensions,
auth,
filters,
granularity,
time_range,
ckh_pool,
),
distribution.distribution_for
.load_distribution(
distribution,
dimensions,
auth,
filters,
granularity,
time_range,
sqlx_pool,
));
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics distribution")
},
_ => {}
};
sqlx_result
}
}
},
&metrics::METRIC_FETCH_TIME,
&distribution.distribution_for,
self,
)
.await
}
pub async fn get_payment_intent_metrics(
&self,
metric: &PaymentIntentMetrics,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
) -> types::MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
// Metrics to get the fetch time for each payment intent metric
metrics::request::record_operation_time(
async {
match self {
Self::Sqlx(pool) => {
metric
.load_metrics(
dimensions,
auth,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::Clickhouse(pool) => {
metric
.load_metrics(
dimensions,
auth,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::CombinedCkh(sqlx_pool, ckh_pool) => {
let (ckh_result, sqlx_result) = tokio::join!(metric
.load_metrics(
dimensions,
auth,
filters,
granularity,
time_range,
ckh_pool,
),
metric
.load_metrics(
dimensions,
auth,
filters,
granularity,
time_range,
sqlx_pool,
));
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payment intents analytics metrics")
},
_ => {}
};
ckh_result
}
Self::CombinedSqlx(sqlx_pool, ckh_pool) => {
let (ckh_result, sqlx_result) = tokio::join!(metric
.load_metrics(
dimensions,
auth,
filters,
granularity,
time_range,
ckh_pool,
),
metric
.load_metrics(
dimensions,
auth,
filters,
granularity,
time_range,
sqlx_pool,
));
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payment intents analytics metrics")
},
_ => {}
};
sqlx_result
}
}
},
&metrics::METRIC_FETCH_TIME,
metric,
self,
)
.await
}
pub async fn get_refund_metrics(
&self,
metric: &RefundMetrics,
dimensions: &[RefundDimensions],
auth: &AuthInfo,
filters: &RefundFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
) -> types::MetricsResult<HashSet<(RefundMetricsBucketIdentifier, RefundMetricRow)>> {
// Metrics to get the fetch time for each refund metric
metrics::request::record_operation_time(
async {
match self {
Self::Sqlx(pool) => {
metric
.load_metrics(
dimensions,
auth,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::Clickhouse(pool) => {
metric
.load_metrics(
dimensions,
auth,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::CombinedCkh(sqlx_pool, ckh_pool) => {
let (ckh_result, sqlx_result) = tokio::join!(
metric.load_metrics(
dimensions,
auth,
filters,
granularity,
time_range,
ckh_pool,
),
metric.load_metrics(
dimensions,
auth,
filters,
granularity,
time_range,
sqlx_pool,
)
);
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres refunds analytics metrics")
}
_ => {}
};
ckh_result
}
Self::CombinedSqlx(sqlx_pool, ckh_pool) => {
let (ckh_result, sqlx_result) = tokio::join!(
metric.load_metrics(
dimensions,
auth,
filters,
granularity,
time_range,
ckh_pool,
),
metric.load_metrics(
dimensions,
auth,
filters,
granularity,
time_range,
sqlx_pool,
)
);
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres refunds analytics metrics")
}
_ => {}
};
sqlx_result
}
}
},
&metrics::METRIC_FETCH_TIME,
metric,
self,
)
.await
}
pub async fn get_refund_distribution(
&self,
distribution: &RefundDistributionBody,
dimensions: &[RefundDimensions],
auth: &AuthInfo,
filters: &RefundFilters,
granularity: &Option<Granularity>,
time_range: &TimeRange,
) -> types::MetricsResult<Vec<(RefundMetricsBucketIdentifier, RefundDistributionRow)>> {
// Metrics to get the fetch time for each payment metric
metrics::request::record_operation_time(
async {
match self {
Self::Sqlx(pool) => {
distribution.distribution_for
.load_distribution(
distribution,
dimensions,
auth,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::Clickhouse(pool) => {
distribution.distribution_for
.load_distribution(
distribution,
dimensions,
auth,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::CombinedCkh(sqlx_pool, ckh_pool) => {
let (ckh_result, sqlx_result) = tokio::join!(distribution.distribution_for
.load_distribution(
distribution,
dimensions,
auth,
filters,
granularity,
time_range,
ckh_pool,
),
distribution.distribution_for
.load_distribution(
distribution,
dimensions,
auth,
filters,
granularity,
time_range,
sqlx_pool,
));
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics distribution")
},
_ => {}
};
ckh_result
}
Self::CombinedSqlx(sqlx_pool, ckh_pool) => {
let (ckh_result, sqlx_result) = tokio::join!(distribution.distribution_for
.load_distribution(
distribution,
dimensions,
auth,
filters,
granularity,
time_range,
ckh_pool,
),
distribution.distribution_for
.load_distribution(
distribution,
dimensions,
auth,
filters,
granularity,
time_range,
sqlx_pool,
));
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics distribution")
},
_ => {}
};
sqlx_result
}
}
},
&metrics::METRIC_FETCH_TIME,
&distribution.distribution_for,
self,
)
.await
}
pub async fn get_frm_metrics(
&self,
metric: &FrmMetrics,
dimensions: &[FrmDimensions],
merchant_id: &common_utils::id_type::MerchantId,
filters: &FrmFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
) -> types::MetricsResult<Vec<(FrmMetricsBucketIdentifier, FrmMetricRow)>> {
// Metrics to get the fetch time for each refund metric
metrics::request::record_operation_time(
async {
match self {
Self::Sqlx(pool) => {
metric
.load_metrics(
dimensions,
merchant_id,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::Clickhouse(pool) => {
metric
.load_metrics(
dimensions,
merchant_id,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::CombinedCkh(sqlx_pool, ckh_pool) => {
let (ckh_result, sqlx_result) = tokio::join!(
metric.load_metrics(
dimensions,
merchant_id,
filters,
granularity,
time_range,
ckh_pool,
),
metric.load_metrics(
dimensions,
merchant_id,
filters,
granularity,
time_range,
sqlx_pool,
)
);
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres frm analytics metrics")
}
_ => {}
};
ckh_result
}
Self::CombinedSqlx(sqlx_pool, ckh_pool) => {
let (ckh_result, sqlx_result) = tokio::join!(
metric.load_metrics(
dimensions,
merchant_id,
filters,
granularity,
time_range,
ckh_pool,
),
metric.load_metrics(
dimensions,
merchant_id,
filters,
granularity,
time_range,
sqlx_pool,
)
);
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres frm analytics metrics")
}
_ => {}
};
sqlx_result
}
}
},
&metrics::METRIC_FETCH_TIME,
metric,
self,
)
.await
}
pub async fn get_dispute_metrics(
&self,
metric: &DisputeMetrics,
dimensions: &[DisputeDimensions],
auth: &AuthInfo,
filters: &DisputeFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
) -> types::MetricsResult<HashSet<(DisputeMetricsBucketIdentifier, DisputeMetricRow)>> {
// Metrics to get the fetch time for each refund metric
metrics::request::record_operation_time(
async {
match self {
Self::Sqlx(pool) => {
metric
.load_metrics(
dimensions,
auth,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::Clickhouse(pool) => {
metric
.load_metrics(
dimensions,
auth,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::CombinedCkh(sqlx_pool, ckh_pool) => {
let (ckh_result, sqlx_result) = tokio::join!(
metric.load_metrics(
dimensions,
auth,
filters,
granularity,
time_range,
ckh_pool,
),
metric.load_metrics(
dimensions,
auth,
filters,
granularity,
time_range,
sqlx_pool,
)
);
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres disputes analytics metrics")
}
_ => {}
};
ckh_result
}
Self::CombinedSqlx(sqlx_pool, ckh_pool) => {
let (ckh_result, sqlx_result) = tokio::join!(
metric.load_metrics(
dimensions,
auth,
filters,
granularity,
time_range,
ckh_pool,
),
metric.load_metrics(
dimensions,
auth,
filters,
granularity,
time_range,
sqlx_pool,
)
);
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres disputes analytics metrics")
}
_ => {}
};
sqlx_result
}
}
},
&metrics::METRIC_FETCH_TIME,
metric,
self,
)
.await
}
pub async fn get_sdk_event_metrics(
&self,
metric: &SdkEventMetrics,
dimensions: &[SdkEventDimensions],
publishable_key: &str,
filters: &SdkEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
) -> types::MetricsResult<HashSet<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>> {
match self {
Self::Sqlx(_pool) => Err(report!(MetricsError::NotImplemented)),
Self::Clickhouse(pool) => {
metric
.load_metrics(
dimensions,
publishable_key,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::CombinedCkh(_sqlx_pool, ckh_pool) | Self::CombinedSqlx(_sqlx_pool, ckh_pool) => {
metric
.load_metrics(
dimensions,
publishable_key,
filters,
granularity,
// Since SDK events are ckh only use ckh here
time_range,
ckh_pool,
)
.await
}
}
}
pub async fn get_active_payments_metrics(
&self,
metric: &ActivePaymentsMetrics,
merchant_id: &common_utils::id_type::MerchantId,
publishable_key: &str,
time_range: &TimeRange,
) -> types::MetricsResult<
HashSet<(
ActivePaymentsMetricsBucketIdentifier,
ActivePaymentsMetricRow,
)>,
> {
match self {
Self::Sqlx(_pool) => Err(report!(MetricsError::NotImplemented)),
Self::Clickhouse(pool) => {
metric
.load_metrics(merchant_id, publishable_key, time_range, pool)
.await
}
Self::CombinedCkh(_sqlx_pool, ckh_pool) | Self::CombinedSqlx(_sqlx_pool, ckh_pool) => {
metric
.load_metrics(merchant_id, publishable_key, time_range, ckh_pool)
.await
}
}
}
pub async fn get_auth_event_metrics(
&self,
metric: &AuthEventMetrics,
dimensions: &[AuthEventDimensions],
merchant_id: &common_utils::id_type::MerchantId,
filters: &AuthEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
) -> types::MetricsResult<HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> {
match self {
Self::Sqlx(_pool) => Err(report!(MetricsError::NotImplemented)),
Self::Clickhouse(pool) => {
metric
.load_metrics(
merchant_id,
dimensions,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::CombinedCkh(_sqlx_pool, ckh_pool) | Self::CombinedSqlx(_sqlx_pool, ckh_pool) => {
metric
.load_metrics(
merchant_id,
dimensions,
filters,
granularity,
// Since API events are ckh only use ckh here
time_range,
ckh_pool,
)
.await
}
}
}
pub async fn get_api_event_metrics(
&self,
metric: &ApiEventMetrics,
dimensions: &[ApiEventDimensions],
merchant_id: &common_utils::id_type::MerchantId,
filters: &ApiEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
) -> types::MetricsResult<HashSet<(ApiEventMetricsBucketIdentifier, ApiEventMetricRow)>> {
match self {
Self::Sqlx(_pool) => Err(report!(MetricsError::NotImplemented)),
Self::Clickhouse(ckh_pool)
| Self::CombinedCkh(_, ckh_pool)
| Self::CombinedSqlx(_, ckh_pool) => {
// Since API events are ckh only use ckh here
metric
.load_metrics(
dimensions,
merchant_id,
filters,
granularity,
time_range,
ckh_pool,
)
.await
}
}
}
pub async fn from_conf(
config: &AnalyticsConfig,
tenant: &dyn storage_impl::config::TenantConfig,
) -> Self {
match config {
AnalyticsConfig::Sqlx { sqlx, .. } => {
Self::Sqlx(SqlxClient::from_conf(sqlx, tenant.get_schema()).await)
}
AnalyticsConfig::Clickhouse { clickhouse, .. } => Self::Clickhouse(ClickhouseClient {
config: Arc::new(clickhouse.clone()),
database: tenant.get_clickhouse_database().to_string(),
}),
AnalyticsConfig::CombinedCkh {
sqlx, clickhouse, ..
} => Self::CombinedCkh(
SqlxClient::from_conf(sqlx, tenant.get_schema()).await,
ClickhouseClient {
config: Arc::new(clickhouse.clone()),
database: tenant.get_clickhouse_database().to_string(),
},
),
AnalyticsConfig::CombinedSqlx {
sqlx, clickhouse, ..
} => Self::CombinedSqlx(
SqlxClient::from_conf(sqlx, tenant.get_schema()).await,
ClickhouseClient {
config: Arc::new(clickhouse.clone()),
database: tenant.get_clickhouse_database().to_string(),
},
),
}
}
}
#[derive(Clone, Debug, serde::Deserialize)]
#[serde(tag = "source", rename_all = "lowercase")]
pub enum AnalyticsConfig {
Sqlx {
sqlx: Database,
#[serde(default)]
forex_enabled: bool,
},
Clickhouse {
clickhouse: ClickhouseConfig,
#[serde(default)]
forex_enabled: bool,
},
CombinedCkh {
sqlx: Database,
clickhouse: ClickhouseConfig,
#[serde(default)]
forex_enabled: bool,
},
CombinedSqlx {
sqlx: Database,
clickhouse: ClickhouseConfig,
#[serde(default)]
forex_enabled: bool,
},
}
impl AnalyticsConfig {
pub fn get_forex_enabled(&self) -> bool {
match self {
Self::Sqlx { forex_enabled, .. }
| Self::Clickhouse { forex_enabled, .. }
| Self::CombinedCkh { forex_enabled, .. }
| Self::CombinedSqlx { forex_enabled, .. } => *forex_enabled,
}
}
}
#[async_trait::async_trait]
impl SecretsHandler for AnalyticsConfig {
async fn convert_to_raw_secret(
value: SecretStateContainer<Self, SecuredSecret>,
secret_management_client: &dyn SecretManagementInterface,
) -> CustomResult<SecretStateContainer<Self, RawSecret>, SecretsManagementError> {
let analytics_config = value.get_inner();
let decrypted_password = match analytics_config {
// Todo: Perform kms decryption of clickhouse password
Self::Clickhouse { .. } => masking::Secret::new(String::default()),
Self::Sqlx { sqlx, .. }
| Self::CombinedCkh { sqlx, .. }
| Self::CombinedSqlx { sqlx, .. } => {
secret_management_client
.get_secret(sqlx.password.clone())
.await?
}
};
Ok(value.transition_state(|conf| match conf {
Self::Sqlx {
sqlx,
forex_enabled,
} => Self::Sqlx {
sqlx: Database {
password: decrypted_password,
..sqlx
},
forex_enabled,
},
Self::Clickhouse {
clickhouse,
forex_enabled,
} => Self::Clickhouse {
clickhouse,
forex_enabled,
},
Self::CombinedCkh {
sqlx,
clickhouse,
forex_enabled,
} => Self::CombinedCkh {
sqlx: Database {
password: decrypted_password,
..sqlx
},
clickhouse,
forex_enabled,
},
Self::CombinedSqlx {
sqlx,
clickhouse,
forex_enabled,
} => Self::CombinedSqlx {
sqlx: Database {
password: decrypted_password,
..sqlx
},
clickhouse,
forex_enabled,
},
}))
}
}
impl Default for AnalyticsConfig {
fn default() -> Self {
Self::Sqlx {
sqlx: Database::default(),
forex_enabled: false,
}
}
}
#[derive(Clone, Debug, serde::Deserialize, Default, serde::Serialize)]
pub struct ReportConfig {
pub payment_function: String,
pub refund_function: String,
pub dispute_function: String,
pub authentication_function: String,
pub region: String,
}
/// Analytics Flow routes Enums
/// Info - Dimensions and filters available for the domain
/// Filters - Set of values present for the dimension
/// Metrics - Analytical data on dimensions and metrics
#[derive(Debug, Display, Clone, PartialEq, Eq)]
pub enum AnalyticsFlow {
GetInfo,
GetPaymentMetrics,
GetPaymentIntentMetrics,
GetRefundsMetrics,
GetFrmMetrics,
GetSdkMetrics,
GetAuthMetrics,
GetAuthEventFilters,
GetActivePaymentsMetrics,
GetPaymentFilters,
GetPaymentIntentFilters,
GetRefundFilters,
GetFrmFilters,
GetSdkEventFilters,
GetApiEvents,
GetSdkEvents,
GeneratePaymentReport,
GenerateDisputeReport,
GenerateRefundReport,
GenerateAuthenticationReport,
GetApiEventMetrics,
GetApiEventFilters,
GetConnectorEvents,
GetOutgoingWebhookEvents,
GetGlobalSearchResults,
GetSearchResults,
GetDisputeFilters,
GetDisputeMetrics,
GetSankey,
}
impl FlowMetric for AnalyticsFlow {}
| 7,066 | 1,778 |
hyperswitch | crates/analytics/src/api_event.rs | .rs | mod core;
pub mod events;
pub mod filters;
pub mod metrics;
pub mod types;
pub trait APIEventAnalytics: events::ApiLogsFilterAnalytics {}
pub use self::core::{api_events_core, get_api_event_metrics, get_filters};
| 50 | 1,779 |
hyperswitch | crates/analytics/src/errors.rs | .rs | use api_models::errors::types::{ApiError, ApiErrorResponse};
use common_utils::errors::{CustomResult, ErrorSwitch};
pub type AnalyticsResult<T> = CustomResult<T, AnalyticsError>;
#[derive(Debug, Clone, serde::Serialize, thiserror::Error)]
pub enum AnalyticsError {
#[allow(dead_code)]
#[error("Not implemented: {0}")]
NotImplemented(&'static str),
#[error("Unknown Analytics Error")]
UnknownError,
#[error("Access Forbidden Analytics Error")]
AccessForbiddenError,
#[error("Failed to fetch currency exchange rate")]
ForexFetchFailed,
}
impl ErrorSwitch<ApiErrorResponse> for AnalyticsError {
fn switch(&self) -> ApiErrorResponse {
match self {
Self::NotImplemented(feature) => ApiErrorResponse::NotImplemented(ApiError::new(
"IR",
0,
format!("{feature} is not implemented."),
None,
)),
Self::UnknownError => ApiErrorResponse::InternalServerError(ApiError::new(
"HE",
0,
"Something went wrong",
None,
)),
Self::AccessForbiddenError => {
ApiErrorResponse::Unauthorized(ApiError::new("IR", 0, "Access Forbidden", None))
}
Self::ForexFetchFailed => ApiErrorResponse::InternalServerError(ApiError::new(
"HE",
0,
"Failed to fetch currency exchange rate",
None,
)),
}
}
}
| 301 | 1,780 |
hyperswitch | crates/analytics/src/sdk_events.rs | .rs | pub mod accumulator;
mod core;
pub mod events;
pub mod filters;
pub mod metrics;
pub mod types;
pub use accumulator::{SdkEventMetricAccumulator, SdkEventMetricsAccumulator};
pub use self::core::{get_filters, get_metrics, sdk_events_core};
| 58 | 1,781 |
hyperswitch | crates/analytics/src/active_payments.rs | .rs | pub mod accumulator;
mod core;
pub mod metrics;
pub use accumulator::{ActivePaymentsMetricAccumulator, ActivePaymentsMetricsAccumulator};
pub use self::core::get_metrics;
| 38 | 1,782 |
hyperswitch | crates/analytics/src/connector_events/core.rs | .rs | use api_models::analytics::connector_events::ConnectorEventsRequest;
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use super::events::{get_connector_events, ConnectorEventsResult};
use crate::{errors::AnalyticsResult, types::FiltersError, AnalyticsProvider};
pub async fn connector_events_core(
pool: &AnalyticsProvider,
req: ConnectorEventsRequest,
merchant_id: &common_utils::id_type::MerchantId,
) -> AnalyticsResult<Vec<ConnectorEventsResult>> {
let data = match pool {
AnalyticsProvider::Sqlx(_) => Err(FiltersError::NotImplemented(
"Connector Events not implemented for SQLX",
))
.attach_printable("SQL Analytics is not implemented for Connector Events"),
AnalyticsProvider::Clickhouse(ckh_pool)
| AnalyticsProvider::CombinedSqlx(_, ckh_pool)
| AnalyticsProvider::CombinedCkh(_, ckh_pool) => {
get_connector_events(merchant_id, req, ckh_pool).await
}
}
.switch()?;
Ok(data)
}
| 223 | 1,783 |
hyperswitch | crates/analytics/src/connector_events/events.rs | .rs | use api_models::analytics::{connector_events::ConnectorEventsRequest, Granularity};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, FiltersError, FiltersResult, LoadRow},
};
pub trait ConnectorEventLogAnalytics: LoadRow<ConnectorEventsResult> {}
pub async fn get_connector_events<T>(
merchant_id: &common_utils::id_type::MerchantId,
query_param: ConnectorEventsRequest,
pool: &T,
) -> FiltersResult<Vec<ConnectorEventsResult>>
where
T: AnalyticsDataSource + ConnectorEventLogAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::ConnectorEvents);
query_builder.add_select_column("*").switch()?;
query_builder
.add_filter_clause("merchant_id", merchant_id)
.switch()?;
query_builder
.add_filter_clause("payment_id", &query_param.payment_id)
.switch()?;
if let Some(refund_id) = query_param.refund_id {
query_builder
.add_filter_clause("refund_id", &refund_id)
.switch()?;
}
if let Some(dispute_id) = query_param.dispute_id {
query_builder
.add_filter_clause("dispute_id", &dispute_id)
.switch()?;
}
//TODO!: update the execute_query function to return reports instead of plain errors...
query_builder
.execute_query::<ConnectorEventsResult, _>(pool)
.await
.change_context(FiltersError::QueryBuildingError)?
.change_context(FiltersError::QueryExecutionFailure)
}
#[derive(Debug, serde::Serialize, serde::Deserialize)]
pub struct ConnectorEventsResult {
pub merchant_id: common_utils::id_type::MerchantId,
pub payment_id: String,
pub connector_name: Option<String>,
pub request_id: Option<String>,
pub flow: String,
pub request: String,
#[serde(rename = "masked_response")]
pub response: Option<String>,
pub error: Option<String>,
pub status_code: u16,
pub latency: Option<u128>,
#[serde(with = "common_utils::custom_serde::iso8601")]
pub created_at: PrimitiveDateTime,
pub method: Option<String>,
}
| 564 | 1,784 |
hyperswitch | crates/analytics/src/payment_intents/core.rs | .rs | #![allow(dead_code)]
use std::collections::{HashMap, HashSet};
use api_models::analytics::{
payment_intents::{
MetricsBucketResponse, PaymentIntentDimensions, PaymentIntentMetrics,
PaymentIntentMetricsBucketIdentifier,
},
GetPaymentIntentFiltersRequest, GetPaymentIntentMetricRequest, PaymentIntentFilterValue,
PaymentIntentFiltersResponse, PaymentIntentsAnalyticsMetadata, PaymentIntentsMetricsResponse,
};
use bigdecimal::ToPrimitive;
use common_enums::Currency;
use common_utils::{errors::CustomResult, types::TimeRange};
use currency_conversion::{conversion::convert, types::ExchangeRates};
use error_stack::ResultExt;
use router_env::{
instrument, logger,
tracing::{self, Instrument},
};
use super::{
filters::{get_payment_intent_filter_for_dimension, PaymentIntentFilterRow},
metrics::PaymentIntentMetricRow,
sankey::{get_sankey_data, SankeyRow},
PaymentIntentMetricsAccumulator,
};
use crate::{
enums::AuthInfo,
errors::{AnalyticsError, AnalyticsResult},
metrics,
payment_intents::PaymentIntentMetricAccumulator,
AnalyticsProvider,
};
#[derive(Debug)]
pub enum TaskType {
MetricTask(
PaymentIntentMetrics,
CustomResult<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
AnalyticsError,
>,
),
}
#[instrument(skip_all)]
pub async fn get_sankey(
pool: &AnalyticsProvider,
auth: &AuthInfo,
req: TimeRange,
) -> AnalyticsResult<Vec<SankeyRow>> {
match pool {
AnalyticsProvider::Sqlx(_) => Err(AnalyticsError::NotImplemented(
"Sankey not implemented for sqlx",
))?,
AnalyticsProvider::Clickhouse(ckh_pool)
| AnalyticsProvider::CombinedCkh(_, ckh_pool)
| AnalyticsProvider::CombinedSqlx(_, ckh_pool) => {
let sankey_rows = get_sankey_data(ckh_pool, auth, &req)
.await
.change_context(AnalyticsError::UnknownError)?;
Ok(sankey_rows)
}
}
}
#[instrument(skip_all)]
pub async fn get_metrics(
pool: &AnalyticsProvider,
ex_rates: &Option<ExchangeRates>,
auth: &AuthInfo,
req: GetPaymentIntentMetricRequest,
) -> AnalyticsResult<PaymentIntentsMetricsResponse<MetricsBucketResponse>> {
let mut metrics_accumulator: HashMap<
PaymentIntentMetricsBucketIdentifier,
PaymentIntentMetricsAccumulator,
> = HashMap::new();
let mut set = tokio::task::JoinSet::new();
for metric_type in req.metrics.iter().cloned() {
let req = req.clone();
let pool = pool.clone();
let task_span = tracing::debug_span!(
"analytics_payment_intents_metrics_query",
payment_metric = metric_type.as_ref()
);
// TODO: lifetime issues with joinset,
// can be optimized away if joinset lifetime requirements are relaxed
let auth_scoped = auth.to_owned();
set.spawn(
async move {
let data = pool
.get_payment_intent_metrics(
&metric_type,
&req.group_by_names.clone(),
&auth_scoped,
&req.filters,
req.time_series.map(|t| t.granularity),
&req.time_range,
)
.await
.change_context(AnalyticsError::UnknownError);
TaskType::MetricTask(metric_type, data)
}
.instrument(task_span),
);
}
while let Some(task_type) = set
.join_next()
.await
.transpose()
.change_context(AnalyticsError::UnknownError)?
{
match task_type {
TaskType::MetricTask(metric, data) => {
let data = data?;
let attributes = router_env::metric_attributes!(
("metric_type", metric.to_string()),
("source", pool.to_string()),
);
let value = u64::try_from(data.len());
if let Ok(val) = value {
metrics::BUCKETS_FETCHED.record(val, attributes);
logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val);
}
for (id, value) in data {
logger::debug!(bucket_id=?id, bucket_value=?value, "Bucket row for metric {metric}");
let metrics_builder = metrics_accumulator.entry(id).or_default();
match metric {
PaymentIntentMetrics::SuccessfulSmartRetries
| PaymentIntentMetrics::SessionizedSuccessfulSmartRetries => {
metrics_builder
.successful_smart_retries
.add_metrics_bucket(&value)
}
PaymentIntentMetrics::TotalSmartRetries
| PaymentIntentMetrics::SessionizedTotalSmartRetries => metrics_builder
.total_smart_retries
.add_metrics_bucket(&value),
PaymentIntentMetrics::SmartRetriedAmount
| PaymentIntentMetrics::SessionizedSmartRetriedAmount => metrics_builder
.smart_retried_amount
.add_metrics_bucket(&value),
PaymentIntentMetrics::PaymentIntentCount
| PaymentIntentMetrics::SessionizedPaymentIntentCount => metrics_builder
.payment_intent_count
.add_metrics_bucket(&value),
PaymentIntentMetrics::PaymentsSuccessRate
| PaymentIntentMetrics::SessionizedPaymentsSuccessRate => metrics_builder
.payments_success_rate
.add_metrics_bucket(&value),
PaymentIntentMetrics::SessionizedPaymentProcessedAmount
| PaymentIntentMetrics::PaymentProcessedAmount => metrics_builder
.payment_processed_amount
.add_metrics_bucket(&value),
PaymentIntentMetrics::SessionizedPaymentsDistribution => metrics_builder
.payments_distribution
.add_metrics_bucket(&value),
}
}
logger::debug!(
"Analytics Accumulated Results: metric: {}, results: {:#?}",
metric,
metrics_accumulator
);
}
}
}
let mut success = 0;
let mut success_without_smart_retries = 0;
let mut total_smart_retried_amount = 0;
let mut total_smart_retried_amount_in_usd = 0;
let mut total_smart_retried_amount_without_smart_retries = 0;
let mut total_smart_retried_amount_without_smart_retries_in_usd = 0;
let mut total = 0;
let mut total_payment_processed_amount = 0;
let mut total_payment_processed_amount_in_usd = 0;
let mut total_payment_processed_count = 0;
let mut total_payment_processed_amount_without_smart_retries = 0;
let mut total_payment_processed_amount_without_smart_retries_in_usd = 0;
let mut total_payment_processed_count_without_smart_retries = 0;
let query_data: Vec<MetricsBucketResponse> = metrics_accumulator
.into_iter()
.map(|(id, val)| {
let mut collected_values = val.collect();
if let Some(success_count) = collected_values.successful_payments {
success += success_count;
}
if let Some(success_count) = collected_values.successful_payments_without_smart_retries
{
success_without_smart_retries += success_count;
}
if let Some(total_count) = collected_values.total_payments {
total += total_count;
}
if let Some(retried_amount) = collected_values.smart_retried_amount {
let amount_in_usd = if let Some(ex_rates) = ex_rates {
id.currency
.and_then(|currency| {
i64::try_from(retried_amount)
.inspect_err(|e| logger::error!("Amount conversion error: {:?}", e))
.ok()
.and_then(|amount_i64| {
convert(ex_rates, currency, Currency::USD, amount_i64)
.inspect_err(|e| {
logger::error!("Currency conversion error: {:?}", e)
})
.ok()
})
})
.map(|amount| (amount * rust_decimal::Decimal::new(100, 0)).to_u64())
.unwrap_or_default()
} else {
None
};
collected_values.smart_retried_amount_in_usd = amount_in_usd;
total_smart_retried_amount += retried_amount;
total_smart_retried_amount_in_usd += amount_in_usd.unwrap_or(0);
}
if let Some(retried_amount) =
collected_values.smart_retried_amount_without_smart_retries
{
let amount_in_usd = if let Some(ex_rates) = ex_rates {
id.currency
.and_then(|currency| {
i64::try_from(retried_amount)
.inspect_err(|e| logger::error!("Amount conversion error: {:?}", e))
.ok()
.and_then(|amount_i64| {
convert(ex_rates, currency, Currency::USD, amount_i64)
.inspect_err(|e| {
logger::error!("Currency conversion error: {:?}", e)
})
.ok()
})
})
.map(|amount| (amount * rust_decimal::Decimal::new(100, 0)).to_u64())
.unwrap_or_default()
} else {
None
};
collected_values.smart_retried_amount_without_smart_retries_in_usd = amount_in_usd;
total_smart_retried_amount_without_smart_retries += retried_amount;
total_smart_retried_amount_without_smart_retries_in_usd +=
amount_in_usd.unwrap_or(0);
}
if let Some(amount) = collected_values.payment_processed_amount {
let amount_in_usd = if let Some(ex_rates) = ex_rates {
id.currency
.and_then(|currency| {
i64::try_from(amount)
.inspect_err(|e| logger::error!("Amount conversion error: {:?}", e))
.ok()
.and_then(|amount_i64| {
convert(ex_rates, currency, Currency::USD, amount_i64)
.inspect_err(|e| {
logger::error!("Currency conversion error: {:?}", e)
})
.ok()
})
})
.map(|amount| (amount * rust_decimal::Decimal::new(100, 0)).to_u64())
.unwrap_or_default()
} else {
None
};
collected_values.payment_processed_amount_in_usd = amount_in_usd;
total_payment_processed_amount_in_usd += amount_in_usd.unwrap_or(0);
total_payment_processed_amount += amount;
}
if let Some(count) = collected_values.payment_processed_count {
total_payment_processed_count += count;
}
if let Some(amount) = collected_values.payment_processed_amount_without_smart_retries {
let amount_in_usd = if let Some(ex_rates) = ex_rates {
id.currency
.and_then(|currency| {
i64::try_from(amount)
.inspect_err(|e| logger::error!("Amount conversion error: {:?}", e))
.ok()
.and_then(|amount_i64| {
convert(ex_rates, currency, Currency::USD, amount_i64)
.inspect_err(|e| {
logger::error!("Currency conversion error: {:?}", e)
})
.ok()
})
})
.map(|amount| (amount * rust_decimal::Decimal::new(100, 0)).to_u64())
.unwrap_or_default()
} else {
None
};
collected_values.payment_processed_amount_without_smart_retries_in_usd =
amount_in_usd;
total_payment_processed_amount_without_smart_retries_in_usd +=
amount_in_usd.unwrap_or(0);
total_payment_processed_amount_without_smart_retries += amount;
}
if let Some(count) = collected_values.payment_processed_count_without_smart_retries {
total_payment_processed_count_without_smart_retries += count;
}
MetricsBucketResponse {
values: collected_values,
dimensions: id,
}
})
.collect();
let total_success_rate = match (success, total) {
(s, t) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)),
_ => None,
};
let total_success_rate_without_smart_retries = match (success_without_smart_retries, total) {
(s, t) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)),
_ => None,
};
Ok(PaymentIntentsMetricsResponse {
query_data,
meta_data: [PaymentIntentsAnalyticsMetadata {
total_success_rate,
total_success_rate_without_smart_retries,
total_smart_retried_amount: Some(total_smart_retried_amount),
total_smart_retried_amount_without_smart_retries: Some(
total_smart_retried_amount_without_smart_retries,
),
total_payment_processed_amount: Some(total_payment_processed_amount),
total_payment_processed_amount_without_smart_retries: Some(
total_payment_processed_amount_without_smart_retries,
),
total_smart_retried_amount_in_usd: if ex_rates.is_some() {
Some(total_smart_retried_amount_in_usd)
} else {
None
},
total_smart_retried_amount_without_smart_retries_in_usd: if ex_rates.is_some() {
Some(total_smart_retried_amount_without_smart_retries_in_usd)
} else {
None
},
total_payment_processed_amount_in_usd: if ex_rates.is_some() {
Some(total_payment_processed_amount_in_usd)
} else {
None
},
total_payment_processed_amount_without_smart_retries_in_usd: if ex_rates.is_some() {
Some(total_payment_processed_amount_without_smart_retries_in_usd)
} else {
None
},
total_payment_processed_count: Some(total_payment_processed_count),
total_payment_processed_count_without_smart_retries: Some(
total_payment_processed_count_without_smart_retries,
),
}],
})
}
pub async fn get_filters(
pool: &AnalyticsProvider,
req: GetPaymentIntentFiltersRequest,
merchant_id: &common_utils::id_type::MerchantId,
) -> AnalyticsResult<PaymentIntentFiltersResponse> {
let mut res = PaymentIntentFiltersResponse::default();
for dim in req.group_by_names {
let values = match pool {
AnalyticsProvider::Sqlx(pool) => {
get_payment_intent_filter_for_dimension(dim, merchant_id, &req.time_range, pool)
.await
}
AnalyticsProvider::Clickhouse(pool) => {
get_payment_intent_filter_for_dimension(dim, merchant_id, &req.time_range, pool)
.await
}
AnalyticsProvider::CombinedCkh(sqlx_poll, ckh_pool) => {
let ckh_result = get_payment_intent_filter_for_dimension(
dim,
merchant_id,
&req.time_range,
ckh_pool,
)
.await;
let sqlx_result = get_payment_intent_filter_for_dimension(
dim,
merchant_id,
&req.time_range,
sqlx_poll,
)
.await;
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payment intents analytics filters")
},
_ => {}
};
ckh_result
}
AnalyticsProvider::CombinedSqlx(sqlx_poll, ckh_pool) => {
let ckh_result = get_payment_intent_filter_for_dimension(
dim,
merchant_id,
&req.time_range,
ckh_pool,
)
.await;
let sqlx_result = get_payment_intent_filter_for_dimension(
dim,
merchant_id,
&req.time_range,
sqlx_poll,
)
.await;
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payment intents analytics filters")
},
_ => {}
};
sqlx_result
}
}
.change_context(AnalyticsError::UnknownError)?
.into_iter()
.filter_map(|fil: PaymentIntentFilterRow| match dim {
PaymentIntentDimensions::PaymentIntentStatus => fil.status.map(|i| i.as_ref().to_string()),
PaymentIntentDimensions::Currency => fil.currency.map(|i| i.as_ref().to_string()),
PaymentIntentDimensions::ProfileId => fil.profile_id,
PaymentIntentDimensions::Connector => fil.connector,
PaymentIntentDimensions::AuthType => fil.authentication_type.map(|i| i.as_ref().to_string()),
PaymentIntentDimensions::PaymentMethod => fil.payment_method,
PaymentIntentDimensions::PaymentMethodType => fil.payment_method_type,
PaymentIntentDimensions::CardNetwork => fil.card_network,
PaymentIntentDimensions::MerchantId => fil.merchant_id,
PaymentIntentDimensions::CardLast4 => fil.card_last_4,
PaymentIntentDimensions::CardIssuer => fil.card_issuer,
PaymentIntentDimensions::ErrorReason => fil.error_reason,
})
.collect::<Vec<String>>();
res.query_data.push(PaymentIntentFilterValue {
dimension: dim,
values,
})
}
Ok(res)
}
| 3,743 | 1,785 |
hyperswitch | crates/analytics/src/payment_intents/metrics.rs | .rs | use std::collections::HashSet;
use api_models::analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetrics,
PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
};
use diesel_models::enums as storage_enums;
use time::PrimitiveDateTime;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, LoadRow, MetricsResult},
};
mod payment_intent_count;
mod payment_processed_amount;
mod payments_success_rate;
mod sessionized_metrics;
mod smart_retried_amount;
mod successful_smart_retries;
mod total_smart_retries;
use payment_intent_count::PaymentIntentCount;
use payment_processed_amount::PaymentProcessedAmount;
use payments_success_rate::PaymentsSuccessRate;
use smart_retried_amount::SmartRetriedAmount;
use successful_smart_retries::SuccessfulSmartRetries;
use total_smart_retries::TotalSmartRetries;
#[derive(Debug, PartialEq, Eq, serde::Deserialize, Hash)]
pub struct PaymentIntentMetricRow {
pub status: Option<DBEnumWrapper<storage_enums::IntentStatus>>,
pub currency: Option<DBEnumWrapper<storage_enums::Currency>>,
pub profile_id: Option<String>,
pub connector: Option<String>,
pub authentication_type: Option<DBEnumWrapper<storage_enums::AuthenticationType>>,
pub payment_method: Option<String>,
pub payment_method_type: Option<String>,
pub card_network: Option<String>,
pub merchant_id: Option<String>,
pub card_last_4: Option<String>,
pub card_issuer: Option<String>,
pub error_reason: Option<String>,
pub first_attempt: Option<i64>,
pub total: Option<bigdecimal::BigDecimal>,
pub count: Option<i64>,
#[serde(with = "common_utils::custom_serde::iso8601::option")]
pub start_bucket: Option<PrimitiveDateTime>,
#[serde(with = "common_utils::custom_serde::iso8601::option")]
pub end_bucket: Option<PrimitiveDateTime>,
}
pub trait PaymentIntentMetricAnalytics: LoadRow<PaymentIntentMetricRow> {}
#[async_trait::async_trait]
pub trait PaymentIntentMetric<T>
where
T: AnalyticsDataSource + PaymentIntentMetricAnalytics,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>;
}
#[async_trait::async_trait]
impl<T> PaymentIntentMetric<T> for PaymentIntentMetrics
where
T: AnalyticsDataSource + PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
match self {
Self::SuccessfulSmartRetries => {
SuccessfulSmartRetries
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::TotalSmartRetries => {
TotalSmartRetries
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SmartRetriedAmount => {
SmartRetriedAmount
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::PaymentIntentCount => {
PaymentIntentCount
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::PaymentsSuccessRate => {
PaymentsSuccessRate
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::PaymentProcessedAmount => {
PaymentProcessedAmount
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedSuccessfulSmartRetries => {
sessionized_metrics::SuccessfulSmartRetries
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedTotalSmartRetries => {
sessionized_metrics::TotalSmartRetries
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedSmartRetriedAmount => {
sessionized_metrics::SmartRetriedAmount
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedPaymentIntentCount => {
sessionized_metrics::PaymentIntentCount
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedPaymentsSuccessRate => {
sessionized_metrics::PaymentsSuccessRate
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedPaymentProcessedAmount => {
sessionized_metrics::PaymentProcessedAmount
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedPaymentsDistribution => {
sessionized_metrics::PaymentsDistribution
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
}
}
}
| 1,267 | 1,786 |
hyperswitch | crates/analytics/src/payment_intents/sankey.rs | .rs | use common_enums::enums;
use common_utils::{
errors::ParsingError,
types::{authentication::AuthInfo, TimeRange},
};
use error_stack::ResultExt;
use router_env::logger;
use crate::{
clickhouse::ClickhouseClient,
query::{Aggregate, QueryBuilder, QueryFilter},
types::{AnalyticsCollection, DBEnumWrapper, MetricsError, MetricsResult},
};
#[derive(
Clone,
Copy,
Debug,
Default,
Eq,
Hash,
PartialEq,
serde::Deserialize,
serde::Serialize,
strum::Display,
strum::EnumIter,
strum::EnumString,
)]
#[serde(rename_all = "snake_case")]
pub enum SessionizerRefundStatus {
FullRefunded,
#[default]
NotRefunded,
PartialRefunded,
}
#[derive(
Clone,
Copy,
Debug,
Default,
Eq,
Hash,
PartialEq,
serde::Deserialize,
serde::Serialize,
strum::Display,
strum::EnumIter,
strum::EnumString,
)]
#[serde(rename_all = "snake_case")]
pub enum SessionizerDisputeStatus {
DisputePresent,
#[default]
NotDisputed,
}
#[derive(Debug, serde::Deserialize, serde::Serialize)]
pub struct SankeyRow {
pub count: i64,
pub status: DBEnumWrapper<enums::IntentStatus>,
#[serde(default)]
pub refunds_status: Option<DBEnumWrapper<SessionizerRefundStatus>>,
#[serde(default)]
pub dispute_status: Option<DBEnumWrapper<SessionizerDisputeStatus>>,
pub first_attempt: i64,
}
impl TryInto<SankeyRow> for serde_json::Value {
type Error = error_stack::Report<ParsingError>;
fn try_into(self) -> Result<SankeyRow, Self::Error> {
logger::debug!("Parsing SankeyRow from {:?}", self);
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse Sankey in clickhouse results",
))
}
}
pub async fn get_sankey_data(
clickhouse_client: &ClickhouseClient,
auth: &AuthInfo,
time_range: &TimeRange,
) -> MetricsResult<Vec<SankeyRow>> {
let mut query_builder =
QueryBuilder::<ClickhouseClient>::new(AnalyticsCollection::PaymentIntentSessionized);
query_builder
.add_select_column(Aggregate::<String>::Count {
field: None,
alias: Some("count"),
})
.change_context(MetricsError::QueryBuildingError)?;
query_builder
.add_select_column("status")
.attach_printable("Error adding select clause")
.change_context(MetricsError::QueryBuildingError)?;
query_builder
.add_select_column("refunds_status")
.attach_printable("Error adding select clause")
.change_context(MetricsError::QueryBuildingError)?;
query_builder
.add_select_column("dispute_status")
.attach_printable("Error adding select clause")
.change_context(MetricsError::QueryBuildingError)?;
query_builder
.add_select_column("(attempt_count = 1) as first_attempt")
.attach_printable("Error adding select clause")
.change_context(MetricsError::QueryBuildingError)?;
auth.set_filter_clause(&mut query_builder)
.change_context(MetricsError::QueryBuildingError)?;
time_range
.set_filter_clause(&mut query_builder)
.change_context(MetricsError::QueryBuildingError)?;
query_builder
.add_group_by_clause("status")
.attach_printable("Error adding group by clause")
.change_context(MetricsError::QueryBuildingError)?;
query_builder
.add_group_by_clause("refunds_status")
.attach_printable("Error adding group by clause")
.change_context(MetricsError::QueryBuildingError)?;
query_builder
.add_group_by_clause("dispute_status")
.attach_printable("Error adding group by clause")
.change_context(MetricsError::QueryBuildingError)?;
query_builder
.add_group_by_clause("first_attempt")
.attach_printable("Error adding group by clause")
.change_context(MetricsError::QueryBuildingError)?;
query_builder
.execute_query::<SankeyRow, _>(clickhouse_client)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(Ok)
.collect()
}
| 972 | 1,787 |
hyperswitch | crates/analytics/src/payment_intents/accumulator.rs | .rs | use api_models::analytics::payment_intents::PaymentIntentMetricsBucketValue;
use bigdecimal::ToPrimitive;
use diesel_models::enums as storage_enums;
use super::metrics::PaymentIntentMetricRow;
#[derive(Debug, Default)]
pub struct PaymentIntentMetricsAccumulator {
pub successful_smart_retries: CountAccumulator,
pub total_smart_retries: CountAccumulator,
pub smart_retried_amount: SmartRetriedAmountAccumulator,
pub payment_intent_count: CountAccumulator,
pub payments_success_rate: PaymentsSuccessRateAccumulator,
pub payment_processed_amount: ProcessedAmountAccumulator,
pub payments_distribution: PaymentsDistributionAccumulator,
}
#[derive(Debug, Default)]
pub struct ErrorDistributionRow {
pub count: i64,
pub total: i64,
pub error_message: String,
}
#[derive(Debug, Default)]
pub struct ErrorDistributionAccumulator {
pub error_vec: Vec<ErrorDistributionRow>,
}
#[derive(Debug, Default)]
#[repr(transparent)]
pub struct CountAccumulator {
pub count: Option<i64>,
}
pub trait PaymentIntentMetricAccumulator {
type MetricOutput;
fn add_metrics_bucket(&mut self, metrics: &PaymentIntentMetricRow);
fn collect(self) -> Self::MetricOutput;
}
#[derive(Debug, Default)]
pub struct SmartRetriedAmountAccumulator {
pub amount: Option<i64>,
pub amount_without_retries: Option<i64>,
}
#[derive(Debug, Default)]
pub struct PaymentsSuccessRateAccumulator {
pub success: u32,
pub success_without_retries: u32,
pub total: u32,
}
#[derive(Debug, Default)]
pub struct ProcessedAmountAccumulator {
pub count_with_retries: Option<i64>,
pub total_with_retries: Option<i64>,
pub count_without_retries: Option<i64>,
pub total_without_retries: Option<i64>,
}
#[derive(Debug, Default)]
pub struct PaymentsDistributionAccumulator {
pub success_without_retries: u32,
pub failed_without_retries: u32,
pub total: u32,
}
impl PaymentIntentMetricAccumulator for CountAccumulator {
type MetricOutput = Option<u64>;
#[inline]
fn add_metrics_bucket(&mut self, metrics: &PaymentIntentMetricRow) {
self.count = match (self.count, metrics.count) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
}
}
#[inline]
fn collect(self) -> Self::MetricOutput {
self.count.and_then(|i| u64::try_from(i).ok())
}
}
impl PaymentIntentMetricAccumulator for SmartRetriedAmountAccumulator {
type MetricOutput = (Option<u64>, Option<u64>, Option<u64>, Option<u64>);
#[inline]
fn add_metrics_bucket(&mut self, metrics: &PaymentIntentMetricRow) {
self.amount = match (
self.amount,
metrics.total.as_ref().and_then(ToPrimitive::to_i64),
) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
};
if metrics.first_attempt.unwrap_or(0) == 1 {
self.amount_without_retries = match (
self.amount_without_retries,
metrics.total.as_ref().and_then(ToPrimitive::to_i64),
) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
}
} else {
self.amount_without_retries = Some(0);
}
}
#[inline]
fn collect(self) -> Self::MetricOutput {
let with_retries = self.amount.and_then(|i| u64::try_from(i).ok()).or(Some(0));
let without_retries = self
.amount_without_retries
.and_then(|i| u64::try_from(i).ok())
.or(Some(0));
(with_retries, without_retries, Some(0), Some(0))
}
}
impl PaymentIntentMetricAccumulator for PaymentsSuccessRateAccumulator {
type MetricOutput = (
Option<u32>,
Option<u32>,
Option<u32>,
Option<f64>,
Option<f64>,
);
fn add_metrics_bucket(&mut self, metrics: &PaymentIntentMetricRow) {
if let Some(ref status) = metrics.status {
if status.as_ref() == &storage_enums::IntentStatus::Succeeded {
if let Some(success) = metrics
.count
.and_then(|success| u32::try_from(success).ok())
{
self.success += success;
if metrics.first_attempt.unwrap_or(0) == 1 {
self.success_without_retries += success;
}
}
}
if status.as_ref() != &storage_enums::IntentStatus::RequiresCustomerAction
&& status.as_ref() != &storage_enums::IntentStatus::RequiresPaymentMethod
&& status.as_ref() != &storage_enums::IntentStatus::RequiresMerchantAction
&& status.as_ref() != &storage_enums::IntentStatus::RequiresConfirmation
{
if let Some(total) = metrics.count.and_then(|total| u32::try_from(total).ok()) {
self.total += total;
}
}
}
}
fn collect(self) -> Self::MetricOutput {
if self.total == 0 {
(None, None, None, None, None)
} else {
let success = Some(self.success);
let success_without_retries = Some(self.success_without_retries);
let total = Some(self.total);
let success_rate = match (success, total) {
(Some(s), Some(t)) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)),
_ => None,
};
let success_without_retries_rate = match (success_without_retries, total) {
(Some(s), Some(t)) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)),
_ => None,
};
(
success,
success_without_retries,
total,
success_rate,
success_without_retries_rate,
)
}
}
}
impl PaymentIntentMetricAccumulator for ProcessedAmountAccumulator {
type MetricOutput = (
Option<u64>,
Option<u64>,
Option<u64>,
Option<u64>,
Option<u64>,
Option<u64>,
);
#[inline]
fn add_metrics_bucket(&mut self, metrics: &PaymentIntentMetricRow) {
self.total_with_retries = match (
self.total_with_retries,
metrics.total.as_ref().and_then(ToPrimitive::to_i64),
) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
};
self.count_with_retries = match (self.count_with_retries, metrics.count) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
};
if metrics.first_attempt.unwrap_or(0) == 1 {
self.total_without_retries = match (
self.total_without_retries,
metrics.total.as_ref().and_then(ToPrimitive::to_i64),
) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
};
self.count_without_retries = match (self.count_without_retries, metrics.count) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
};
}
}
#[inline]
fn collect(self) -> Self::MetricOutput {
let total_with_retries = u64::try_from(self.total_with_retries.unwrap_or(0)).ok();
let count_with_retries = self.count_with_retries.and_then(|i| u64::try_from(i).ok());
let total_without_retries = u64::try_from(self.total_without_retries.unwrap_or(0)).ok();
let count_without_retries = self
.count_without_retries
.and_then(|i| u64::try_from(i).ok());
(
total_with_retries,
count_with_retries,
total_without_retries,
count_without_retries,
Some(0),
Some(0),
)
}
}
impl PaymentIntentMetricAccumulator for PaymentsDistributionAccumulator {
type MetricOutput = (Option<f64>, Option<f64>);
fn add_metrics_bucket(&mut self, metrics: &PaymentIntentMetricRow) {
let first_attempt = metrics.first_attempt.unwrap_or(0);
if let Some(ref status) = metrics.status {
if status.as_ref() == &storage_enums::IntentStatus::Succeeded {
if let Some(success) = metrics
.count
.and_then(|success| u32::try_from(success).ok())
{
if first_attempt == 1 {
self.success_without_retries += success;
}
}
}
if let Some(failed) = metrics.count.and_then(|failed| u32::try_from(failed).ok()) {
if first_attempt == 0
|| (first_attempt == 1
&& status.as_ref() == &storage_enums::IntentStatus::Failed)
{
self.failed_without_retries += failed;
}
}
if status.as_ref() != &storage_enums::IntentStatus::RequiresCustomerAction
&& status.as_ref() != &storage_enums::IntentStatus::RequiresPaymentMethod
&& status.as_ref() != &storage_enums::IntentStatus::RequiresMerchantAction
&& status.as_ref() != &storage_enums::IntentStatus::RequiresConfirmation
{
if let Some(total) = metrics.count.and_then(|total| u32::try_from(total).ok()) {
self.total += total;
}
}
}
}
fn collect(self) -> Self::MetricOutput {
if self.total == 0 {
(None, None)
} else {
let success_without_retries = Some(self.success_without_retries);
let failed_without_retries = Some(self.failed_without_retries);
let total = Some(self.total);
let success_rate_without_retries = match (success_without_retries, total) {
(Some(s), Some(t)) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)),
_ => None,
};
let failed_rate_without_retries = match (failed_without_retries, total) {
(Some(s), Some(t)) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)),
_ => None,
};
(success_rate_without_retries, failed_rate_without_retries)
}
}
}
impl PaymentIntentMetricsAccumulator {
pub fn collect(self) -> PaymentIntentMetricsBucketValue {
let (
successful_payments,
successful_payments_without_smart_retries,
total_payments,
payments_success_rate,
payments_success_rate_without_smart_retries,
) = self.payments_success_rate.collect();
let (
smart_retried_amount,
smart_retried_amount_without_smart_retries,
smart_retried_amount_in_usd,
smart_retried_amount_without_smart_retries_in_usd,
) = self.smart_retried_amount.collect();
let (
payment_processed_amount,
payment_processed_count,
payment_processed_amount_without_smart_retries,
payment_processed_count_without_smart_retries,
payment_processed_amount_in_usd,
payment_processed_amount_without_smart_retries_in_usd,
) = self.payment_processed_amount.collect();
let (
payments_success_rate_distribution_without_smart_retries,
payments_failure_rate_distribution_without_smart_retries,
) = self.payments_distribution.collect();
PaymentIntentMetricsBucketValue {
successful_smart_retries: self.successful_smart_retries.collect(),
total_smart_retries: self.total_smart_retries.collect(),
smart_retried_amount,
smart_retried_amount_in_usd,
smart_retried_amount_without_smart_retries,
smart_retried_amount_without_smart_retries_in_usd,
payment_intent_count: self.payment_intent_count.collect(),
successful_payments,
successful_payments_without_smart_retries,
total_payments,
payments_success_rate,
payments_success_rate_without_smart_retries,
payment_processed_amount,
payment_processed_count,
payment_processed_amount_without_smart_retries,
payment_processed_count_without_smart_retries,
payments_success_rate_distribution_without_smart_retries,
payments_failure_rate_distribution_without_smart_retries,
payment_processed_amount_in_usd,
payment_processed_amount_without_smart_retries_in_usd,
}
}
}
| 2,975 | 1,788 |
hyperswitch | crates/analytics/src/payment_intents/filters.rs | .rs | use api_models::analytics::{payment_intents::PaymentIntentDimensions, Granularity, TimeRange};
use common_utils::errors::ReportSwitchExt;
use diesel_models::enums::{AuthenticationType, Currency, IntentStatus};
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{
AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, FiltersError, FiltersResult,
LoadRow,
},
};
pub trait PaymentIntentFilterAnalytics: LoadRow<PaymentIntentFilterRow> {}
pub async fn get_payment_intent_filter_for_dimension<T>(
dimension: PaymentIntentDimensions,
merchant_id: &common_utils::id_type::MerchantId,
time_range: &TimeRange,
pool: &T,
) -> FiltersResult<Vec<PaymentIntentFilterRow>>
where
T: AnalyticsDataSource + PaymentIntentFilterAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::PaymentIntent);
query_builder.add_select_column(dimension).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
query_builder
.add_filter_clause("merchant_id", merchant_id)
.switch()?;
query_builder.set_distinct();
query_builder
.execute_query::<PaymentIntentFilterRow, _>(pool)
.await
.change_context(FiltersError::QueryBuildingError)?
.change_context(FiltersError::QueryExecutionFailure)
}
#[derive(Debug, serde::Serialize, Eq, PartialEq, serde::Deserialize)]
pub struct PaymentIntentFilterRow {
pub status: Option<DBEnumWrapper<IntentStatus>>,
pub currency: Option<DBEnumWrapper<Currency>>,
pub profile_id: Option<String>,
pub connector: Option<String>,
pub authentication_type: Option<DBEnumWrapper<AuthenticationType>>,
pub payment_method: Option<String>,
pub payment_method_type: Option<String>,
pub card_network: Option<String>,
pub merchant_id: Option<String>,
pub card_last_4: Option<String>,
pub card_issuer: Option<String>,
pub error_reason: Option<String>,
pub customer_id: Option<String>,
}
| 525 | 1,789 |
hyperswitch | crates/analytics/src/payment_intents/types.rs | .rs | use api_models::analytics::payment_intents::{PaymentIntentDimensions, PaymentIntentFilters};
use error_stack::ResultExt;
use crate::{
query::{QueryBuilder, QueryFilter, QueryResult, ToSql},
types::{AnalyticsCollection, AnalyticsDataSource},
};
impl<T> QueryFilter<T> for PaymentIntentFilters
where
T: AnalyticsDataSource,
AnalyticsCollection: ToSql<T>,
{
fn set_filter_clause(&self, builder: &mut QueryBuilder<T>) -> QueryResult<()> {
if !self.status.is_empty() {
builder
.add_filter_in_range_clause(
PaymentIntentDimensions::PaymentIntentStatus,
&self.status,
)
.attach_printable("Error adding payment intent status filter")?;
}
if !self.currency.is_empty() {
builder
.add_filter_in_range_clause(PaymentIntentDimensions::Currency, &self.currency)
.attach_printable("Error adding currency filter")?;
}
if !self.profile_id.is_empty() {
builder
.add_filter_in_range_clause(PaymentIntentDimensions::ProfileId, &self.profile_id)
.attach_printable("Error adding profile id filter")?;
}
if !self.connector.is_empty() {
builder
.add_filter_in_range_clause(PaymentIntentDimensions::Connector, &self.connector)
.attach_printable("Error adding connector filter")?;
}
if !self.auth_type.is_empty() {
builder
.add_filter_in_range_clause(PaymentIntentDimensions::AuthType, &self.auth_type)
.attach_printable("Error adding auth type filter")?;
}
if !self.payment_method.is_empty() {
builder
.add_filter_in_range_clause(
PaymentIntentDimensions::PaymentMethod,
&self.payment_method,
)
.attach_printable("Error adding payment method filter")?;
}
if !self.payment_method_type.is_empty() {
builder
.add_filter_in_range_clause(
PaymentIntentDimensions::PaymentMethodType,
&self.payment_method_type,
)
.attach_printable("Error adding payment method type filter")?;
}
if !self.card_network.is_empty() {
builder
.add_filter_in_range_clause(
PaymentIntentDimensions::CardNetwork,
&self.card_network,
)
.attach_printable("Error adding card network filter")?;
}
if !self.merchant_id.is_empty() {
builder
.add_filter_in_range_clause(PaymentIntentDimensions::MerchantId, &self.merchant_id)
.attach_printable("Error adding merchant id filter")?;
}
if !self.card_last_4.is_empty() {
builder
.add_filter_in_range_clause(PaymentIntentDimensions::CardLast4, &self.card_last_4)
.attach_printable("Error adding card last 4 filter")?;
}
if !self.card_issuer.is_empty() {
builder
.add_filter_in_range_clause(PaymentIntentDimensions::CardIssuer, &self.card_issuer)
.attach_printable("Error adding card issuer filter")?;
}
if !self.error_reason.is_empty() {
builder
.add_filter_in_range_clause(
PaymentIntentDimensions::ErrorReason,
&self.error_reason,
)
.attach_printable("Error adding error reason filter")?;
}
if !self.customer_id.is_empty() {
builder
.add_filter_in_range_clause("customer_id", &self.customer_id)
.attach_printable("Error adding customer id filter")?;
}
Ok(())
}
}
| 755 | 1,790 |
hyperswitch | crates/analytics/src/payment_intents/metrics/payment_processed_amount.rs | .rs | use std::collections::HashSet;
use api_models::analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use diesel_models::enums as storage_enums;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentIntentMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct PaymentProcessedAmount;
#[async_trait::async_trait]
impl<T> super::PaymentIntentMetric<T> for PaymentProcessedAmount
where
T: AnalyticsDataSource + super::PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntent);
let mut dimensions = dimensions.to_vec();
dimensions.push(PaymentIntentDimensions::PaymentIntentStatus);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder.add_select_column("currency").switch()?;
query_builder
.add_select_column(Aggregate::Sum {
field: "amount",
alias: Some("total"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
query_builder
.add_group_by_clause("currency")
.attach_printable("Error grouping by currency")
.switch()?;
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.add_filter_clause(
PaymentIntentDimensions::PaymentIntentStatus,
storage_enums::IntentStatus::Succeeded,
)
.switch()?;
query_builder
.execute_query::<PaymentIntentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentIntentMetricsBucketIdentifier::new(
None,
i.currency.as_ref().map(|i| i.0),
i.profile_id.clone(),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
| 1,033 | 1,791 |
hyperswitch | crates/analytics/src/payment_intents/metrics/successful_smart_retries.rs | .rs | use std::collections::HashSet;
use api_models::{
analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
},
enums::IntentStatus,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentIntentMetricRow;
use crate::{
enums::AuthInfo,
query::{
Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql,
Window,
},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct SuccessfulSmartRetries;
#[async_trait::async_trait]
impl<T> super::PaymentIntentMetric<T> for SuccessfulSmartRetries
where
T: AnalyticsDataSource + super::PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntent);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_custom_filter_clause("attempt_count", "1", FilterTypes::Gt)
.switch()?;
query_builder
.add_custom_filter_clause("status", IntentStatus::Succeeded, FilterTypes::Equal)
.switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentIntentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentIntentMetricsBucketIdentifier::new(
i.status.as_ref().map(|i| i.0),
i.currency.as_ref().map(|i| i.0),
i.profile_id.clone(),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
| 971 | 1,792 |
hyperswitch | crates/analytics/src/payment_intents/metrics/sessionized_metrics.rs | .rs | mod payment_intent_count;
mod payment_processed_amount;
mod payments_distribution;
mod payments_success_rate;
mod smart_retried_amount;
mod successful_smart_retries;
mod total_smart_retries;
pub(super) use payment_intent_count::PaymentIntentCount;
pub(super) use payment_processed_amount::PaymentProcessedAmount;
pub(super) use payments_distribution::PaymentsDistribution;
pub(super) use payments_success_rate::PaymentsSuccessRate;
pub(super) use smart_retried_amount::SmartRetriedAmount;
pub(super) use successful_smart_retries::SuccessfulSmartRetries;
pub(super) use total_smart_retries::TotalSmartRetries;
pub use super::{PaymentIntentMetric, PaymentIntentMetricAnalytics, PaymentIntentMetricRow};
| 145 | 1,793 |
hyperswitch | crates/analytics/src/payment_intents/metrics/payment_intent_count.rs | .rs | use std::collections::HashSet;
use api_models::analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentIntentMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct PaymentIntentCount;
#[async_trait::async_trait]
impl<T> super::PaymentIntentMetric<T> for PaymentIntentCount
where
T: AnalyticsDataSource + super::PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntent);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentIntentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentIntentMetricsBucketIdentifier::new(
i.status.as_ref().map(|i| i.0),
i.currency.as_ref().map(|i| i.0),
i.profile_id.clone(),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
| 898 | 1,794 |
hyperswitch | crates/analytics/src/payment_intents/metrics/payments_success_rate.rs | .rs | use std::collections::HashSet;
use api_models::analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentIntentMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct PaymentsSuccessRate;
#[async_trait::async_trait]
impl<T> super::PaymentIntentMetric<T> for PaymentsSuccessRate
where
T: AnalyticsDataSource + super::PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntent);
let mut dimensions = dimensions.to_vec();
dimensions.push(PaymentIntentDimensions::PaymentIntentStatus);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentIntentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentIntentMetricsBucketIdentifier::new(
None,
i.currency.as_ref().map(|i| i.0),
i.profile_id.clone(),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
| 909 | 1,795 |
hyperswitch | crates/analytics/src/payment_intents/metrics/smart_retried_amount.rs | .rs | use std::collections::HashSet;
use api_models::{
analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
},
enums::IntentStatus,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentIntentMetricRow;
use crate::{
enums::AuthInfo,
query::{
Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql,
Window,
},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct SmartRetriedAmount;
#[async_trait::async_trait]
impl<T> super::PaymentIntentMetric<T> for SmartRetriedAmount
where
T: AnalyticsDataSource + super::PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntent);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Sum {
field: "amount",
alias: Some("total"),
})
.switch()?;
query_builder.add_select_column("currency").switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_custom_filter_clause("attempt_count", "1", FilterTypes::Gt)
.switch()?;
query_builder
.add_custom_filter_clause("status", IntentStatus::Succeeded, FilterTypes::Equal)
.switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
query_builder
.add_group_by_clause("currency")
.attach_printable("Error grouping by currency")
.switch()?;
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentIntentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentIntentMetricsBucketIdentifier::new(
i.status.as_ref().map(|i| i.0),
i.currency.as_ref().map(|i| i.0),
i.profile_id.clone(),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
| 1,012 | 1,796 |
hyperswitch | crates/analytics/src/payment_intents/metrics/total_smart_retries.rs | .rs | use std::collections::HashSet;
use api_models::analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentIntentMetricRow;
use crate::{
enums::AuthInfo,
query::{
Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql,
Window,
},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct TotalSmartRetries;
#[async_trait::async_trait]
impl<T> super::PaymentIntentMetric<T> for TotalSmartRetries
where
T: AnalyticsDataSource + super::PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntent);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_custom_filter_clause("attempt_count", "1", FilterTypes::Gt)
.switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentIntentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentIntentMetricsBucketIdentifier::new(
i.status.as_ref().map(|i| i.0),
i.currency.as_ref().map(|i| i.0),
i.profile_id.clone(),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
| 934 | 1,797 |
hyperswitch | crates/analytics/src/payment_intents/metrics/sessionized_metrics/payment_processed_amount.rs | .rs | use std::collections::HashSet;
use api_models::analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use diesel_models::enums as storage_enums;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentIntentMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct PaymentProcessedAmount;
#[async_trait::async_trait]
impl<T> super::PaymentIntentMetric<T> for PaymentProcessedAmount
where
T: AnalyticsDataSource + super::PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntentSessionized);
let mut dimensions = dimensions.to_vec();
dimensions.push(PaymentIntentDimensions::PaymentIntentStatus);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column("(attempt_count = 1) as first_attempt")
.switch()?;
query_builder.add_select_column("currency").switch()?;
query_builder
.add_select_column(Aggregate::Sum {
field: "amount",
alias: Some("total"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
query_builder
.add_group_by_clause("first_attempt")
.attach_printable("Error grouping by first_attempt")
.switch()?;
query_builder
.add_group_by_clause("currency")
.attach_printable("Error grouping by currency")
.switch()?;
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.add_filter_clause(
PaymentIntentDimensions::PaymentIntentStatus,
storage_enums::IntentStatus::Succeeded,
)
.switch()?;
query_builder
.execute_query::<PaymentIntentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentIntentMetricsBucketIdentifier::new(
None,
i.currency.as_ref().map(|i| i.0),
i.profile_id.clone(),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
| 1,085 | 1,798 |
hyperswitch | crates/analytics/src/payment_intents/metrics/sessionized_metrics/successful_smart_retries.rs | .rs | use std::collections::HashSet;
use api_models::{
analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
},
enums::IntentStatus,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentIntentMetricRow;
use crate::{
enums::AuthInfo,
query::{
Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql,
Window,
},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct SuccessfulSmartRetries;
#[async_trait::async_trait]
impl<T> super::PaymentIntentMetric<T> for SuccessfulSmartRetries
where
T: AnalyticsDataSource + super::PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntentSessionized);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_custom_filter_clause("attempt_count", "1", FilterTypes::Gt)
.switch()?;
query_builder
.add_custom_filter_clause("status", IntentStatus::Succeeded, FilterTypes::Equal)
.switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentIntentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentIntentMetricsBucketIdentifier::new(
i.status.as_ref().map(|i| i.0),
i.currency.as_ref().map(|i| i.0),
i.profile_id.clone(),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
| 973 | 1,799 |
hyperswitch | crates/analytics/src/payment_intents/metrics/sessionized_metrics/payment_intent_count.rs | .rs | use std::collections::HashSet;
use api_models::analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentIntentMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct PaymentIntentCount;
#[async_trait::async_trait]
impl<T> super::PaymentIntentMetric<T> for PaymentIntentCount
where
T: AnalyticsDataSource + super::PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntentSessionized);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentIntentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentIntentMetricsBucketIdentifier::new(
i.status.as_ref().map(|i| i.0),
i.currency.as_ref().map(|i| i.0),
i.profile_id.clone(),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
| 900 | 1,800 |
hyperswitch | crates/analytics/src/payment_intents/metrics/sessionized_metrics/payments_success_rate.rs | .rs | use std::collections::HashSet;
use api_models::analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentIntentMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct PaymentsSuccessRate;
#[async_trait::async_trait]
impl<T> super::PaymentIntentMetric<T> for PaymentsSuccessRate
where
T: AnalyticsDataSource + super::PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntentSessionized);
let mut dimensions = dimensions.to_vec();
dimensions.push(PaymentIntentDimensions::PaymentIntentStatus);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column("(attempt_count = 1) as first_attempt".to_string())
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
query_builder
.add_group_by_clause("first_attempt")
.attach_printable("Error grouping by first_attempt")
.switch()?;
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentIntentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentIntentMetricsBucketIdentifier::new(
None,
i.currency.as_ref().map(|i| i.0),
i.profile_id.clone(),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
| 970 | 1,801 |
hyperswitch | crates/analytics/src/payment_intents/metrics/sessionized_metrics/payments_distribution.rs | .rs | use std::collections::HashSet;
use api_models::analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentIntentMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct PaymentsDistribution;
#[async_trait::async_trait]
impl<T> super::PaymentIntentMetric<T> for PaymentsDistribution
where
T: AnalyticsDataSource + super::PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntentSessionized);
let mut dimensions = dimensions.to_vec();
dimensions.push(PaymentIntentDimensions::PaymentIntentStatus);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column("(attempt_count = 1) as first_attempt")
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
query_builder
.add_group_by_clause("first_attempt")
.attach_printable("Error grouping by first_attempt")
.switch()?;
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentIntentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentIntentMetricsBucketIdentifier::new(
None,
i.currency.as_ref().map(|i| i.0),
i.profile_id.clone(),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
| 964 | 1,802 |
hyperswitch | crates/analytics/src/payment_intents/metrics/sessionized_metrics/smart_retried_amount.rs | .rs | use std::collections::HashSet;
use api_models::{
analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
},
enums::IntentStatus,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentIntentMetricRow;
use crate::{
enums::AuthInfo,
query::{
Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql,
Window,
},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct SmartRetriedAmount;
#[async_trait::async_trait]
impl<T> super::PaymentIntentMetric<T> for SmartRetriedAmount
where
T: AnalyticsDataSource + super::PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntentSessionized);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Sum {
field: "amount",
alias: Some("total"),
})
.switch()?;
query_builder
.add_select_column("(attempt_count = 1) as first_attempt")
.switch()?;
query_builder.add_select_column("currency").switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_custom_filter_clause("attempt_count", "1", FilterTypes::Gt)
.switch()?;
query_builder
.add_custom_filter_clause("status", IntentStatus::Succeeded, FilterTypes::Equal)
.switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
query_builder
.add_group_by_clause("first_attempt")
.attach_printable("Error grouping by first_attempt")
.switch()?;
query_builder
.add_group_by_clause("currency")
.attach_printable("Error grouping by currency")
.switch()?;
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentIntentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentIntentMetricsBucketIdentifier::new(
i.status.as_ref().map(|i| i.0),
i.currency.as_ref().map(|i| i.0),
i.profile_id.clone(),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
| 1,069 | 1,803 |
hyperswitch | crates/analytics/src/payment_intents/metrics/sessionized_metrics/total_smart_retries.rs | .rs | use std::collections::HashSet;
use api_models::analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentIntentMetricRow;
use crate::{
enums::AuthInfo,
query::{
Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql,
Window,
},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct TotalSmartRetries;
#[async_trait::async_trait]
impl<T> super::PaymentIntentMetric<T> for TotalSmartRetries
where
T: AnalyticsDataSource + super::PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntentSessionized);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_custom_filter_clause("attempt_count", "1", FilterTypes::Gt)
.switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentIntentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentIntentMetricsBucketIdentifier::new(
i.status.as_ref().map(|i| i.0),
i.currency.as_ref().map(|i| i.0),
i.profile_id.clone(),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
| 936 | 1,804 |
hyperswitch | crates/analytics/src/refunds/core.rs | .rs | #![allow(dead_code)]
use std::collections::{HashMap, HashSet};
use api_models::analytics::{
refunds::{
RefundDimensions, RefundDistributions, RefundMetrics, RefundMetricsBucketIdentifier,
RefundMetricsBucketResponse,
},
GetRefundFilterRequest, GetRefundMetricRequest, RefundFilterValue, RefundFiltersResponse,
RefundsAnalyticsMetadata, RefundsMetricsResponse,
};
use bigdecimal::ToPrimitive;
use common_enums::Currency;
use common_utils::errors::CustomResult;
use currency_conversion::{conversion::convert, types::ExchangeRates};
use error_stack::ResultExt;
use router_env::{
logger,
tracing::{self, Instrument},
};
use super::{
distribution::RefundDistributionRow,
filters::{get_refund_filter_for_dimension, RefundFilterRow},
metrics::RefundMetricRow,
RefundMetricsAccumulator,
};
use crate::{
enums::AuthInfo,
errors::{AnalyticsError, AnalyticsResult},
metrics,
refunds::{accumulator::RefundDistributionAccumulator, RefundMetricAccumulator},
AnalyticsProvider,
};
#[derive(Debug)]
pub enum TaskType {
MetricTask(
RefundMetrics,
CustomResult<HashSet<(RefundMetricsBucketIdentifier, RefundMetricRow)>, AnalyticsError>,
),
DistributionTask(
RefundDistributions,
CustomResult<Vec<(RefundMetricsBucketIdentifier, RefundDistributionRow)>, AnalyticsError>,
),
}
pub async fn get_metrics(
pool: &AnalyticsProvider,
ex_rates: &Option<ExchangeRates>,
auth: &AuthInfo,
req: GetRefundMetricRequest,
) -> AnalyticsResult<RefundsMetricsResponse<RefundMetricsBucketResponse>> {
let mut metrics_accumulator: HashMap<RefundMetricsBucketIdentifier, RefundMetricsAccumulator> =
HashMap::new();
let mut set = tokio::task::JoinSet::new();
for metric_type in req.metrics.iter().cloned() {
let req = req.clone();
let pool = pool.clone();
let task_span = tracing::debug_span!(
"analytics_refund_query",
refund_metric = metric_type.as_ref()
);
// Currently JoinSet works with only static lifetime references even if the task pool does not outlive the given reference
// We can optimize away this clone once that is fixed
let auth_scoped = auth.to_owned();
set.spawn(
async move {
let data = pool
.get_refund_metrics(
&metric_type,
&req.group_by_names.clone(),
&auth_scoped,
&req.filters,
req.time_series.map(|t| t.granularity),
&req.time_range,
)
.await
.change_context(AnalyticsError::UnknownError);
TaskType::MetricTask(metric_type, data)
}
.instrument(task_span),
);
}
if let Some(distribution) = req.clone().distribution {
let req = req.clone();
let pool = pool.clone();
let task_span = tracing::debug_span!(
"analytics_refunds_distribution_query",
refund_distribution = distribution.distribution_for.as_ref()
);
let auth_scoped = auth.to_owned();
set.spawn(
async move {
let data = pool
.get_refund_distribution(
&distribution,
&req.group_by_names.clone(),
&auth_scoped,
&req.filters,
&req.time_series.map(|t| t.granularity),
&req.time_range,
)
.await
.change_context(AnalyticsError::UnknownError);
TaskType::DistributionTask(distribution.distribution_for, data)
}
.instrument(task_span),
);
}
while let Some(task_type) = set
.join_next()
.await
.transpose()
.change_context(AnalyticsError::UnknownError)?
{
match task_type {
TaskType::MetricTask(metric, data) => {
let data = data?;
let attributes = router_env::metric_attributes!(
("metric_type", metric.to_string()),
("source", pool.to_string()),
);
let value = u64::try_from(data.len());
if let Ok(val) = value {
metrics::BUCKETS_FETCHED.record(val, attributes);
logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val);
}
for (id, value) in data {
logger::debug!(bucket_id=?id, bucket_value=?value, "Bucket row for metric {metric}");
let metrics_builder = metrics_accumulator.entry(id).or_default();
match metric {
RefundMetrics::RefundSuccessRate
| RefundMetrics::SessionizedRefundSuccessRate => metrics_builder
.refund_success_rate
.add_metrics_bucket(&value),
RefundMetrics::RefundCount | RefundMetrics::SessionizedRefundCount => {
metrics_builder.refund_count.add_metrics_bucket(&value)
}
RefundMetrics::RefundSuccessCount
| RefundMetrics::SessionizedRefundSuccessCount => {
metrics_builder.refund_success.add_metrics_bucket(&value)
}
RefundMetrics::RefundProcessedAmount
| RefundMetrics::SessionizedRefundProcessedAmount => {
metrics_builder.processed_amount.add_metrics_bucket(&value)
}
RefundMetrics::SessionizedRefundReason => {
metrics_builder.refund_reason.add_metrics_bucket(&value)
}
RefundMetrics::SessionizedRefundErrorMessage => metrics_builder
.refund_error_message
.add_metrics_bucket(&value),
}
}
logger::debug!(
"Analytics Accumulated Results: metric: {}, results: {:#?}",
metric,
metrics_accumulator
);
}
TaskType::DistributionTask(distribution, data) => {
let data = data?;
let attributes = router_env::metric_attributes!(
("distribution_type", distribution.to_string()),
("source", pool.to_string()),
);
let value = u64::try_from(data.len());
if let Ok(val) = value {
metrics::BUCKETS_FETCHED.record(val, attributes);
logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val);
}
for (id, value) in data {
logger::debug!(bucket_id=?id, bucket_value=?value, "Bucket row for distribution {distribution}");
let metrics_builder = metrics_accumulator.entry(id).or_default();
match distribution {
RefundDistributions::SessionizedRefundReason => metrics_builder
.refund_reason_distribution
.add_distribution_bucket(&value),
RefundDistributions::SessionizedRefundErrorMessage => metrics_builder
.refund_error_message_distribution
.add_distribution_bucket(&value),
}
}
logger::debug!(
"Analytics Accumulated Results: distribution: {}, results: {:#?}",
distribution,
metrics_accumulator
);
}
}
}
let mut success = 0;
let mut total = 0;
let mut total_refund_processed_amount = 0;
let mut total_refund_processed_amount_in_usd = 0;
let mut total_refund_processed_count = 0;
let mut total_refund_reason_count = 0;
let mut total_refund_error_message_count = 0;
let query_data: Vec<RefundMetricsBucketResponse> = metrics_accumulator
.into_iter()
.map(|(id, val)| {
let mut collected_values = val.collect();
if let Some(success_count) = collected_values.successful_refunds {
success += success_count;
}
if let Some(total_count) = collected_values.total_refunds {
total += total_count;
}
if let Some(amount) = collected_values.refund_processed_amount {
let amount_in_usd = if let Some(ex_rates) = ex_rates {
id.currency
.and_then(|currency| {
i64::try_from(amount)
.inspect_err(|e| logger::error!("Amount conversion error: {:?}", e))
.ok()
.and_then(|amount_i64| {
convert(ex_rates, currency, Currency::USD, amount_i64)
.inspect_err(|e| {
logger::error!("Currency conversion error: {:?}", e)
})
.ok()
})
})
.map(|amount| (amount * rust_decimal::Decimal::new(100, 0)).to_u64())
.unwrap_or_default()
} else {
None
};
collected_values.refund_processed_amount_in_usd = amount_in_usd;
total_refund_processed_amount += amount;
total_refund_processed_amount_in_usd += amount_in_usd.unwrap_or(0);
}
if let Some(count) = collected_values.refund_processed_count {
total_refund_processed_count += count;
}
if let Some(total_count) = collected_values.refund_reason_count {
total_refund_reason_count += total_count;
}
if let Some(total_count) = collected_values.refund_error_message_count {
total_refund_error_message_count += total_count;
}
RefundMetricsBucketResponse {
values: collected_values,
dimensions: id,
}
})
.collect();
let total_refund_success_rate = match (success, total) {
(s, t) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)),
_ => None,
};
Ok(RefundsMetricsResponse {
query_data,
meta_data: [RefundsAnalyticsMetadata {
total_refund_success_rate,
total_refund_processed_amount: Some(total_refund_processed_amount),
total_refund_processed_amount_in_usd: if ex_rates.is_some() {
Some(total_refund_processed_amount_in_usd)
} else {
None
},
total_refund_processed_count: Some(total_refund_processed_count),
total_refund_reason_count: Some(total_refund_reason_count),
total_refund_error_message_count: Some(total_refund_error_message_count),
}],
})
}
pub async fn get_filters(
pool: &AnalyticsProvider,
req: GetRefundFilterRequest,
auth: &AuthInfo,
) -> AnalyticsResult<RefundFiltersResponse> {
let mut res = RefundFiltersResponse::default();
for dim in req.group_by_names {
let values = match pool {
AnalyticsProvider::Sqlx(pool) => {
get_refund_filter_for_dimension(dim, auth, &req.time_range, pool)
.await
}
AnalyticsProvider::Clickhouse(pool) => {
get_refund_filter_for_dimension(dim, auth, &req.time_range, pool)
.await
}
AnalyticsProvider::CombinedCkh(sqlx_pool, ckh_pool) => {
let ckh_result = get_refund_filter_for_dimension(
dim,
auth,
&req.time_range,
ckh_pool,
)
.await;
let sqlx_result = get_refund_filter_for_dimension(
dim,
auth,
&req.time_range,
sqlx_pool,
)
.await;
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres refunds analytics filters")
},
_ => {}
};
ckh_result
}
AnalyticsProvider::CombinedSqlx(sqlx_pool, ckh_pool) => {
let ckh_result = get_refund_filter_for_dimension(
dim,
auth,
&req.time_range,
ckh_pool,
)
.await;
let sqlx_result = get_refund_filter_for_dimension(
dim,
auth,
&req.time_range,
sqlx_pool,
)
.await;
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres refunds analytics filters")
},
_ => {}
};
sqlx_result
}
}
.change_context(AnalyticsError::UnknownError)?
.into_iter()
.filter_map(|fil: RefundFilterRow| match dim {
RefundDimensions::Currency => fil.currency.map(|i| i.as_ref().to_string()),
RefundDimensions::RefundStatus => fil.refund_status.map(|i| i.as_ref().to_string()),
RefundDimensions::Connector => fil.connector,
RefundDimensions::RefundType => fil.refund_type.map(|i| i.as_ref().to_string()),
RefundDimensions::ProfileId => fil.profile_id,
RefundDimensions::RefundReason => fil.refund_reason,
RefundDimensions::RefundErrorMessage => fil.refund_error_message,
})
.collect::<Vec<String>>();
res.query_data.push(RefundFilterValue {
dimension: dim,
values,
})
}
Ok(res)
}
| 2,833 | 1,805 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.