text
stringlengths
81
477k
file_path
stringlengths
22
92
module
stringlengths
13
87
token_count
int64
24
94.8k
has_source_code
bool
1 class
// File: crates/external_services/src/crm.rs // Module: external_services::src::crm use std::sync::Arc; use common_utils::{ errors::CustomResult, ext_traits::ConfigExt, request::{Method, Request, RequestBuilder, RequestContent}, }; use error_stack::ResultExt; use http::header; use hyperswitch_interfaces::{ crm::{CrmInterface, CrmPayload}, errors::HttpClientError, types::Proxy, }; use reqwest; use router_env::logger; use crate::{http_client, hubspot_proxy::HubspotRequest}; /// Hubspot Crm configuration #[derive(Debug, Clone, serde::Deserialize)] pub struct HubspotProxyConfig { /// The ID of the Hubspot form to be submitted. pub form_id: String, /// The URL to which the Hubspot form data will be sent. pub request_url: String, } impl HubspotProxyConfig { /// Validates Hubspot configuration pub(super) fn validate(&self) -> Result<(), InvalidCrmConfig> { use common_utils::fp_utils::when; when(self.request_url.is_default_or_empty(), || { Err(InvalidCrmConfig("request url must not be empty")) })?; when(self.form_id.is_default_or_empty(), || { Err(InvalidCrmConfig("form_id must not be empty")) }) } } /// Error thrown when the crm config is invalid #[derive(Debug, Clone)] pub struct InvalidCrmConfig(pub &'static str); impl std::error::Error for InvalidCrmConfig {} impl std::fmt::Display for InvalidCrmConfig { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "crm: {}", self.0) } } #[derive(Debug, Clone, Copy)] /// NoCrm struct pub struct NoCrm; /// Enum representing different Crm configurations #[derive(Debug, Clone, Default, serde::Deserialize)] #[serde(tag = "crm_manager")] #[serde(rename_all = "snake_case")] pub enum CrmManagerConfig { /// Hubspot Crm configuration HubspotProxy { /// Hubspot Crm configuration hubspot_proxy: HubspotProxyConfig, }, /// No Crm configuration #[default] NoCrm, } impl CrmManagerConfig { /// Verifies that the client configuration is usable pub fn validate(&self) -> Result<(), InvalidCrmConfig> { match self { Self::HubspotProxy { hubspot_proxy } => hubspot_proxy.validate(), Self::NoCrm => Ok(()), } } /// Retrieves the appropriate Crm client based on the configuration. pub async fn get_crm_client(&self) -> Arc<dyn CrmInterface> { match self { Self::HubspotProxy { hubspot_proxy } => Arc::new(hubspot_proxy.clone()), Self::NoCrm => Arc::new(NoCrm), } } } #[async_trait::async_trait] impl CrmInterface for NoCrm { async fn make_body(&self, _details: CrmPayload) -> RequestContent { RequestContent::Json(Box::new(())) } async fn make_request(&self, _body: RequestContent, _origin_base_url: String) -> Request { RequestBuilder::default().build() } async fn send_request( &self, _proxy: &Proxy, _request: Request, ) -> CustomResult<reqwest::Response, HttpClientError> { logger::info!("No CRM configured!"); Err(HttpClientError::UnexpectedState).attach_printable("No CRM configured!") } } #[async_trait::async_trait] impl CrmInterface for HubspotProxyConfig { async fn make_body(&self, details: CrmPayload) -> RequestContent { RequestContent::FormUrlEncoded(Box::new(HubspotRequest::new( details.business_country_name.unwrap_or_default(), self.form_id.clone(), details.poc_name.unwrap_or_default(), details.poc_email.clone().unwrap_or_default(), details.legal_business_name.unwrap_or_default(), details.business_website.unwrap_or_default(), ))) } async fn make_request(&self, body: RequestContent, origin_base_url: String) -> Request { RequestBuilder::new() .method(Method::Post) .url(self.request_url.as_str()) .set_body(body) .attach_default_headers() .headers(vec![( header::ORIGIN.to_string(), format!("{origin_base_url}/dashboard").into(), )]) .build() } async fn send_request( &self, proxy: &Proxy, request: Request, ) -> CustomResult<reqwest::Response, HttpClientError> { http_client::send_request(proxy, request, None).await } }
crates/external_services/src/crm.rs
external_services::src::crm
1,048
true
// File: crates/external_services/src/grpc_client.rs // Module: external_services::src::grpc_client /// Dyanimc Routing Client interface implementation #[cfg(feature = "dynamic_routing")] pub mod dynamic_routing; /// gRPC based Heath Check Client interface implementation #[cfg(feature = "dynamic_routing")] pub mod health_check_client; /// gRPC based Recovery Trainer Client interface implementation #[cfg(feature = "revenue_recovery")] pub mod revenue_recovery; /// gRPC based Unified Connector Service Client interface implementation pub mod unified_connector_service; use std::{fmt::Debug, sync::Arc}; #[cfg(feature = "dynamic_routing")] use common_utils::consts; use common_utils::{id_type, ucs_types}; #[cfg(feature = "dynamic_routing")] use dynamic_routing::{DynamicRoutingClientConfig, RoutingStrategy}; #[cfg(feature = "dynamic_routing")] use health_check_client::HealthCheckClient; #[cfg(any(feature = "dynamic_routing", feature = "revenue_recovery"))] use hyper_util::client::legacy::connect::HttpConnector; #[cfg(any(feature = "dynamic_routing", feature = "revenue_recovery"))] use router_env::logger; use serde_urlencoded; #[cfg(any(feature = "dynamic_routing", feature = "revenue_recovery"))] use tonic::body::Body; use typed_builder::TypedBuilder; #[cfg(feature = "revenue_recovery")] pub use self::revenue_recovery::{ recovery_decider_client::{ DeciderRequest, DeciderResponse, RecoveryDeciderClientConfig, RecoveryDeciderClientInterface, RecoveryDeciderError, RecoveryDeciderResult, }, GrpcRecoveryHeaders, }; use crate::grpc_client::unified_connector_service::{ UnifiedConnectorServiceClient, UnifiedConnectorServiceClientConfig, }; #[cfg(any(feature = "dynamic_routing", feature = "revenue_recovery"))] /// Hyper based Client type for maintaining connection pool for all gRPC services pub type Client = hyper_util::client::legacy::Client<HttpConnector, Body>; /// Struct contains all the gRPC Clients #[derive(Debug, Clone)] pub struct GrpcClients { /// The routing client #[cfg(feature = "dynamic_routing")] pub dynamic_routing: Option<RoutingStrategy>, /// Health Check client for all gRPC services #[cfg(feature = "dynamic_routing")] pub health_client: HealthCheckClient, /// Recovery Decider Client #[cfg(feature = "revenue_recovery")] pub recovery_decider_client: Option<Box<dyn RecoveryDeciderClientInterface>>, /// Unified Connector Service client pub unified_connector_service_client: Option<UnifiedConnectorServiceClient>, } /// Type that contains the configs required to construct a gRPC client with its respective services. #[derive(Debug, Clone, serde::Deserialize, serde::Serialize, Default)] pub struct GrpcClientSettings { #[cfg(feature = "dynamic_routing")] /// Configs for Dynamic Routing Client pub dynamic_routing_client: Option<DynamicRoutingClientConfig>, #[cfg(feature = "revenue_recovery")] /// Configs for Recovery Decider Client pub recovery_decider_client: Option<RecoveryDeciderClientConfig>, /// Configs for Unified Connector Service client pub unified_connector_service: Option<UnifiedConnectorServiceClientConfig>, } impl GrpcClientSettings { /// # Panics /// /// This function will panic if it fails to establish a connection with the gRPC server. /// This function will be called at service startup. #[allow(clippy::expect_used)] pub async fn get_grpc_client_interface(&self) -> Arc<GrpcClients> { #[cfg(any(feature = "dynamic_routing", feature = "revenue_recovery"))] let client = hyper_util::client::legacy::Client::builder(hyper_util::rt::TokioExecutor::new()) .http2_only(true) .build_http(); #[cfg(feature = "dynamic_routing")] let dynamic_routing_connection = self .dynamic_routing_client .clone() .map(|config| config.get_dynamic_routing_connection(client.clone())) .transpose() .expect("Failed to establish a connection with the Dynamic Routing Server") .flatten(); #[cfg(feature = "dynamic_routing")] let health_client = HealthCheckClient::build_connections(self, client.clone()) .await .expect("Failed to build gRPC connections"); let unified_connector_service_client = UnifiedConnectorServiceClient::build_connections(self).await; #[cfg(feature = "revenue_recovery")] let recovery_decider_client = { match &self.recovery_decider_client { Some(config) => { // Validate the config first config .validate() .expect("Recovery Decider configuration validation failed"); // Create the client let client = config .get_recovery_decider_connection(client.clone()) .expect( "Failed to establish a connection with the Recovery Decider Server", ); logger::info!("Recovery Decider gRPC client successfully initialized"); let boxed_client: Box<dyn RecoveryDeciderClientInterface> = Box::new(client); Some(boxed_client) } None => { logger::debug!("Recovery Decider client configuration not provided, client will be disabled"); None } } }; Arc::new(GrpcClients { #[cfg(feature = "dynamic_routing")] dynamic_routing: dynamic_routing_connection, #[cfg(feature = "dynamic_routing")] health_client, #[cfg(feature = "revenue_recovery")] recovery_decider_client, unified_connector_service_client, }) } } /// Contains grpc headers #[derive(Debug)] pub struct GrpcHeaders { /// Tenant id pub tenant_id: String, /// Request id pub request_id: Option<String>, } /// Contains grpc headers for Ucs #[derive(Debug, TypedBuilder)] pub struct GrpcHeadersUcs { /// Tenant id tenant_id: String, /// Lineage ids lineage_ids: LineageIds, /// External vault proxy metadata external_vault_proxy_metadata: Option<String>, /// Merchant Reference Id merchant_reference_id: Option<ucs_types::UcsReferenceId>, request_id: Option<String>, shadow_mode: Option<bool>, } /// Type aliase for GrpcHeaders builder in initial stage pub type GrpcHeadersUcsBuilderInitial = GrpcHeadersUcsBuilder<((String,), (), (), (), (Option<String>,), (Option<bool>,))>; /// Type aliase for GrpcHeaders builder in intermediate stage pub type GrpcHeadersUcsBuilderFinal = GrpcHeadersUcsBuilder<( (String,), (LineageIds,), (Option<String>,), (Option<ucs_types::UcsReferenceId>,), (Option<String>,), (Option<bool>,), )>; /// struct to represent set of Lineage ids #[derive(Debug, serde::Serialize)] pub struct LineageIds { merchant_id: id_type::MerchantId, profile_id: id_type::ProfileId, } impl LineageIds { /// constructor for LineageIds pub fn new(merchant_id: id_type::MerchantId, profile_id: id_type::ProfileId) -> Self { Self { merchant_id, profile_id, } } /// get url encoded string representation of LineageIds pub fn get_url_encoded_string(self) -> Result<String, serde_urlencoded::ser::Error> { serde_urlencoded::to_string(&self) } } #[cfg(feature = "dynamic_routing")] /// Trait to add necessary headers to the tonic Request pub(crate) trait AddHeaders { /// Add necessary header fields to the tonic Request fn add_headers_to_grpc_request(&mut self, headers: GrpcHeaders); } #[cfg(feature = "dynamic_routing")] impl<T> AddHeaders for tonic::Request<T> { #[track_caller] fn add_headers_to_grpc_request(&mut self, headers: GrpcHeaders) { headers.tenant_id .parse() .map(|tenant_id| { self .metadata_mut() .append(consts::TENANT_HEADER, tenant_id) }) .inspect_err( |err| logger::warn!(header_parse_error=?err,"invalid {} received",consts::TENANT_HEADER), ) .ok(); headers.request_id.map(|request_id| { request_id .parse() .map(|request_id| { self .metadata_mut() .append(consts::X_REQUEST_ID, request_id) }) .inspect_err( |err| logger::warn!(header_parse_error=?err,"invalid {} received",consts::X_REQUEST_ID), ) .ok(); }); } } #[cfg(feature = "dynamic_routing")] pub(crate) fn create_grpc_request<T: Debug>(message: T, headers: GrpcHeaders) -> tonic::Request<T> { let mut request = tonic::Request::new(message); request.add_headers_to_grpc_request(headers); logger::info!(?request); request }
crates/external_services/src/grpc_client.rs
external_services::src::grpc_client
1,922
true
// File: crates/external_services/src/no_encryption.rs // Module: external_services::src::no_encryption //! No encryption functionalities pub mod core; pub mod implementers;
crates/external_services/src/no_encryption.rs
external_services::src::no_encryption
39
true
// File: crates/external_services/src/lib.rs // Module: external_services::src::lib //! Interactions with external systems. #![warn(missing_docs, missing_debug_implementations)] #[cfg(feature = "aws_kms")] pub mod aws_kms; /// crm module pub mod crm; #[cfg(feature = "email")] pub mod email; pub mod file_storage; /// Building grpc clients to communicate with the server pub mod grpc_client; #[cfg(feature = "hashicorp-vault")] pub mod hashicorp_vault; /// http_client module pub mod http_client; /// hubspot_proxy module pub mod hubspot_proxy; pub mod managers; pub mod no_encryption; #[cfg(feature = "superposition")] pub mod superposition; /// deserializers module_path pub mod utils; #[cfg(feature = "revenue_recovery")] /// date_time module pub mod date_time { use error_stack::ResultExt; /// Errors in time conversion #[derive(Debug, thiserror::Error)] pub enum DateTimeConversionError { #[error("Invalid timestamp value from prost Timestamp: out of representable range")] /// Error for out of range TimestampOutOfRange, } /// Converts a `time::PrimitiveDateTime` to a `prost_types::Timestamp`. pub fn convert_to_prost_timestamp(dt: time::PrimitiveDateTime) -> prost_types::Timestamp { let odt = dt.assume_utc(); prost_types::Timestamp { seconds: odt.unix_timestamp(), // This conversion is safe as nanoseconds (0..999_999_999) always fit within an i32. #[allow(clippy::as_conversions)] nanos: odt.nanosecond() as i32, } } /// Converts a `prost_types::Timestamp` to an `time::PrimitiveDateTime`. pub fn convert_from_prost_timestamp( ts: &prost_types::Timestamp, ) -> error_stack::Result<time::PrimitiveDateTime, DateTimeConversionError> { let timestamp_nanos = i128::from(ts.seconds) * 1_000_000_000 + i128::from(ts.nanos); time::OffsetDateTime::from_unix_timestamp_nanos(timestamp_nanos) .map(|offset_dt| time::PrimitiveDateTime::new(offset_dt.date(), offset_dt.time())) .change_context(DateTimeConversionError::TimestampOutOfRange) } } /// Crate specific constants pub mod consts { /// General purpose base64 engine #[cfg(feature = "aws_kms")] pub(crate) const BASE64_ENGINE: base64::engine::GeneralPurpose = base64::engine::general_purpose::STANDARD; /// Header key used to specify the connector name in UCS requests. pub(crate) const UCS_HEADER_CONNECTOR: &str = "x-connector"; /// Header key used to indicate the authentication type being used. pub(crate) const UCS_HEADER_AUTH_TYPE: &str = "x-auth"; /// Header key for sending the API key used for authentication. pub(crate) const UCS_HEADER_API_KEY: &str = "x-api-key"; /// Header key for sending an additional secret key used in some auth types. pub(crate) const UCS_HEADER_KEY1: &str = "x-key1"; /// Header key for sending the API secret in signature-based authentication. pub(crate) const UCS_HEADER_API_SECRET: &str = "x-api-secret"; /// Header key for sending the AUTH KEY MAP in currency-based authentication. pub(crate) const UCS_HEADER_AUTH_KEY_MAP: &str = "x-auth-key-map"; /// Header key for sending the EXTERNAL VAULT METADATA in proxy payments pub(crate) const UCS_HEADER_EXTERNAL_VAULT_METADATA: &str = "x-external-vault-metadata"; /// Header key for sending the list of lineage ids pub(crate) const UCS_LINEAGE_IDS: &str = "x-lineage-ids"; /// Header key for sending the merchant reference id to UCS pub(crate) const UCS_HEADER_REFERENCE_ID: &str = "x-reference-id"; } /// Metrics for interactions with external systems. #[cfg(feature = "aws_kms")] pub mod metrics { use router_env::{counter_metric, global_meter, histogram_metric_f64}; global_meter!(GLOBAL_METER, "EXTERNAL_SERVICES"); #[cfg(feature = "aws_kms")] counter_metric!(AWS_KMS_DECRYPTION_FAILURES, GLOBAL_METER); // No. of AWS KMS Decryption failures #[cfg(feature = "aws_kms")] counter_metric!(AWS_KMS_ENCRYPTION_FAILURES, GLOBAL_METER); // No. of AWS KMS Encryption failures #[cfg(feature = "aws_kms")] histogram_metric_f64!(AWS_KMS_DECRYPT_TIME, GLOBAL_METER); // Histogram for AWS KMS decryption time (in sec) #[cfg(feature = "aws_kms")] histogram_metric_f64!(AWS_KMS_ENCRYPT_TIME, GLOBAL_METER); // Histogram for AWS KMS encryption time (in sec) }
crates/external_services/src/lib.rs
external_services::src::lib
1,093
true
// File: crates/external_services/src/http_client.rs // Module: external_services::src::http_client use common_utils::{consts, errors::CustomResult, request::Request}; use hyperswitch_interfaces::{errors::HttpClientError, types::Proxy}; use request::{HeaderExt, RequestBuilderExt}; use router_env::{instrument, logger, tracing}; /// client module pub mod client; /// metrics module pub mod metrics; /// request module pub mod request; use std::{error::Error, time::Duration}; use common_utils::request::RequestContent; pub use common_utils::request::{ContentType, Method, RequestBuilder}; use error_stack::ResultExt; #[allow(missing_docs)] #[instrument(skip_all)] pub async fn send_request( client_proxy: &Proxy, request: Request, option_timeout_secs: Option<u64>, ) -> CustomResult<reqwest::Response, HttpClientError> { logger::info!(method=?request.method, headers=?request.headers, payload=?request.body, ?request); let url = url::Url::parse(&request.url).change_context(HttpClientError::UrlParsingFailed)?; let client = client::create_client( client_proxy, request.certificate, request.certificate_key, request.ca_certificate, )?; let headers = request.headers.construct_header_map()?; let metrics_tag = router_env::metric_attributes!(( consts::METRICS_HOST_TAG_NAME, url.host_str().unwrap_or_default().to_owned() )); let request = { match request.method { Method::Get => client.get(url), Method::Post => { let client = client.post(url); match request.body { Some(RequestContent::Json(payload)) => client.json(&payload), Some(RequestContent::FormData((form, _))) => client.multipart(form), Some(RequestContent::FormUrlEncoded(payload)) => client.form(&payload), Some(RequestContent::Xml(payload)) => { let body = quick_xml::se::to_string(&payload) .change_context(HttpClientError::BodySerializationFailed)?; client.body(body).header("Content-Type", "application/xml") } Some(RequestContent::RawBytes(payload)) => client.body(payload), None => client, } } Method::Put => { let client = client.put(url); match request.body { Some(RequestContent::Json(payload)) => client.json(&payload), Some(RequestContent::FormData((form, _))) => client.multipart(form), Some(RequestContent::FormUrlEncoded(payload)) => client.form(&payload), Some(RequestContent::Xml(payload)) => { let body = quick_xml::se::to_string(&payload) .change_context(HttpClientError::BodySerializationFailed)?; client.body(body).header("Content-Type", "application/xml") } Some(RequestContent::RawBytes(payload)) => client.body(payload), None => client, } } Method::Patch => { let client = client.patch(url); match request.body { Some(RequestContent::Json(payload)) => client.json(&payload), Some(RequestContent::FormData((form, _))) => client.multipart(form), Some(RequestContent::FormUrlEncoded(payload)) => client.form(&payload), Some(RequestContent::Xml(payload)) => { let body = quick_xml::se::to_string(&payload) .change_context(HttpClientError::BodySerializationFailed)?; client.body(body).header("Content-Type", "application/xml") } Some(RequestContent::RawBytes(payload)) => client.body(payload), None => client, } } Method::Delete => client.delete(url), } .add_headers(headers) .timeout(Duration::from_secs( option_timeout_secs.unwrap_or(consts::REQUEST_TIME_OUT), )) }; // We cannot clone the request type, because it has Form trait which is not cloneable. So we are cloning the request builder here. let cloned_send_request = request.try_clone().map(|cloned_request| async { cloned_request .send() .await .map_err(|error| match error { error if error.is_timeout() => { metrics::REQUEST_BUILD_FAILURE.add(1, metrics_tag); HttpClientError::RequestTimeoutReceived } error if is_connection_closed_before_message_could_complete(&error) => { metrics::REQUEST_BUILD_FAILURE.add(1, metrics_tag); HttpClientError::ConnectionClosedIncompleteMessage } _ => HttpClientError::RequestNotSent(error.to_string()), }) .attach_printable("Unable to send request to connector") }); let send_request = async { request .send() .await .map_err(|error| match error { error if error.is_timeout() => { metrics::REQUEST_BUILD_FAILURE.add(1, metrics_tag); HttpClientError::RequestTimeoutReceived } error if is_connection_closed_before_message_could_complete(&error) => { metrics::REQUEST_BUILD_FAILURE.add(1, metrics_tag); HttpClientError::ConnectionClosedIncompleteMessage } _ => HttpClientError::RequestNotSent(error.to_string()), }) .attach_printable("Unable to send request to connector") }; let response = common_utils::metrics::utils::record_operation_time( send_request, &metrics::EXTERNAL_REQUEST_TIME, metrics_tag, ) .await; // Retry once if the response is connection closed. // // This is just due to the racy nature of networking. // hyper has a connection pool of idle connections, and it selected one to send your request. // Most of the time, hyper will receive the server’s FIN and drop the dead connection from its pool. // But occasionally, a connection will be selected from the pool // and written to at the same time the server is deciding to close the connection. // Since hyper already wrote some of the request, // it can’t really retry it automatically on a new connection, since the server may have acted already match response { Ok(response) => Ok(response), Err(error) if error.current_context() == &HttpClientError::ConnectionClosedIncompleteMessage => { metrics::AUTO_RETRY_CONNECTION_CLOSED.add(1, metrics_tag); match cloned_send_request { Some(cloned_request) => { logger::info!( "Retrying request due to connection closed before message could complete" ); common_utils::metrics::utils::record_operation_time( cloned_request, &metrics::EXTERNAL_REQUEST_TIME, metrics_tag, ) .await } None => { logger::info!("Retrying request due to connection closed before message could complete failed as request is not cloneable"); Err(error) } } } err @ Err(_) => err, } } fn is_connection_closed_before_message_could_complete(error: &reqwest::Error) -> bool { let mut source = error.source(); while let Some(err) = source { if let Some(hyper_err) = err.downcast_ref::<hyper::Error>() { if hyper_err.is_incomplete_message() { return true; } } source = err.source(); } false }
crates/external_services/src/http_client.rs
external_services::src::http_client
1,521
true
// File: crates/external_services/src/managers.rs // Module: external_services::src::managers //! Config and client managers pub mod encryption_management; pub mod secrets_management;
crates/external_services/src/managers.rs
external_services::src::managers
39
true
// File: crates/external_services/src/aws_kms.rs // Module: external_services::src::aws_kms //! Interactions with the AWS KMS SDK pub mod core; pub mod implementers;
crates/external_services/src/aws_kms.rs
external_services::src::aws_kms
44
true
// File: crates/external_services/src/email.rs // Module: external_services::src::email //! Interactions with the AWS SES SDK use aws_sdk_sesv2::types::Body; use common_utils::{errors::CustomResult, pii}; use serde::Deserialize; /// Implementation of aws ses client pub mod ses; /// Implementation of SMTP server client pub mod smtp; /// Implementation of Email client when email support is disabled pub mod no_email; /// Custom Result type alias for Email operations. pub type EmailResult<T> = CustomResult<T, EmailError>; /// A trait that defines the methods that must be implemented to send email. #[async_trait::async_trait] pub trait EmailClient: Sync + Send + dyn_clone::DynClone { /// The rich text type of the email client type RichText; /// Sends an email to the specified recipient with the given subject and body. async fn send_email( &self, recipient: pii::Email, subject: String, body: Self::RichText, proxy_url: Option<&String>, ) -> EmailResult<()>; /// Convert Stringified HTML to client native rich text format /// This has to be done because not all clients may format html as the same fn convert_to_rich_text( &self, intermediate_string: IntermediateString, ) -> CustomResult<Self::RichText, EmailError> where Self::RichText: Send; } /// A super trait which is automatically implemented for all EmailClients #[async_trait::async_trait] pub trait EmailService: Sync + Send + dyn_clone::DynClone { /// Compose and send email using the email data async fn compose_and_send_email( &self, base_url: &str, email_data: Box<dyn EmailData + Send>, proxy_url: Option<&String>, ) -> EmailResult<()>; } #[async_trait::async_trait] impl<T> EmailService for T where T: EmailClient, <Self as EmailClient>::RichText: Send, { async fn compose_and_send_email( &self, base_url: &str, email_data: Box<dyn EmailData + Send>, proxy_url: Option<&String>, ) -> EmailResult<()> { let email_data = email_data.get_email_data(base_url); let email_data = email_data.await?; let EmailContents { subject, body, recipient, } = email_data; let rich_text_string = self.convert_to_rich_text(body)?; self.send_email(recipient, subject, rich_text_string, proxy_url) .await } } /// This is a struct used to create Intermediate String for rich text ( html ) #[derive(Debug)] pub struct IntermediateString(String); impl IntermediateString { /// Create a new Instance of IntermediateString using a string pub fn new(inner: String) -> Self { Self(inner) } /// Get the inner String pub fn into_inner(self) -> String { self.0 } } /// Temporary output for the email subject #[derive(Debug)] pub struct EmailContents { /// The subject of email pub subject: String, /// This will be the intermediate representation of the email body in a generic format. /// The email clients can convert this intermediate representation to their client specific rich text format pub body: IntermediateString, /// The email of the recipient to whom the email has to be sent pub recipient: pii::Email, } /// A trait which will contain the logic of generating the email subject and body #[async_trait::async_trait] pub trait EmailData { /// Get the email contents async fn get_email_data(&self, base_url: &str) -> CustomResult<EmailContents, EmailError>; } dyn_clone::clone_trait_object!(EmailClient<RichText = Body>); /// List of available email clients to choose from #[derive(Debug, Clone, Default, Deserialize)] #[serde(tag = "active_email_client")] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] pub enum EmailClientConfigs { #[default] /// Default Email client to use when no client is specified NoEmailClient, /// AWS ses email client Ses { /// AWS SES client configuration aws_ses: ses::SESConfig, }, /// Other Simple SMTP server Smtp { /// SMTP server configuration smtp: smtp::SmtpServerConfig, }, } /// Struct that contains the settings required to construct an EmailClient. #[derive(Debug, Clone, Default, Deserialize)] #[serde(default)] pub struct EmailSettings { /// The AWS region to send SES requests to. pub aws_region: String, /// Number of days for verification of the email pub allowed_unverified_days: i64, /// Sender email pub sender_email: String, #[serde(flatten)] /// The client specific configurations pub client_config: EmailClientConfigs, /// Recipient email for recon emails pub recon_recipient_email: pii::Email, /// Recipient email for recon emails pub prod_intent_recipient_email: pii::Email, } impl EmailSettings { /// Validation for the Email client specific configurations pub fn validate(&self) -> Result<(), &'static str> { match &self.client_config { EmailClientConfigs::Ses { ref aws_ses } => aws_ses.validate(), EmailClientConfigs::Smtp { ref smtp } => smtp.validate(), EmailClientConfigs::NoEmailClient => Ok(()), } } } /// Errors that could occur from EmailClient. #[derive(Debug, thiserror::Error)] pub enum EmailError { /// An error occurred when building email client. #[error("Error building email client")] ClientBuildingFailure, /// An error occurred when sending email #[error("Error sending email to recipient")] EmailSendingFailure, /// Failed to generate the email token #[error("Failed to generate email token")] TokenGenerationFailure, /// The expected feature is not implemented #[error("Feature not implemented")] NotImplemented, /// An error occurred when building email content. #[error("Error building email content")] ContentBuildFailure, }
crates/external_services/src/email.rs
external_services::src::email
1,327
true
// File: crates/external_services/src/file_storage.rs // Module: external_services::src::file_storage //! Module for managing file storage operations with support for multiple storage schemes. use std::{ fmt::{Display, Formatter}, sync::Arc, }; use common_utils::errors::CustomResult; /// Includes functionality for AWS S3 storage operations. #[cfg(feature = "aws_s3")] mod aws_s3; mod file_system; /// Enum representing different file storage configurations, allowing for multiple storage schemes. #[derive(Debug, Clone, Default, serde::Deserialize)] #[serde(tag = "file_storage_backend")] #[serde(rename_all = "snake_case")] pub enum FileStorageConfig { /// AWS S3 storage configuration. #[cfg(feature = "aws_s3")] AwsS3 { /// Configuration for AWS S3 file storage. aws_s3: aws_s3::AwsFileStorageConfig, }, /// Local file system storage configuration. #[default] FileSystem, } impl FileStorageConfig { /// Validates the file storage configuration. pub fn validate(&self) -> Result<(), InvalidFileStorageConfig> { match self { #[cfg(feature = "aws_s3")] Self::AwsS3 { aws_s3 } => aws_s3.validate(), Self::FileSystem => Ok(()), } } /// Retrieves the appropriate file storage client based on the file storage configuration. pub async fn get_file_storage_client(&self) -> Arc<dyn FileStorageInterface> { match self { #[cfg(feature = "aws_s3")] Self::AwsS3 { aws_s3 } => Arc::new(aws_s3::AwsFileStorageClient::new(aws_s3).await), Self::FileSystem => Arc::new(file_system::FileSystem), } } } /// Trait for file storage operations #[async_trait::async_trait] pub trait FileStorageInterface: dyn_clone::DynClone + Sync + Send { /// Uploads a file to the selected storage scheme. async fn upload_file( &self, file_key: &str, file: Vec<u8>, ) -> CustomResult<(), FileStorageError>; /// Deletes a file from the selected storage scheme. async fn delete_file(&self, file_key: &str) -> CustomResult<(), FileStorageError>; /// Retrieves a file from the selected storage scheme. async fn retrieve_file(&self, file_key: &str) -> CustomResult<Vec<u8>, FileStorageError>; } dyn_clone::clone_trait_object!(FileStorageInterface); /// Error thrown when the file storage config is invalid #[derive(Debug, Clone)] pub struct InvalidFileStorageConfig(&'static str); impl std::error::Error for InvalidFileStorageConfig {} impl Display for InvalidFileStorageConfig { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { write!(f, "file_storage: {}", self.0) } } /// Represents errors that can occur during file storage operations. #[derive(Debug, thiserror::Error, PartialEq)] pub enum FileStorageError { /// Indicates that the file upload operation failed. #[error("Failed to upload file")] UploadFailed, /// Indicates that the file retrieval operation failed. #[error("Failed to retrieve file")] RetrieveFailed, /// Indicates that the file deletion operation failed. #[error("Failed to delete file")] DeleteFailed, }
crates/external_services/src/file_storage.rs
external_services::src::file_storage
717
true
// File: crates/external_services/src/hubspot_proxy.rs // Module: external_services::src::hubspot_proxy use masking::Secret; /// Lead source constant for Hubspot pub const HUBSPOT_LEAD_SOURCE: &str = "Hyperswitch Dashboard"; /// Struct representing a request to Hubspot #[derive(Clone, Debug, serde::Serialize, Default)] pub struct HubspotRequest { /// Indicates whether Hubspot should be used. #[serde(rename = "useHubspot")] pub use_hubspot: bool, /// The country of the user or company. pub country: String, /// The ID of the Hubspot form being submitted. #[serde(rename = "hubspotFormId")] pub hubspot_form_id: String, /// The first name of the user. pub firstname: Secret<String>, /// The last name of the user. pub lastname: Secret<String>, /// The email address of the user. pub email: Secret<String>, /// The name of the company. #[serde(rename = "companyName")] pub company_name: String, /// The source of the lead, typically set to "Hyperswitch Dashboard". pub lead_source: String, /// The website URL of the company. pub website: String, /// The phone number of the user. pub phone: Secret<String>, /// The role or designation of the user. pub role: String, /// The monthly GMV (Gross Merchandise Value) of the company. #[serde(rename = "monthlyGMV")] pub monthly_gmv: String, /// Notes from the business development team. pub bd_notes: String, /// Additional message or comments. pub message: String, } #[allow(missing_docs)] impl HubspotRequest { pub fn new( country: String, hubspot_form_id: String, firstname: Secret<String>, email: Secret<String>, company_name: String, website: String, ) -> Self { Self { use_hubspot: true, country, hubspot_form_id, firstname, email, company_name, lead_source: HUBSPOT_LEAD_SOURCE.to_string(), website, ..Default::default() } } }
crates/external_services/src/hubspot_proxy.rs
external_services::src::hubspot_proxy
489
true
// File: crates/external_services/src/utils.rs // Module: external_services::src::utils //! Custom deserializers for external services configuration use std::collections::HashSet; use serde::Deserialize; /// Parses a comma-separated string into a HashSet of typed values. /// /// # Arguments /// /// * `value` - String or string reference containing comma-separated values /// /// # Returns /// /// * `Ok(HashSet<T>)` - Successfully parsed HashSet /// * `Err(String)` - Error message if any value parsing fails /// /// # Type Parameters /// /// * `T` - Target type that implements `FromStr`, `Eq`, and `Hash` /// /// # Examples /// /// ``` /// use std::collections::HashSet; /// /// let result: Result<HashSet<i32>, String> = /// deserialize_hashset_inner("1,2,3"); /// assert!(result.is_ok()); /// /// if let Ok(hashset) = result { /// assert!(hashset.contains(&1)); /// assert!(hashset.contains(&2)); /// assert!(hashset.contains(&3)); /// } /// ``` fn deserialize_hashset_inner<T>(value: impl AsRef<str>) -> Result<HashSet<T>, String> where T: Eq + std::str::FromStr + std::hash::Hash, <T as std::str::FromStr>::Err: std::fmt::Display, { let (values, errors) = value .as_ref() .trim() .split(',') .map(|s| { T::from_str(s.trim()).map_err(|error| { format!( "Unable to deserialize `{}` as `{}`: {error}", s.trim(), std::any::type_name::<T>() ) }) }) .fold( (HashSet::new(), Vec::new()), |(mut values, mut errors), result| match result { Ok(t) => { values.insert(t); (values, errors) } Err(error) => { errors.push(error); (values, errors) } }, ); if !errors.is_empty() { Err(format!("Some errors occurred:\n{}", errors.join("\n"))) } else { Ok(values) } } /// Serde deserializer function for converting comma-separated strings into typed HashSets. /// /// This function is designed to be used with serde's `#[serde(deserialize_with = "deserialize_hashset")]` /// attribute to customize deserialization of HashSet fields. /// /// # Arguments /// /// * `deserializer` - Serde deserializer instance /// /// # Returns /// /// * `Ok(HashSet<T>)` - Successfully deserialized HashSet /// * `Err(D::Error)` - Serde deserialization error /// /// # Type Parameters /// /// * `D` - Serde deserializer type /// * `T` - Target type that implements `FromStr`, `Eq`, and `Hash` pub(crate) fn deserialize_hashset<'a, D, T>(deserializer: D) -> Result<HashSet<T>, D::Error> where D: serde::Deserializer<'a>, T: Eq + std::str::FromStr + std::hash::Hash, <T as std::str::FromStr>::Err: std::fmt::Display, { use serde::de::Error; deserialize_hashset_inner(<String>::deserialize(deserializer)?).map_err(D::Error::custom) } #[cfg(test)] mod tests { use std::collections::HashSet; use super::*; #[test] fn test_deserialize_hashset_inner_success() { let result: Result<HashSet<i32>, String> = deserialize_hashset_inner("1,2,3"); assert!(result.is_ok()); if let Ok(hashset) = result { assert_eq!(hashset.len(), 3); assert!(hashset.contains(&1)); assert!(hashset.contains(&2)); assert!(hashset.contains(&3)); } } #[test] fn test_deserialize_hashset_inner_with_whitespace() { let result: Result<HashSet<String>, String> = deserialize_hashset_inner(" a , b , c "); assert!(result.is_ok()); if let Ok(hashset) = result { assert_eq!(hashset.len(), 3); assert!(hashset.contains("a")); assert!(hashset.contains("b")); assert!(hashset.contains("c")); } } #[test] fn test_deserialize_hashset_inner_empty_string() { let result: Result<HashSet<String>, String> = deserialize_hashset_inner(""); assert!(result.is_ok()); if let Ok(hashset) = result { assert_eq!(hashset.len(), 0); } } #[test] fn test_deserialize_hashset_inner_single_value() { let result: Result<HashSet<String>, String> = deserialize_hashset_inner("single"); assert!(result.is_ok()); if let Ok(hashset) = result { assert_eq!(hashset.len(), 1); assert!(hashset.contains("single")); } } #[test] fn test_deserialize_hashset_inner_invalid_int() { let result: Result<HashSet<i32>, String> = deserialize_hashset_inner("1,invalid,3"); assert!(result.is_err()); if let Err(error) = result { assert!(error.contains("Unable to deserialize `invalid` as `i32`")); } } #[test] fn test_deserialize_hashset_inner_duplicates() { let result: Result<HashSet<String>, String> = deserialize_hashset_inner("a,b,a,c,b"); assert!(result.is_ok()); if let Ok(hashset) = result { assert_eq!(hashset.len(), 3); // Duplicates should be removed assert!(hashset.contains("a")); assert!(hashset.contains("b")); assert!(hashset.contains("c")); } } }
crates/external_services/src/utils.rs
external_services::src::utils
1,280
true
// File: crates/external_services/src/hashicorp_vault.rs // Module: external_services::src::hashicorp_vault //! Interactions with the HashiCorp Vault pub mod core; pub mod implementers;
crates/external_services/src/hashicorp_vault.rs
external_services::src::hashicorp_vault
48
true
// File: crates/external_services/src/http_client/request.rs // Module: external_services::src::http_client::request use std::str::FromStr; use common_utils::request::Headers; pub use common_utils::{errors::CustomResult, request::ContentType}; use error_stack::ResultExt; use hyperswitch_interfaces::errors::HttpClientError; pub use masking::{Mask, Maskable}; use router_env::{instrument, tracing}; #[allow(missing_docs)] pub trait HeaderExt { fn construct_header_map(self) -> CustomResult<reqwest::header::HeaderMap, HttpClientError>; } impl HeaderExt for Headers { fn construct_header_map(self) -> CustomResult<reqwest::header::HeaderMap, HttpClientError> { use reqwest::header::{HeaderMap, HeaderName, HeaderValue}; self.into_iter().try_fold( HeaderMap::new(), |mut header_map, (header_name, header_value)| { let header_name = HeaderName::from_str(&header_name) .change_context(HttpClientError::HeaderMapConstructionFailed)?; let header_value = header_value.into_inner(); let header_value = HeaderValue::from_str(&header_value) .change_context(HttpClientError::HeaderMapConstructionFailed)?; header_map.append(header_name, header_value); Ok(header_map) }, ) } } #[allow(missing_docs)] pub trait RequestBuilderExt { fn add_headers(self, headers: reqwest::header::HeaderMap) -> Self; } impl RequestBuilderExt for reqwest::RequestBuilder { #[instrument(skip_all)] fn add_headers(mut self, headers: reqwest::header::HeaderMap) -> Self { self = self.headers(headers); self } }
crates/external_services/src/http_client/request.rs
external_services::src::http_client::request
365
true
// File: crates/external_services/src/http_client/client.rs // Module: external_services::src::http_client::client use std::time::Duration; use base64::Engine; use common_utils::consts::BASE64_ENGINE; pub use common_utils::errors::CustomResult; use error_stack::ResultExt; use hyperswitch_interfaces::{errors::HttpClientError, types::Proxy}; use masking::ExposeInterface; use once_cell::sync::OnceCell; static DEFAULT_CLIENT: OnceCell<reqwest::Client> = OnceCell::new(); use router_env::logger; // We may need to use outbound proxy to connect to external world. // Precedence will be the environment variables, followed by the config. #[allow(missing_docs)] pub fn create_client( proxy_config: &Proxy, client_certificate: Option<masking::Secret<String>>, client_certificate_key: Option<masking::Secret<String>>, ca_certificate: Option<masking::Secret<String>>, ) -> CustomResult<reqwest::Client, HttpClientError> { // Case 1: Mutual TLS with client certificate and key if let (Some(encoded_certificate), Some(encoded_certificate_key)) = (client_certificate.clone(), client_certificate_key.clone()) { if ca_certificate.is_some() { logger::warn!("All of client certificate, client key, and CA certificate are provided. CA certificate will be ignored in mutual TLS setup."); } logger::debug!("Creating HTTP client with mutual TLS (client cert + key)"); let client_builder = apply_mitm_certificate(get_client_builder(proxy_config)?, proxy_config); let identity = create_identity_from_certificate_and_key( encoded_certificate.clone(), encoded_certificate_key, )?; let certificate_list = create_certificate(encoded_certificate)?; let client_builder = certificate_list .into_iter() .fold(client_builder, |client_builder, certificate| { client_builder.add_root_certificate(certificate) }); return client_builder .identity(identity) .use_rustls_tls() .build() .change_context(HttpClientError::ClientConstructionFailed) .attach_printable("Failed to construct client with certificate and certificate key"); } // Case 2: Use provided CA certificate for server authentication only (one-way TLS) if let Some(ca_pem) = ca_certificate { logger::debug!("Creating HTTP client with one-way TLS (CA certificate)"); let pem = ca_pem.expose().replace("\\r\\n", "\n"); // Fix escaped newlines let cert = reqwest::Certificate::from_pem(pem.as_bytes()) .change_context(HttpClientError::ClientConstructionFailed) .attach_printable("Failed to parse CA certificate PEM block")?; let client_builder = apply_mitm_certificate(get_client_builder(proxy_config)?, proxy_config) .add_root_certificate(cert); return client_builder .use_rustls_tls() .build() .change_context(HttpClientError::ClientConstructionFailed) .attach_printable("Failed to construct client with CA certificate"); } // Case 3: Default client (no certs) logger::debug!("Creating default HTTP client (no client or CA certificates)"); get_base_client(proxy_config) } #[allow(missing_docs)] pub fn get_client_builder( proxy_config: &Proxy, ) -> CustomResult<reqwest::ClientBuilder, HttpClientError> { let mut client_builder = reqwest::Client::builder() .redirect(reqwest::redirect::Policy::none()) .pool_idle_timeout(Duration::from_secs( proxy_config .idle_pool_connection_timeout .unwrap_or_default(), )); let proxy_exclusion_config = reqwest::NoProxy::from_string(&proxy_config.bypass_proxy_hosts.clone().unwrap_or_default()); // Proxy all HTTPS traffic through the configured HTTPS proxy if let Some(url) = proxy_config.https_url.as_ref() { client_builder = client_builder.proxy( reqwest::Proxy::https(url) .change_context(HttpClientError::InvalidProxyConfiguration) .attach_printable("HTTPS proxy configuration error")? .no_proxy(proxy_exclusion_config.clone()), ); } // Proxy all HTTP traffic through the configured HTTP proxy if let Some(url) = proxy_config.http_url.as_ref() { client_builder = client_builder.proxy( reqwest::Proxy::http(url) .change_context(HttpClientError::InvalidProxyConfiguration) .attach_printable("HTTP proxy configuration error")? .no_proxy(proxy_exclusion_config), ); } Ok(client_builder) } #[allow(missing_docs)] pub fn create_identity_from_certificate_and_key( encoded_certificate: masking::Secret<String>, encoded_certificate_key: masking::Secret<String>, ) -> Result<reqwest::Identity, error_stack::Report<HttpClientError>> { let decoded_certificate = BASE64_ENGINE .decode(encoded_certificate.expose()) .change_context(HttpClientError::CertificateDecodeFailed)?; let decoded_certificate_key = BASE64_ENGINE .decode(encoded_certificate_key.expose()) .change_context(HttpClientError::CertificateDecodeFailed)?; let certificate = String::from_utf8(decoded_certificate) .change_context(HttpClientError::CertificateDecodeFailed)?; let certificate_key = String::from_utf8(decoded_certificate_key) .change_context(HttpClientError::CertificateDecodeFailed)?; let key_chain = format!("{certificate_key}{certificate}"); reqwest::Identity::from_pem(key_chain.as_bytes()) .change_context(HttpClientError::CertificateDecodeFailed) } #[allow(missing_docs)] pub fn create_certificate( encoded_certificate: masking::Secret<String>, ) -> Result<Vec<reqwest::Certificate>, error_stack::Report<HttpClientError>> { let decoded_certificate = BASE64_ENGINE .decode(encoded_certificate.expose()) .change_context(HttpClientError::CertificateDecodeFailed)?; let certificate = String::from_utf8(decoded_certificate) .change_context(HttpClientError::CertificateDecodeFailed)?; reqwest::Certificate::from_pem_bundle(certificate.as_bytes()) .change_context(HttpClientError::CertificateDecodeFailed) } fn apply_mitm_certificate( mut client_builder: reqwest::ClientBuilder, proxy_config: &Proxy, ) -> reqwest::ClientBuilder { if let Some(mitm_ca_cert) = &proxy_config.mitm_ca_certificate { let pem = mitm_ca_cert.clone().expose().replace("\\r\\n", "\n"); match reqwest::Certificate::from_pem(pem.as_bytes()) { Ok(cert) => { logger::debug!("Successfully added MITM CA certificate"); client_builder = client_builder.add_root_certificate(cert); } Err(err) => { logger::error!( "Failed to parse MITM CA certificate: {}, continuing without MITM support", err ); } } } client_builder } fn get_base_client(proxy_config: &Proxy) -> CustomResult<reqwest::Client, HttpClientError> { Ok(DEFAULT_CLIENT .get_or_try_init(|| { apply_mitm_certificate(get_client_builder(proxy_config)?, proxy_config) .build() .change_context(HttpClientError::ClientConstructionFailed) .attach_printable("Failed to construct base client") })? .clone()) }
crates/external_services/src/http_client/client.rs
external_services::src::http_client::client
1,544
true
// File: crates/external_services/src/http_client/metrics.rs // Module: external_services::src::http_client::metrics use router_env::{counter_metric, global_meter, histogram_metric_f64}; global_meter!(GLOBAL_METER, "ROUTER_API"); counter_metric!(REQUEST_BUILD_FAILURE, GLOBAL_METER); histogram_metric_f64!(EXTERNAL_REQUEST_TIME, GLOBAL_METER); counter_metric!(AUTO_RETRY_CONNECTION_CLOSED, GLOBAL_METER);
crates/external_services/src/http_client/metrics.rs
external_services::src::http_client::metrics
93
true
// File: crates/external_services/src/hashicorp_vault/core.rs // Module: external_services::src::hashicorp_vault::core //! Interactions with the HashiCorp Vault use std::{collections::HashMap, future::Future, pin::Pin}; use common_utils::{ext_traits::ConfigExt, fp_utils::when}; use error_stack::{Report, ResultExt}; use masking::{PeekInterface, Secret}; use vaultrs::client::{VaultClient, VaultClientSettingsBuilder}; static HC_CLIENT: tokio::sync::OnceCell<HashiCorpVault> = tokio::sync::OnceCell::const_new(); #[allow(missing_debug_implementations)] /// A struct representing a connection to HashiCorp Vault. pub struct HashiCorpVault { /// The underlying client used for interacting with HashiCorp Vault. client: VaultClient, } /// Configuration for connecting to HashiCorp Vault. #[derive(Clone, Debug, Default, serde::Deserialize)] #[serde(default)] pub struct HashiCorpVaultConfig { /// The URL of the HashiCorp Vault server. pub url: String, /// The authentication token used to access HashiCorp Vault. pub token: Secret<String>, } impl HashiCorpVaultConfig { /// Verifies that the [`HashiCorpVault`] configuration is usable. pub fn validate(&self) -> Result<(), &'static str> { when(self.url.is_default_or_empty(), || { Err("HashiCorp vault url must not be empty") })?; when(self.token.is_default_or_empty(), || { Err("HashiCorp vault token must not be empty") }) } } /// Asynchronously retrieves a HashiCorp Vault client based on the provided configuration. /// /// # Parameters /// /// - `config`: A reference to a `HashiCorpVaultConfig` containing the configuration details. pub async fn get_hashicorp_client( config: &HashiCorpVaultConfig, ) -> error_stack::Result<&'static HashiCorpVault, HashiCorpError> { HC_CLIENT .get_or_try_init(|| async { HashiCorpVault::new(config) }) .await } /// A trait defining an engine for interacting with HashiCorp Vault. pub trait Engine: Sized { /// The associated type representing the return type of the engine's operations. type ReturnType<'b, T> where T: 'b, Self: 'b; /// Reads data from HashiCorp Vault at the specified location. /// /// # Parameters /// /// - `client`: A reference to the HashiCorpVault client. /// - `location`: The location in HashiCorp Vault to read data from. /// /// # Returns /// /// A future representing the result of the read operation. fn read(client: &HashiCorpVault, location: String) -> Self::ReturnType<'_, String>; } /// An implementation of the `Engine` trait for the Key-Value version 2 (Kv2) engine. #[derive(Debug)] pub enum Kv2 {} impl Engine for Kv2 { type ReturnType<'b, T: 'b> = Pin<Box<dyn Future<Output = error_stack::Result<T, HashiCorpError>> + Send + 'b>>; fn read(client: &HashiCorpVault, location: String) -> Self::ReturnType<'_, String> { Box::pin(async move { let mut split = location.split(':'); let mount = split.next().ok_or(HashiCorpError::IncompleteData)?; let path = split.next().ok_or(HashiCorpError::IncompleteData)?; let key = split.next().unwrap_or("value"); let mut output = vaultrs::kv2::read::<HashMap<String, String>>(&client.client, mount, path) .await .map_err(Into::<Report<_>>::into) .change_context(HashiCorpError::FetchFailed)?; Ok(output.remove(key).ok_or(HashiCorpError::ParseError)?) }) } } impl HashiCorpVault { /// Creates a new instance of HashiCorpVault based on the provided configuration. /// /// # Parameters /// /// - `config`: A reference to a `HashiCorpVaultConfig` containing the configuration details. pub fn new(config: &HashiCorpVaultConfig) -> error_stack::Result<Self, HashiCorpError> { VaultClient::new( VaultClientSettingsBuilder::default() .address(&config.url) .token(config.token.peek()) .build() .map_err(Into::<Report<_>>::into) .change_context(HashiCorpError::ClientCreationFailed) .attach_printable("Failed while building vault settings")?, ) .map_err(Into::<Report<_>>::into) .change_context(HashiCorpError::ClientCreationFailed) .map(|client| Self { client }) } /// Asynchronously fetches data from HashiCorp Vault using the specified engine. /// /// # Parameters /// /// - `data`: A String representing the location or identifier of the data in HashiCorp Vault. /// /// # Type Parameters /// /// - `En`: The engine type that implements the `Engine` trait. /// - `I`: The type that can be constructed from the retrieved encoded data. pub async fn fetch<En, I>(&self, data: String) -> error_stack::Result<I, HashiCorpError> where for<'a> En: Engine< ReturnType<'a, String> = Pin< Box< dyn Future<Output = error_stack::Result<String, HashiCorpError>> + Send + 'a, >, >, > + 'a, I: FromEncoded, { let output = En::read(self, data).await?; I::from_encoded(output).ok_or(error_stack::report!(HashiCorpError::HexDecodingFailed)) } } /// A trait for types that can be constructed from encoded data in the form of a String. pub trait FromEncoded: Sized { /// Constructs an instance of the type from the provided encoded input. /// /// # Parameters /// /// - `input`: A String containing the encoded data. /// /// # Returns /// /// An `Option<Self>` representing the constructed instance if successful, or `None` otherwise. /// /// # Example /// /// ```rust /// use external_services::hashicorp_vault::core::FromEncoded; /// use masking::Secret; /// let secret_instance = Secret::<String>::from_encoded("encoded_secret_string".to_string()); /// let vec_instance = Vec::<u8>::from_encoded("68656c6c6f".to_string()); /// ``` fn from_encoded(input: String) -> Option<Self>; } impl FromEncoded for Secret<String> { fn from_encoded(input: String) -> Option<Self> { Some(input.into()) } } impl FromEncoded for Vec<u8> { fn from_encoded(input: String) -> Option<Self> { hex::decode(input).ok() } } /// An enumeration representing various errors that can occur in interactions with HashiCorp Vault. #[derive(Debug, thiserror::Error)] pub enum HashiCorpError { /// Failed while creating hashicorp client #[error("Failed while creating a new client")] ClientCreationFailed, /// Failed while building configurations for hashicorp client #[error("Failed while building configuration")] ConfigurationBuildFailed, /// Failed while decoding data to hex format #[error("Failed while decoding hex data")] HexDecodingFailed, /// An error occurred when base64 decoding input data. #[error("Failed to base64 decode input data")] Base64DecodingFailed, /// An error occurred when KMS decrypting input data. #[error("Failed to KMS decrypt input data")] DecryptionFailed, /// The KMS decrypted output does not include a plaintext output. #[error("Missing plaintext KMS decryption output")] MissingPlaintextDecryptionOutput, /// An error occurred UTF-8 decoding KMS decrypted output. #[error("Failed to UTF-8 decode decryption output")] Utf8DecodingFailed, /// Incomplete data provided to fetch data from hasicorp #[error("Provided information about the value is incomplete")] IncompleteData, /// Failed while fetching data from vault #[error("Failed while fetching data from the server")] FetchFailed, /// Failed while parsing received data #[error("Failed while parsing the response")] ParseError, }
crates/external_services/src/hashicorp_vault/core.rs
external_services::src::hashicorp_vault::core
1,864
true
// File: crates/external_services/src/hashicorp_vault/implementers.rs // Module: external_services::src::hashicorp_vault::implementers //! Trait implementations for Hashicorp vault client use common_utils::errors::CustomResult; use error_stack::ResultExt; use hyperswitch_interfaces::secrets_interface::{ SecretManagementInterface, SecretsManagementError, }; use masking::{ExposeInterface, Secret}; use crate::hashicorp_vault::core::{HashiCorpVault, Kv2}; #[async_trait::async_trait] impl SecretManagementInterface for HashiCorpVault { async fn get_secret( &self, input: Secret<String>, ) -> CustomResult<Secret<String>, SecretsManagementError> { self.fetch::<Kv2, Secret<String>>(input.expose()) .await .map(|val| val.expose().to_owned()) .change_context(SecretsManagementError::FetchSecretFailed) .map(Into::into) } }
crates/external_services/src/hashicorp_vault/implementers.rs
external_services::src::hashicorp_vault::implementers
211
true
// File: crates/external_services/src/file_storage/file_system.rs // Module: external_services::src::file_storage::file_system //! Module for local file system storage operations use std::{ fs::{remove_file, File}, io::{Read, Write}, path::PathBuf, }; use common_utils::errors::CustomResult; use error_stack::ResultExt; use crate::file_storage::{FileStorageError, FileStorageInterface}; /// Constructs the file path for a given file key within the file system. /// The file path is generated based on the workspace path and the provided file key. fn get_file_path(file_key: impl AsRef<str>) -> PathBuf { let mut file_path = PathBuf::new(); file_path.push(std::env::current_dir().unwrap_or(".".into())); file_path.push("files"); file_path.push(file_key.as_ref()); file_path } /// Represents a file system for storing and managing files locally. #[derive(Debug, Clone)] pub(super) struct FileSystem; impl FileSystem { /// Saves the provided file data to the file system under the specified file key. async fn upload_file( &self, file_key: &str, file: Vec<u8>, ) -> CustomResult<(), FileSystemStorageError> { let file_path = get_file_path(file_key); // Ignore the file name and create directories in the `file_path` if not exists std::fs::create_dir_all( file_path .parent() .ok_or(FileSystemStorageError::CreateDirFailed) .attach_printable("Failed to obtain parent directory")?, ) .change_context(FileSystemStorageError::CreateDirFailed)?; let mut file_handler = File::create(file_path).change_context(FileSystemStorageError::CreateFailure)?; file_handler .write_all(&file) .change_context(FileSystemStorageError::WriteFailure)?; Ok(()) } /// Deletes the file associated with the specified file key from the file system. async fn delete_file(&self, file_key: &str) -> CustomResult<(), FileSystemStorageError> { let file_path = get_file_path(file_key); remove_file(file_path).change_context(FileSystemStorageError::DeleteFailure)?; Ok(()) } /// Retrieves the file content associated with the specified file key from the file system. async fn retrieve_file(&self, file_key: &str) -> CustomResult<Vec<u8>, FileSystemStorageError> { let mut received_data: Vec<u8> = Vec::new(); let file_path = get_file_path(file_key); let mut file = File::open(file_path).change_context(FileSystemStorageError::FileOpenFailure)?; file.read_to_end(&mut received_data) .change_context(FileSystemStorageError::ReadFailure)?; Ok(received_data) } } #[async_trait::async_trait] impl FileStorageInterface for FileSystem { /// Saves the provided file data to the file system under the specified file key. async fn upload_file( &self, file_key: &str, file: Vec<u8>, ) -> CustomResult<(), FileStorageError> { self.upload_file(file_key, file) .await .change_context(FileStorageError::UploadFailed)?; Ok(()) } /// Deletes the file associated with the specified file key from the file system. async fn delete_file(&self, file_key: &str) -> CustomResult<(), FileStorageError> { self.delete_file(file_key) .await .change_context(FileStorageError::DeleteFailed)?; Ok(()) } /// Retrieves the file content associated with the specified file key from the file system. async fn retrieve_file(&self, file_key: &str) -> CustomResult<Vec<u8>, FileStorageError> { Ok(self .retrieve_file(file_key) .await .change_context(FileStorageError::RetrieveFailed)?) } } /// Represents an error that can occur during local file system storage operations. #[derive(Debug, thiserror::Error)] enum FileSystemStorageError { /// Error indicating opening a file failed #[error("Failed while opening the file")] FileOpenFailure, /// Error indicating file creation failed. #[error("Failed to create file")] CreateFailure, /// Error indicating reading a file failed. #[error("Failed while reading the file")] ReadFailure, /// Error indicating writing to a file failed. #[error("Failed while writing into file")] WriteFailure, /// Error indicating file deletion failed. #[error("Failed while deleting the file")] DeleteFailure, /// Error indicating directory creation failed #[error("Failed while creating a directory")] CreateDirFailed, }
crates/external_services/src/file_storage/file_system.rs
external_services::src::file_storage::file_system
1,002
true
// File: crates/external_services/src/file_storage/aws_s3.rs // Module: external_services::src::file_storage::aws_s3 use aws_config::meta::region::RegionProviderChain; use aws_sdk_s3::{ operation::{ delete_object::DeleteObjectError, get_object::GetObjectError, put_object::PutObjectError, }, Client, }; use aws_sdk_sts::config::Region; use common_utils::{errors::CustomResult, ext_traits::ConfigExt}; use error_stack::ResultExt; use super::InvalidFileStorageConfig; use crate::file_storage::{FileStorageError, FileStorageInterface}; /// Configuration for AWS S3 file storage. #[derive(Debug, serde::Deserialize, Clone, Default)] #[serde(default)] pub struct AwsFileStorageConfig { /// The AWS region to send file uploads region: String, /// The AWS s3 bucket to send file uploads bucket_name: String, } impl AwsFileStorageConfig { /// Validates the AWS S3 file storage configuration. pub(super) fn validate(&self) -> Result<(), InvalidFileStorageConfig> { use common_utils::fp_utils::when; when(self.region.is_default_or_empty(), || { Err(InvalidFileStorageConfig("aws s3 region must not be empty")) })?; when(self.bucket_name.is_default_or_empty(), || { Err(InvalidFileStorageConfig( "aws s3 bucket name must not be empty", )) }) } } /// AWS S3 file storage client. #[derive(Debug, Clone)] pub(super) struct AwsFileStorageClient { /// AWS S3 client inner_client: Client, /// The name of the AWS S3 bucket. bucket_name: String, } impl AwsFileStorageClient { /// Creates a new AWS S3 file storage client. pub(super) async fn new(config: &AwsFileStorageConfig) -> Self { let region_provider = RegionProviderChain::first_try(Region::new(config.region.clone())); let sdk_config = aws_config::from_env().region(region_provider).load().await; Self { inner_client: Client::new(&sdk_config), bucket_name: config.bucket_name.clone(), } } /// Uploads a file to AWS S3. async fn upload_file( &self, file_key: &str, file: Vec<u8>, ) -> CustomResult<(), AwsS3StorageError> { self.inner_client .put_object() .bucket(&self.bucket_name) .key(file_key) .body(file.into()) .send() .await .map_err(AwsS3StorageError::UploadFailure)?; Ok(()) } /// Deletes a file from AWS S3. async fn delete_file(&self, file_key: &str) -> CustomResult<(), AwsS3StorageError> { self.inner_client .delete_object() .bucket(&self.bucket_name) .key(file_key) .send() .await .map_err(AwsS3StorageError::DeleteFailure)?; Ok(()) } /// Retrieves a file from AWS S3. async fn retrieve_file(&self, file_key: &str) -> CustomResult<Vec<u8>, AwsS3StorageError> { Ok(self .inner_client .get_object() .bucket(&self.bucket_name) .key(file_key) .send() .await .map_err(AwsS3StorageError::RetrieveFailure)? .body .collect() .await .map_err(AwsS3StorageError::UnknownError)? .to_vec()) } } #[async_trait::async_trait] impl FileStorageInterface for AwsFileStorageClient { /// Uploads a file to AWS S3. async fn upload_file( &self, file_key: &str, file: Vec<u8>, ) -> CustomResult<(), FileStorageError> { self.upload_file(file_key, file) .await .change_context(FileStorageError::UploadFailed)?; Ok(()) } /// Deletes a file from AWS S3. async fn delete_file(&self, file_key: &str) -> CustomResult<(), FileStorageError> { self.delete_file(file_key) .await .change_context(FileStorageError::DeleteFailed)?; Ok(()) } /// Retrieves a file from AWS S3. async fn retrieve_file(&self, file_key: &str) -> CustomResult<Vec<u8>, FileStorageError> { Ok(self .retrieve_file(file_key) .await .change_context(FileStorageError::RetrieveFailed)?) } } /// Enum representing errors that can occur during AWS S3 file storage operations. #[derive(Debug, thiserror::Error)] enum AwsS3StorageError { /// Error indicating that file upload to S3 failed. #[error("File upload to S3 failed: {0:?}")] UploadFailure(aws_sdk_s3::error::SdkError<PutObjectError>), /// Error indicating that file retrieval from S3 failed. #[error("File retrieve from S3 failed: {0:?}")] RetrieveFailure(aws_sdk_s3::error::SdkError<GetObjectError>), /// Error indicating that file deletion from S3 failed. #[error("File delete from S3 failed: {0:?}")] DeleteFailure(aws_sdk_s3::error::SdkError<DeleteObjectError>), /// Unknown error occurred. #[error("Unknown error occurred: {0:?}")] UnknownError(aws_sdk_s3::primitives::ByteStreamError), }
crates/external_services/src/file_storage/aws_s3.rs
external_services::src::file_storage::aws_s3
1,202
true
// File: crates/external_services/src/managers/secrets_management.rs // Module: external_services::src::managers::secrets_management //! Secrets management util module use common_utils::errors::CustomResult; #[cfg(feature = "hashicorp-vault")] use error_stack::ResultExt; use hyperswitch_interfaces::secrets_interface::{ SecretManagementInterface, SecretsManagementError, }; #[cfg(feature = "aws_kms")] use crate::aws_kms; #[cfg(feature = "hashicorp-vault")] use crate::hashicorp_vault; use crate::no_encryption::core::NoEncryption; /// Enum representing configuration options for secrets management. #[derive(Debug, Clone, Default, serde::Deserialize)] #[serde(tag = "secrets_manager")] #[serde(rename_all = "snake_case")] pub enum SecretsManagementConfig { /// AWS KMS configuration #[cfg(feature = "aws_kms")] AwsKms { /// AWS KMS config aws_kms: aws_kms::core::AwsKmsConfig, }, /// HashiCorp-Vault configuration #[cfg(feature = "hashicorp-vault")] HashiCorpVault { /// HC-Vault config hc_vault: hashicorp_vault::core::HashiCorpVaultConfig, }, /// Variant representing no encryption #[default] NoEncryption, } impl SecretsManagementConfig { /// Verifies that the client configuration is usable pub fn validate(&self) -> Result<(), &'static str> { match self { #[cfg(feature = "aws_kms")] Self::AwsKms { aws_kms } => aws_kms.validate(), #[cfg(feature = "hashicorp-vault")] Self::HashiCorpVault { hc_vault } => hc_vault.validate(), Self::NoEncryption => Ok(()), } } /// Retrieves the appropriate secret management client based on the configuration. pub async fn get_secret_management_client( &self, ) -> CustomResult<Box<dyn SecretManagementInterface>, SecretsManagementError> { match self { #[cfg(feature = "aws_kms")] Self::AwsKms { aws_kms } => { Ok(Box::new(aws_kms::core::AwsKmsClient::new(aws_kms).await)) } #[cfg(feature = "hashicorp-vault")] Self::HashiCorpVault { hc_vault } => { hashicorp_vault::core::HashiCorpVault::new(hc_vault) .change_context(SecretsManagementError::ClientCreationFailed) .map(|inner| -> Box<dyn SecretManagementInterface> { Box::new(inner) }) } Self::NoEncryption => Ok(Box::new(NoEncryption)), } } }
crates/external_services/src/managers/secrets_management.rs
external_services::src::managers::secrets_management
595
true
// File: crates/external_services/src/managers/encryption_management.rs // Module: external_services::src::managers::encryption_management //! Encryption management util module use std::sync::Arc; use common_utils::errors::CustomResult; use hyperswitch_interfaces::encryption_interface::{ EncryptionError, EncryptionManagementInterface, }; #[cfg(feature = "aws_kms")] use crate::aws_kms; use crate::no_encryption::core::NoEncryption; /// Enum representing configuration options for encryption management. #[derive(Debug, Clone, Default, serde::Deserialize)] #[serde(tag = "encryption_manager")] #[serde(rename_all = "snake_case")] pub enum EncryptionManagementConfig { /// AWS KMS configuration #[cfg(feature = "aws_kms")] AwsKms { /// AWS KMS config aws_kms: aws_kms::core::AwsKmsConfig, }, /// Variant representing no encryption #[default] NoEncryption, } impl EncryptionManagementConfig { /// Verifies that the client configuration is usable pub fn validate(&self) -> Result<(), &'static str> { match self { #[cfg(feature = "aws_kms")] Self::AwsKms { aws_kms } => aws_kms.validate(), Self::NoEncryption => Ok(()), } } /// Retrieves the appropriate encryption client based on the configuration. pub async fn get_encryption_management_client( &self, ) -> CustomResult<Arc<dyn EncryptionManagementInterface>, EncryptionError> { Ok(match self { #[cfg(feature = "aws_kms")] Self::AwsKms { aws_kms } => Arc::new(aws_kms::core::AwsKmsClient::new(aws_kms).await), Self::NoEncryption => Arc::new(NoEncryption), }) } }
crates/external_services/src/managers/encryption_management.rs
external_services::src::managers::encryption_management
388
true
// File: crates/external_services/src/no_encryption/core.rs // Module: external_services::src::no_encryption::core //! No encryption core functionalities /// No encryption type #[derive(Debug, Clone)] pub struct NoEncryption; impl NoEncryption { /// Encryption functionality pub fn encrypt(&self, data: impl AsRef<[u8]>) -> Vec<u8> { data.as_ref().into() } /// Decryption functionality pub fn decrypt(&self, data: impl AsRef<[u8]>) -> Vec<u8> { data.as_ref().into() } }
crates/external_services/src/no_encryption/core.rs
external_services::src::no_encryption::core
130
true
// File: crates/external_services/src/no_encryption/implementers.rs // Module: external_services::src::no_encryption::implementers //! Trait implementations for No encryption client use common_utils::errors::CustomResult; use error_stack::ResultExt; use hyperswitch_interfaces::{ encryption_interface::{EncryptionError, EncryptionManagementInterface}, secrets_interface::{SecretManagementInterface, SecretsManagementError}, }; use masking::{ExposeInterface, Secret}; use crate::no_encryption::core::NoEncryption; #[async_trait::async_trait] impl EncryptionManagementInterface for NoEncryption { async fn encrypt(&self, input: &[u8]) -> CustomResult<Vec<u8>, EncryptionError> { Ok(self.encrypt(input)) } async fn decrypt(&self, input: &[u8]) -> CustomResult<Vec<u8>, EncryptionError> { Ok(self.decrypt(input)) } } #[async_trait::async_trait] impl SecretManagementInterface for NoEncryption { async fn get_secret( &self, input: Secret<String>, ) -> CustomResult<Secret<String>, SecretsManagementError> { String::from_utf8(self.decrypt(input.expose())) .map(Into::into) .change_context(SecretsManagementError::FetchSecretFailed) .attach_printable("Failed to convert decrypted value to UTF-8") } }
crates/external_services/src/no_encryption/implementers.rs
external_services::src::no_encryption::implementers
282
true
// File: crates/external_services/src/aws_kms/core.rs // Module: external_services::src::aws_kms::core //! Interactions with the AWS KMS SDK use std::time::Instant; use aws_config::meta::region::RegionProviderChain; use aws_sdk_kms::{config::Region, primitives::Blob, Client}; use base64::Engine; use common_utils::errors::CustomResult; use error_stack::{report, ResultExt}; use router_env::logger; use crate::{consts, metrics}; /// Configuration parameters required for constructing a [`AwsKmsClient`]. #[derive(Clone, Debug, Default, serde::Deserialize)] #[serde(default)] pub struct AwsKmsConfig { /// The AWS key identifier of the KMS key used to encrypt or decrypt data. pub key_id: String, /// The AWS region to send KMS requests to. pub region: String, } /// Client for AWS KMS operations. #[derive(Debug, Clone)] pub struct AwsKmsClient { inner_client: Client, key_id: String, } impl AwsKmsClient { /// Constructs a new AWS KMS client. pub async fn new(config: &AwsKmsConfig) -> Self { let region_provider = RegionProviderChain::first_try(Region::new(config.region.clone())); let sdk_config = aws_config::from_env().region(region_provider).load().await; Self { inner_client: Client::new(&sdk_config), key_id: config.key_id.clone(), } } /// Decrypts the provided base64-encoded encrypted data using the AWS KMS SDK. We assume that /// the SDK has the values required to interact with the AWS KMS APIs (`AWS_ACCESS_KEY_ID` and /// `AWS_SECRET_ACCESS_KEY`) either set in environment variables, or that the SDK is running in /// a machine that is able to assume an IAM role. pub async fn decrypt(&self, data: impl AsRef<[u8]>) -> CustomResult<String, AwsKmsError> { let start = Instant::now(); let data = consts::BASE64_ENGINE .decode(data) .change_context(AwsKmsError::Base64DecodingFailed)?; let ciphertext_blob = Blob::new(data); let decrypt_output = self .inner_client .decrypt() .key_id(&self.key_id) .ciphertext_blob(ciphertext_blob) .send() .await .inspect_err(|error| { // Logging using `Debug` representation of the error as the `Display` // representation does not hold sufficient information. logger::error!(aws_kms_sdk_error=?error, "Failed to AWS KMS decrypt data"); metrics::AWS_KMS_DECRYPTION_FAILURES.add(1, &[]); }) .change_context(AwsKmsError::DecryptionFailed)?; let output = decrypt_output .plaintext .ok_or(report!(AwsKmsError::MissingPlaintextDecryptionOutput)) .and_then(|blob| { String::from_utf8(blob.into_inner()).change_context(AwsKmsError::Utf8DecodingFailed) })?; let time_taken = start.elapsed(); metrics::AWS_KMS_DECRYPT_TIME.record(time_taken.as_secs_f64(), &[]); Ok(output) } /// Encrypts the provided String data using the AWS KMS SDK. We assume that /// the SDK has the values required to interact with the AWS KMS APIs (`AWS_ACCESS_KEY_ID` and /// `AWS_SECRET_ACCESS_KEY`) either set in environment variables, or that the SDK is running in /// a machine that is able to assume an IAM role. pub async fn encrypt(&self, data: impl AsRef<[u8]>) -> CustomResult<String, AwsKmsError> { let start = Instant::now(); let plaintext_blob = Blob::new(data.as_ref()); let encrypted_output = self .inner_client .encrypt() .key_id(&self.key_id) .plaintext(plaintext_blob) .send() .await .inspect_err(|error| { // Logging using `Debug` representation of the error as the `Display` // representation does not hold sufficient information. logger::error!(aws_kms_sdk_error=?error, "Failed to AWS KMS encrypt data"); metrics::AWS_KMS_ENCRYPTION_FAILURES.add(1, &[]); }) .change_context(AwsKmsError::EncryptionFailed)?; let output = encrypted_output .ciphertext_blob .ok_or(AwsKmsError::MissingCiphertextEncryptionOutput) .map(|blob| consts::BASE64_ENGINE.encode(blob.into_inner()))?; let time_taken = start.elapsed(); metrics::AWS_KMS_ENCRYPT_TIME.record(time_taken.as_secs_f64(), &[]); Ok(output) } } /// Errors that could occur during KMS operations. #[derive(Debug, thiserror::Error)] pub enum AwsKmsError { /// An error occurred when base64 encoding input data. #[error("Failed to base64 encode input data")] Base64EncodingFailed, /// An error occurred when base64 decoding input data. #[error("Failed to base64 decode input data")] Base64DecodingFailed, /// An error occurred when AWS KMS decrypting input data. #[error("Failed to AWS KMS decrypt input data")] DecryptionFailed, /// An error occurred when AWS KMS encrypting input data. #[error("Failed to AWS KMS encrypt input data")] EncryptionFailed, /// The AWS KMS decrypted output does not include a plaintext output. #[error("Missing plaintext AWS KMS decryption output")] MissingPlaintextDecryptionOutput, /// The AWS KMS encrypted output does not include a ciphertext output. #[error("Missing ciphertext AWS KMS encryption output")] MissingCiphertextEncryptionOutput, /// An error occurred UTF-8 decoding AWS KMS decrypted output. #[error("Failed to UTF-8 decode decryption output")] Utf8DecodingFailed, /// The AWS KMS client has not been initialized. #[error("The AWS KMS client has not been initialized")] AwsKmsClientNotInitialized, } impl AwsKmsConfig { /// Verifies that the [`AwsKmsClient`] configuration is usable. pub fn validate(&self) -> Result<(), &'static str> { use common_utils::{ext_traits::ConfigExt, fp_utils::when}; when(self.key_id.is_default_or_empty(), || { Err("KMS AWS key ID must not be empty") })?; when(self.region.is_default_or_empty(), || { Err("KMS AWS region must not be empty") }) } } #[cfg(test)] mod tests { #![allow(clippy::expect_used, clippy::print_stdout)] #[tokio::test] async fn check_aws_kms_encryption() { std::env::set_var("AWS_SECRET_ACCESS_KEY", "YOUR SECRET ACCESS KEY"); std::env::set_var("AWS_ACCESS_KEY_ID", "YOUR AWS ACCESS KEY ID"); use super::*; let config = AwsKmsConfig { key_id: "YOUR AWS KMS KEY ID".to_string(), region: "AWS REGION".to_string(), }; let data = "hello".to_string(); let binding = data.as_bytes(); let kms_encrypted_fingerprint = AwsKmsClient::new(&config) .await .encrypt(binding) .await .expect("aws kms encryption failed"); println!("{kms_encrypted_fingerprint}"); } #[tokio::test] async fn check_aws_kms_decrypt() { std::env::set_var("AWS_SECRET_ACCESS_KEY", "YOUR SECRET ACCESS KEY"); std::env::set_var("AWS_ACCESS_KEY_ID", "YOUR AWS ACCESS KEY ID"); use super::*; let config = AwsKmsConfig { key_id: "YOUR AWS KMS KEY ID".to_string(), region: "AWS REGION".to_string(), }; // Should decrypt to hello let data = "AWS KMS ENCRYPTED CIPHER".to_string(); let binding = data.as_bytes(); let kms_encrypted_fingerprint = AwsKmsClient::new(&config) .await .decrypt(binding) .await .expect("aws kms decryption failed"); println!("{kms_encrypted_fingerprint}"); } }
crates/external_services/src/aws_kms/core.rs
external_services::src::aws_kms::core
1,828
true
// File: crates/external_services/src/aws_kms/implementers.rs // Module: external_services::src::aws_kms::implementers //! Trait implementations for aws kms client use common_utils::errors::CustomResult; use error_stack::ResultExt; use hyperswitch_interfaces::{ encryption_interface::{EncryptionError, EncryptionManagementInterface}, secrets_interface::{SecretManagementInterface, SecretsManagementError}, }; use masking::{PeekInterface, Secret}; use crate::aws_kms::core::AwsKmsClient; #[async_trait::async_trait] impl EncryptionManagementInterface for AwsKmsClient { async fn encrypt(&self, input: &[u8]) -> CustomResult<Vec<u8>, EncryptionError> { self.encrypt(input) .await .change_context(EncryptionError::EncryptionFailed) .map(|val| val.into_bytes()) } async fn decrypt(&self, input: &[u8]) -> CustomResult<Vec<u8>, EncryptionError> { self.decrypt(input) .await .change_context(EncryptionError::DecryptionFailed) .map(|val| val.into_bytes()) } } #[async_trait::async_trait] impl SecretManagementInterface for AwsKmsClient { async fn get_secret( &self, input: Secret<String>, ) -> CustomResult<Secret<String>, SecretsManagementError> { self.decrypt(input.peek()) .await .change_context(SecretsManagementError::FetchSecretFailed) .map(Into::into) } }
crates/external_services/src/aws_kms/implementers.rs
external_services::src::aws_kms::implementers
320
true
// File: crates/external_services/src/superposition/types.rs // Module: external_services::src::superposition::types //! Type definitions for Superposition integration use std::collections::HashMap; use common_utils::{errors::CustomResult, fp_utils::when}; use masking::{ExposeInterface, Secret}; /// Wrapper type for JSON values from Superposition #[derive(Debug, Clone)] pub struct JsonValue(serde_json::Value); impl JsonValue { /// Consume the wrapper and return the inner JSON value pub(super) fn into_inner(self) -> serde_json::Value { self.0 } } impl TryFrom<open_feature::StructValue> for JsonValue { type Error = String; fn try_from(sv: open_feature::StructValue) -> Result<Self, Self::Error> { let capacity = sv.fields.len(); sv.fields .into_iter() .try_fold( serde_json::Map::with_capacity(capacity), |mut map, (k, v)| { let value = super::convert_open_feature_value(v)?; map.insert(k, value); Ok(map) }, ) .map(|map| Self(serde_json::Value::Object(map))) } } /// Configuration for Superposition integration #[derive(Debug, Clone, serde::Deserialize)] #[serde(default)] pub struct SuperpositionClientConfig { /// Whether Superposition is enabled pub enabled: bool, /// Superposition API endpoint pub endpoint: String, /// Authentication token for Superposition pub token: Secret<String>, /// Organization ID in Superposition pub org_id: String, /// Workspace ID in Superposition pub workspace_id: String, /// Polling interval in seconds for configuration updates pub polling_interval: u64, /// Request timeout in seconds for Superposition API calls (None = no timeout) pub request_timeout: Option<u64>, } impl Default for SuperpositionClientConfig { fn default() -> Self { Self { enabled: false, endpoint: String::new(), token: Secret::new(String::new()), org_id: String::new(), workspace_id: String::new(), polling_interval: 15, request_timeout: None, } } } /// Errors that can occur when using Superposition #[derive(Debug, thiserror::Error)] pub enum SuperpositionError { /// Error initializing the Superposition client #[error("Failed to initialize Superposition client: {0}")] ClientInitError(String), /// Error from the Superposition client #[error("Superposition client error: {0}")] ClientError(String), /// Invalid configuration provided #[error("Invalid configuration: {0}")] InvalidConfiguration(String), } /// Context for configuration requests #[derive(Debug, Clone, Default)] pub struct ConfigContext { /// Key-value pairs for configuration context pub(super) values: HashMap<String, String>, } impl SuperpositionClientConfig { /// Validate the Superposition configuration pub fn validate(&self) -> Result<(), SuperpositionError> { if !self.enabled { return Ok(()); } when(self.endpoint.is_empty(), || { Err(SuperpositionError::InvalidConfiguration( "Superposition endpoint cannot be empty".to_string(), )) })?; when(url::Url::parse(&self.endpoint).is_err(), || { Err(SuperpositionError::InvalidConfiguration( "Superposition endpoint must be a valid URL".to_string(), )) })?; when(self.token.clone().expose().is_empty(), || { Err(SuperpositionError::InvalidConfiguration( "Superposition token cannot be empty".to_string(), )) })?; when(self.org_id.is_empty(), || { Err(SuperpositionError::InvalidConfiguration( "Superposition org_id cannot be empty".to_string(), )) })?; when(self.workspace_id.is_empty(), || { Err(SuperpositionError::InvalidConfiguration( "Superposition workspace_id cannot be empty".to_string(), )) })?; Ok(()) } } impl ConfigContext { /// Create a new empty context pub fn new() -> Self { Self::default() } /// Add a key-value pair to the context. Replaces existing value if key exists. pub fn with(mut self, key: &str, value: &str) -> Self { self.values.insert(key.to_string(), value.to_string()); self } } #[cfg(feature = "superposition")] #[async_trait::async_trait] impl hyperswitch_interfaces::secrets_interface::secret_handler::SecretsHandler for SuperpositionClientConfig { async fn convert_to_raw_secret( value: hyperswitch_interfaces::secrets_interface::secret_state::SecretStateContainer< Self, hyperswitch_interfaces::secrets_interface::secret_state::SecuredSecret, >, secret_management_client: &dyn hyperswitch_interfaces::secrets_interface::SecretManagementInterface, ) -> CustomResult< hyperswitch_interfaces::secrets_interface::secret_state::SecretStateContainer< Self, hyperswitch_interfaces::secrets_interface::secret_state::RawSecret, >, hyperswitch_interfaces::secrets_interface::SecretsManagementError, > { let superposition_config = value.get_inner(); let token = if superposition_config.enabled { secret_management_client .get_secret(superposition_config.token.clone()) .await? } else { superposition_config.token.clone() }; Ok(value.transition_state(|superposition_config| Self { token, ..superposition_config })) } }
crates/external_services/src/superposition/types.rs
external_services::src::superposition::types
1,200
true
// File: crates/external_services/src/grpc_client/health_check_client.rs // Module: external_services::src::grpc_client::health_check_client use std::{collections::HashMap, fmt::Debug}; use api_models::health_check::{HealthCheckMap, HealthCheckServices}; use common_utils::{errors::CustomResult, ext_traits::AsyncExt}; use error_stack::ResultExt; pub use health_check::{ health_check_response::ServingStatus, health_client::HealthClient, HealthCheckRequest, HealthCheckResponse, }; use router_env::logger; #[allow( missing_docs, unused_qualifications, clippy::unwrap_used, clippy::as_conversions, clippy::use_self )] pub mod health_check { tonic::include_proto!("grpc.health.v1"); } use super::{Client, DynamicRoutingClientConfig, GrpcClientSettings}; /// Result type for Dynamic Routing pub type HealthCheckResult<T> = CustomResult<T, HealthCheckError>; /// Dynamic Routing Errors #[derive(Debug, Clone, thiserror::Error)] pub enum HealthCheckError { /// The required input is missing #[error("Missing fields: {0} for building the Health check connection")] MissingFields(String), /// Error from gRPC Server #[error("Error from gRPC Server : {0}")] ConnectionError(String), /// status is invalid #[error("Invalid Status from server")] InvalidStatus, } /// Health Check Client type #[derive(Debug, Clone)] pub struct HealthCheckClient { /// Health clients for all gRPC based services pub clients: HashMap<HealthCheckServices, HealthClient<Client>>, } impl HealthCheckClient { /// Build connections to all gRPC services pub async fn build_connections( config: &GrpcClientSettings, client: Client, ) -> Result<Self, Box<dyn std::error::Error>> { let dynamic_routing_config = &config.dynamic_routing_client; let connection = match dynamic_routing_config { Some(DynamicRoutingClientConfig::Enabled { host, port, service, }) => Some((host.clone(), *port, service.clone())), _ => None, }; let mut client_map = HashMap::new(); if let Some(conn) = connection { let uri = format!("http://{}:{}", conn.0, conn.1).parse::<tonic::transport::Uri>()?; let health_client = HealthClient::with_origin(client, uri); client_map.insert(HealthCheckServices::DynamicRoutingService, health_client); } Ok(Self { clients: client_map, }) } /// Perform health check for all services involved pub async fn perform_health_check( &self, config: &GrpcClientSettings, ) -> HealthCheckResult<HealthCheckMap> { let dynamic_routing_config = &config.dynamic_routing_client; let connection = match dynamic_routing_config { Some(DynamicRoutingClientConfig::Enabled { host, port, service, }) => Some((host.clone(), *port, service.clone())), _ => None, }; let health_client = self .clients .get(&HealthCheckServices::DynamicRoutingService); // SAFETY : This is a safe cast as there exists a valid // integer value for this variant #[allow(clippy::as_conversions)] let expected_status = ServingStatus::Serving as i32; let mut service_map = HealthCheckMap::new(); let health_check_succeed = connection .as_ref() .async_map(|conn| self.get_response_from_grpc_service(conn.2.clone(), health_client)) .await .transpose() .change_context(HealthCheckError::ConnectionError( "error calling dynamic routing service".to_string(), )) .map_err(|err| logger::error!(error=?err)) .ok() .flatten() .is_some_and(|resp| resp.status == expected_status); connection.and_then(|_conn| { service_map.insert( HealthCheckServices::DynamicRoutingService, health_check_succeed, ) }); Ok(service_map) } async fn get_response_from_grpc_service( &self, service: String, client: Option<&HealthClient<Client>>, ) -> HealthCheckResult<HealthCheckResponse> { let request = tonic::Request::new(HealthCheckRequest { service }); let mut client = client .ok_or(HealthCheckError::MissingFields( "[health_client]".to_string(), ))? .clone(); let response = client .check(request) .await .change_context(HealthCheckError::ConnectionError( "Failed to call dynamic routing service".to_string(), ))? .into_inner(); Ok(response) } }
crates/external_services/src/grpc_client/health_check_client.rs
external_services::src::grpc_client::health_check_client
1,032
true
// File: crates/external_services/src/grpc_client/unified_connector_service.rs // Module: external_services::src::grpc_client::unified_connector_service use std::collections::{HashMap, HashSet}; use common_enums::connector_enums::Connector; use common_utils::{consts as common_utils_consts, errors::CustomResult, types::Url}; use error_stack::ResultExt; pub use hyperswitch_interfaces::unified_connector_service::transformers::UnifiedConnectorServiceError; use masking::{PeekInterface, Secret}; use router_env::logger; use tokio::time::{timeout, Duration}; use tonic::{ metadata::{MetadataMap, MetadataValue}, transport::Uri, }; use unified_connector_service_client::payments::{ self as payments_grpc, payment_service_client::PaymentServiceClient, PaymentServiceAuthorizeResponse, PaymentServiceTransformRequest, PaymentServiceTransformResponse, }; use crate::{ consts, grpc_client::{GrpcClientSettings, GrpcHeadersUcs}, utils::deserialize_hashset, }; /// Result type for Dynamic Routing pub type UnifiedConnectorServiceResult<T> = CustomResult<T, UnifiedConnectorServiceError>; /// Contains the Unified Connector Service client #[derive(Debug, Clone)] pub struct UnifiedConnectorServiceClient { /// The Unified Connector Service Client pub client: PaymentServiceClient<tonic::transport::Channel>, } /// Contains the Unified Connector Service Client config #[derive(Debug, Clone, serde::Deserialize, serde::Serialize)] pub struct UnifiedConnectorServiceClientConfig { /// Base URL of the gRPC Server pub base_url: Url, /// Contains the connection timeout duration in seconds pub connection_timeout: u64, /// Set of external services/connectors available for the unified connector service #[serde(default, deserialize_with = "deserialize_hashset")] pub ucs_only_connectors: HashSet<Connector>, /// Set of connectors for which psync is disabled in unified connector service #[serde(default, deserialize_with = "deserialize_hashset")] pub ucs_psync_disabled_connectors: HashSet<Connector>, } /// Contains the Connector Auth Type and related authentication data. #[derive(Debug, Clone, serde::Deserialize, serde::Serialize)] pub struct ConnectorAuthMetadata { /// Name of the connector (e.g., "stripe", "paypal"). pub connector_name: String, /// Type of authentication used (e.g., "HeaderKey", "BodyKey", "SignatureKey"). pub auth_type: String, /// Optional API key used for authentication. pub api_key: Option<Secret<String>>, /// Optional additional key used by some authentication types. pub key1: Option<Secret<String>>, /// Optional API secret used for signature or secure authentication. pub api_secret: Option<Secret<String>>, /// Optional auth_key_map used for authentication. pub auth_key_map: Option<HashMap<common_enums::enums::Currency, common_utils::pii::SecretSerdeValue>>, /// Id of the merchant. pub merchant_id: Secret<String>, } /// External Vault Proxy Related Metadata #[derive(Debug, Clone, serde::Deserialize, serde::Serialize)] #[serde(untagged)] pub enum ExternalVaultProxyMetadata { /// VGS proxy data variant VgsMetadata(VgsMetadata), } /// VGS proxy data #[derive(Debug, Clone, serde::Deserialize, serde::Serialize)] pub struct VgsMetadata { /// External vault url pub proxy_url: Url, /// CA certificates to verify the vault server pub certificate: Secret<String>, } impl UnifiedConnectorServiceClient { /// Builds the connection to the gRPC service pub async fn build_connections(config: &GrpcClientSettings) -> Option<Self> { match &config.unified_connector_service { Some(unified_connector_service_client_config) => { let uri: Uri = match unified_connector_service_client_config .base_url .get_string_repr() .parse() { Ok(parsed_uri) => parsed_uri, Err(err) => { logger::error!(error = ?err, "Failed to parse URI for Unified Connector Service"); return None; } }; let connect_result = timeout( Duration::from_secs(unified_connector_service_client_config.connection_timeout), PaymentServiceClient::connect(uri), ) .await; match connect_result { Ok(Ok(client)) => { logger::info!("Successfully connected to Unified Connector Service"); Some(Self { client }) } Ok(Err(err)) => { logger::error!(error = ?err, "Failed to connect to Unified Connector Service"); None } Err(err) => { logger::error!(error = ?err, "Connection to Unified Connector Service timed out"); None } } } None => { router_env::logger::error!(?config.unified_connector_service, "Unified Connector Service config is missing"); None } } } /// Performs Payment Authorize pub async fn payment_authorize( &self, payment_authorize_request: payments_grpc::PaymentServiceAuthorizeRequest, connector_auth_metadata: ConnectorAuthMetadata, grpc_headers: GrpcHeadersUcs, ) -> UnifiedConnectorServiceResult<tonic::Response<PaymentServiceAuthorizeResponse>> { let mut request = tonic::Request::new(payment_authorize_request); let connector_name = connector_auth_metadata.connector_name.clone(); let metadata = build_unified_connector_service_grpc_headers(connector_auth_metadata, grpc_headers)?; *request.metadata_mut() = metadata; self.client .clone() .authorize(request) .await .change_context(UnifiedConnectorServiceError::PaymentAuthorizeFailure) .inspect_err(|error| { logger::error!( grpc_error=?error, method="payment_authorize", connector_name=?connector_name, "UCS payment authorize gRPC call failed" ) }) } /// Performs Payment Sync/Get pub async fn payment_get( &self, payment_get_request: payments_grpc::PaymentServiceGetRequest, connector_auth_metadata: ConnectorAuthMetadata, grpc_headers: GrpcHeadersUcs, ) -> UnifiedConnectorServiceResult<tonic::Response<payments_grpc::PaymentServiceGetResponse>> { let mut request = tonic::Request::new(payment_get_request); let connector_name = connector_auth_metadata.connector_name.clone(); let metadata = build_unified_connector_service_grpc_headers(connector_auth_metadata, grpc_headers)?; *request.metadata_mut() = metadata; self.client .clone() .get(request) .await .change_context(UnifiedConnectorServiceError::PaymentGetFailure) .inspect_err(|error| { logger::error!( grpc_error=?error, method="payment_get", connector_name=?connector_name, "UCS payment get/sync gRPC call failed" ) }) } /// Performs Payment Setup Mandate pub async fn payment_setup_mandate( &self, payment_register_request: payments_grpc::PaymentServiceRegisterRequest, connector_auth_metadata: ConnectorAuthMetadata, grpc_headers: GrpcHeadersUcs, ) -> UnifiedConnectorServiceResult<tonic::Response<payments_grpc::PaymentServiceRegisterResponse>> { let mut request = tonic::Request::new(payment_register_request); let connector_name = connector_auth_metadata.connector_name.clone(); let metadata = build_unified_connector_service_grpc_headers(connector_auth_metadata, grpc_headers)?; *request.metadata_mut() = metadata; self.client .clone() .register(request) .await .change_context(UnifiedConnectorServiceError::PaymentRegisterFailure) .inspect_err(|error| { logger::error!( grpc_error=?error, method="payment_setup_mandate", connector_name=?connector_name, "UCS payment setup mandate gRPC call failed" ) }) } /// Performs Payment repeat (MIT - Merchant Initiated Transaction). pub async fn payment_repeat( &self, payment_repeat_request: payments_grpc::PaymentServiceRepeatEverythingRequest, connector_auth_metadata: ConnectorAuthMetadata, grpc_headers: GrpcHeadersUcs, ) -> UnifiedConnectorServiceResult< tonic::Response<payments_grpc::PaymentServiceRepeatEverythingResponse>, > { let mut request = tonic::Request::new(payment_repeat_request); let connector_name = connector_auth_metadata.connector_name.clone(); let metadata = build_unified_connector_service_grpc_headers(connector_auth_metadata, grpc_headers)?; *request.metadata_mut() = metadata; self.client .clone() .repeat_everything(request) .await .change_context(UnifiedConnectorServiceError::PaymentRepeatEverythingFailure) .inspect_err(|error| { logger::error!( grpc_error=?error, method="payment_repeat", connector_name=?connector_name, "UCS payment repeat gRPC call failed" ) }) } /// Transforms incoming webhook through UCS pub async fn transform_incoming_webhook( &self, webhook_transform_request: PaymentServiceTransformRequest, connector_auth_metadata: ConnectorAuthMetadata, grpc_headers: GrpcHeadersUcs, ) -> UnifiedConnectorServiceResult<tonic::Response<PaymentServiceTransformResponse>> { let mut request = tonic::Request::new(webhook_transform_request); let connector_name = connector_auth_metadata.connector_name.clone(); let metadata = build_unified_connector_service_grpc_headers(connector_auth_metadata, grpc_headers)?; *request.metadata_mut() = metadata; self.client .clone() .transform(request) .await .change_context(UnifiedConnectorServiceError::WebhookTransformFailure) .inspect_err(|error| { logger::error!( grpc_error=?error, method="transform_incoming_webhook", connector_name=?connector_name, "UCS webhook transform gRPC call failed" ) }) } } /// Build the gRPC Headers for Unified Connector Service Request pub fn build_unified_connector_service_grpc_headers( meta: ConnectorAuthMetadata, grpc_headers: GrpcHeadersUcs, ) -> Result<MetadataMap, UnifiedConnectorServiceError> { let mut metadata = MetadataMap::new(); let parse = |key: &str, value: &str| -> Result<MetadataValue<_>, UnifiedConnectorServiceError> { value.parse::<MetadataValue<_>>().map_err(|error| { logger::error!(?error); UnifiedConnectorServiceError::HeaderInjectionFailed(key.to_string()) }) }; metadata.append( consts::UCS_HEADER_CONNECTOR, parse("connector", &meta.connector_name)?, ); metadata.append( consts::UCS_HEADER_AUTH_TYPE, parse("auth_type", &meta.auth_type)?, ); if let Some(api_key) = meta.api_key { metadata.append( consts::UCS_HEADER_API_KEY, parse("api_key", api_key.peek())?, ); } if let Some(key1) = meta.key1 { metadata.append(consts::UCS_HEADER_KEY1, parse("key1", key1.peek())?); } if let Some(api_secret) = meta.api_secret { metadata.append( consts::UCS_HEADER_API_SECRET, parse("api_secret", api_secret.peek())?, ); } if let Some(auth_key_map) = meta.auth_key_map { let auth_key_map_str = serde_json::to_string(&auth_key_map).map_err(|error| { logger::error!(?error); UnifiedConnectorServiceError::ParsingFailed })?; metadata.append( consts::UCS_HEADER_AUTH_KEY_MAP, parse("auth_key_map", &auth_key_map_str)?, ); } metadata.append( common_utils_consts::X_MERCHANT_ID, parse(common_utils_consts::X_MERCHANT_ID, meta.merchant_id.peek())?, ); if let Some(external_vault_proxy_metadata) = grpc_headers.external_vault_proxy_metadata { metadata.append( consts::UCS_HEADER_EXTERNAL_VAULT_METADATA, parse("external_vault_metadata", &external_vault_proxy_metadata)?, ); }; let lineage_ids_str = grpc_headers .lineage_ids .get_url_encoded_string() .map_err(|err| { logger::error!(?err); UnifiedConnectorServiceError::HeaderInjectionFailed(consts::UCS_LINEAGE_IDS.to_string()) })?; metadata.append( consts::UCS_LINEAGE_IDS, parse(consts::UCS_LINEAGE_IDS, &lineage_ids_str)?, ); if let Some(reference_id) = grpc_headers.merchant_reference_id { metadata.append( consts::UCS_HEADER_REFERENCE_ID, parse( consts::UCS_HEADER_REFERENCE_ID, reference_id.get_string_repr(), )?, ); }; if let Some(request_id) = grpc_headers.request_id { metadata.append( common_utils_consts::X_REQUEST_ID, parse(common_utils_consts::X_REQUEST_ID, &request_id)?, ); }; if let Some(shadow_mode) = grpc_headers.shadow_mode { metadata.append( common_utils_consts::X_UNIFIED_CONNECTOR_SERVICE_MODE, parse( common_utils_consts::X_UNIFIED_CONNECTOR_SERVICE_MODE, &shadow_mode.to_string(), )?, ); } if let Err(err) = grpc_headers .tenant_id .parse() .map(|tenant_id| metadata.append(common_utils_consts::TENANT_HEADER, tenant_id)) { logger::error!( header_parse_error=?err, tenant_id=?grpc_headers.tenant_id, "Failed to parse tenant_id header for UCS gRPC request: {}", common_utils_consts::TENANT_HEADER ); } Ok(metadata) }
crates/external_services/src/grpc_client/unified_connector_service.rs
external_services::src::grpc_client::unified_connector_service
2,959
true
// File: crates/external_services/src/grpc_client/dynamic_routing.rs // Module: external_services::src::grpc_client::dynamic_routing /// Module for Contract based routing pub mod contract_routing_client; use std::fmt::Debug; use common_utils::errors::CustomResult; use router_env::logger; use serde; /// Elimination Routing Client Interface Implementation pub mod elimination_based_client; /// Success Routing Client Interface Implementation pub mod success_rate_client; pub use contract_routing_client::ContractScoreCalculatorClient; pub use elimination_based_client::EliminationAnalyserClient; pub use success_rate_client::SuccessRateCalculatorClient; use super::Client; /// Result type for Dynamic Routing pub type DynamicRoutingResult<T> = CustomResult<T, DynamicRoutingError>; /// Dynamic Routing Errors #[derive(Debug, Clone, thiserror::Error)] pub enum DynamicRoutingError { /// The required input is missing #[error("Missing Required Field : {field} for building the Dynamic Routing Request")] MissingRequiredField { /// The required field name field: String, }, /// Error from Dynamic Routing Server while performing success_rate analysis #[error("Error from Dynamic Routing Server while perfrming success_rate analysis : {0}")] SuccessRateBasedRoutingFailure(String), /// Generic Error from Dynamic Routing Server while performing contract based routing #[error("Error from Dynamic Routing Server while performing contract based routing: {0}")] ContractBasedRoutingFailure(String), /// Generic Error from Dynamic Routing Server while performing contract based routing #[error("Contract not found in the dynamic routing service")] ContractNotFound, /// Error from Dynamic Routing Server while perfrming elimination #[error("Error from Dynamic Routing Server while perfrming elimination : {0}")] EliminationRateRoutingFailure(String), } /// Type that consists of all the services provided by the client #[derive(Debug, Clone)] pub struct RoutingStrategy { /// success rate service for Dynamic Routing pub success_rate_client: SuccessRateCalculatorClient<Client>, /// contract based routing service for Dynamic Routing pub contract_based_client: ContractScoreCalculatorClient<Client>, /// elimination service for Dynamic Routing pub elimination_based_client: EliminationAnalyserClient<Client>, } /// Contains the Dynamic Routing Client Config #[derive(Debug, Clone, serde::Deserialize, serde::Serialize, Default)] #[serde(untagged)] pub enum DynamicRoutingClientConfig { /// If the dynamic routing client config has been enabled Enabled { /// The host for the client host: String, /// The port of the client port: u16, /// Service name service: String, }, #[default] /// If the dynamic routing client config has been disabled Disabled, } impl DynamicRoutingClientConfig { /// establish connection with the server pub fn get_dynamic_routing_connection( self, client: Client, ) -> Result<Option<RoutingStrategy>, Box<dyn std::error::Error>> { match self { Self::Enabled { host, port, .. } => { let uri = format!("http://{host}:{port}").parse::<tonic::transport::Uri>()?; logger::info!("Connection established with dynamic routing gRPC Server"); let (success_rate_client, contract_based_client, elimination_based_client) = ( SuccessRateCalculatorClient::with_origin(client.clone(), uri.clone()), ContractScoreCalculatorClient::with_origin(client.clone(), uri.clone()), EliminationAnalyserClient::with_origin(client, uri), ); Ok(Some(RoutingStrategy { success_rate_client, contract_based_client, elimination_based_client, })) } Self::Disabled => Ok(None), } } }
crates/external_services/src/grpc_client/dynamic_routing.rs
external_services::src::grpc_client::dynamic_routing
776
true
// File: crates/external_services/src/grpc_client/revenue_recovery.rs // Module: external_services::src::grpc_client::revenue_recovery /// Recovery Decider client pub mod recovery_decider_client; use std::fmt::Debug; use common_utils::consts; use router_env::logger; /// Contains recovery grpc headers #[derive(Debug)] pub struct GrpcRecoveryHeaders { /// Request id pub request_id: Option<String>, } /// Trait to add necessary recovery headers to the tonic Request pub(crate) trait AddRecoveryHeaders { /// Add necessary recovery header fields to the tonic Request fn add_recovery_headers(&mut self, headers: GrpcRecoveryHeaders); } impl<T> AddRecoveryHeaders for tonic::Request<T> { #[track_caller] fn add_recovery_headers(&mut self, headers: GrpcRecoveryHeaders) { headers.request_id.map(|request_id| { request_id .parse() .map(|request_id_val| { self .metadata_mut() .append(consts::X_REQUEST_ID, request_id_val) }) .inspect_err( |err| logger::warn!(header_parse_error=?err,"invalid {} received",consts::X_REQUEST_ID), ) .ok(); }); } } /// Creates a tonic::Request with recovery headers added. pub(crate) fn create_revenue_recovery_grpc_request<T: Debug>( message: T, recovery_headers: GrpcRecoveryHeaders, ) -> tonic::Request<T> { let mut request = tonic::Request::new(message); request.add_recovery_headers(recovery_headers); request }
crates/external_services/src/grpc_client/revenue_recovery.rs
external_services::src::grpc_client::revenue_recovery
348
true
// File: crates/external_services/src/grpc_client/dynamic_routing/elimination_based_client.rs // Module: external_services::src::grpc_client::dynamic_routing::elimination_based_client use api_models::routing::{ EliminationAnalyserConfig as EliminationConfig, RoutableConnectorChoice, RoutableConnectorChoiceWithBucketName, }; use common_utils::{ext_traits::OptionExt, transformers::ForeignTryFrom}; pub use elimination_rate::{ elimination_analyser_client::EliminationAnalyserClient, EliminationBucketConfig, EliminationRequest, EliminationResponse, InvalidateBucketRequest, InvalidateBucketResponse, LabelWithBucketName, UpdateEliminationBucketRequest, UpdateEliminationBucketResponse, }; use error_stack::ResultExt; use router_env::{instrument, logger, tracing}; #[allow( missing_docs, unused_qualifications, clippy::unwrap_used, clippy::as_conversions, clippy::use_self )] pub mod elimination_rate { tonic::include_proto!("elimination"); } use super::{Client, DynamicRoutingError, DynamicRoutingResult}; use crate::grpc_client::{self, GrpcHeaders}; /// The trait Elimination Based Routing would have the functions required to support performance, calculation and invalidation bucket #[async_trait::async_trait] pub trait EliminationBasedRouting: dyn_clone::DynClone + Send + Sync { /// To perform the elimination based routing for the list of connectors async fn perform_elimination_routing( &self, id: String, params: String, labels: Vec<RoutableConnectorChoice>, configs: Option<EliminationConfig>, headers: GrpcHeaders, ) -> DynamicRoutingResult<EliminationResponse>; /// To update the bucket size and ttl for list of connectors with its respective bucket name async fn update_elimination_bucket_config( &self, id: String, params: String, report: Vec<RoutableConnectorChoiceWithBucketName>, config: Option<EliminationConfig>, headers: GrpcHeaders, ) -> DynamicRoutingResult<UpdateEliminationBucketResponse>; /// To invalidate the previous id's bucket async fn invalidate_elimination_bucket( &self, id: String, headers: GrpcHeaders, ) -> DynamicRoutingResult<InvalidateBucketResponse>; } #[async_trait::async_trait] impl EliminationBasedRouting for EliminationAnalyserClient<Client> { #[instrument(skip_all)] async fn perform_elimination_routing( &self, id: String, params: String, label_input: Vec<RoutableConnectorChoice>, configs: Option<EliminationConfig>, headers: GrpcHeaders, ) -> DynamicRoutingResult<EliminationResponse> { let labels = label_input .into_iter() .map(|conn_choice| conn_choice.to_string()) .collect::<Vec<_>>(); let config = configs.map(ForeignTryFrom::foreign_try_from).transpose()?; let request = grpc_client::create_grpc_request( EliminationRequest { id, params, labels, config, }, headers, ); let response = self .clone() .get_elimination_status(request) .await .change_context(DynamicRoutingError::EliminationRateRoutingFailure( "Failed to perform the elimination analysis".to_string(), ))? .into_inner(); logger::info!(dynamic_routing_response=?response); Ok(response) } #[instrument(skip_all)] async fn update_elimination_bucket_config( &self, id: String, params: String, report: Vec<RoutableConnectorChoiceWithBucketName>, configs: Option<EliminationConfig>, headers: GrpcHeaders, ) -> DynamicRoutingResult<UpdateEliminationBucketResponse> { let config = configs.map(ForeignTryFrom::foreign_try_from).transpose()?; let labels_with_bucket_name = report .into_iter() .map(|conn_choice_with_bucket| LabelWithBucketName { label: conn_choice_with_bucket .routable_connector_choice .to_string(), bucket_name: conn_choice_with_bucket.bucket_name, }) .collect::<Vec<_>>(); let request = grpc_client::create_grpc_request( UpdateEliminationBucketRequest { id, params, labels_with_bucket_name, config, }, headers, ); let response = self .clone() .update_elimination_bucket(request) .await .change_context(DynamicRoutingError::EliminationRateRoutingFailure( "Failed to update the elimination bucket".to_string(), ))? .into_inner(); logger::info!(dynamic_routing_response=?response); Ok(response) } #[instrument(skip_all)] async fn invalidate_elimination_bucket( &self, id: String, headers: GrpcHeaders, ) -> DynamicRoutingResult<InvalidateBucketResponse> { let request = grpc_client::create_grpc_request(InvalidateBucketRequest { id }, headers); let response = self .clone() .invalidate_bucket(request) .await .change_context(DynamicRoutingError::EliminationRateRoutingFailure( "Failed to invalidate the elimination bucket".to_string(), ))? .into_inner(); logger::info!(dynamic_routing_response=?response); Ok(response) } } impl ForeignTryFrom<EliminationConfig> for EliminationBucketConfig { type Error = error_stack::Report<DynamicRoutingError>; fn foreign_try_from(config: EliminationConfig) -> Result<Self, Self::Error> { Ok(Self { bucket_size: config .bucket_size .get_required_value("bucket_size") .change_context(DynamicRoutingError::MissingRequiredField { field: "bucket_size".to_string(), })?, bucket_leak_interval_in_secs: config .bucket_leak_interval_in_secs .get_required_value("bucket_leak_interval_in_secs") .change_context(DynamicRoutingError::MissingRequiredField { field: "bucket_leak_interval_in_secs".to_string(), })?, }) } }
crates/external_services/src/grpc_client/dynamic_routing/elimination_based_client.rs
external_services::src::grpc_client::dynamic_routing::elimination_based_client
1,312
true
// File: crates/external_services/src/grpc_client/dynamic_routing/success_rate_client.rs // Module: external_services::src::grpc_client::dynamic_routing::success_rate_client use api_models::routing::{ CurrentBlockThreshold, RoutableConnectorChoice, RoutableConnectorChoiceWithStatus, SuccessBasedRoutingConfig, SuccessBasedRoutingConfigBody, SuccessRateSpecificityLevel, }; use common_utils::{ext_traits::OptionExt, transformers::ForeignTryFrom}; use error_stack::ResultExt; use router_env::{instrument, logger, tracing}; pub use success_rate::{ success_rate_calculator_client::SuccessRateCalculatorClient, CalGlobalSuccessRateConfig, CalGlobalSuccessRateRequest, CalGlobalSuccessRateResponse, CalSuccessRateConfig, CalSuccessRateRequest, CalSuccessRateResponse, CurrentBlockThreshold as DynamicCurrentThreshold, InvalidateWindowsRequest, InvalidateWindowsResponse, LabelWithStatus, SuccessRateSpecificityLevel as ProtoSpecificityLevel, UpdateSuccessRateWindowConfig, UpdateSuccessRateWindowRequest, UpdateSuccessRateWindowResponse, }; #[allow( missing_docs, unused_qualifications, clippy::unwrap_used, clippy::as_conversions, clippy::use_self )] pub mod success_rate { tonic::include_proto!("success_rate"); } use super::{Client, DynamicRoutingError, DynamicRoutingResult}; use crate::grpc_client::{self, GrpcHeaders}; /// The trait Success Based Dynamic Routing would have the functions required to support the calculation and updation window #[async_trait::async_trait] pub trait SuccessBasedDynamicRouting: dyn_clone::DynClone + Send + Sync { /// To calculate the success rate for the list of chosen connectors async fn calculate_success_rate( &self, id: String, success_rate_based_config: SuccessBasedRoutingConfig, params: String, label_input: Vec<RoutableConnectorChoice>, headers: GrpcHeaders, ) -> DynamicRoutingResult<CalSuccessRateResponse>; /// To update the success rate with the given label async fn update_success_rate( &self, id: String, success_rate_based_config: SuccessBasedRoutingConfig, params: String, response: Vec<RoutableConnectorChoiceWithStatus>, headers: GrpcHeaders, ) -> DynamicRoutingResult<UpdateSuccessRateWindowResponse>; /// To invalidates the success rate routing keys async fn invalidate_success_rate_routing_keys( &self, id: String, headers: GrpcHeaders, ) -> DynamicRoutingResult<InvalidateWindowsResponse>; /// To calculate both global and merchant specific success rate for the list of chosen connectors async fn calculate_entity_and_global_success_rate( &self, id: String, success_rate_based_config: SuccessBasedRoutingConfig, params: String, label_input: Vec<RoutableConnectorChoice>, headers: GrpcHeaders, ) -> DynamicRoutingResult<CalGlobalSuccessRateResponse>; } #[async_trait::async_trait] impl SuccessBasedDynamicRouting for SuccessRateCalculatorClient<Client> { #[instrument(skip_all)] async fn calculate_success_rate( &self, id: String, success_rate_based_config: SuccessBasedRoutingConfig, params: String, label_input: Vec<RoutableConnectorChoice>, headers: GrpcHeaders, ) -> DynamicRoutingResult<CalSuccessRateResponse> { let labels = label_input .into_iter() .map(|conn_choice| conn_choice.to_string()) .collect::<Vec<_>>(); let config = success_rate_based_config .config .map(ForeignTryFrom::foreign_try_from) .transpose()?; let request = grpc_client::create_grpc_request( CalSuccessRateRequest { id, params, labels, config, }, headers, ); let response = self .clone() .fetch_success_rate(request) .await .change_context(DynamicRoutingError::SuccessRateBasedRoutingFailure( "Failed to fetch the success rate".to_string(), ))? .into_inner(); logger::info!(dynamic_routing_response=?response); Ok(response) } #[instrument(skip_all)] async fn update_success_rate( &self, id: String, success_rate_based_config: SuccessBasedRoutingConfig, params: String, label_input: Vec<RoutableConnectorChoiceWithStatus>, headers: GrpcHeaders, ) -> DynamicRoutingResult<UpdateSuccessRateWindowResponse> { let config = success_rate_based_config .config .map(ForeignTryFrom::foreign_try_from) .transpose()?; let labels_with_status = label_input .clone() .into_iter() .map(|conn_choice| LabelWithStatus { label: conn_choice.routable_connector_choice.to_string(), status: conn_choice.status, }) .collect(); let global_labels_with_status = label_input .into_iter() .map(|conn_choice| LabelWithStatus { label: conn_choice.routable_connector_choice.connector.to_string(), status: conn_choice.status, }) .collect(); let request = grpc_client::create_grpc_request( UpdateSuccessRateWindowRequest { id, params, labels_with_status, config, global_labels_with_status, }, headers, ); let response = self .clone() .update_success_rate_window(request) .await .change_context(DynamicRoutingError::SuccessRateBasedRoutingFailure( "Failed to update the success rate window".to_string(), ))? .into_inner(); logger::info!(dynamic_routing_response=?response); Ok(response) } #[instrument(skip_all)] async fn invalidate_success_rate_routing_keys( &self, id: String, headers: GrpcHeaders, ) -> DynamicRoutingResult<InvalidateWindowsResponse> { let request = grpc_client::create_grpc_request(InvalidateWindowsRequest { id }, headers); let response = self .clone() .invalidate_windows(request) .await .change_context(DynamicRoutingError::SuccessRateBasedRoutingFailure( "Failed to invalidate the success rate routing keys".to_string(), ))? .into_inner(); logger::info!(dynamic_routing_response=?response); Ok(response) } async fn calculate_entity_and_global_success_rate( &self, id: String, success_rate_based_config: SuccessBasedRoutingConfig, params: String, label_input: Vec<RoutableConnectorChoice>, headers: GrpcHeaders, ) -> DynamicRoutingResult<CalGlobalSuccessRateResponse> { let labels = label_input .clone() .into_iter() .map(|conn_choice| conn_choice.to_string()) .collect::<Vec<_>>(); let global_labels = label_input .into_iter() .map(|conn_choice| conn_choice.connector.to_string()) .collect::<Vec<_>>(); let config = success_rate_based_config .config .map(ForeignTryFrom::foreign_try_from) .transpose()?; let request = grpc_client::create_grpc_request( CalGlobalSuccessRateRequest { entity_id: id, entity_params: params, entity_labels: labels, global_labels, config, }, headers, ); let response = self .clone() .fetch_entity_and_global_success_rate(request) .await .change_context(DynamicRoutingError::SuccessRateBasedRoutingFailure( "Failed to fetch the entity and global success rate".to_string(), ))? .into_inner(); logger::info!(dynamic_routing_response=?response); Ok(response) } } impl ForeignTryFrom<CurrentBlockThreshold> for DynamicCurrentThreshold { type Error = error_stack::Report<DynamicRoutingError>; fn foreign_try_from(current_threshold: CurrentBlockThreshold) -> Result<Self, Self::Error> { Ok(Self { duration_in_mins: current_threshold.duration_in_mins, max_total_count: current_threshold .max_total_count .get_required_value("max_total_count") .change_context(DynamicRoutingError::MissingRequiredField { field: "max_total_count".to_string(), })?, }) } } impl ForeignTryFrom<SuccessBasedRoutingConfigBody> for UpdateSuccessRateWindowConfig { type Error = error_stack::Report<DynamicRoutingError>; fn foreign_try_from(config: SuccessBasedRoutingConfigBody) -> Result<Self, Self::Error> { Ok(Self { max_aggregates_size: config .max_aggregates_size .get_required_value("max_aggregate_size") .change_context(DynamicRoutingError::MissingRequiredField { field: "max_aggregates_size".to_string(), })?, current_block_threshold: config .current_block_threshold .map(ForeignTryFrom::foreign_try_from) .transpose()?, }) } } impl ForeignTryFrom<SuccessBasedRoutingConfigBody> for CalSuccessRateConfig { type Error = error_stack::Report<DynamicRoutingError>; fn foreign_try_from(config: SuccessBasedRoutingConfigBody) -> Result<Self, Self::Error> { Ok(Self { min_aggregates_size: config .min_aggregates_size .get_required_value("min_aggregate_size") .change_context(DynamicRoutingError::MissingRequiredField { field: "min_aggregates_size".to_string(), })?, default_success_rate: config .default_success_rate .get_required_value("default_success_rate") .change_context(DynamicRoutingError::MissingRequiredField { field: "default_success_rate".to_string(), })?, specificity_level: match config.specificity_level { SuccessRateSpecificityLevel::Merchant => Some(ProtoSpecificityLevel::Entity.into()), SuccessRateSpecificityLevel::Global => Some(ProtoSpecificityLevel::Global.into()), }, exploration_percent: config.exploration_percent, shuffle_on_tie_during_exploitation: config.shuffle_on_tie_during_exploitation, }) } } impl ForeignTryFrom<SuccessBasedRoutingConfigBody> for CalGlobalSuccessRateConfig { type Error = error_stack::Report<DynamicRoutingError>; fn foreign_try_from(config: SuccessBasedRoutingConfigBody) -> Result<Self, Self::Error> { Ok(Self { entity_min_aggregates_size: config .min_aggregates_size .get_required_value("min_aggregate_size") .change_context(DynamicRoutingError::MissingRequiredField { field: "min_aggregates_size".to_string(), })?, entity_default_success_rate: config .default_success_rate .get_required_value("default_success_rate") .change_context(DynamicRoutingError::MissingRequiredField { field: "default_success_rate".to_string(), })?, }) } }
crates/external_services/src/grpc_client/dynamic_routing/success_rate_client.rs
external_services::src::grpc_client::dynamic_routing::success_rate_client
2,344
true
// File: crates/external_services/src/grpc_client/dynamic_routing/contract_routing_client.rs // Module: external_services::src::grpc_client::dynamic_routing::contract_routing_client use api_models::routing::{ ContractBasedRoutingConfig, ContractBasedRoutingConfigBody, ContractBasedTimeScale, LabelInformation, RoutableConnectorChoice, RoutableConnectorChoiceWithStatus, }; use common_utils::{ ext_traits::OptionExt, transformers::{ForeignFrom, ForeignTryFrom}, }; pub use contract_routing::{ contract_score_calculator_client::ContractScoreCalculatorClient, CalContractScoreConfig, CalContractScoreRequest, CalContractScoreResponse, InvalidateContractRequest, InvalidateContractResponse, LabelInformation as ProtoLabelInfo, TimeScale, UpdateContractRequest, UpdateContractResponse, }; use error_stack::ResultExt; use router_env::logger; use crate::grpc_client::{self, GrpcHeaders}; #[allow( missing_docs, unused_qualifications, clippy::unwrap_used, clippy::as_conversions, clippy::use_self )] pub mod contract_routing { tonic::include_proto!("contract_routing"); } pub use tonic::Code; use super::{Client, DynamicRoutingError, DynamicRoutingResult}; /// The trait ContractBasedDynamicRouting would have the functions required to support the calculation and updation window #[async_trait::async_trait] pub trait ContractBasedDynamicRouting: dyn_clone::DynClone + Send + Sync { /// To calculate the contract scores for the list of chosen connectors async fn calculate_contract_score( &self, id: String, config: ContractBasedRoutingConfig, params: String, label_input: Vec<RoutableConnectorChoice>, headers: GrpcHeaders, ) -> DynamicRoutingResult<CalContractScoreResponse>; /// To update the contract scores with the given labels async fn update_contracts( &self, id: String, label_info: Vec<LabelInformation>, params: String, response: Vec<RoutableConnectorChoiceWithStatus>, incr_count: u64, headers: GrpcHeaders, ) -> DynamicRoutingResult<UpdateContractResponse>; /// To invalidates the contract scores against the id async fn invalidate_contracts( &self, id: String, headers: GrpcHeaders, ) -> DynamicRoutingResult<InvalidateContractResponse>; } #[async_trait::async_trait] impl ContractBasedDynamicRouting for ContractScoreCalculatorClient<Client> { async fn calculate_contract_score( &self, id: String, config: ContractBasedRoutingConfig, params: String, label_input: Vec<RoutableConnectorChoice>, headers: GrpcHeaders, ) -> DynamicRoutingResult<CalContractScoreResponse> { let labels = label_input .into_iter() .map(|conn_choice| conn_choice.to_string()) .collect::<Vec<_>>(); let config = config .config .map(ForeignTryFrom::foreign_try_from) .transpose()?; let request = grpc_client::create_grpc_request( CalContractScoreRequest { id, params, labels, config, }, headers, ); let response = self .clone() .fetch_contract_score(request) .await .map_err(|err| match err.code() { Code::NotFound => DynamicRoutingError::ContractNotFound, _ => DynamicRoutingError::ContractBasedRoutingFailure(err.to_string()), })? .into_inner(); logger::info!(dynamic_routing_response=?response); Ok(response) } async fn update_contracts( &self, id: String, label_info: Vec<LabelInformation>, params: String, _response: Vec<RoutableConnectorChoiceWithStatus>, incr_count: u64, headers: GrpcHeaders, ) -> DynamicRoutingResult<UpdateContractResponse> { let mut labels_information = label_info .into_iter() .map(ProtoLabelInfo::foreign_from) .collect::<Vec<_>>(); labels_information .iter_mut() .for_each(|info| info.current_count += incr_count); let request = grpc_client::create_grpc_request( UpdateContractRequest { id, params, labels_information, }, headers, ); let response = self .clone() .update_contract(request) .await .change_context(DynamicRoutingError::ContractBasedRoutingFailure( "Failed to update the contracts".to_string(), ))? .into_inner(); logger::info!(dynamic_routing_response=?response); Ok(response) } async fn invalidate_contracts( &self, id: String, headers: GrpcHeaders, ) -> DynamicRoutingResult<InvalidateContractResponse> { let request = grpc_client::create_grpc_request(InvalidateContractRequest { id }, headers); let response = self .clone() .invalidate_contract(request) .await .change_context(DynamicRoutingError::ContractBasedRoutingFailure( "Failed to invalidate the contracts".to_string(), ))? .into_inner(); Ok(response) } } impl ForeignFrom<ContractBasedTimeScale> for TimeScale { fn foreign_from(scale: ContractBasedTimeScale) -> Self { Self { time_scale: match scale { ContractBasedTimeScale::Day => 0, _ => 1, }, } } } impl ForeignTryFrom<ContractBasedRoutingConfigBody> for CalContractScoreConfig { type Error = error_stack::Report<DynamicRoutingError>; fn foreign_try_from(config: ContractBasedRoutingConfigBody) -> Result<Self, Self::Error> { Ok(Self { constants: config .constants .get_required_value("constants") .change_context(DynamicRoutingError::MissingRequiredField { field: "constants".to_string(), })?, time_scale: config.time_scale.clone().map(TimeScale::foreign_from), }) } } impl ForeignFrom<LabelInformation> for ProtoLabelInfo { fn foreign_from(config: LabelInformation) -> Self { Self { label: format!( "{}:{}", config.label.clone(), config.mca_id.get_string_repr() ), target_count: config.target_count, target_time: config.target_time, current_count: u64::default(), } } }
crates/external_services/src/grpc_client/dynamic_routing/contract_routing_client.rs
external_services::src::grpc_client::dynamic_routing::contract_routing_client
1,370
true
// File: crates/external_services/src/grpc_client/revenue_recovery/recovery_decider_client.rs // Module: external_services::src::grpc_client::revenue_recovery::recovery_decider_client use std::fmt::Debug; use common_utils::errors::CustomResult; use error_stack::{Report, ResultExt}; use router_env::logger; use crate::grpc_client::Client; #[allow( missing_docs, unused_qualifications, clippy::unwrap_used, clippy::as_conversions, clippy::use_self )] pub mod decider { tonic::include_proto!("decider"); } use decider::decider_client::DeciderClient; pub use decider::{DeciderRequest, DeciderResponse}; /// Recovery Decider result pub type RecoveryDeciderResult<T> = CustomResult<T, RecoveryDeciderError>; /// Recovery Decider Error #[derive(Debug, Clone, thiserror::Error)] pub enum RecoveryDeciderError { /// Error establishing gRPC connection #[error("Failed to establish connection with Recovery Decider service: {0}")] ConnectionError(String), /// Error received from the gRPC service #[error("Recovery Decider service returned an error: {0}")] ServiceError(String), /// Missing configuration for the client #[error("Recovery Decider client configuration is missing or invalid")] ConfigError(String), } /// Recovery Decider Client type #[async_trait::async_trait] pub trait RecoveryDeciderClientInterface: dyn_clone::DynClone + Send + Sync + Debug { /// fn to call gRPC service async fn decide_on_retry( &mut self, request_payload: DeciderRequest, recovery_headers: super::GrpcRecoveryHeaders, ) -> RecoveryDeciderResult<DeciderResponse>; } dyn_clone::clone_trait_object!(RecoveryDeciderClientInterface); /// Configuration for the Recovery Decider gRPC client. #[derive(Debug, Default, Clone, serde::Deserialize, serde::Serialize)] pub struct RecoveryDeciderClientConfig { /// Base URL of the Recovery Decider service pub base_url: String, } impl RecoveryDeciderClientConfig { /// Validate the configuration pub fn validate(&self) -> Result<(), RecoveryDeciderError> { use common_utils::fp_utils::when; when(self.base_url.is_empty(), || { Err(RecoveryDeciderError::ConfigError( "Recovery Decider base URL cannot be empty when configuration is provided" .to_string(), )) }) } /// create a connection pub fn get_recovery_decider_connection( &self, hyper_client: Client, ) -> Result<DeciderClient<Client>, Report<RecoveryDeciderError>> { let uri = self .base_url .parse::<tonic::transport::Uri>() .map_err(Report::from) .change_context(RecoveryDeciderError::ConfigError(format!( "Invalid URI: {}", self.base_url )))?; let service_client = DeciderClient::with_origin(hyper_client, uri); Ok(service_client) } } #[async_trait::async_trait] impl RecoveryDeciderClientInterface for DeciderClient<Client> { async fn decide_on_retry( &mut self, request_payload: DeciderRequest, recovery_headers: super::GrpcRecoveryHeaders, ) -> RecoveryDeciderResult<DeciderResponse> { let request = super::create_revenue_recovery_grpc_request(request_payload, recovery_headers); logger::debug!(decider_request =?request); let grpc_response = self .decide(request) .await .change_context(RecoveryDeciderError::ServiceError( "Decider service call failed".to_string(), ))? .into_inner(); logger::debug!(grpc_decider_response =?grpc_response); Ok(grpc_response) } }
crates/external_services/src/grpc_client/revenue_recovery/recovery_decider_client.rs
external_services::src::grpc_client::revenue_recovery::recovery_decider_client
837
true
// File: crates/external_services/src/email/ses.rs // Module: external_services::src::email::ses use std::time::{Duration, SystemTime}; use aws_sdk_sesv2::{ config::Region, operation::send_email::SendEmailError, types::{Body, Content, Destination, EmailContent, Message}, Client, }; use aws_sdk_sts::config::Credentials; use aws_smithy_runtime::client::http::hyper_014::HyperClientBuilder; use common_utils::{errors::CustomResult, pii}; use error_stack::{report, ResultExt}; use hyper::Uri; use masking::PeekInterface; use router_env::logger; use crate::email::{EmailClient, EmailError, EmailResult, EmailSettings, IntermediateString}; /// Client for AWS SES operation #[derive(Debug, Clone)] pub struct AwsSes { sender: String, ses_config: SESConfig, settings: EmailSettings, } /// Struct that contains the AWS ses specific configs required to construct an SES email client #[derive(Debug, Clone, Default, serde::Deserialize)] pub struct SESConfig { /// The arn of email role pub email_role_arn: String, /// The name of sts_session role pub sts_role_session_name: String, } impl SESConfig { /// Validation for the SES client specific configs pub fn validate(&self) -> Result<(), &'static str> { use common_utils::{ext_traits::ConfigExt, fp_utils::when}; when(self.email_role_arn.is_default_or_empty(), || { Err("email.aws_ses.email_role_arn must not be empty") })?; when(self.sts_role_session_name.is_default_or_empty(), || { Err("email.aws_ses.sts_role_session_name must not be empty") }) } } /// Errors that could occur during SES operations. #[derive(Debug, thiserror::Error)] pub enum AwsSesError { /// An error occurred in the SDK while sending email. #[error("Failed to Send Email {0:?}")] SendingFailure(Box<aws_sdk_sesv2::error::SdkError<SendEmailError>>), /// Configuration variable is missing to construct the email client #[error("Missing configuration variable {0}")] MissingConfigurationVariable(&'static str), /// Failed to assume the given STS role #[error("Failed to STS assume role: Role ARN: {role_arn}, Session name: {session_name}, Region: {region}")] AssumeRoleFailure { /// Aws region region: String, /// arn of email role role_arn: String, /// The name of sts_session role session_name: String, }, /// Temporary credentials are missing #[error("Assumed role does not contain credentials for role user: {0:?}")] TemporaryCredentialsMissing(String), /// The proxy Connector cannot be built #[error("The proxy build cannot be built")] BuildingProxyConnectorFailed, } impl AwsSes { /// Constructs a new AwsSes client pub async fn create( conf: &EmailSettings, ses_config: &SESConfig, proxy_url: Option<impl AsRef<str>>, ) -> Self { // Build the client initially which will help us know if the email configuration is correct Self::create_client(conf, ses_config, proxy_url) .await .map_err(|error| logger::error!(?error, "Failed to initialize SES Client")) .ok(); Self { sender: conf.sender_email.clone(), ses_config: ses_config.clone(), settings: conf.clone(), } } /// A helper function to create ses client pub async fn create_client( conf: &EmailSettings, ses_config: &SESConfig, proxy_url: Option<impl AsRef<str>>, ) -> CustomResult<Client, AwsSesError> { let sts_config = Self::get_shared_config(conf.aws_region.to_owned(), proxy_url.as_ref())? .load() .await; let role = aws_sdk_sts::Client::new(&sts_config) .assume_role() .role_arn(&ses_config.email_role_arn) .role_session_name(&ses_config.sts_role_session_name) .send() .await .change_context(AwsSesError::AssumeRoleFailure { region: conf.aws_region.to_owned(), role_arn: ses_config.email_role_arn.to_owned(), session_name: ses_config.sts_role_session_name.to_owned(), })?; let creds = role.credentials().ok_or( report!(AwsSesError::TemporaryCredentialsMissing(format!( "{role:?}" ))) .attach_printable("Credentials object not available"), )?; let credentials = Credentials::new( creds.access_key_id(), creds.secret_access_key(), Some(creds.session_token().to_owned()), u64::try_from(creds.expiration().as_nanos()) .ok() .map(Duration::from_nanos) .and_then(|val| SystemTime::UNIX_EPOCH.checked_add(val)), "custom_provider", ); logger::debug!( "Obtained SES temporary credentials with expiry {:?}", credentials.expiry() ); let ses_config = Self::get_shared_config(conf.aws_region.to_owned(), proxy_url)? .credentials_provider(credentials) .load() .await; Ok(Client::new(&ses_config)) } fn get_shared_config( region: String, proxy_url: Option<impl AsRef<str>>, ) -> CustomResult<aws_config::ConfigLoader, AwsSesError> { let region_provider = Region::new(region); let mut config = aws_config::from_env().region(region_provider); if let Some(proxy_url) = proxy_url { let proxy_connector = Self::get_proxy_connector(proxy_url)?; let http_client = HyperClientBuilder::new().build(proxy_connector); config = config.http_client(http_client); }; Ok(config) } fn get_proxy_connector( proxy_url: impl AsRef<str>, ) -> CustomResult<hyper_proxy::ProxyConnector<hyper::client::HttpConnector>, AwsSesError> { let proxy_uri = proxy_url .as_ref() .parse::<Uri>() .attach_printable("Unable to parse the proxy url {proxy_url}") .change_context(AwsSesError::BuildingProxyConnectorFailed)?; let proxy = hyper_proxy::Proxy::new(hyper_proxy::Intercept::All, proxy_uri); hyper_proxy::ProxyConnector::from_proxy(hyper::client::HttpConnector::new(), proxy) .change_context(AwsSesError::BuildingProxyConnectorFailed) } } #[async_trait::async_trait] impl EmailClient for AwsSes { type RichText = Body; fn convert_to_rich_text( &self, intermediate_string: IntermediateString, ) -> CustomResult<Self::RichText, EmailError> { let email_body = Body::builder() .html( Content::builder() .data(intermediate_string.into_inner()) .charset("UTF-8") .build() .change_context(EmailError::ContentBuildFailure)?, ) .build(); Ok(email_body) } async fn send_email( &self, recipient: pii::Email, subject: String, body: Self::RichText, proxy_url: Option<&String>, ) -> EmailResult<()> { // Not using the same email client which was created at startup as the role session would expire // Create a client every time when the email is being sent let email_client = Self::create_client(&self.settings, &self.ses_config, proxy_url) .await .change_context(EmailError::ClientBuildingFailure)?; email_client .send_email() .from_email_address(self.sender.to_owned()) .destination( Destination::builder() .to_addresses(recipient.peek()) .build(), ) .content( EmailContent::builder() .simple( Message::builder() .subject( Content::builder() .data(subject) .build() .change_context(EmailError::ContentBuildFailure)?, ) .body(body) .build(), ) .build(), ) .send() .await .map_err(|e| AwsSesError::SendingFailure(Box::new(e))) .change_context(EmailError::EmailSendingFailure)?; Ok(()) } }
crates/external_services/src/email/ses.rs
external_services::src::email::ses
1,844
true
// File: crates/external_services/src/email/no_email.rs // Module: external_services::src::email::no_email use common_utils::{errors::CustomResult, pii}; use router_env::logger; use crate::email::{EmailClient, EmailError, EmailResult, IntermediateString}; /// Client when email support is disabled #[derive(Debug, Clone, Default, serde::Deserialize)] pub struct NoEmailClient {} impl NoEmailClient { /// Constructs a new client when email is disabled pub async fn create() -> Self { Self {} } } #[async_trait::async_trait] impl EmailClient for NoEmailClient { type RichText = String; fn convert_to_rich_text( &self, intermediate_string: IntermediateString, ) -> CustomResult<Self::RichText, EmailError> { Ok(intermediate_string.into_inner()) } async fn send_email( &self, _recipient: pii::Email, _subject: String, _body: Self::RichText, _proxy_url: Option<&String>, ) -> EmailResult<()> { logger::info!("Email not sent as email support is disabled, please enable any of the supported email clients to send emails"); Ok(()) } }
crates/external_services/src/email/no_email.rs
external_services::src::email::no_email
263
true
// File: crates/external_services/src/email/smtp.rs // Module: external_services::src::email::smtp use std::time::Duration; use common_utils::{errors::CustomResult, pii}; use error_stack::ResultExt; use lettre::{ address::AddressError, error, message::{header::ContentType, Mailbox}, transport::smtp::{self, authentication::Credentials}, Message, SmtpTransport, Transport, }; use masking::{PeekInterface, Secret}; use crate::email::{EmailClient, EmailError, EmailResult, EmailSettings, IntermediateString}; /// Client for SMTP server operation #[derive(Debug, Clone, Default, serde::Deserialize)] pub struct SmtpServer { /// sender email id pub sender: String, /// SMTP server specific configs pub smtp_config: SmtpServerConfig, } impl SmtpServer { /// A helper function to create SMTP server client pub fn create_client(&self) -> Result<SmtpTransport, SmtpError> { let host = self.smtp_config.host.clone(); let port = self.smtp_config.port; let timeout = Some(Duration::from_secs(self.smtp_config.timeout)); let credentials = self .smtp_config .username .clone() .zip(self.smtp_config.password.clone()) .map(|(username, password)| { Credentials::new(username.peek().to_owned(), password.peek().to_owned()) }); match &self.smtp_config.connection { SmtpConnection::StartTls => match credentials { Some(credentials) => Ok(SmtpTransport::starttls_relay(&host) .map_err(SmtpError::ConnectionFailure)? .port(port) .timeout(timeout) .credentials(credentials) .build()), None => Ok(SmtpTransport::starttls_relay(&host) .map_err(SmtpError::ConnectionFailure)? .port(port) .timeout(timeout) .build()), }, SmtpConnection::Plaintext => match credentials { Some(credentials) => Ok(SmtpTransport::builder_dangerous(&host) .port(port) .timeout(timeout) .credentials(credentials) .build()), None => Ok(SmtpTransport::builder_dangerous(&host) .port(port) .timeout(timeout) .build()), }, } } /// Constructs a new SMTP client pub async fn create(conf: &EmailSettings, smtp_config: SmtpServerConfig) -> Self { Self { sender: conf.sender_email.clone(), smtp_config: smtp_config.clone(), } } /// helper function to convert email id into Mailbox fn to_mail_box(email: String) -> EmailResult<Mailbox> { Ok(Mailbox::new( None, email .parse() .map_err(SmtpError::EmailParsingFailed) .change_context(EmailError::EmailSendingFailure)?, )) } } /// Struct that contains the SMTP server specific configs required #[derive(Debug, Clone, Default, serde::Serialize, serde::Deserialize)] #[serde(rename_all = "snake_case")] pub struct SmtpServerConfig { /// hostname of the SMTP server eg: smtp.gmail.com pub host: String, /// portname of the SMTP server eg: 25 pub port: u16, /// timeout for the SMTP server connection in seconds eg: 10 pub timeout: u64, /// Username name of the SMTP server pub username: Option<Secret<String>>, /// Password of the SMTP server pub password: Option<Secret<String>>, /// Connection type of the SMTP server #[serde(default)] pub connection: SmtpConnection, } /// Enum that contains the connection types of the SMTP server #[derive(Debug, Clone, Default, serde::Serialize, serde::Deserialize)] #[serde(rename_all = "snake_case")] pub enum SmtpConnection { #[default] /// Plaintext connection which MUST then successfully upgrade to TLS via STARTTLS StartTls, /// Plaintext connection (very insecure) Plaintext, } impl SmtpServerConfig { /// Validation for the SMTP server client specific configs pub fn validate(&self) -> Result<(), &'static str> { use common_utils::{ext_traits::ConfigExt, fp_utils::when}; when(self.host.is_default_or_empty(), || { Err("email.smtp.host must not be empty") })?; self.username.clone().zip(self.password.clone()).map_or( Ok(()), |(username, password)| { when(username.peek().is_default_or_empty(), || { Err("email.smtp.username must not be empty") })?; when(password.peek().is_default_or_empty(), || { Err("email.smtp.password must not be empty") }) }, )?; Ok(()) } } #[async_trait::async_trait] impl EmailClient for SmtpServer { type RichText = String; fn convert_to_rich_text( &self, intermediate_string: IntermediateString, ) -> CustomResult<Self::RichText, EmailError> { Ok(intermediate_string.into_inner()) } async fn send_email( &self, recipient: pii::Email, subject: String, body: Self::RichText, _proxy_url: Option<&String>, ) -> EmailResult<()> { // Create a client every time when the email is being sent let email_client = Self::create_client(self).change_context(EmailError::EmailSendingFailure)?; let email = Message::builder() .to(Self::to_mail_box(recipient.peek().to_string())?) .from(Self::to_mail_box(self.sender.clone())?) .subject(subject) .header(ContentType::TEXT_HTML) .body(body) .map_err(SmtpError::MessageBuildingFailed) .change_context(EmailError::EmailSendingFailure)?; email_client .send(&email) .map_err(SmtpError::SendingFailure) .change_context(EmailError::EmailSendingFailure)?; Ok(()) } } /// Errors that could occur during SES operations. #[derive(Debug, thiserror::Error)] pub enum SmtpError { /// An error occurred in the SMTP while sending email. #[error("Failed to Send Email {0:?}")] SendingFailure(smtp::Error), /// An error occurred in the SMTP while building the message content. #[error("Failed to create connection {0:?}")] ConnectionFailure(smtp::Error), /// An error occurred in the SMTP while building the message content. #[error("Failed to Build Email content {0:?}")] MessageBuildingFailed(error::Error), /// An error occurred in the SMTP while building the message content. #[error("Failed to parse given email {0:?}")] EmailParsingFailed(AddressError), }
crates/external_services/src/email/smtp.rs
external_services::src::email::smtp
1,467
true
// File: crates/euclid_macros/src/lib.rs // Module: euclid_macros::src::lib mod inner; use proc_macro::TokenStream; #[proc_macro_derive(EnumNums)] pub fn enum_nums(ts: TokenStream) -> TokenStream { inner::enum_nums_inner(ts) } #[proc_macro] pub fn knowledge(ts: TokenStream) -> TokenStream { match inner::knowledge_inner(ts.into()) { Ok(ts) => ts.into(), Err(e) => e.into_compile_error().into(), } }
crates/euclid_macros/src/lib.rs
euclid_macros::src::lib
113
true
// File: crates/euclid_macros/src/inner.rs // Module: euclid_macros::src::inner mod enum_nums; mod knowledge; pub(crate) use enum_nums::enum_nums_inner; pub(crate) use knowledge::knowledge_inner;
crates/euclid_macros/src/inner.rs
euclid_macros::src::inner
51
true
// File: crates/euclid_macros/src/inner/enum_nums.rs // Module: euclid_macros::src::inner::enum_nums use proc_macro::TokenStream; use proc_macro2::{Span, TokenStream as TokenStream2}; use quote::quote; fn error() -> TokenStream2 { syn::Error::new( Span::call_site(), "'EnumNums' can only be derived on enums with unit variants".to_string(), ) .to_compile_error() } pub(crate) fn enum_nums_inner(ts: TokenStream) -> TokenStream { let derive_input = syn::parse_macro_input!(ts as syn::DeriveInput); let enum_obj = match derive_input.data { syn::Data::Enum(e) => e, _ => return error().into(), }; let enum_name = derive_input.ident; let mut match_arms = Vec::<TokenStream2>::with_capacity(enum_obj.variants.len()); for (i, variant) in enum_obj.variants.iter().enumerate() { match variant.fields { syn::Fields::Unit => {} _ => return error().into(), } let var_ident = &variant.ident; match_arms.push(quote! { Self::#var_ident => #i }); } let impl_block = quote! { impl #enum_name { pub fn to_num(&self) -> usize { match self { #(#match_arms),* } } } }; impl_block.into() }
crates/euclid_macros/src/inner/enum_nums.rs
euclid_macros::src::inner::enum_nums
321
true
// File: crates/euclid_macros/src/inner/knowledge.rs // Module: euclid_macros::src::inner::knowledge use std::{ fmt::{Display, Formatter}, hash::Hash, rc::Rc, }; use proc_macro2::{Span, TokenStream}; use quote::{format_ident, quote}; use rustc_hash::{FxHashMap, FxHashSet}; use syn::{parse::Parse, Token}; mod strength { syn::custom_punctuation!(Normal, ->); syn::custom_punctuation!(Strong, ->>); } mod kw { syn::custom_keyword!(any); syn::custom_keyword!(not); } #[derive(Clone, PartialEq, Eq, Hash)] enum Comparison { LessThan, Equal, GreaterThan, GreaterThanEqual, LessThanEqual, } impl Display for Comparison { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { let symbol = match self { Self::LessThan => "< ", Self::Equal => return Ok(()), Self::GreaterThanEqual => ">= ", Self::LessThanEqual => "<= ", Self::GreaterThan => "> ", }; write!(f, "{symbol}") } } impl Parse for Comparison { fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result<Self> { if input.peek(Token![>]) { input.parse::<Token![>]>()?; Ok(Self::GreaterThan) } else if input.peek(Token![<]) { input.parse::<Token![<]>()?; Ok(Self::LessThan) } else if input.peek(Token!(<=)) { input.parse::<Token![<=]>()?; Ok(Self::LessThanEqual) } else if input.peek(Token!(>=)) { input.parse::<Token![>=]>()?; Ok(Self::GreaterThanEqual) } else { Ok(Self::Equal) } } } #[derive(Clone, PartialEq, Eq, Hash)] enum ValueType { Any, EnumVariant(String), Number { number: i64, comparison: Comparison }, } impl ValueType { fn to_string(&self, key: &str) -> String { match self { Self::Any => format!("{key}(any)"), Self::EnumVariant(s) => format!("{key}({s})"), Self::Number { number, comparison } => { format!("{key}({comparison}{number})") } } } } impl Parse for ValueType { fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result<Self> { let lookahead = input.lookahead1(); if lookahead.peek(syn::Ident) { let ident: syn::Ident = input.parse()?; Ok(Self::EnumVariant(ident.to_string())) } else if lookahead.peek(Token![>]) || lookahead.peek(Token![<]) || lookahead.peek(syn::LitInt) { let comparison: Comparison = input.parse()?; let number: syn::LitInt = input.parse()?; let num_val = number.base10_parse::<i64>()?; Ok(Self::Number { number: num_val, comparison, }) } else { Err(lookahead.error()) } } } #[derive(Clone, PartialEq, Eq, Hash)] struct Atom { key: String, value: ValueType, } impl Display for Atom { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.value.to_string(&self.key)) } } impl Parse for Atom { fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result<Self> { let maybe_any: syn::Ident = input.parse()?; if maybe_any == "any" { let actual_key: syn::Ident = input.parse()?; Ok(Self { key: actual_key.to_string(), value: ValueType::Any, }) } else { let content; syn::parenthesized!(content in input); let value: ValueType = content.parse()?; Ok(Self { key: maybe_any.to_string(), value, }) } } } #[derive(Clone, PartialEq, Eq, Hash, strum::Display)] enum Strength { Normal, Strong, } impl Parse for Strength { fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result<Self> { let lookahead = input.lookahead1(); if lookahead.peek(strength::Strong) { input.parse::<strength::Strong>()?; Ok(Self::Strong) } else if lookahead.peek(strength::Normal) { input.parse::<strength::Normal>()?; Ok(Self::Normal) } else { Err(lookahead.error()) } } } #[derive(Clone, PartialEq, Eq, Hash, strum::Display)] enum Relation { Positive, Negative, } enum AtomType { Value { relation: Relation, atom: Rc<Atom>, }, InAggregator { key: String, values: Vec<String>, relation: Relation, }, } fn parse_atom_type_inner( input: syn::parse::ParseStream<'_>, key: syn::Ident, relation: Relation, ) -> syn::Result<AtomType> { let result = if input.peek(Token![in]) { input.parse::<Token![in]>()?; let bracketed; syn::bracketed!(bracketed in input); let mut values = Vec::<String>::new(); let first: syn::Ident = bracketed.parse()?; values.push(first.to_string()); while !bracketed.is_empty() { bracketed.parse::<Token![,]>()?; let next: syn::Ident = bracketed.parse()?; values.push(next.to_string()); } AtomType::InAggregator { key: key.to_string(), values, relation, } } else if input.peek(kw::any) { input.parse::<kw::any>()?; AtomType::Value { relation, atom: Rc::new(Atom { key: key.to_string(), value: ValueType::Any, }), } } else { let value: ValueType = input.parse()?; AtomType::Value { relation, atom: Rc::new(Atom { key: key.to_string(), value, }), } }; Ok(result) } impl Parse for AtomType { fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result<Self> { let key: syn::Ident = input.parse()?; let content; syn::parenthesized!(content in input); let relation = if content.peek(kw::not) { content.parse::<kw::not>()?; Relation::Negative } else { Relation::Positive }; let result = parse_atom_type_inner(&content, key, relation)?; if !content.is_empty() { Err(content.error("Unexpected input received after atom value")) } else { Ok(result) } } } fn parse_rhs_atom(input: syn::parse::ParseStream<'_>) -> syn::Result<Atom> { let key: syn::Ident = input.parse()?; let content; syn::parenthesized!(content in input); let lookahead = content.lookahead1(); let value_type = if lookahead.peek(kw::any) { content.parse::<kw::any>()?; ValueType::Any } else if lookahead.peek(syn::Ident) { let variant = content.parse::<syn::Ident>()?; ValueType::EnumVariant(variant.to_string()) } else { return Err(lookahead.error()); }; if !content.is_empty() { Err(content.error("Unexpected input received after atom value")) } else { Ok(Atom { key: key.to_string(), value: value_type, }) } } struct Rule { lhs: Vec<AtomType>, strength: Strength, rhs: Rc<Atom>, } impl Parse for Rule { fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result<Self> { let first_atom: AtomType = input.parse()?; let mut lhs: Vec<AtomType> = vec![first_atom]; while input.peek(Token![&]) { input.parse::<Token![&]>()?; let and_atom: AtomType = input.parse()?; lhs.push(and_atom); } let strength: Strength = input.parse()?; let rhs: Rc<Atom> = Rc::new(parse_rhs_atom(input)?); input.parse::<Token![;]>()?; Ok(Self { lhs, strength, rhs }) } } #[derive(Clone)] struct Program { rules: Vec<Rc<Rule>>, } impl Parse for Program { fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result<Self> { let mut rules: Vec<Rc<Rule>> = Vec::new(); while !input.is_empty() { rules.push(Rc::new(input.parse::<Rule>()?)); } Ok(Self { rules }) } } struct GenContext { next_idx: usize, next_node_idx: usize, idx2atom: FxHashMap<usize, Rc<Atom>>, atom2idx: FxHashMap<Rc<Atom>, usize>, edges: FxHashMap<usize, FxHashSet<usize>>, compiled_atoms: FxHashMap<Rc<Atom>, proc_macro2::Ident>, } impl GenContext { fn new() -> Self { Self { next_idx: 1, next_node_idx: 1, idx2atom: FxHashMap::default(), atom2idx: FxHashMap::default(), edges: FxHashMap::default(), compiled_atoms: FxHashMap::default(), } } fn register_node(&mut self, atom: Rc<Atom>) -> usize { if let Some(idx) = self.atom2idx.get(&atom) { *idx } else { let this_idx = self.next_idx; self.next_idx += 1; self.idx2atom.insert(this_idx, Rc::clone(&atom)); self.atom2idx.insert(atom, this_idx); this_idx } } fn register_edge(&mut self, from: usize, to: usize) -> Result<(), String> { let node_children = self.edges.entry(from).or_default(); if node_children.contains(&to) { Err("Duplicate edge detected".to_string()) } else { node_children.insert(to); self.edges.entry(to).or_default(); Ok(()) } } fn register_rule(&mut self, rule: &Rule) -> Result<(), String> { let to_idx = self.register_node(Rc::clone(&rule.rhs)); for atom_type in &rule.lhs { if let AtomType::Value { atom, .. } = atom_type { let from_idx = self.register_node(Rc::clone(atom)); self.register_edge(from_idx, to_idx)?; } } Ok(()) } fn cycle_dfs( &self, node_id: usize, explored: &mut FxHashSet<usize>, visited: &mut FxHashSet<usize>, order: &mut Vec<usize>, ) -> Result<Option<Vec<usize>>, String> { if explored.contains(&node_id) { let position = order .iter() .position(|v| *v == node_id) .ok_or_else(|| "Error deciding cycle order".to_string())?; let cycle_order = order .get(position..) .ok_or_else(|| "Error getting cycle order".to_string())? .to_vec(); Ok(Some(cycle_order)) } else if visited.contains(&node_id) { Ok(None) } else { visited.insert(node_id); explored.insert(node_id); order.push(node_id); let dests = self .edges .get(&node_id) .ok_or_else(|| "Error getting edges of node".to_string())?; for dest in dests.iter().copied() { if let Some(cycle) = self.cycle_dfs(dest, explored, visited, order)? { return Ok(Some(cycle)); } } order.pop(); Ok(None) } } fn detect_graph_cycles(&self) -> Result<(), String> { let start_nodes = self.edges.keys().copied().collect::<Vec<usize>>(); let mut total_visited = FxHashSet::<usize>::default(); for node_id in start_nodes.iter().copied() { let mut explored = FxHashSet::<usize>::default(); let mut order = Vec::<usize>::new(); match self.cycle_dfs(node_id, &mut explored, &mut total_visited, &mut order)? { None => {} Some(order) => { let mut display_strings = Vec::<String>::with_capacity(order.len() + 1); for cycle_node_id in order { let node = self.idx2atom.get(&cycle_node_id).ok_or_else(|| { "Failed to find node during cycle display creation".to_string() })?; display_strings.push(node.to_string()); } let first = display_strings .first() .cloned() .ok_or("Unable to fill cycle display array")?; display_strings.push(first); return Err(format!("Found cycle: {}", display_strings.join(" -> "))); } } } Ok(()) } fn next_node_ident(&mut self) -> (proc_macro2::Ident, usize) { let this_idx = self.next_node_idx; self.next_node_idx += 1; (format_ident!("_node_{this_idx}"), this_idx) } fn compile_atom( &mut self, atom: &Rc<Atom>, tokens: &mut TokenStream, ) -> Result<proc_macro2::Ident, String> { let maybe_ident = self.compiled_atoms.get(atom); if let Some(ident) = maybe_ident { Ok(ident.clone()) } else { let (identifier, _) = self.next_node_ident(); let key = format_ident!("{}", &atom.key); let the_value = match &atom.value { ValueType::Any => quote! { cgraph::NodeValue::Key(DirKey::new(DirKeyKind::#key,None)) }, ValueType::EnumVariant(variant) => { let variant = format_ident!("{}", variant); quote! { cgraph::NodeValue::Value(DirValue::#key(#key::#variant)) } } ValueType::Number { number, comparison } => { let comp_type = match comparison { Comparison::Equal => quote! { None }, Comparison::LessThan => quote! { Some(NumValueRefinement::LessThan) }, Comparison::GreaterThan => quote! { Some(NumValueRefinement::GreaterThan) }, Comparison::GreaterThanEqual => quote! { Some(NumValueRefinement::GreaterThanEqual) }, Comparison::LessThanEqual => quote! { Some(NumValueRefinement::LessThanEqual) }, }; quote! { cgraph::NodeValue::Value(DirValue::#key(NumValue { number: #number, refinement: #comp_type, })) } } }; let compiled = quote! { let #identifier = graph.make_value_node(#the_value, None, None::<()>); }; tokens.extend(compiled); self.compiled_atoms .insert(Rc::clone(atom), identifier.clone()); Ok(identifier) } } fn compile_atom_type( &mut self, atom_type: &AtomType, tokens: &mut TokenStream, ) -> Result<(proc_macro2::Ident, Relation), String> { match atom_type { AtomType::Value { relation, atom } => { let node_ident = self.compile_atom(atom, tokens)?; Ok((node_ident, relation.clone())) } AtomType::InAggregator { key, values, relation, } => { let key_ident = format_ident!("{key}"); let mut values_tokens: Vec<TokenStream> = Vec::new(); for value in values { let value_ident = format_ident!("{value}"); values_tokens.push(quote! { DirValue::#key_ident(#key_ident::#value_ident) }); } let (node_ident, _) = self.next_node_ident(); let node_code = quote! { let #node_ident = graph.make_in_aggregator( Vec::from_iter([#(#values_tokens),*]), None, None::<()>, ).expect("Failed to make In aggregator"); }; tokens.extend(node_code); Ok((node_ident, relation.clone())) } } } fn compile_rule(&mut self, rule: &Rule, tokens: &mut TokenStream) -> Result<(), String> { let rhs_ident = self.compile_atom(&rule.rhs, tokens)?; let mut node_details: Vec<(proc_macro2::Ident, Relation)> = Vec::with_capacity(rule.lhs.len()); for lhs_atom_type in &rule.lhs { let details = self.compile_atom_type(lhs_atom_type, tokens)?; node_details.push(details); } if node_details.len() <= 1 { let strength = format_ident!("{}", rule.strength.to_string()); for (from_node, relation) in &node_details { let relation = format_ident!("{}", relation.to_string()); tokens.extend(quote! { graph.make_edge(#from_node, #rhs_ident, cgraph::Strength::#strength, cgraph::Relation::#relation, None::<cgraph::DomainId>) .expect("Failed to make edge"); }); } } else { let mut all_agg_nodes: Vec<TokenStream> = Vec::with_capacity(node_details.len()); for (from_node, relation) in &node_details { let relation = format_ident!("{}", relation.to_string()); all_agg_nodes.push( quote! { (#from_node, cgraph::Relation::#relation, cgraph::Strength::Strong) }, ); } let strength = format_ident!("{}", rule.strength.to_string()); let (agg_node_ident, _) = self.next_node_ident(); tokens.extend(quote! { let #agg_node_ident = graph.make_all_aggregator(&[#(#all_agg_nodes),*], None, None::<()>, None) .expect("Failed to make all aggregator node"); graph.make_edge(#agg_node_ident, #rhs_ident, cgraph::Strength::#strength, cgraph::Relation::Positive, None::<cgraph::DomainId>) .expect("Failed to create all aggregator edge"); }); } Ok(()) } fn compile(&mut self, program: Program) -> Result<TokenStream, String> { let mut tokens = TokenStream::new(); for rule in &program.rules { self.compile_rule(rule, &mut tokens)?; } let compiled = quote! {{ use euclid_graph_prelude::*; let mut graph = cgraph::ConstraintGraphBuilder::new(); #tokens graph.build() }}; Ok(compiled) } } pub(crate) fn knowledge_inner(ts: TokenStream) -> syn::Result<TokenStream> { let program = syn::parse::<Program>(ts.into())?; let mut gen_context = GenContext::new(); for rule in &program.rules { gen_context .register_rule(rule) .map_err(|msg| syn::Error::new(Span::call_site(), msg))?; } gen_context .detect_graph_cycles() .map_err(|msg| syn::Error::new(Span::call_site(), msg))?; gen_context .compile(program) .map_err(|msg| syn::Error::new(Span::call_site(), msg)) }
crates/euclid_macros/src/inner/knowledge.rs
euclid_macros::src::inner::knowledge
4,265
true
// File: crates/masking/src/strategy.rs // Module: masking::src::strategy use core::fmt; /// Debugging trait which is specialized for handling secret values pub trait Strategy<T> { /// Format information about the secret's type. fn fmt(value: &T, fmt: &mut fmt::Formatter<'_>) -> fmt::Result; } /// Debug with type #[cfg_attr(feature = "serde", derive(serde::Deserialize))] #[derive(Debug, Copy, Clone, PartialEq)] pub enum WithType {} impl<T> Strategy<T> for WithType { fn fmt(_: &T, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.write_str("*** ")?; fmt.write_str(std::any::type_name::<T>())?; fmt.write_str(" ***") } } /// Debug without type pub enum WithoutType {} impl<T> Strategy<T> for WithoutType { fn fmt(_: &T, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.write_str("*** ***") } }
crates/masking/src/strategy.rs
masking::src::strategy
226
true
// File: crates/masking/src/abs.rs // Module: masking::src::abs //! Abstract data types. use crate::Secret; /// Interface to expose a reference to an inner secret pub trait PeekInterface<S> { /// Only method providing access to the secret value. fn peek(&self) -> &S; /// Provide a mutable reference to the inner value. fn peek_mut(&mut self) -> &mut S; } /// Interface that consumes a option secret and returns the value. pub trait ExposeOptionInterface<S> { /// Expose option. fn expose_option(self) -> S; } /// Interface that consumes a secret and returns the inner value. pub trait ExposeInterface<S> { /// Consume the secret and return the inner value fn expose(self) -> S; } impl<S, I> ExposeOptionInterface<Option<S>> for Option<Secret<S, I>> where S: Clone, I: crate::Strategy<S>, { fn expose_option(self) -> Option<S> { self.map(ExposeInterface::expose) } } impl<S, I> ExposeInterface<S> for Secret<S, I> where I: crate::Strategy<S>, { fn expose(self) -> S { self.inner_secret } } /// Interface that consumes a secret and converts it to a secret with a different masking strategy. pub trait SwitchStrategy<FromStrategy, ToStrategy> { /// The type returned by `switch_strategy()`. type Output; /// Consumes the secret and converts it to a secret with a different masking strategy. fn switch_strategy(self) -> Self::Output; } impl<S, FromStrategy, ToStrategy> SwitchStrategy<FromStrategy, ToStrategy> for Secret<S, FromStrategy> where FromStrategy: crate::Strategy<S>, ToStrategy: crate::Strategy<S>, { type Output = Secret<S, ToStrategy>; fn switch_strategy(self) -> Self::Output { Secret::new(self.inner_secret) } }
crates/masking/src/abs.rs
masking::src::abs
429
true
// File: crates/masking/src/strong_secret.rs // Module: masking::src::strong_secret //! Structure describing secret. use std::{fmt, marker::PhantomData}; use subtle::ConstantTimeEq; use zeroize::{self, Zeroize as ZeroizableSecret}; use crate::{strategy::Strategy, PeekInterface}; /// Secret thing. /// /// To get access to value use method `expose()` of trait [`crate::ExposeInterface`]. pub struct StrongSecret<Secret: ZeroizableSecret, MaskingStrategy = crate::WithType> { /// Inner secret value pub(crate) inner_secret: Secret, pub(crate) masking_strategy: PhantomData<MaskingStrategy>, } impl<Secret: ZeroizableSecret, MaskingStrategy> StrongSecret<Secret, MaskingStrategy> { /// Take ownership of a secret value pub fn new(secret: Secret) -> Self { Self { inner_secret: secret, masking_strategy: PhantomData, } } } impl<Secret: ZeroizableSecret, MaskingStrategy> PeekInterface<Secret> for StrongSecret<Secret, MaskingStrategy> { fn peek(&self) -> &Secret { &self.inner_secret } fn peek_mut(&mut self) -> &mut Secret { &mut self.inner_secret } } impl<Secret: ZeroizableSecret, MaskingStrategy> From<Secret> for StrongSecret<Secret, MaskingStrategy> { fn from(secret: Secret) -> Self { Self::new(secret) } } impl<Secret: Clone + ZeroizableSecret, MaskingStrategy> Clone for StrongSecret<Secret, MaskingStrategy> { fn clone(&self) -> Self { Self { inner_secret: self.inner_secret.clone(), masking_strategy: PhantomData, } } } impl<Secret, MaskingStrategy> PartialEq for StrongSecret<Secret, MaskingStrategy> where Self: PeekInterface<Secret>, Secret: ZeroizableSecret + StrongEq, { fn eq(&self, other: &Self) -> bool { StrongEq::strong_eq(self.peek(), other.peek()) } } impl<Secret, MaskingStrategy> Eq for StrongSecret<Secret, MaskingStrategy> where Self: PeekInterface<Secret>, Secret: ZeroizableSecret + StrongEq, { } impl<Secret: ZeroizableSecret, MaskingStrategy: Strategy<Secret>> fmt::Debug for StrongSecret<Secret, MaskingStrategy> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { MaskingStrategy::fmt(&self.inner_secret, f) } } impl<Secret: ZeroizableSecret, MaskingStrategy: Strategy<Secret>> fmt::Display for StrongSecret<Secret, MaskingStrategy> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { MaskingStrategy::fmt(&self.inner_secret, f) } } impl<Secret: ZeroizableSecret, MaskingStrategy> Default for StrongSecret<Secret, MaskingStrategy> where Secret: ZeroizableSecret + Default, { fn default() -> Self { Secret::default().into() } } impl<Secret: ZeroizableSecret, MaskingStrategy> Drop for StrongSecret<Secret, MaskingStrategy> { fn drop(&mut self) { self.inner_secret.zeroize(); } } trait StrongEq { fn strong_eq(&self, other: &Self) -> bool; } impl StrongEq for String { fn strong_eq(&self, other: &Self) -> bool { let lhs = self.as_bytes(); let rhs = other.as_bytes(); bool::from(lhs.ct_eq(rhs)) } } impl StrongEq for Vec<u8> { fn strong_eq(&self, other: &Self) -> bool { let lhs = &self; let rhs = &other; bool::from(lhs.ct_eq(rhs)) } } #[cfg(feature = "proto_tonic")] impl<T> prost::Message for StrongSecret<T, crate::WithType> where T: prost::Message + Default + Clone + ZeroizableSecret, { fn encode_raw(&self, buf: &mut impl bytes::BufMut) { self.peek().encode_raw(buf); } fn merge_field( &mut self, tag: u32, wire_type: prost::encoding::WireType, buf: &mut impl bytes::Buf, ctx: prost::encoding::DecodeContext, ) -> Result<(), prost::DecodeError> { if tag == 1 { self.peek_mut().merge_field(tag, wire_type, buf, ctx) } else { prost::encoding::skip_field(wire_type, tag, buf, ctx) } } fn encoded_len(&self) -> usize { self.peek().encoded_len() } fn clear(&mut self) { self.peek_mut().clear(); } }
crates/masking/src/strong_secret.rs
masking::src::strong_secret
1,060
true
// File: crates/masking/src/serde.rs // Module: masking::src::serde //! Serde-related. pub use erased_serde::Serialize as ErasedSerialize; pub use serde::{de, Deserialize, Serialize, Serializer}; use serde_json::{value::Serializer as JsonValueSerializer, Value}; use crate::{Secret, Strategy, StrongSecret, ZeroizableSecret}; /// Marker trait for secret types which can be [`Serialize`]-d by [`serde`]. /// /// When the `serde` feature of this crate is enabled and types are marked with /// this trait, they receive a [`Serialize` impl] for `Secret<T>`. /// (NOTE: all types which impl `DeserializeOwned` receive a [`Deserialize`] /// impl) /// /// This is done deliberately to prevent accidental exfiltration of secrets /// via `serde` serialization. #[cfg_attr(docsrs, cfg(feature = "serde"))] pub trait SerializableSecret: Serialize {} // #[cfg_attr(docsrs, doc(cfg(feature = "serde")))] // pub trait NonSerializableSecret: Serialize {} impl SerializableSecret for Value {} impl SerializableSecret for u8 {} impl SerializableSecret for u16 {} impl SerializableSecret for i8 {} impl SerializableSecret for i32 {} impl SerializableSecret for i64 {} impl SerializableSecret for url::Url {} #[cfg(feature = "time")] impl SerializableSecret for time::Date {} impl<T: SerializableSecret> SerializableSecret for &T {} impl<'de, T, I> Deserialize<'de> for Secret<T, I> where T: Clone + de::DeserializeOwned + Sized, I: Strategy<T>, { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: de::Deserializer<'de>, { T::deserialize(deserializer).map(Self::new) } } impl<T, I> Serialize for Secret<T, I> where T: SerializableSecret + Serialize + Sized, I: Strategy<T>, { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { pii_serializer::pii_serialize(self, serializer) } } impl<'de, T, I> Deserialize<'de> for StrongSecret<T, I> where T: Clone + de::DeserializeOwned + Sized + ZeroizableSecret, I: Strategy<T>, { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de>, { T::deserialize(deserializer).map(Self::new) } } impl<T, I> Serialize for StrongSecret<T, I> where T: SerializableSecret + Serialize + ZeroizableSecret + Sized, I: Strategy<T>, { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { pii_serializer::pii_serialize(self, serializer) } } /// Masked serialization. /// /// the default behaviour for secrets is to serialize in exposed format since the common use cases /// for storing the secret to database or sending it over the network requires the secret to be exposed /// This method allows to serialize the secret in masked format if needed for logs or other insecure exposures pub fn masked_serialize<T: Serialize>(value: &T) -> Result<Value, serde_json::Error> { value.serialize(PIISerializer { inner: JsonValueSerializer, }) } /// Masked serialization. /// /// Trait object for supporting serialization to Value while accounting for masking /// The usual Serde Serialize trait cannot be used as trait objects /// like &dyn Serialize or boxed trait objects like Box<dyn Serialize> because of Rust's "object safety" rules. /// In particular, the trait contains generic methods which cannot be made into a trait object. /// In this case we remove the generic for assuming the serialization to be of 2 types only raw json or masked json pub trait ErasedMaskSerialize: ErasedSerialize { /// Masked serialization. fn masked_serialize(&self) -> Result<Value, serde_json::Error>; } impl<T: Serialize + ErasedSerialize> ErasedMaskSerialize for T { fn masked_serialize(&self) -> Result<Value, serde_json::Error> { masked_serialize(self) } } impl Serialize for dyn ErasedMaskSerialize + '_ { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { erased_serde::serialize(self, serializer) } } impl Serialize for dyn ErasedMaskSerialize + '_ + Send { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { erased_serde::serialize(self, serializer) } } use pii_serializer::PIISerializer; mod pii_serializer { use std::fmt::Display; pub(super) fn pii_serialize< V: Serialize, T: std::fmt::Debug + PeekInterface<V>, S: Serializer, >( value: &T, serializer: S, ) -> Result<S::Ok, S::Error> { // Mask the value if the serializer is of type PIISerializer // or send empty map if the serializer is of type FlatMapSerializer over PiiSerializer if std::any::type_name::<S>() == std::any::type_name::<PIISerializer>() { format!("{value:?}").serialize(serializer) } else if std::any::type_name::<S>() == std::any::type_name::< serde::__private::ser::FlatMapSerializer<'_, SerializeMap<PIISerializer>>, >() { std::collections::HashMap::<String, String>::from([]).serialize(serializer) } else { value.peek().serialize(serializer) } } use serde::{Serialize, Serializer}; use serde_json::{value::Serializer as JsonValueSerializer, Map, Value}; use crate::PeekInterface; pub(super) struct PIISerializer { pub inner: JsonValueSerializer, } impl Clone for PIISerializer { fn clone(&self) -> Self { Self { inner: JsonValueSerializer, } } } impl Serializer for PIISerializer { type Ok = Value; type Error = serde_json::Error; type SerializeSeq = SerializeVec<Self>; type SerializeTuple = SerializeVec<Self>; type SerializeTupleStruct = SerializeVec<Self>; type SerializeTupleVariant = SerializeTupleVariant<Self>; type SerializeMap = SerializeMap<Self>; type SerializeStruct = SerializeMap<Self>; type SerializeStructVariant = SerializeStructVariant<Self>; #[inline] fn serialize_bool(self, value: bool) -> Result<Self::Ok, Self::Error> { self.inner.serialize_bool(value) } #[inline] fn serialize_i8(self, value: i8) -> Result<Self::Ok, Self::Error> { self.serialize_i64(value.into()) } #[inline] fn serialize_i16(self, value: i16) -> Result<Self::Ok, Self::Error> { self.serialize_i64(value.into()) } #[inline] fn serialize_i32(self, value: i32) -> Result<Self::Ok, Self::Error> { self.serialize_i64(value.into()) } fn serialize_i64(self, value: i64) -> Result<Self::Ok, Self::Error> { self.inner.serialize_i64(value) } fn serialize_i128(self, value: i128) -> Result<Self::Ok, Self::Error> { self.inner.serialize_i128(value) } #[inline] fn serialize_u8(self, value: u8) -> Result<Self::Ok, Self::Error> { self.serialize_u64(value.into()) } #[inline] fn serialize_u16(self, value: u16) -> Result<Self::Ok, Self::Error> { self.serialize_u64(value.into()) } #[inline] fn serialize_u32(self, value: u32) -> Result<Self::Ok, Self::Error> { self.serialize_u64(value.into()) } #[inline] fn serialize_u64(self, value: u64) -> Result<Self::Ok, Self::Error> { Ok(Value::Number(value.into())) } fn serialize_u128(self, value: u128) -> Result<Self::Ok, Self::Error> { self.inner.serialize_u128(value) } #[inline] fn serialize_f32(self, float: f32) -> Result<Self::Ok, Self::Error> { Ok(Value::from(float)) } #[inline] fn serialize_f64(self, float: f64) -> Result<Self::Ok, Self::Error> { Ok(Value::from(float)) } #[inline] fn serialize_char(self, value: char) -> Result<Self::Ok, Self::Error> { let mut s = String::new(); s.push(value); Ok(Value::String(s)) } #[inline] fn serialize_str(self, value: &str) -> Result<Self::Ok, Self::Error> { Ok(Value::String(value.to_owned())) } fn serialize_bytes(self, value: &[u8]) -> Result<Self::Ok, Self::Error> { let vec = value.iter().map(|&b| Value::Number(b.into())).collect(); Ok(Value::Array(vec)) } #[inline] fn serialize_unit(self) -> Result<Self::Ok, Self::Error> { Ok(Value::Null) } #[inline] fn serialize_unit_struct(self, _name: &'static str) -> Result<Self::Ok, Self::Error> { self.serialize_unit() } #[inline] fn serialize_unit_variant( self, _name: &'static str, _variant_index: u32, variant: &'static str, ) -> Result<Self::Ok, Self::Error> { self.serialize_str(variant) } #[inline] fn serialize_newtype_struct<T>( self, _name: &'static str, value: &T, ) -> Result<Self::Ok, Self::Error> where T: ?Sized + Serialize, { value.serialize(self) } fn serialize_newtype_variant<T>( self, _name: &'static str, _variant_index: u32, variant: &'static str, value: &T, ) -> Result<Self::Ok, Self::Error> where T: ?Sized + Serialize, { let mut values = Map::new(); values.insert(String::from(variant), value.serialize(self)?); Ok(Value::Object(values)) } #[inline] fn serialize_none(self) -> Result<Self::Ok, Self::Error> { self.serialize_unit() } #[inline] fn serialize_some<T>(self, value: &T) -> Result<Self::Ok, Self::Error> where T: ?Sized + Serialize, { value.serialize(self) } fn serialize_seq(self, len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> { Ok(SerializeVec { vec: Vec::with_capacity(len.unwrap_or(0)), ser: self, }) } fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple, Self::Error> { self.serialize_seq(Some(len)) } fn serialize_tuple_struct( self, _name: &'static str, len: usize, ) -> Result<Self::SerializeTupleStruct, Self::Error> { self.serialize_seq(Some(len)) } fn serialize_tuple_variant( self, _name: &'static str, _variant_index: u32, variant: &'static str, len: usize, ) -> Result<Self::SerializeTupleVariant, Self::Error> { Ok(SerializeTupleVariant { name: String::from(variant), vec: Vec::with_capacity(len), ser: self, }) } fn serialize_map(self, len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> { Ok(SerializeMap { inner: self.clone().inner.serialize_map(len)?, ser: self, }) } fn serialize_struct( self, _name: &'static str, len: usize, ) -> Result<Self::SerializeStruct, Self::Error> { self.serialize_map(Some(len)) } fn serialize_struct_variant( self, _name: &'static str, _variant_index: u32, variant: &'static str, _len: usize, ) -> Result<Self::SerializeStructVariant, Self::Error> { Ok(SerializeStructVariant { name: String::from(variant), map: Map::new(), ser: self, }) } fn collect_str<T>(self, value: &T) -> Result<Self::Ok, Self::Error> where T: ?Sized + Display, { self.inner.collect_str(value) } } pub(super) struct SerializeVec<T: Serializer> { vec: Vec<Value>, ser: T, } impl<T: Serializer<Ok = Value> + Clone> serde::ser::SerializeSeq for SerializeVec<T> { type Ok = Value; type Error = T::Error; fn serialize_element<V>(&mut self, value: &V) -> Result<(), Self::Error> where V: ?Sized + Serialize, { self.vec.push(value.serialize(self.ser.clone())?); Ok(()) } fn end(self) -> Result<Self::Ok, Self::Error> { Ok(Value::Array(self.vec)) } } impl<T: Serializer<Ok = Value> + Clone> serde::ser::SerializeTuple for SerializeVec<T> { type Ok = Value; type Error = T::Error; fn serialize_element<V>(&mut self, value: &V) -> Result<(), Self::Error> where V: ?Sized + Serialize, { serde::ser::SerializeSeq::serialize_element(self, value) } fn end(self) -> Result<Self::Ok, Self::Error> { serde::ser::SerializeSeq::end(self) } } impl<T: Serializer<Ok = Value> + Clone> serde::ser::SerializeTupleStruct for SerializeVec<T> { type Ok = Value; type Error = T::Error; fn serialize_field<V>(&mut self, value: &V) -> Result<(), Self::Error> where V: ?Sized + Serialize, { serde::ser::SerializeSeq::serialize_element(self, value) } fn end(self) -> Result<Self::Ok, Self::Error> { serde::ser::SerializeSeq::end(self) } } pub(super) struct SerializeStructVariant<T: Serializer> { name: String, map: Map<String, Value>, ser: T, } impl<T: Serializer<Ok = Value> + Clone> serde::ser::SerializeStructVariant for SerializeStructVariant<T> { type Ok = Value; type Error = T::Error; fn serialize_field<V>(&mut self, key: &'static str, value: &V) -> Result<(), Self::Error> where V: ?Sized + Serialize, { self.map .insert(String::from(key), value.serialize(self.ser.clone())?); Ok(()) } fn end(self) -> Result<Self::Ok, Self::Error> { let mut object = Map::new(); object.insert(self.name, Value::Object(self.map)); Ok(Value::Object(object)) } } pub(super) struct SerializeTupleVariant<T: Serializer> { name: String, vec: Vec<Value>, ser: T, } impl<T: Serializer<Ok = Value> + Clone> serde::ser::SerializeTupleVariant for SerializeTupleVariant<T> { type Ok = Value; type Error = T::Error; fn serialize_field<V>(&mut self, value: &V) -> Result<(), Self::Error> where V: ?Sized + Serialize, { self.vec.push(value.serialize(self.ser.clone())?); Ok(()) } fn end(self) -> Result<Value, Self::Error> { let mut object = Map::new(); object.insert(self.name, Value::Array(self.vec)); Ok(Value::Object(object)) } } pub(super) struct SerializeMap<T: Serializer> { inner: <serde_json::value::Serializer as Serializer>::SerializeMap, ser: T, } impl<T: Serializer<Ok = Value, Error = serde_json::Error> + Clone> serde::ser::SerializeMap for SerializeMap<T> { type Ok = Value; type Error = T::Error; fn serialize_key<V>(&mut self, key: &V) -> Result<(), Self::Error> where V: ?Sized + Serialize, { self.inner.serialize_key(key)?; Ok(()) } fn serialize_value<V>(&mut self, value: &V) -> Result<(), Self::Error> where V: ?Sized + Serialize, { let value = value.serialize(self.ser.clone())?; self.inner.serialize_value(&value)?; Ok(()) } fn end(self) -> Result<Value, Self::Error> { self.inner.end() } } impl<T: Serializer<Ok = Value, Error = serde_json::Error> + Clone> serde::ser::SerializeStruct for SerializeMap<T> { type Ok = Value; type Error = T::Error; fn serialize_field<V>(&mut self, key: &'static str, value: &V) -> Result<(), Self::Error> where V: ?Sized + Serialize, { serde::ser::SerializeMap::serialize_entry(self, key, value) } fn end(self) -> Result<Value, Self::Error> { serde::ser::SerializeMap::end(self) } } }
crates/masking/src/serde.rs
masking::src::serde
3,967
true
// File: crates/masking/src/maskable.rs // Module: masking::src::maskable //! This module contains Masking objects and traits use crate::{ExposeInterface, Secret}; /// An Enum that allows us to optionally mask data, based on which enum variant that data is stored /// in. #[derive(Clone, Eq, PartialEq)] pub enum Maskable<T: Eq + PartialEq + Clone> { /// Variant which masks the data by wrapping in a Secret Masked(Secret<T>), /// Varant which doesn't mask the data Normal(T), } impl<T: std::fmt::Debug + Clone + Eq + PartialEq> std::fmt::Debug for Maskable<T> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::Masked(secret_value) => std::fmt::Debug::fmt(secret_value, f), Self::Normal(value) => std::fmt::Debug::fmt(value, f), } } } impl<T: Eq + PartialEq + Clone + std::hash::Hash> std::hash::Hash for Maskable<T> { fn hash<H: std::hash::Hasher>(&self, state: &mut H) { match self { Self::Masked(value) => crate::PeekInterface::peek(value).hash(state), Self::Normal(value) => value.hash(state), } } } impl<T: Eq + PartialEq + Clone> Maskable<T> { /// Get the inner data while consuming self pub fn into_inner(self) -> T { match self { Self::Masked(inner_secret) => inner_secret.expose(), Self::Normal(inner) => inner, } } /// Create a new Masked data pub fn new_masked(item: Secret<T>) -> Self { Self::Masked(item) } /// Create a new non-masked data pub fn new_normal(item: T) -> Self { Self::Normal(item) } /// Checks whether the data is masked. /// Returns `true` if the data is wrapped in the `Masked` variant, /// returns `false` otherwise. pub fn is_masked(&self) -> bool { matches!(self, Self::Masked(_)) } /// Checks whether the data is normal (not masked). /// Returns `true` if the data is wrapped in the `Normal` variant, /// returns `false` otherwise. pub fn is_normal(&self) -> bool { matches!(self, Self::Normal(_)) } } /// Trait for providing a method on custom types for constructing `Maskable` pub trait Mask { /// The type returned by the `into_masked()` method. Must implement `PartialEq`, `Eq` and `Clone` type Output: Eq + Clone + PartialEq; /// Construct a `Maskable` instance that wraps `Self::Output` by consuming `self` fn into_masked(self) -> Maskable<Self::Output>; } impl Mask for String { type Output = Self; fn into_masked(self) -> Maskable<Self::Output> { Maskable::new_masked(self.into()) } } impl Mask for Secret<String> { type Output = String; fn into_masked(self) -> Maskable<Self::Output> { Maskable::new_masked(self) } } impl<T: Eq + PartialEq + Clone> From<T> for Maskable<T> { fn from(value: T) -> Self { Self::new_normal(value) } } impl From<&str> for Maskable<String> { fn from(value: &str) -> Self { Self::new_normal(value.to_string()) } }
crates/masking/src/maskable.rs
masking::src::maskable
801
true
// File: crates/masking/src/lib.rs // Module: masking::src::lib #![cfg_attr(docsrs, feature(doc_auto_cfg, doc_cfg_hide))] #![cfg_attr(docsrs, doc(cfg_hide(doc)))] #![warn(missing_docs)] //! Personal Identifiable Information protection. Wrapper types and traits for secret management which help ensure they aren't accidentally copied, logged, or otherwise exposed (as much as possible), and also ensure secrets are securely wiped from memory when dropped. //! Secret-keeping library inspired by secrecy. #![doc = include_str!(concat!(env!("CARGO_MANIFEST_DIR" ), "/", "README.md"))] pub use zeroize::{self, DefaultIsZeroes, Zeroize as ZeroizableSecret}; mod strategy; pub use strategy::{Strategy, WithType, WithoutType}; mod abs; pub use abs::{ExposeInterface, ExposeOptionInterface, PeekInterface, SwitchStrategy}; mod secret; mod strong_secret; #[cfg(feature = "serde")] pub use secret::JsonMaskStrategy; pub use secret::Secret; pub use strong_secret::StrongSecret; #[cfg(feature = "alloc")] extern crate alloc; #[cfg(feature = "alloc")] mod boxed; #[cfg(feature = "bytes")] mod bytes; #[cfg(feature = "bytes")] pub use self::bytes::SecretBytesMut; #[cfg(feature = "alloc")] mod string; #[cfg(feature = "alloc")] mod vec; #[cfg(feature = "serde")] mod serde; #[cfg(feature = "serde")] pub use crate::serde::{ masked_serialize, Deserialize, ErasedMaskSerialize, SerializableSecret, Serialize, }; /// This module should be included with asterisk. /// /// `use masking::prelude::*;` pub mod prelude { pub use super::{ExposeInterface, ExposeOptionInterface, PeekInterface}; } #[cfg(feature = "diesel")] mod diesel; #[cfg(feature = "cassandra")] mod cassandra; pub mod maskable; pub use maskable::*;
crates/masking/src/lib.rs
masking::src::lib
406
true
// File: crates/masking/src/boxed.rs // Module: masking::src::boxed //! `Box` types containing secrets //! //! There is not alias type by design. #[cfg(feature = "serde")] use super::{SerializableSecret, Serialize}; #[cfg(feature = "serde")] impl<S: Serialize> SerializableSecret for Box<S> {}
crates/masking/src/boxed.rs
masking::src::boxed
72
true
// File: crates/masking/src/cassandra.rs // Module: masking::src::cassandra use scylla::{ deserialize::DeserializeValue, frame::response::result::ColumnType, serialize::{ value::SerializeValue, writers::{CellWriter, WrittenCellProof}, SerializationError, }, }; use crate::{abs::PeekInterface, StrongSecret}; impl<T> SerializeValue for StrongSecret<T> where T: SerializeValue + zeroize::Zeroize + Clone, { fn serialize<'b>( &self, column_type: &ColumnType<'_>, writer: CellWriter<'b>, ) -> Result<WrittenCellProof<'b>, SerializationError> { self.peek().serialize(column_type, writer) } } impl<'frame, 'metadata, T> DeserializeValue<'frame, 'metadata> for StrongSecret<T> where T: DeserializeValue<'frame, 'metadata> + zeroize::Zeroize + Clone, { fn type_check(column_type: &ColumnType<'_>) -> Result<(), scylla::deserialize::TypeCheckError> { T::type_check(column_type) } fn deserialize( column_type: &'metadata ColumnType<'metadata>, v: Option<scylla::deserialize::FrameSlice<'frame>>, ) -> Result<Self, scylla::deserialize::DeserializationError> { Ok(Self::new(T::deserialize(column_type, v)?)) } }
crates/masking/src/cassandra.rs
masking::src::cassandra
307
true
// File: crates/masking/src/vec.rs // Module: masking::src::vec //! Secret `Vec` types //! //! There is not alias type by design. #[cfg(feature = "serde")] use super::{SerializableSecret, Serialize}; #[cfg(feature = "serde")] impl<S: Serialize> SerializableSecret for Vec<S> {}
crates/masking/src/vec.rs
masking::src::vec
71
true
// File: crates/masking/src/diesel.rs // Module: masking::src::diesel //! Diesel-related. use diesel::{ backend::Backend, deserialize::{self, FromSql, Queryable}, expression::AsExpression, internal::derives::as_expression::Bound, serialize::{self, Output, ToSql}, sql_types, }; use crate::{Secret, Strategy, StrongSecret, ZeroizableSecret}; impl<S, I, T> AsExpression<T> for &Secret<S, I> where T: sql_types::SingleValue, I: Strategy<S>, { type Expression = Bound<T, Self>; fn as_expression(self) -> Self::Expression { Bound::new(self) } } impl<S, I, T> AsExpression<T> for &&Secret<S, I> where T: sql_types::SingleValue, I: Strategy<S>, { type Expression = Bound<T, Self>; fn as_expression(self) -> Self::Expression { Bound::new(self) } } impl<S, I, T, DB> ToSql<T, DB> for Secret<S, I> where DB: Backend, S: ToSql<T, DB>, I: Strategy<S>, { fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, DB>) -> serialize::Result { ToSql::<T, DB>::to_sql(&self.inner_secret, out) } } impl<DB, S, T, I> FromSql<T, DB> for Secret<S, I> where DB: Backend, S: FromSql<T, DB>, I: Strategy<S>, { fn from_sql(bytes: DB::RawValue<'_>) -> deserialize::Result<Self> { S::from_sql(bytes).map(|raw| raw.into()) } } impl<S, I, T> AsExpression<T> for Secret<S, I> where T: sql_types::SingleValue, I: Strategy<S>, { type Expression = Bound<T, Self>; fn as_expression(self) -> Self::Expression { Bound::new(self) } } impl<ST, DB, S, I> Queryable<ST, DB> for Secret<S, I> where DB: Backend, I: Strategy<S>, ST: sql_types::SingleValue, Self: FromSql<ST, DB>, { type Row = Self; fn build(row: Self::Row) -> deserialize::Result<Self> { Ok(row) } } impl<S, I, T> AsExpression<T> for &StrongSecret<S, I> where T: sql_types::SingleValue, S: ZeroizableSecret, I: Strategy<S>, { type Expression = Bound<T, Self>; fn as_expression(self) -> Self::Expression { Bound::new(self) } } impl<S, I, T> AsExpression<T> for &&StrongSecret<S, I> where T: sql_types::SingleValue, S: ZeroizableSecret, I: Strategy<S>, { type Expression = Bound<T, Self>; fn as_expression(self) -> Self::Expression { Bound::new(self) } } impl<S, I, DB, T> ToSql<T, DB> for StrongSecret<S, I> where DB: Backend, S: ToSql<T, DB> + ZeroizableSecret, I: Strategy<S>, { fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, DB>) -> serialize::Result { ToSql::<T, DB>::to_sql(&self.inner_secret, out) } } impl<DB, S, I, T> FromSql<T, DB> for StrongSecret<S, I> where DB: Backend, S: FromSql<T, DB> + ZeroizableSecret, I: Strategy<S>, { fn from_sql(bytes: DB::RawValue<'_>) -> deserialize::Result<Self> { S::from_sql(bytes).map(|raw| raw.into()) } } impl<S, I, T> AsExpression<T> for StrongSecret<S, I> where T: sql_types::SingleValue, S: ZeroizableSecret, I: Strategy<S>, { type Expression = Bound<T, Self>; fn as_expression(self) -> Self::Expression { Bound::new(self) } } impl<ST, DB, S, I> Queryable<ST, DB> for StrongSecret<S, I> where I: Strategy<S>, DB: Backend, S: ZeroizableSecret, ST: sql_types::SingleValue, Self: FromSql<ST, DB>, { type Row = Self; fn build(row: Self::Row) -> deserialize::Result<Self> { Ok(row) } }
crates/masking/src/diesel.rs
masking::src::diesel
1,040
true
// File: crates/masking/src/string.rs // Module: masking::src::string //! Secret strings //! //! There is not alias type by design. use alloc::{ str::FromStr, string::{String, ToString}, }; #[cfg(feature = "serde")] use super::SerializableSecret; use super::{Secret, Strategy}; use crate::StrongSecret; #[cfg(feature = "serde")] impl SerializableSecret for String {} impl<I> FromStr for Secret<String, I> where I: Strategy<String>, { type Err = core::convert::Infallible; fn from_str(src: &str) -> Result<Self, Self::Err> { Ok(Self::new(src.to_string())) } } impl<I> FromStr for StrongSecret<String, I> where I: Strategy<String>, { type Err = core::convert::Infallible; fn from_str(src: &str) -> Result<Self, Self::Err> { Ok(Self::new(src.to_string())) } }
crates/masking/src/string.rs
masking::src::string
214
true
// File: crates/masking/src/bytes.rs // Module: masking::src::bytes //! Optional `Secret` wrapper type for the `bytes::BytesMut` crate. use core::fmt; use bytes::BytesMut; #[cfg(all(feature = "bytes", feature = "serde"))] use serde::de::{self, Deserialize}; use super::{PeekInterface, ZeroizableSecret}; /// Instance of [`BytesMut`] protected by a type that impls the [`ExposeInterface`] /// trait like `Secret<T>`. /// /// Because of the nature of how the `BytesMut` type works, it needs some special /// care in order to have a proper zeroizing drop handler. #[derive(Clone)] #[cfg_attr(docsrs, cfg(feature = "bytes"))] pub struct SecretBytesMut(BytesMut); impl SecretBytesMut { /// Wrap bytes in `SecretBytesMut` pub fn new(bytes: impl Into<BytesMut>) -> Self { Self(bytes.into()) } } impl PeekInterface<BytesMut> for SecretBytesMut { fn peek(&self) -> &BytesMut { &self.0 } fn peek_mut(&mut self) -> &mut BytesMut { &mut self.0 } } impl fmt::Debug for SecretBytesMut { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "SecretBytesMut([REDACTED])") } } impl From<BytesMut> for SecretBytesMut { fn from(bytes: BytesMut) -> Self { Self::new(bytes) } } impl Drop for SecretBytesMut { fn drop(&mut self) { self.0.resize(self.0.capacity(), 0); self.0.as_mut().zeroize(); debug_assert!(self.0.as_ref().iter().all(|b| *b == 0)); } } #[cfg(all(feature = "bytes", feature = "serde"))] impl<'de> Deserialize<'de> for SecretBytesMut { fn deserialize<D: de::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> { struct SecretBytesVisitor; impl<'de> de::Visitor<'de> for SecretBytesVisitor { type Value = SecretBytesMut; fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter.write_str("byte array") } #[inline] fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E> where E: de::Error, { let mut bytes = BytesMut::with_capacity(v.len()); bytes.extend_from_slice(v); Ok(SecretBytesMut(bytes)) } #[inline] fn visit_seq<V>(self, mut seq: V) -> Result<Self::Value, V::Error> where V: de::SeqAccess<'de>, { // 4096 is cargo culted from upstream let len = core::cmp::min(seq.size_hint().unwrap_or(0), 4096); let mut bytes = BytesMut::with_capacity(len); use bytes::BufMut; while let Some(value) = seq.next_element()? { bytes.put_u8(value); } Ok(SecretBytesMut(bytes)) } } deserializer.deserialize_bytes(SecretBytesVisitor) } }
crates/masking/src/bytes.rs
masking::src::bytes
731
true
// File: crates/masking/src/secret.rs // Module: masking::src::secret //! Structure describing secret. use std::{fmt, marker::PhantomData}; use crate::{strategy::Strategy, PeekInterface, StrongSecret}; /// Secret thing. /// /// To get access to value use method `expose()` of trait [`crate::ExposeInterface`]. /// /// ## Masking /// Use the [`crate::strategy::Strategy`] trait to implement a masking strategy on a zero-variant /// enum and pass this enum as a second generic parameter to [`Secret`] while defining it. /// [`Secret`] will take care of applying the masking strategy on the inner secret when being /// displayed. /// /// ## Masking Example /// /// ``` /// use masking::Strategy; /// use masking::Secret; /// use std::fmt; /// /// enum MyStrategy {} /// /// impl<T> Strategy<T> for MyStrategy /// where /// T: fmt::Display /// { /// fn fmt(val: &T, f: &mut fmt::Formatter<'_>) -> fmt::Result { /// write!(f, "{}", val.to_string().to_ascii_lowercase()) /// } /// } /// /// let my_secret: Secret<String, MyStrategy> = Secret::new("HELLO".to_string()); /// /// assert_eq!("hello", &format!("{:?}", my_secret)); /// ``` pub struct Secret<Secret, MaskingStrategy = crate::WithType> where MaskingStrategy: Strategy<Secret>, { pub(crate) inner_secret: Secret, pub(crate) masking_strategy: PhantomData<MaskingStrategy>, } impl<SecretValue, MaskingStrategy> Secret<SecretValue, MaskingStrategy> where MaskingStrategy: Strategy<SecretValue>, { /// Take ownership of a secret value pub fn new(secret: SecretValue) -> Self { Self { inner_secret: secret, masking_strategy: PhantomData, } } /// Zip 2 secrets with the same masking strategy into one pub fn zip<OtherSecretValue>( self, other: Secret<OtherSecretValue, MaskingStrategy>, ) -> Secret<(SecretValue, OtherSecretValue), MaskingStrategy> where MaskingStrategy: Strategy<OtherSecretValue> + Strategy<(SecretValue, OtherSecretValue)>, { (self.inner_secret, other.inner_secret).into() } /// consume self and modify the inner value pub fn map<OtherSecretValue>( self, f: impl FnOnce(SecretValue) -> OtherSecretValue, ) -> Secret<OtherSecretValue, MaskingStrategy> where MaskingStrategy: Strategy<OtherSecretValue>, { f(self.inner_secret).into() } /// Convert to [`StrongSecret`] pub fn into_strong(self) -> StrongSecret<SecretValue, MaskingStrategy> where SecretValue: zeroize::DefaultIsZeroes, { StrongSecret::new(self.inner_secret) } /// Convert to [`Secret`] with a reference to the inner secret pub fn as_ref(&self) -> Secret<&SecretValue, MaskingStrategy> where MaskingStrategy: for<'a> Strategy<&'a SecretValue>, { Secret::new(self.peek()) } } impl<SecretValue, MaskingStrategy> PeekInterface<SecretValue> for Secret<SecretValue, MaskingStrategy> where MaskingStrategy: Strategy<SecretValue>, { fn peek(&self) -> &SecretValue { &self.inner_secret } fn peek_mut(&mut self) -> &mut SecretValue { &mut self.inner_secret } } impl<SecretValue, MaskingStrategy> From<SecretValue> for Secret<SecretValue, MaskingStrategy> where MaskingStrategy: Strategy<SecretValue>, { fn from(secret: SecretValue) -> Self { Self::new(secret) } } impl<SecretValue, MaskingStrategy> Clone for Secret<SecretValue, MaskingStrategy> where SecretValue: Clone, MaskingStrategy: Strategy<SecretValue>, { fn clone(&self) -> Self { Self { inner_secret: self.inner_secret.clone(), masking_strategy: PhantomData, } } } impl<SecretValue, MaskingStrategy> PartialEq for Secret<SecretValue, MaskingStrategy> where Self: PeekInterface<SecretValue>, SecretValue: PartialEq, MaskingStrategy: Strategy<SecretValue>, { fn eq(&self, other: &Self) -> bool { self.peek().eq(other.peek()) } } impl<SecretValue, MaskingStrategy> Eq for Secret<SecretValue, MaskingStrategy> where Self: PeekInterface<SecretValue>, SecretValue: Eq, MaskingStrategy: Strategy<SecretValue>, { } impl<SecretValue, MaskingStrategy> fmt::Debug for Secret<SecretValue, MaskingStrategy> where MaskingStrategy: Strategy<SecretValue>, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { MaskingStrategy::fmt(&self.inner_secret, f) } } impl<SecretValue, MaskingStrategy> Default for Secret<SecretValue, MaskingStrategy> where SecretValue: Default, MaskingStrategy: Strategy<SecretValue>, { fn default() -> Self { SecretValue::default().into() } } // Required by base64-serde to serialize Secret of Vec<u8> which contains the base64 decoded value impl AsRef<[u8]> for Secret<Vec<u8>> { fn as_ref(&self) -> &[u8] { self.peek().as_slice() } } /// Strategy for masking JSON values #[cfg(feature = "serde")] pub enum JsonMaskStrategy {} #[cfg(feature = "serde")] impl Strategy<serde_json::Value> for JsonMaskStrategy { fn fmt(value: &serde_json::Value, f: &mut fmt::Formatter<'_>) -> fmt::Result { match value { serde_json::Value::Object(map) => { write!(f, "{{")?; let mut first = true; for (key, val) in map { if !first { write!(f, ", ")?; } first = false; write!(f, "\"{key}\":")?; Self::fmt(val, f)?; } write!(f, "}}") } serde_json::Value::Array(arr) => { write!(f, "[")?; let mut first = true; for val in arr { if !first { write!(f, ", ")?; } first = false; Self::fmt(val, f)?; } write!(f, "]") } serde_json::Value::String(s) => { // For strings, we show a masked version that gives a hint about the content let masked = if s.len() <= 2 { "**".to_string() } else if s.len() <= 6 { format!("{}**", &s[0..1]) } else { // For longer strings, show first and last character with length in between format!( "{}**{}**{}", &s[0..1], s.len() - 2, &s[s.len() - 1..s.len()] ) }; write!(f, "\"{masked}\"") } serde_json::Value::Number(n) => { // For numbers, we can show the order of magnitude if n.is_i64() || n.is_u64() { let num_str = n.to_string(); let masked_num = "*".repeat(num_str.len()); write!(f, "{masked_num}") } else if n.is_f64() { // For floats, just use a generic mask write!(f, "**.**") } else { write!(f, "0") } } serde_json::Value::Bool(b) => { // For booleans, we can show a hint about which one it is write!(f, "{}", if *b { "**true" } else { "**false" }) } serde_json::Value::Null => write!(f, "null"), } } } #[cfg(feature = "proto_tonic")] impl<T> prost::Message for Secret<T, crate::WithType> where T: prost::Message + Default + Clone, { fn encode_raw(&self, buf: &mut impl bytes::BufMut) { self.peek().encode_raw(buf); } fn merge_field( &mut self, tag: u32, wire_type: prost::encoding::WireType, buf: &mut impl bytes::Buf, ctx: prost::encoding::DecodeContext, ) -> Result<(), prost::DecodeError> { if tag == 1 { self.peek_mut().merge_field(tag, wire_type, buf, ctx) } else { prost::encoding::skip_field(wire_type, tag, buf, ctx) } } fn encoded_len(&self) -> usize { self.peek().encoded_len() } fn clear(&mut self) { self.peek_mut().clear(); } } #[cfg(test)] #[cfg(feature = "serde")] mod tests { use serde_json::json; use super::*; #[test] #[allow(clippy::expect_used)] fn test_json_mask_strategy() { // Create a sample JSON with different types for testing let original = json!({ "user": { "name": "John Doe", "email": "john@example.com", "age": 35, "verified": true }, "card": { "number": "4242424242424242", "cvv": 123, "amount": 99.99 }, "tags": ["personal", "premium"], "null_value": null, "short": "hi" }); // Apply the JsonMaskStrategy let secret = Secret::<_, JsonMaskStrategy>::new(original.clone()); let masked_str = format!("{secret:?}"); // Get specific values from original let original_obj = original.as_object().expect("Original should be an object"); let user_obj = original_obj["user"] .as_object() .expect("User should be an object"); let name = user_obj["name"].as_str().expect("Name should be a string"); let email = user_obj["email"] .as_str() .expect("Email should be a string"); let age = user_obj["age"].as_i64().expect("Age should be a number"); let verified = user_obj["verified"] .as_bool() .expect("Verified should be a boolean"); let card_obj = original_obj["card"] .as_object() .expect("Card should be an object"); let card_number = card_obj["number"] .as_str() .expect("Card number should be a string"); let cvv = card_obj["cvv"].as_i64().expect("CVV should be a number"); let tags = original_obj["tags"] .as_array() .expect("Tags should be an array"); let tag1 = tags .first() .and_then(|v| v.as_str()) .expect("First tag should be a string"); // Now explicitly verify the masking patterns for each value type // 1. String masking - pattern: first char + ** + length - 2 + ** + last char let expected_name_mask = format!( "\"{}**{}**{}\"", &name[0..1], name.len() - 2, &name[name.len() - 1..] ); let expected_email_mask = format!( "\"{}**{}**{}\"", &email[0..1], email.len() - 2, &email[email.len() - 1..] ); let expected_card_mask = format!( "\"{}**{}**{}\"", &card_number[0..1], card_number.len() - 2, &card_number[card_number.len() - 1..] ); let expected_tag1_mask = if tag1.len() <= 2 { "\"**\"".to_string() } else if tag1.len() <= 6 { format!("\"{}**\"", &tag1[0..1]) } else { format!( "\"{}**{}**{}\"", &tag1[0..1], tag1.len() - 2, &tag1[tag1.len() - 1..] ) }; let expected_short_mask = "\"**\"".to_string(); // For "hi" // 2. Number masking let expected_age_mask = "*".repeat(age.to_string().len()); // Repeat * for the number of digits let expected_cvv_mask = "*".repeat(cvv.to_string().len()); // 3. Boolean masking let expected_verified_mask = if verified { "**true" } else { "**false" }; // Check that the masked output includes the expected masked patterns assert!( masked_str.contains(&expected_name_mask), "Name not masked correctly. Expected: {expected_name_mask}" ); assert!( masked_str.contains(&expected_email_mask), "Email not masked correctly. Expected: {expected_email_mask}", ); assert!( masked_str.contains(&expected_card_mask), "Card number not masked correctly. Expected: {expected_card_mask}", ); assert!( masked_str.contains(&expected_tag1_mask), "Tag not masked correctly. Expected: {expected_tag1_mask}", ); assert!( masked_str.contains(&expected_short_mask), "Short string not masked correctly. Expected: {expected_short_mask}", ); assert!( masked_str.contains(&expected_age_mask), "Age not masked correctly. Expected: {expected_age_mask}", ); assert!( masked_str.contains(&expected_cvv_mask), "CVV not masked correctly. Expected: {expected_cvv_mask}", ); assert!( masked_str.contains(expected_verified_mask), "Boolean not masked correctly. Expected: {expected_verified_mask}", ); // Check structure preservation assert!( masked_str.contains("\"user\""), "Structure not preserved - missing user object" ); assert!( masked_str.contains("\"card\""), "Structure not preserved - missing card object" ); assert!( masked_str.contains("\"tags\""), "Structure not preserved - missing tags array" ); assert!( masked_str.contains("\"null_value\":null"), "Null value not preserved correctly" ); // Additional security checks to ensure no original values are exposed assert!( !masked_str.contains(name), "Original name value exposed in masked output" ); assert!( !masked_str.contains(email), "Original email value exposed in masked output" ); assert!( !masked_str.contains(card_number), "Original card number exposed in masked output" ); assert!( !masked_str.contains(&age.to_string()), "Original age value exposed in masked output" ); assert!( !masked_str.contains(&cvv.to_string()), "Original CVV value exposed in masked output" ); assert!( !masked_str.contains(tag1), "Original tag value exposed in masked output" ); assert!( !masked_str.contains("hi"), "Original short string value exposed in masked output" ); } }
crates/masking/src/secret.rs
masking::src::secret
3,362
true
// File: crates/injector/src/consts.rs // Module: injector::src::consts /// Header name for external vault metadata pub const EXTERNAL_VAULT_METADATA_HEADER: &str = "x-external-vault-metadata";
crates/injector/src/consts.rs
injector::src::consts
50
true
// File: crates/injector/src/injector.rs // Module: injector::src::injector pub mod core { use std::collections::HashMap; use async_trait::async_trait; use common_utils::request::{Method, RequestBuilder, RequestContent}; use error_stack::{self, ResultExt}; use masking::{self, ExposeInterface}; use nom::{ bytes::complete::{tag, take_while1}, character::complete::{char, multispace0}, sequence::{delimited, preceded, terminated}, IResult, }; use router_env::{instrument, logger, tracing}; use serde_json::Value; use thiserror::Error; use crate as injector_types; use crate::{ types::{ContentType, InjectorRequest, InjectorResponse, IntoInjectorResponse}, vault_metadata::VaultMetadataExtractorExt, }; impl From<injector_types::HttpMethod> for Method { fn from(method: injector_types::HttpMethod) -> Self { match method { injector_types::HttpMethod::GET => Self::Get, injector_types::HttpMethod::POST => Self::Post, injector_types::HttpMethod::PUT => Self::Put, injector_types::HttpMethod::PATCH => Self::Patch, injector_types::HttpMethod::DELETE => Self::Delete, } } } /// Proxy configuration structure (copied from hyperswitch_interfaces to make injector standalone) #[derive(Debug, serde::Deserialize, Clone)] #[serde(default)] pub struct Proxy { /// The URL of the HTTP proxy server. pub http_url: Option<String>, /// The URL of the HTTPS proxy server. pub https_url: Option<String>, /// The timeout duration (in seconds) for idle connections in the proxy pool. pub idle_pool_connection_timeout: Option<u64>, /// A comma-separated list of hosts that should bypass the proxy. pub bypass_proxy_hosts: Option<String>, } impl Default for Proxy { fn default() -> Self { Self { http_url: Default::default(), https_url: Default::default(), idle_pool_connection_timeout: Some(90), bypass_proxy_hosts: Default::default(), } } } /// Create HTTP client using the proven external_services create_client logic fn create_client( proxy_config: &Proxy, client_certificate: Option<masking::Secret<String>>, client_certificate_key: Option<masking::Secret<String>>, ca_certificate: Option<masking::Secret<String>>, ) -> error_stack::Result<reqwest::Client, InjectorError> { logger::debug!( has_client_cert = client_certificate.is_some(), has_client_key = client_certificate_key.is_some(), has_ca_cert = ca_certificate.is_some(), "Creating HTTP client" ); // Case 1: Mutual TLS with client certificate and key if let (Some(encoded_certificate), Some(encoded_certificate_key)) = (client_certificate.clone(), client_certificate_key.clone()) { if ca_certificate.is_some() { logger::warn!("All of client certificate, client key, and CA certificate are provided. CA certificate will be ignored in mutual TLS setup."); } let client_builder = get_client_builder(proxy_config)?; let identity = create_identity_from_certificate_and_key( encoded_certificate.clone(), encoded_certificate_key, )?; let certificate_list = create_certificate(encoded_certificate)?; let client_builder = certificate_list .into_iter() .fold(client_builder, |client_builder, certificate| { client_builder.add_root_certificate(certificate) }); return client_builder .identity(identity) .use_rustls_tls() .build() .change_context(InjectorError::HttpRequestFailed) .inspect_err(|e| { logger::error!( "Failed to construct client with certificate and certificate key: {:?}", e ); }); } // Case 2: Use provided CA certificate for server authentication only (one-way TLS) if let Some(ca_pem) = ca_certificate { let pem = ca_pem.expose().replace("\\r\\n", "\n"); // Fix escaped newlines let cert = reqwest::Certificate::from_pem(pem.as_bytes()) .change_context(InjectorError::HttpRequestFailed) .inspect_err(|e| { logger::error!("Failed to parse CA certificate PEM block: {:?}", e) })?; let client_builder = get_client_builder(proxy_config)?.add_root_certificate(cert); return client_builder .use_rustls_tls() .build() .change_context(InjectorError::HttpRequestFailed) .inspect_err(|e| { logger::error!("Failed to construct client with CA certificate: {:?}", e); }); } // Case 3: Default client (no certs) get_base_client(proxy_config) } /// Helper functions from external_services fn get_client_builder( proxy_config: &Proxy, ) -> error_stack::Result<reqwest::ClientBuilder, InjectorError> { let mut client_builder = reqwest::Client::builder(); // Configure proxy if provided if let Some(proxy_url) = &proxy_config.https_url { let proxy = reqwest::Proxy::https(proxy_url) .change_context(InjectorError::HttpRequestFailed) .inspect_err(|e| { logger::error!("Failed to configure HTTPS proxy: {:?}", e); })?; client_builder = client_builder.proxy(proxy); } if let Some(proxy_url) = &proxy_config.http_url { let proxy = reqwest::Proxy::http(proxy_url) .change_context(InjectorError::HttpRequestFailed) .inspect_err(|e| { logger::error!("Failed to configure HTTP proxy: {:?}", e); })?; client_builder = client_builder.proxy(proxy); } Ok(client_builder) } fn get_base_client( proxy_config: &Proxy, ) -> error_stack::Result<reqwest::Client, InjectorError> { let client_builder = get_client_builder(proxy_config)?; client_builder .build() .change_context(InjectorError::HttpRequestFailed) .inspect_err(|e| { logger::error!("Failed to build default HTTP client: {:?}", e); }) } fn create_identity_from_certificate_and_key( encoded_certificate: masking::Secret<String>, encoded_certificate_key: masking::Secret<String>, ) -> error_stack::Result<reqwest::Identity, InjectorError> { let cert_str = encoded_certificate.expose(); let key_str = encoded_certificate_key.expose(); let combined_pem = format!("{cert_str}\n{key_str}"); reqwest::Identity::from_pem(combined_pem.as_bytes()) .change_context(InjectorError::HttpRequestFailed) .inspect_err(|e| { logger::error!( "Failed to create identity from certificate and key: {:?}", e ); }) } fn create_certificate( encoded_certificate: masking::Secret<String>, ) -> error_stack::Result<Vec<reqwest::Certificate>, InjectorError> { let cert_str = encoded_certificate.expose(); let cert = reqwest::Certificate::from_pem(cert_str.as_bytes()) .change_context(InjectorError::HttpRequestFailed) .inspect_err(|e| { logger::error!("Failed to create certificate from PEM: {:?}", e); })?; Ok(vec![cert]) } /// Generic function to log HTTP request errors with detailed error type information fn log_and_convert_http_error(e: reqwest::Error, context: &str) -> InjectorError { let error_msg = e.to_string(); logger::error!("HTTP request failed in {}: {}", context, error_msg); // Log specific error types for debugging if e.is_timeout() { logger::error!("Request timed out in {}", context); } if e.is_connect() { logger::error!("Connection error occurred in {}", context); } if e.is_request() { logger::error!("Request construction error in {}", context); } if e.is_decode() { logger::error!("Response decoding error in {}", context); } InjectorError::HttpRequestFailed } /// Apply certificate configuration to request builder and return built request fn build_request_with_certificates( mut request_builder: RequestBuilder, config: &injector_types::ConnectionConfig, ) -> common_utils::request::Request { // Add certificate configuration if provided if let Some(cert_content) = &config.client_cert { request_builder = request_builder.add_certificate(Some(cert_content.clone())); } if let Some(key_content) = &config.client_key { request_builder = request_builder.add_certificate_key(Some(key_content.clone())); } if let Some(ca_content) = &config.ca_cert { request_builder = request_builder.add_ca_certificate_pem(Some(ca_content.clone())); } request_builder.build() } /// Simplified HTTP client for injector using the proven external_services create_client logic #[instrument(skip_all)] pub async fn send_request( client_proxy: &Proxy, request: common_utils::request::Request, _option_timeout_secs: Option<u64>, ) -> error_stack::Result<reqwest::Response, InjectorError> { logger::info!( has_client_cert = request.certificate.is_some(), has_client_key = request.certificate_key.is_some(), has_ca_cert = request.ca_certificate.is_some(), "Making HTTP request using standalone injector HTTP client with configuration" ); // Create reqwest client using the proven create_client function let client = create_client( client_proxy, request.certificate.clone(), request.certificate_key.clone(), request.ca_certificate.clone(), )?; // Build the request let method = match request.method { Method::Get => reqwest::Method::GET, Method::Post => reqwest::Method::POST, Method::Put => reqwest::Method::PUT, Method::Patch => reqwest::Method::PATCH, Method::Delete => reqwest::Method::DELETE, }; let mut req_builder = client.request(method, &request.url); // Add headers for (key, value) in &request.headers { let header_value = match value { masking::Maskable::Masked(secret) => secret.clone().expose(), masking::Maskable::Normal(normal) => normal.clone(), }; req_builder = req_builder.header(key, header_value); } // Add body if present if let Some(body) = request.body { match body { RequestContent::Json(payload) => { req_builder = req_builder.json(&payload); } RequestContent::FormUrlEncoded(payload) => { req_builder = req_builder.form(&payload); } RequestContent::RawBytes(payload) => { req_builder = req_builder.body(payload); } _ => { logger::warn!("Unsupported request content type, using raw bytes"); } } } // Send the request let response = req_builder .send() .await .map_err(|e| log_and_convert_http_error(e, "send_request"))?; logger::info!( status_code = response.status().as_u16(), "HTTP request completed successfully" ); Ok(response) } #[derive(Error, Debug)] pub enum InjectorError { #[error("Token replacement failed: {0}")] TokenReplacementFailed(String), #[error("HTTP request failed")] HttpRequestFailed, #[error("Serialization error: {0}")] SerializationError(String), #[error("Invalid template: {0}")] InvalidTemplate(String), } #[instrument(skip_all)] pub async fn injector_core( request: InjectorRequest, ) -> error_stack::Result<InjectorResponse, InjectorError> { logger::info!("Starting injector_core processing"); let injector = Injector::new(); injector.injector_core(request).await } /// Represents a token reference found in a template string #[derive(Debug)] struct TokenReference { /// The field name to be replaced (without the {{$}} wrapper) pub field: String, } /// Parses a single token reference from a string using nom parser combinators /// /// Expects tokens in the format `{{$field_name}}` where field_name contains /// only alphanumeric characters and underscores. fn parse_token(input: &str) -> IResult<&str, TokenReference> { let (input, field) = delimited( tag("{{"), preceded( multispace0, preceded( char('$'), terminated( take_while1(|c: char| c.is_alphanumeric() || c == '_'), multispace0, ), ), ), tag("}}"), )(input)?; Ok(( input, TokenReference { field: field.to_string(), }, )) } /// Finds all token references in a string using nom parser /// /// Scans through the entire input string and extracts all valid token references. /// Returns a vector of TokenReference structs containing the field names. fn find_all_tokens(input: &str) -> Vec<TokenReference> { let mut tokens = Vec::new(); let mut current_input = input; while !current_input.is_empty() { if let Ok((remaining, token_ref)) = parse_token(current_input) { tokens.push(token_ref); current_input = remaining; } else { // Move forward one character if no token found if let Some((_, rest)) = current_input.split_at_checked(1) { current_input = rest; } else { break; } } } tokens } /// Recursively searches for a field in vault data JSON structure /// /// Performs a depth-first search through the JSON object hierarchy to find /// a field with the specified name. Returns the first matching value found. fn find_field_recursively_in_vault_data( obj: &serde_json::Map<String, Value>, field_name: &str, ) -> Option<Value> { obj.get(field_name).cloned().or_else(|| { obj.values() .filter_map(|val| { if let Value::Object(inner_obj) = val { find_field_recursively_in_vault_data(inner_obj, field_name) } else { None } }) .next() }) } #[async_trait] trait TokenInjector { async fn injector_core( &self, request: InjectorRequest, ) -> error_stack::Result<InjectorResponse, InjectorError>; } pub struct Injector; impl Injector { pub fn new() -> Self { Self } /// Processes a string template and replaces token references with vault data #[instrument(skip_all)] fn interpolate_string_template_with_vault_data( &self, template: String, vault_data: &Value, vault_connector: &injector_types::VaultConnectors, ) -> error_stack::Result<String, InjectorError> { // Find all tokens using nom parser let tokens = find_all_tokens(&template); let mut result = template; for token_ref in tokens.into_iter() { let extracted_field_value = self.extract_field_from_vault_data( vault_data, &token_ref.field, vault_connector, )?; let token_str = match extracted_field_value { Value::String(token_value) => token_value, _ => serde_json::to_string(&extracted_field_value).unwrap_or_default(), }; // Replace the token in the result string let token_pattern = format!("{{{{${}}}}}", token_ref.field); result = result.replace(&token_pattern, &token_str); } Ok(result) } #[instrument(skip_all)] fn interpolate_token_references_with_vault_data( &self, value: Value, vault_data: &Value, vault_connector: &injector_types::VaultConnectors, ) -> error_stack::Result<Value, InjectorError> { match value { Value::Object(obj) => { let new_obj = obj .into_iter() .map(|(key, val)| { self.interpolate_token_references_with_vault_data( val, vault_data, vault_connector, ) .map(|processed| (key, processed)) }) .collect::<error_stack::Result<serde_json::Map<_, _>, InjectorError>>()?; Ok(Value::Object(new_obj)) } Value::String(s) => { let processed_string = self.interpolate_string_template_with_vault_data( s, vault_data, vault_connector, )?; Ok(Value::String(processed_string)) } _ => Ok(value), } } #[instrument(skip_all)] fn extract_field_from_vault_data( &self, vault_data: &Value, field_name: &str, vault_connector: &injector_types::VaultConnectors, ) -> error_stack::Result<Value, InjectorError> { logger::debug!( "Extracting field '{}' from vault data using vault type {:?}", field_name, vault_connector ); match vault_data { Value::Object(obj) => { let raw_value = find_field_recursively_in_vault_data(obj, field_name) .ok_or_else(|| { error_stack::Report::new(InjectorError::TokenReplacementFailed( format!("Field '{field_name}' not found"), )) })?; // Apply vault-specific token transformation self.apply_vault_specific_transformation(raw_value, vault_connector, field_name) } _ => Err(error_stack::Report::new( InjectorError::TokenReplacementFailed( "Vault data is not a valid JSON object".to_string(), ), )), } } #[instrument(skip_all)] fn apply_vault_specific_transformation( &self, extracted_field_value: Value, vault_connector: &injector_types::VaultConnectors, field_name: &str, ) -> error_stack::Result<Value, InjectorError> { match vault_connector { injector_types::VaultConnectors::VGS => { logger::debug!( "VGS vault: Using direct token replacement for field '{}'", field_name ); Ok(extracted_field_value) } } } #[instrument(skip_all)] async fn make_http_request( &self, config: &injector_types::ConnectionConfig, payload: &str, content_type: &ContentType, ) -> error_stack::Result<InjectorResponse, InjectorError> { logger::info!( method = ?config.http_method, endpoint = %config.endpoint, content_type = ?content_type, payload_length = payload.len(), headers_count = config.headers.len(), "Making HTTP request to connector" ); // Validate inputs first if config.endpoint.is_empty() { logger::error!("Endpoint URL is empty"); Err(error_stack::Report::new(InjectorError::InvalidTemplate( "Endpoint URL cannot be empty".to_string(), )))?; } // Parse and validate the complete endpoint URL let url = reqwest::Url::parse(&config.endpoint).map_err(|e| { logger::error!("Failed to parse endpoint URL: {}", e); error_stack::Report::new(InjectorError::InvalidTemplate(format!( "Invalid endpoint URL: {e}" ))) })?; logger::debug!("Constructed URL: {}", url); // Convert headers to common_utils Headers format safely let headers: Vec<(String, masking::Maskable<String>)> = config .headers .clone() .into_iter() .map(|(k, v)| (k, masking::Maskable::new_normal(v.expose().clone()))) .collect(); // Determine method and request content let method = Method::from(config.http_method); // Determine request content based on content type with error handling let request_content = match content_type { ContentType::ApplicationJson => { // Try to parse as JSON, fallback to raw string match serde_json::from_str::<Value>(payload) { Ok(json) => Some(RequestContent::Json(Box::new(json))), Err(e) => { logger::debug!( "Failed to parse payload as JSON: {}, falling back to raw bytes", e ); Some(RequestContent::RawBytes(payload.as_bytes().to_vec())) } } } ContentType::ApplicationXWwwFormUrlencoded => { // Parse form data safely let form_data: HashMap<String, String> = url::form_urlencoded::parse(payload.as_bytes()) .into_owned() .collect(); Some(RequestContent::FormUrlEncoded(Box::new(form_data))) } ContentType::ApplicationXml | ContentType::TextXml => { Some(RequestContent::RawBytes(payload.as_bytes().to_vec())) } ContentType::TextPlain => { Some(RequestContent::RawBytes(payload.as_bytes().to_vec())) } }; // Extract vault metadata directly from headers using existing functions let (vault_proxy_url, vault_ca_cert) = if config .headers .contains_key(crate::consts::EXTERNAL_VAULT_METADATA_HEADER) { let mut temp_config = injector_types::ConnectionConfig::new( config.endpoint.clone(), config.http_method, ); // Use existing vault metadata extraction with fallback if temp_config.extract_and_apply_vault_metadata_with_fallback(&config.headers) { (temp_config.proxy_url, temp_config.ca_cert) } else { (None, None) } } else { (None, None) }; // Build request safely with certificate configuration let mut request_builder = RequestBuilder::new() .method(method) .url(url.as_str()) .headers(headers); if let Some(content) = request_content { request_builder = request_builder.set_body(content); } // Create final config with vault CA certificate if available let mut final_config = config.clone(); let has_vault_ca_cert = vault_ca_cert.is_some(); if has_vault_ca_cert { final_config.ca_cert = vault_ca_cert; } // Log certificate configuration (but not the actual content) logger::info!( has_client_cert = final_config.client_cert.is_some(), has_client_key = final_config.client_key.is_some(), has_ca_cert = final_config.ca_cert.is_some(), has_vault_ca_cert = has_vault_ca_cert, insecure = final_config.insecure.unwrap_or(false), cert_format = ?final_config.cert_format, "Certificate configuration applied" ); // Build request with certificate configuration applied let request = build_request_with_certificates(request_builder, &final_config); // Determine which proxy to use: vault metadata > backup > none let final_proxy_url = vault_proxy_url.or_else(|| config.backup_proxy_url.clone()); let proxy = if let Some(proxy_url) = final_proxy_url { let proxy_url_str = proxy_url.expose(); // Set proxy URL for both HTTP and HTTPS traffic Proxy { http_url: Some(proxy_url_str.clone()), https_url: Some(proxy_url_str), idle_pool_connection_timeout: Some(90), bypass_proxy_hosts: None, } } else { Proxy::default() }; // Send request using local standalone http client let response = send_request(&proxy, request, None).await?; // Convert reqwest::Response to InjectorResponse using trait response .into_injector_response() .await .map_err(|e| error_stack::Report::new(e)) } } impl Default for Injector { fn default() -> Self { Self::new() } } #[async_trait] impl TokenInjector for Injector { #[instrument(skip_all)] async fn injector_core( &self, request: InjectorRequest, ) -> error_stack::Result<InjectorResponse, InjectorError> { let start_time = std::time::Instant::now(); // Extract token data from SecretSerdeValue for vault data lookup let vault_data = request.token_data.specific_token_data.expose().clone(); logger::debug!( template_length = request.connector_payload.template.len(), vault_connector = ?request.token_data.vault_connector, "Processing token injection request" ); // Process template string directly with vault-specific logic let processed_payload = self.interpolate_string_template_with_vault_data( request.connector_payload.template, &vault_data, &request.token_data.vault_connector, )?; logger::debug!( processed_payload_length = processed_payload.len(), "Token replacement completed" ); // Determine content type from headers or default to form-urlencoded let content_type = request .connection_config .headers .get("Content-Type") .and_then(|ct| match ct.clone().expose().as_str() { "application/json" => Some(ContentType::ApplicationJson), "application/x-www-form-urlencoded" => { Some(ContentType::ApplicationXWwwFormUrlencoded) } "application/xml" => Some(ContentType::ApplicationXml), "text/xml" => Some(ContentType::TextXml), "text/plain" => Some(ContentType::TextPlain), _ => None, }) .unwrap_or(ContentType::ApplicationXWwwFormUrlencoded); // Make HTTP request to connector and return enhanced response let response = self .make_http_request( &request.connection_config, &processed_payload, &content_type, ) .await?; let elapsed = start_time.elapsed(); logger::info!( duration_ms = elapsed.as_millis(), status_code = response.status_code, response_size = serde_json::to_string(&response.response) .map(|s| s.len()) .unwrap_or(0), headers_count = response.headers.as_ref().map(|h| h.len()).unwrap_or(0), "Token injection completed successfully" ); Ok(response) } } } // Re-export all items pub use core::*; #[cfg(test)] #[allow(clippy::unwrap_used)] mod tests { use std::collections::HashMap; use router_env::logger; use crate::*; #[tokio::test] #[ignore = "Integration test that requires network access"] async fn test_injector_core_integration() { // Create test request let mut headers = HashMap::new(); headers.insert( "Content-Type".to_string(), masking::Secret::new("application/x-www-form-urlencoded".to_string()), ); headers.insert( "Authorization".to_string(), masking::Secret::new("Bearer Test".to_string()), ); let specific_token_data = common_utils::pii::SecretSerdeValue::new(serde_json::json!({ "card_number": "TEST_123", "cvv": "123", "exp_month": "12", "exp_year": "25" })); let request = InjectorRequest { connector_payload: ConnectorPayload { template: "card_number={{$card_number}}&cvv={{$cvv}}&expiry={{$exp_month}}/{{$exp_year}}&amount=50&currency=USD&transaction_type=purchase".to_string(), }, token_data: TokenData { vault_connector: VaultConnectors::VGS, specific_token_data, }, connection_config: ConnectionConfig { endpoint: "https://api.stripe.com/v1/payment_intents".to_string(), http_method: HttpMethod::POST, headers, proxy_url: None, // Remove proxy that was causing issues backup_proxy_url: None, // Certificate fields (None for basic test) client_cert: None, client_key: None, ca_cert: None, // Empty CA cert for testing insecure: None, cert_password: None, cert_format: None, max_response_size: None, // Use default }, }; // Test the core function - this will make a real HTTP request to httpbin.org let result = injector_core(request).await; // The request should succeed (httpbin.org should be accessible) if let Err(ref e) = result { logger::info!("Error: {e:?}"); } assert!( result.is_ok(), "injector_core should succeed with valid request: {result:?}" ); let response = result.unwrap(); // Print the actual response for demonstration logger::info!("=== HTTP RESPONSE FROM HTTPBIN.ORG ==="); logger::info!( "{}", serde_json::to_string_pretty(&response).unwrap_or_default() ); logger::info!("======================================="); // Response should have a proper status code and response data assert!( response.status_code >= 200 && response.status_code < 300, "Response should have successful status code: {}", response.status_code ); assert!( response.response.is_object() || response.response.is_string(), "Response data should be JSON object or string" ); } #[tokio::test] async fn test_certificate_configuration() { let mut headers = HashMap::new(); headers.insert( "Content-Type".to_string(), masking::Secret::new("application/x-www-form-urlencoded".to_string()), ); headers.insert( "Authorization".to_string(), masking::Secret::new("Bearer TEST".to_string()), ); let specific_token_data = common_utils::pii::SecretSerdeValue::new(serde_json::json!({ "card_number": "4242429789164242", "cvv": "123", "exp_month": "12", "exp_year": "25" })); // Test with insecure flag (skip certificate verification) let request = InjectorRequest { connector_payload: ConnectorPayload { template: "card_number={{$card_number}}&cvv={{$cvv}}&expiry={{$exp_month}}/{{$exp_year}}&amount=50&currency=USD&transaction_type=purchase".to_string(), }, token_data: TokenData { vault_connector: VaultConnectors::VGS, specific_token_data, }, connection_config: ConnectionConfig { endpoint: "https://httpbin.org/post".to_string(), http_method: HttpMethod::POST, headers, proxy_url: None, // Remove proxy to make test work reliably backup_proxy_url: None, // Test without certificates for basic functionality client_cert: None, client_key: None, ca_cert: None, insecure: None, cert_password: None, cert_format: None, max_response_size: None, }, }; let result = injector_core(request).await; // Should succeed even with insecure flag assert!( result.is_ok(), "Certificate test should succeed: {result:?}" ); let response = result.unwrap(); // Print the actual response for demonstration logger::info!("=== CERTIFICATE TEST RESPONSE ==="); logger::info!( "{}", serde_json::to_string_pretty(&response).unwrap_or_default() ); logger::info!("================================"); // Should succeed with proper status code assert!( response.status_code >= 200 && response.status_code < 300, "Certificate test should have successful status code: {}", response.status_code ); // Verify the tokens were replaced correctly in the form data // httpbin.org returns the request data in the 'form' field let response_str = serde_json::to_string(&response.response).unwrap_or_default(); // Check that our test tokens were replaced with the actual values from vault data let tokens_replaced = response_str.contains("4242429789164242") && // card_number response_str.contains("123") && // cvv response_str.contains("12/25"); // expiry assert!( tokens_replaced, "Response should contain replaced tokens (card_number, cvv, expiry): {}", serde_json::to_string_pretty(&response.response).unwrap_or_default() ); } }
crates/injector/src/injector.rs
injector::src::injector
6,955
true
// File: crates/injector/src/types.rs // Module: injector::src::types pub mod models { use std::collections::HashMap; use async_trait::async_trait; use common_utils::pii::SecretSerdeValue; use masking::Secret; use router_env::logger; use serde::{Deserialize, Serialize}; // Enums for the injector - making it standalone /// Content types supported by the injector for HTTP requests #[derive(Clone, Copy, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize)] #[serde(rename_all = "snake_case")] pub enum ContentType { ApplicationJson, ApplicationXWwwFormUrlencoded, ApplicationXml, TextXml, TextPlain, } /// HTTP methods supported by the injector #[derive(Clone, Copy, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize)] #[serde(rename_all = "UPPERCASE")] pub enum HttpMethod { GET, POST, PUT, PATCH, DELETE, } /// Vault connectors supported by the injector for token management /// /// Currently supports VGS as the primary vault connector. While only VGS is /// implemented today, this enum structure is maintained for future extensibility /// to support additional vault providers (e.g., Basis Theory, Skyflow, etc.) /// without breaking API compatibility. #[derive(Clone, Copy, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize)] #[serde(rename_all = "UPPERCASE")] pub enum VaultConnectors { /// VGS (Very Good Security) vault connector VGS, } /// Token data containing vault-specific information for token replacement #[derive(Clone, Debug, Deserialize, Serialize)] pub struct TokenData { /// The specific token data retrieved from the vault pub specific_token_data: SecretSerdeValue, /// The type of vault connector being used (e.g., VGS) pub vault_connector: VaultConnectors, } /// Connector payload containing the template to be processed #[derive(Clone, Debug, Deserialize, Serialize)] pub struct ConnectorPayload { /// Template string containing token references in the format {{$field_name}} pub template: String, } /// Configuration for HTTP connection to the external connector #[derive(Clone, Debug, Deserialize, Serialize)] pub struct ConnectionConfig { /// Complete URL endpoint for the connector (e.g., "https://api.stripe.com/v1/payment_intents") pub endpoint: String, /// HTTP method to use for the request pub http_method: HttpMethod, /// HTTP headers to include in the request pub headers: HashMap<String, Secret<String>>, /// Optional proxy URL for routing the request through a proxy server pub proxy_url: Option<Secret<String>>, /// Optional backup proxy URL to use if vault metadata doesn't provide one #[serde(default)] pub backup_proxy_url: Option<Secret<String>>, /// Optional client certificate for mutual TLS authentication pub client_cert: Option<Secret<String>>, /// Optional client private key for mutual TLS authentication pub client_key: Option<Secret<String>>, /// Optional CA certificate for verifying the server certificate pub ca_cert: Option<Secret<String>>, /// Whether to skip certificate verification (for testing only) pub insecure: Option<bool>, /// Optional password for encrypted client certificate pub cert_password: Option<Secret<String>>, /// Format of the client certificate (e.g., "PEM") pub cert_format: Option<String>, /// Maximum response size in bytes (defaults to 10MB if not specified) pub max_response_size: Option<usize>, } /// Complete request structure for the injector service #[derive(Clone, Debug, Deserialize, Serialize)] pub struct InjectorRequest { /// Token data from the vault pub token_data: TokenData, /// Payload template to process pub connector_payload: ConnectorPayload, /// HTTP connection configuration pub connection_config: ConnectionConfig, } /// Response from the injector including status code and response data #[derive(Clone, Debug, Deserialize, Serialize)] pub struct InjectorResponse { /// HTTP status code from the connector response pub status_code: u16, /// Response headers from the connector (optional) pub headers: Option<HashMap<String, String>>, /// Response body from the connector pub response: serde_json::Value, } /// Trait for converting HTTP responses to InjectorResponse #[async_trait] pub trait IntoInjectorResponse { /// Convert to InjectorResponse with proper error handling async fn into_injector_response( self, ) -> Result<InjectorResponse, crate::injector::core::InjectorError>; } #[async_trait] impl IntoInjectorResponse for reqwest::Response { async fn into_injector_response( self, ) -> Result<InjectorResponse, crate::injector::core::InjectorError> { let status_code = self.status().as_u16(); logger::info!( status_code = status_code, "Converting reqwest::Response to InjectorResponse" ); // Extract headers let headers: Option<HashMap<String, String>> = { let header_map: HashMap<String, String> = self .headers() .iter() .filter_map(|(name, value)| { value .to_str() .ok() .map(|v| (name.to_string(), v.to_string())) }) .collect(); if header_map.is_empty() { None } else { Some(header_map) } }; let response_text = self .text() .await .map_err(|_| crate::injector::core::InjectorError::HttpRequestFailed)?; logger::debug!( response_length = response_text.len(), headers_count = headers.as_ref().map(|h| h.len()).unwrap_or(0), "Processing connector response" ); let response_data = match serde_json::from_str::<serde_json::Value>(&response_text) { Ok(json) => json, Err(_e) => serde_json::Value::String(response_text), }; Ok(InjectorResponse { status_code, headers, response: response_data, }) } } impl InjectorRequest { /// Creates a new InjectorRequest #[allow(clippy::too_many_arguments)] pub fn new( endpoint: String, http_method: HttpMethod, template: String, token_data: TokenData, headers: Option<HashMap<String, Secret<String>>>, proxy_url: Option<Secret<String>>, client_cert: Option<Secret<String>>, client_key: Option<Secret<String>>, ca_cert: Option<Secret<String>>, ) -> Self { let headers = headers.unwrap_or_default(); let mut connection_config = ConnectionConfig::new(endpoint, http_method); // Keep vault metadata header for processing in make_http_request // Store backup proxy for make_http_request to use as fallback connection_config.backup_proxy_url = proxy_url; connection_config.client_cert = connection_config.client_cert.or(client_cert); connection_config.client_key = connection_config.client_key.or(client_key); connection_config.ca_cert = connection_config.ca_cert.or(ca_cert); connection_config.headers = headers; Self { token_data, connector_payload: ConnectorPayload { template }, connection_config, } } } impl ConnectionConfig { /// Creates a new ConnectionConfig from basic parameters pub fn new(endpoint: String, http_method: HttpMethod) -> Self { Self { endpoint, http_method, headers: HashMap::new(), proxy_url: None, backup_proxy_url: None, client_cert: None, client_key: None, ca_cert: None, insecure: None, cert_password: None, cert_format: None, max_response_size: None, } } } } pub use models::*;
crates/injector/src/types.rs
injector::src::types
1,704
true
// File: crates/injector/src/lib.rs // Module: injector::src::lib pub mod consts; pub mod injector; pub mod types; pub mod vault_metadata; // Re-export all functionality pub use consts::*; pub use injector::*; pub use types::*; pub use vault_metadata::*;
crates/injector/src/lib.rs
injector::src::lib
60
true
// File: crates/injector/src/vault_metadata.rs // Module: injector::src::vault_metadata use std::collections::HashMap; use base64::Engine; use masking::{ExposeInterface, Secret}; use router_env::logger; use url::Url; use crate::{consts::EXTERNAL_VAULT_METADATA_HEADER, types::ConnectionConfig, VaultConnectors}; const BASE64_ENGINE: base64::engine::GeneralPurpose = base64::engine::general_purpose::STANDARD; /// Trait for different vault metadata processors pub trait VaultMetadataProcessor: Send + Sync { /// Process vault metadata and return connection configuration updates fn process_metadata( &self, connection_config: &mut ConnectionConfig, ) -> Result<(), VaultMetadataError>; /// Get the vault connector type fn vault_connector(&self) -> VaultConnectors; } /// Comprehensive errors related to vault metadata processing #[derive(Debug, thiserror::Error)] pub enum VaultMetadataError { #[error("Failed to decode base64 vault metadata: {0}")] Base64DecodingFailed(String), #[error("Failed to parse vault metadata JSON: {0}")] JsonParsingFailed(String), #[error("Unsupported vault connector: {0}")] UnsupportedVaultConnector(String), #[error("Invalid URL in vault metadata: {0}")] InvalidUrl(String), #[error("Missing required field in vault metadata: {0}")] MissingRequiredField(String), #[error("Invalid certificate format: {0}")] InvalidCertificateFormat(String), #[error("Vault metadata header is empty or malformed")] EmptyOrMalformedHeader, #[error("URL validation failed for {field}: {url} - {reason}")] UrlValidationFailed { field: String, url: String, reason: String, }, #[error("Certificate validation failed: {0}")] CertificateValidationFailed(String), #[error("Vault metadata processing failed for connector {connector}: {reason}")] ProcessingFailed { connector: String, reason: String }, } impl VaultMetadataError { /// Create a URL validation error with context pub fn url_validation_failed(field: &str, url: &str, reason: impl Into<String>) -> Self { Self::UrlValidationFailed { field: field.to_string(), url: url.to_string(), reason: reason.into(), } } } /// External vault proxy metadata (moved from external_services) #[derive(Debug, Clone, serde::Deserialize, serde::Serialize)] #[serde(untagged)] pub enum ExternalVaultProxyMetadata { /// VGS proxy data variant VgsMetadata(VgsMetadata), } /// VGS proxy data (moved from external_services) #[derive(Debug, Clone, serde::Deserialize, serde::Serialize)] pub struct VgsMetadata { /// External vault url pub proxy_url: Url, /// CA certificates to verify the vault server pub certificate: Secret<String>, } impl VaultMetadataProcessor for VgsMetadata { fn process_metadata( &self, connection_config: &mut ConnectionConfig, ) -> Result<(), VaultMetadataError> { // Validate and set proxy URL from VGS metadata let proxy_url_str = self.proxy_url.as_str().to_string(); connection_config.proxy_url = Some(Secret::new(proxy_url_str.clone())); // Validate and decode certificate from VGS metadata let cert_content = self.certificate.clone().expose(); // Check if certificate is base64 encoded and decode if necessary let decoded_cert = if cert_content.starts_with("-----BEGIN") { cert_content } else { match BASE64_ENGINE.decode(&cert_content) { Ok(decoded_bytes) => String::from_utf8(decoded_bytes).map_err(|e| { VaultMetadataError::CertificateValidationFailed(format!( "Certificate is not valid UTF-8 after base64 decoding: {e}" )) })?, Err(e) => { logger::error!( error = %e, "Failed to decode base64 certificate" ); return Err(VaultMetadataError::CertificateValidationFailed(format!( "Failed to decode base64 certificate: {e}" ))); } } }; connection_config.ca_cert = Some(Secret::new(decoded_cert.clone())); Ok(()) } fn vault_connector(&self) -> VaultConnectors { VaultConnectors::VGS } } impl VaultMetadataProcessor for ExternalVaultProxyMetadata { fn process_metadata( &self, connection_config: &mut ConnectionConfig, ) -> Result<(), VaultMetadataError> { match self { Self::VgsMetadata(vgs_metadata) => vgs_metadata.process_metadata(connection_config), } } fn vault_connector(&self) -> VaultConnectors { match self { Self::VgsMetadata(vgs_metadata) => vgs_metadata.vault_connector(), } } } /// Factory for creating vault metadata processors from different sources pub struct VaultMetadataFactory; impl VaultMetadataFactory { /// Create a vault metadata processor from base64 encoded header value with comprehensive validation pub fn from_base64_header( base64_value: &str, ) -> Result<Box<dyn VaultMetadataProcessor>, VaultMetadataError> { // Validate input if base64_value.trim().is_empty() { return Err(VaultMetadataError::EmptyOrMalformedHeader); } // Decode base64 with detailed error context let decoded_bytes = BASE64_ENGINE.decode(base64_value.trim()).map_err(|e| { logger::error!( error = %e, "Failed to decode base64 vault metadata header" ); VaultMetadataError::Base64DecodingFailed(format!("Invalid base64 encoding: {e}")) })?; // Validate decoded size if decoded_bytes.is_empty() { return Err(VaultMetadataError::EmptyOrMalformedHeader); } if decoded_bytes.len() > 1_000_000 { return Err(VaultMetadataError::JsonParsingFailed( "Decoded vault metadata is too large (>1MB)".to_string(), )); } // Parse JSON with detailed error context let metadata: ExternalVaultProxyMetadata = serde_json::from_slice(&decoded_bytes).map_err(|e| { logger::error!( error = %e, "Failed to parse vault metadata JSON" ); VaultMetadataError::JsonParsingFailed(format!("Invalid JSON structure: {e}")) })?; logger::info!( vault_connector = ?metadata.vault_connector(), "Successfully parsed vault metadata from header" ); Ok(Box::new(metadata)) } } /// Trait for extracting vault metadata from various sources pub trait VaultMetadataExtractor { /// Extract vault metadata from headers and apply to connection config fn extract_and_apply_vault_metadata( &mut self, headers: &HashMap<String, Secret<String>>, ) -> Result<(), VaultMetadataError>; } impl VaultMetadataExtractor for ConnectionConfig { fn extract_and_apply_vault_metadata( &mut self, headers: &HashMap<String, Secret<String>>, ) -> Result<(), VaultMetadataError> { if let Some(vault_metadata_header) = headers.get(EXTERNAL_VAULT_METADATA_HEADER) { let processor = VaultMetadataFactory::from_base64_header(&vault_metadata_header.clone().expose()) .map_err(|e| { logger::error!( error = %e, "Failed to create vault metadata processor from header" ); e })?; processor.process_metadata(self).map_err(|e| { logger::error!( error = %e, vault_connector = ?processor.vault_connector(), "Failed to apply vault metadata to connection config" ); e })?; logger::info!( vault_connector = ?processor.vault_connector(), proxy_url_applied = self.proxy_url.is_some(), ca_cert_applied = self.ca_cert.is_some(), client_cert_applied = self.client_cert.is_some(), "Successfully applied vault metadata to connection configuration" ); } Ok(()) } } /// Extended trait for graceful fallback handling pub trait VaultMetadataExtractorExt { /// Extract vault metadata with graceful fallback (doesn't fail the entire request) fn extract_and_apply_vault_metadata_with_fallback( &mut self, headers: &HashMap<String, Secret<String>>, ) -> bool; /// Extract vault metadata from a single header value with graceful fallback fn extract_and_apply_vault_metadata_with_fallback_from_header( &mut self, header_value: &str, ) -> bool; } impl VaultMetadataExtractorExt for ConnectionConfig { fn extract_and_apply_vault_metadata_with_fallback( &mut self, headers: &HashMap<String, Secret<String>>, ) -> bool { match self.extract_and_apply_vault_metadata(headers) { Ok(()) => { logger::info!( proxy_url_set = self.proxy_url.is_some(), ca_cert_set = self.ca_cert.is_some(), client_cert_set = self.client_cert.is_some(), "Vault metadata processing completed successfully" ); true } Err(error) => { logger::warn!( error = %error, proxy_url_set = self.proxy_url.is_some(), ca_cert_set = self.ca_cert.is_some(), "Vault metadata processing failed, continuing without vault configuration" ); false } } } fn extract_and_apply_vault_metadata_with_fallback_from_header( &mut self, header_value: &str, ) -> bool { let mut temp_headers = HashMap::new(); temp_headers.insert( EXTERNAL_VAULT_METADATA_HEADER.to_string(), Secret::new(header_value.to_string()), ); self.extract_and_apply_vault_metadata_with_fallback(&temp_headers) } } #[cfg(test)] #[allow(clippy::expect_used)] mod tests { use std::collections::HashMap; use base64::Engine; use common_utils::pii::SecretSerdeValue; use super::*; use crate::types::{HttpMethod, InjectorRequest, TokenData, VaultConnectors}; #[test] fn test_vault_metadata_processing() { // Create test VGS metadata with base64 encoded certificate let vgs_metadata = VgsMetadata { proxy_url: "https://vgs-proxy.example.com:8443" .parse() .expect("Valid test URL"), certificate: Secret::new("cert".to_string()), }; let metadata = ExternalVaultProxyMetadata::VgsMetadata(vgs_metadata); // Serialize and base64 encode (as it would come from the header) let metadata_json = serde_json::to_vec(&metadata).expect("Metadata serialization should succeed"); let base64_metadata = BASE64_ENGINE.encode(&metadata_json); // Create headers with vault metadata let mut headers = HashMap::new(); headers.insert( "Content-Type".to_string(), Secret::new("application/json".to_string()), ); headers.insert( "Authorization".to_string(), Secret::new("Bearer token123".to_string()), ); headers.insert( EXTERNAL_VAULT_METADATA_HEADER.to_string(), Secret::new(base64_metadata), ); // Test the amazing automatic processing with the unified API! let injector_request = InjectorRequest::new( "https://api.example.com/v1/payments".to_string(), HttpMethod::POST, "amount={{$amount}}&currency={{$currency}}".to_string(), TokenData { vault_connector: VaultConnectors::VGS, specific_token_data: SecretSerdeValue::new(serde_json::json!({ "amount": "1000", "currency": "USD" })), }, Some(headers), None, // No fallback proxy needed - vault metadata provides it None, // No fallback client cert None, // No fallback client key None, // No fallback CA cert ); // Verify vault metadata was automatically applied! assert!(injector_request.connection_config.proxy_url.is_some()); assert!(injector_request.connection_config.ca_cert.is_some()); assert_eq!( injector_request .connection_config .proxy_url .as_ref() .expect("Proxy URL should be set") .clone() .expose(), "https://vgs-proxy.example.com:8443/" ); // Verify vault metadata header was removed from regular headers assert!(!injector_request .connection_config .headers .contains_key(EXTERNAL_VAULT_METADATA_HEADER)); // Verify other headers are preserved assert!(injector_request .connection_config .headers .contains_key("Content-Type")); assert!(injector_request .connection_config .headers .contains_key("Authorization")); } #[test] fn test_vault_metadata_factory() { let vgs_metadata = VgsMetadata { proxy_url: "https://vgs-proxy.example.com:8443" .parse() .expect("Valid test URL"), certificate: Secret::new("cert".to_string()), }; let metadata = ExternalVaultProxyMetadata::VgsMetadata(vgs_metadata); let metadata_json = serde_json::to_vec(&metadata).expect("Metadata serialization should succeed"); let base64_metadata = BASE64_ENGINE.encode(&metadata_json); // Test factory creation from base64 let processor = VaultMetadataFactory::from_base64_header(&base64_metadata) .expect("Base64 decoding should succeed"); assert_eq!(processor.vault_connector(), VaultConnectors::VGS); // Test processor creation was successful assert!(processor.vault_connector() == VaultConnectors::VGS); } }
crates/injector/src/vault_metadata.rs
injector::src::vault_metadata
2,965
true
// File: crates/analytics/src/core.rs // Module: analytics::src::core use api_models::analytics::GetInfoResponse; use crate::{types::AnalyticsDomain, utils}; pub async fn get_domain_info( domain: AnalyticsDomain, ) -> crate::errors::AnalyticsResult<GetInfoResponse> { let info = match domain { AnalyticsDomain::Payments => GetInfoResponse { metrics: utils::get_payment_metrics_info(), download_dimensions: None, dimensions: utils::get_payment_dimensions(), }, AnalyticsDomain::PaymentIntents => GetInfoResponse { metrics: utils::get_payment_intent_metrics_info(), download_dimensions: None, dimensions: utils::get_payment_intent_dimensions(), }, AnalyticsDomain::Refunds => GetInfoResponse { metrics: utils::get_refund_metrics_info(), download_dimensions: None, dimensions: utils::get_refund_dimensions(), }, AnalyticsDomain::Frm => GetInfoResponse { metrics: utils::get_frm_metrics_info(), download_dimensions: None, dimensions: utils::get_frm_dimensions(), }, AnalyticsDomain::SdkEvents => GetInfoResponse { metrics: utils::get_sdk_event_metrics_info(), download_dimensions: None, dimensions: utils::get_sdk_event_dimensions(), }, AnalyticsDomain::AuthEvents => GetInfoResponse { metrics: utils::get_auth_event_metrics_info(), download_dimensions: None, dimensions: utils::get_auth_event_dimensions(), }, AnalyticsDomain::ApiEvents => GetInfoResponse { metrics: utils::get_api_event_metrics_info(), download_dimensions: None, dimensions: utils::get_api_event_dimensions(), }, AnalyticsDomain::Dispute => GetInfoResponse { metrics: utils::get_dispute_metrics_info(), download_dimensions: None, dimensions: utils::get_dispute_dimensions(), }, AnalyticsDomain::Routing => GetInfoResponse { metrics: utils::get_payment_metrics_info(), download_dimensions: None, dimensions: utils::get_payment_dimensions(), }, }; Ok(info) }
crates/analytics/src/core.rs
analytics::src::core
433
true
// File: crates/analytics/src/payment_intents.rs // Module: analytics::src::payment_intents pub mod accumulator; mod core; pub mod filters; pub mod metrics; pub mod sankey; pub mod types; pub use accumulator::{PaymentIntentMetricAccumulator, PaymentIntentMetricsAccumulator}; pub trait PaymentIntentAnalytics: metrics::PaymentIntentMetricAnalytics + filters::PaymentIntentFilterAnalytics { } pub use self::core::{get_filters, get_metrics, get_sankey};
crates/analytics/src/payment_intents.rs
analytics::src::payment_intents
104
true
// File: crates/analytics/src/api_event.rs // Module: analytics::src::api_event mod core; pub mod events; pub mod filters; pub mod metrics; pub mod types; pub trait APIEventAnalytics: events::ApiLogsFilterAnalytics {} pub use self::core::{api_events_core, get_api_event_metrics, get_filters};
crates/analytics/src/api_event.rs
analytics::src::api_event
71
true
// File: crates/analytics/src/sqlx.rs // Module: analytics::src::sqlx use std::{fmt::Display, str::FromStr}; use api_models::{ analytics::{frm::FrmTransactionType, refunds::RefundType}, enums::{DisputeStage, DisputeStatus}, }; use common_enums::{ AuthenticationConnectors, AuthenticationStatus, DecoupledAuthenticationType, TransactionStatus, }; use common_utils::{ errors::{CustomResult, ParsingError}, DbConnectionParams, }; use diesel_models::enums::{ AttemptStatus, AuthenticationType, Currency, FraudCheckStatus, IntentStatus, PaymentMethod, RefundStatus, RoutingApproach, }; use error_stack::ResultExt; use sqlx::{ postgres::{PgArgumentBuffer, PgPoolOptions, PgRow, PgTypeInfo, PgValueRef}, Decode, Encode, Error::ColumnNotFound, FromRow, Pool, Postgres, Row, }; use storage_impl::config::Database; use time::PrimitiveDateTime; use super::{ health_check::HealthCheck, query::{Aggregate, ToSql, Window}, types::{ AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, LoadRow, QueryExecutionError, TableEngine, }, }; #[derive(Debug, Clone)] pub struct SqlxClient { pool: Pool<Postgres>, } impl Default for SqlxClient { fn default() -> Self { let database_url = format!( "postgres://{}:{}@{}:{}/{}", "db_user", "db_pass", "localhost", 5432, "hyperswitch_db" ); Self { #[allow(clippy::expect_used)] pool: PgPoolOptions::new() .connect_lazy(&database_url) .expect("SQLX Pool Creation failed"), } } } impl SqlxClient { pub async fn from_conf(conf: &Database, schema: &str) -> Self { let database_url = conf.get_database_url(schema); #[allow(clippy::expect_used)] let pool = PgPoolOptions::new() .max_connections(conf.pool_size) .acquire_timeout(std::time::Duration::from_secs(conf.connection_timeout)) .connect_lazy(&database_url) .expect("SQLX Pool Creation failed"); Self { pool } } } pub trait DbType { fn name() -> &'static str; } macro_rules! db_type { ($a: ident, $str: tt) => { impl DbType for $a { fn name() -> &'static str { stringify!($str) } } }; ($a:ident) => { impl DbType for $a { fn name() -> &'static str { stringify!($a) } } }; } db_type!(Currency); db_type!(AuthenticationType); db_type!(AttemptStatus); db_type!(IntentStatus); db_type!(PaymentMethod, TEXT); db_type!(RefundStatus); db_type!(RefundType); db_type!(FraudCheckStatus); db_type!(FrmTransactionType); db_type!(DisputeStage); db_type!(DisputeStatus); db_type!(AuthenticationStatus); db_type!(TransactionStatus); db_type!(AuthenticationConnectors); db_type!(DecoupledAuthenticationType); db_type!(RoutingApproach); impl<'q, Type> Encode<'q, Postgres> for DBEnumWrapper<Type> where Type: DbType + FromStr + Display, { fn encode_by_ref( &self, buf: &mut PgArgumentBuffer, ) -> Result<sqlx::encode::IsNull, Box<dyn std::error::Error + Send + Sync + 'static>> { <String as Encode<'q, Postgres>>::encode(self.0.to_string(), buf) } fn size_hint(&self) -> usize { <String as Encode<'q, Postgres>>::size_hint(&self.0.to_string()) } } impl<'r, Type> Decode<'r, Postgres> for DBEnumWrapper<Type> where Type: DbType + FromStr + Display, { fn decode( value: PgValueRef<'r>, ) -> Result<Self, Box<dyn std::error::Error + 'static + Send + Sync>> { let str_value = <&'r str as Decode<'r, Postgres>>::decode(value)?; Type::from_str(str_value).map(DBEnumWrapper).or(Err(format!( "invalid value {:?} for enum {}", str_value, Type::name() ) .into())) } } impl<Type> sqlx::Type<Postgres> for DBEnumWrapper<Type> where Type: DbType + FromStr + Display, { fn type_info() -> PgTypeInfo { PgTypeInfo::with_name(Type::name()) } } impl<T> LoadRow<T> for SqlxClient where for<'a> T: FromRow<'a, PgRow>, { fn load_row(row: PgRow) -> CustomResult<T, QueryExecutionError> { T::from_row(&row).change_context(QueryExecutionError::RowExtractionFailure) } } impl super::payments::filters::PaymentFilterAnalytics for SqlxClient {} impl super::payments::metrics::PaymentMetricAnalytics for SqlxClient {} impl super::payments::distribution::PaymentDistributionAnalytics for SqlxClient {} impl super::payment_intents::filters::PaymentIntentFilterAnalytics for SqlxClient {} impl super::payment_intents::metrics::PaymentIntentMetricAnalytics for SqlxClient {} impl super::refunds::metrics::RefundMetricAnalytics for SqlxClient {} impl super::refunds::filters::RefundFilterAnalytics for SqlxClient {} impl super::refunds::distribution::RefundDistributionAnalytics for SqlxClient {} impl super::disputes::filters::DisputeFilterAnalytics for SqlxClient {} impl super::disputes::metrics::DisputeMetricAnalytics for SqlxClient {} impl super::frm::metrics::FrmMetricAnalytics for SqlxClient {} impl super::frm::filters::FrmFilterAnalytics for SqlxClient {} impl super::auth_events::metrics::AuthEventMetricAnalytics for SqlxClient {} impl super::auth_events::filters::AuthEventFilterAnalytics for SqlxClient {} #[async_trait::async_trait] impl AnalyticsDataSource for SqlxClient { type Row = PgRow; async fn load_results<T>(&self, query: &str) -> CustomResult<Vec<T>, QueryExecutionError> where Self: LoadRow<T>, { sqlx::query(&format!("{query};")) .fetch_all(&self.pool) .await .change_context(QueryExecutionError::DatabaseError) .attach_printable_lazy(|| format!("Failed to run query {query}"))? .into_iter() .map(Self::load_row) .collect::<Result<Vec<_>, _>>() .change_context(QueryExecutionError::RowExtractionFailure) } } #[async_trait::async_trait] impl HealthCheck for SqlxClient { async fn deep_health_check(&self) -> CustomResult<(), QueryExecutionError> { sqlx::query("SELECT 1") .fetch_all(&self.pool) .await .map(|_| ()) .change_context(QueryExecutionError::DatabaseError) } } impl<'a> FromRow<'a, PgRow> for super::auth_events::metrics::AuthEventMetricRow { fn from_row(row: &'a PgRow) -> sqlx::Result<Self> { let authentication_status: Option<DBEnumWrapper<AuthenticationStatus>> = row.try_get("authentication_status").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let trans_status: Option<DBEnumWrapper<TransactionStatus>> = row.try_get("trans_status").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let authentication_type: Option<DBEnumWrapper<DecoupledAuthenticationType>> = row.try_get("authentication_type").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let error_message: Option<String> = row.try_get("error_message").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let authentication_connector: Option<DBEnumWrapper<AuthenticationConnectors>> = row .try_get("authentication_connector") .or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let message_version: Option<String> = row.try_get("message_version").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let platform: Option<String> = row.try_get("platform").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let acs_reference_number: Option<String> = row.try_get("acs_reference_number").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let count: Option<i64> = row.try_get("count").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; // Removing millisecond precision to get accurate diffs against clickhouse let start_bucket: Option<PrimitiveDateTime> = row .try_get::<Option<PrimitiveDateTime>, _>("start_bucket")? .and_then(|dt| dt.replace_millisecond(0).ok()); let end_bucket: Option<PrimitiveDateTime> = row .try_get::<Option<PrimitiveDateTime>, _>("end_bucket")? .and_then(|dt| dt.replace_millisecond(0).ok()); let mcc: Option<String> = row.try_get("mcc").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let currency: Option<DBEnumWrapper<Currency>> = row.try_get("currency").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let merchant_country: Option<String> = row.try_get("merchant_country").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let billing_country: Option<String> = row.try_get("billing_country").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let shipping_country: Option<String> = row.try_get("shipping_country").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let issuer_country: Option<String> = row.try_get("issuer_country").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let earliest_supported_version: Option<String> = row .try_get("earliest_supported_version") .or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let latest_supported_version: Option<String> = row .try_get("latest_supported_version") .or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let whitelist_decision: Option<bool> = row.try_get("whitelist_decision").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let device_manufacturer: Option<String> = row.try_get("device_manufacturer").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let device_type: Option<String> = row.try_get("device_type").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let device_brand: Option<String> = row.try_get("device_brand").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let device_os: Option<String> = row.try_get("device_os").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let device_display: Option<String> = row.try_get("device_display").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let browser_name: Option<String> = row.try_get("browser_name").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let browser_version: Option<String> = row.try_get("browser_version").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let issuer_id: Option<String> = row.try_get("issuer_id").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let scheme_name: Option<String> = row.try_get("scheme_name").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let exemption_requested: Option<bool> = row.try_get("exemption_requested").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let exemption_accepted: Option<bool> = row.try_get("exemption_accepted").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; Ok(Self { authentication_status, trans_status, authentication_type, error_message, authentication_connector, message_version, acs_reference_number, platform, count, start_bucket, end_bucket, mcc, currency, merchant_country, billing_country, shipping_country, issuer_country, earliest_supported_version, latest_supported_version, whitelist_decision, device_manufacturer, device_type, device_brand, device_os, device_display, browser_name, browser_version, issuer_id, scheme_name, exemption_requested, exemption_accepted, }) } } impl<'a> FromRow<'a, PgRow> for super::auth_events::filters::AuthEventFilterRow { fn from_row(row: &'a PgRow) -> sqlx::Result<Self> { let authentication_status: Option<DBEnumWrapper<AuthenticationStatus>> = row.try_get("authentication_status").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let trans_status: Option<DBEnumWrapper<TransactionStatus>> = row.try_get("trans_status").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let authentication_type: Option<DBEnumWrapper<DecoupledAuthenticationType>> = row.try_get("authentication_type").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let error_message: Option<String> = row.try_get("error_message").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let authentication_connector: Option<DBEnumWrapper<AuthenticationConnectors>> = row .try_get("authentication_connector") .or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let message_version: Option<String> = row.try_get("message_version").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let acs_reference_number: Option<String> = row.try_get("acs_reference_number").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let platform: Option<String> = row.try_get("platform").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let mcc: Option<String> = row.try_get("mcc").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let currency: Option<DBEnumWrapper<Currency>> = row.try_get("currency").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let merchant_country: Option<String> = row.try_get("merchant_country").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let billing_country: Option<String> = row.try_get("billing_country").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let shipping_country: Option<String> = row.try_get("shipping_country").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let issuer_country: Option<String> = row.try_get("issuer_country").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let earliest_supported_version: Option<String> = row .try_get("earliest_supported_version") .or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let latest_supported_version: Option<String> = row .try_get("latest_supported_version") .or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let whitelist_decision: Option<bool> = row.try_get("whitelist_decision").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let device_manufacturer: Option<String> = row.try_get("device_manufacturer").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let device_type: Option<String> = row.try_get("device_type").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let device_brand: Option<String> = row.try_get("device_brand").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let device_os: Option<String> = row.try_get("device_os").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let device_display: Option<String> = row.try_get("device_display").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let browser_name: Option<String> = row.try_get("browser_name").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let browser_version: Option<String> = row.try_get("browser_version").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let issuer_id: Option<String> = row.try_get("issuer_id").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let scheme_name: Option<String> = row.try_get("scheme_name").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let exemption_requested: Option<bool> = row.try_get("exemption_requested").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let exemption_accepted: Option<bool> = row.try_get("exemption_accepted").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; Ok(Self { authentication_status, trans_status, authentication_type, error_message, authentication_connector, message_version, platform, acs_reference_number, mcc, currency, merchant_country, billing_country, shipping_country, issuer_country, earliest_supported_version, latest_supported_version, whitelist_decision, device_manufacturer, device_type, device_brand, device_os, device_display, browser_name, browser_version, issuer_id, scheme_name, exemption_requested, exemption_accepted, }) } } impl<'a> FromRow<'a, PgRow> for super::refunds::metrics::RefundMetricRow { fn from_row(row: &'a PgRow) -> sqlx::Result<Self> { let currency: Option<DBEnumWrapper<Currency>> = row.try_get("currency").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let refund_status: Option<DBEnumWrapper<RefundStatus>> = row.try_get("refund_status").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let connector: Option<String> = row.try_get("connector").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let refund_type: Option<DBEnumWrapper<RefundType>> = row.try_get("refund_type").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let profile_id: Option<String> = row.try_get("profile_id").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let refund_reason: Option<String> = row.try_get("refund_reason").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let refund_error_message: Option<String> = row.try_get("refund_error_message").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let total: Option<bigdecimal::BigDecimal> = row.try_get("total").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let count: Option<i64> = row.try_get("count").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; // Removing millisecond precision to get accurate diffs against clickhouse let start_bucket: Option<PrimitiveDateTime> = row .try_get::<Option<PrimitiveDateTime>, _>("start_bucket")? .and_then(|dt| dt.replace_millisecond(0).ok()); let end_bucket: Option<PrimitiveDateTime> = row .try_get::<Option<PrimitiveDateTime>, _>("end_bucket")? .and_then(|dt| dt.replace_millisecond(0).ok()); Ok(Self { currency, refund_status, connector, refund_type, profile_id, refund_reason, refund_error_message, total, count, start_bucket, end_bucket, }) } } impl<'a> FromRow<'a, PgRow> for super::frm::metrics::FrmMetricRow { fn from_row(row: &'a PgRow) -> sqlx::Result<Self> { let frm_name: Option<String> = row.try_get("frm_name").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let frm_status: Option<DBEnumWrapper<FraudCheckStatus>> = row.try_get("frm_status").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let frm_transaction_type: Option<DBEnumWrapper<FrmTransactionType>> = row.try_get("frm_transaction_type").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let total: Option<bigdecimal::BigDecimal> = row.try_get("total").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let count: Option<i64> = row.try_get("count").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; // Removing millisecond precision to get accurate diffs against clickhouse let start_bucket: Option<PrimitiveDateTime> = row .try_get::<Option<PrimitiveDateTime>, _>("start_bucket")? .and_then(|dt| dt.replace_millisecond(0).ok()); let end_bucket: Option<PrimitiveDateTime> = row .try_get::<Option<PrimitiveDateTime>, _>("end_bucket")? .and_then(|dt| dt.replace_millisecond(0).ok()); Ok(Self { frm_name, frm_status, frm_transaction_type, total, count, start_bucket, end_bucket, }) } } impl<'a> FromRow<'a, PgRow> for super::payments::metrics::PaymentMetricRow { fn from_row(row: &'a PgRow) -> sqlx::Result<Self> { let currency: Option<DBEnumWrapper<Currency>> = row.try_get("currency").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let status: Option<DBEnumWrapper<AttemptStatus>> = row.try_get("status").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let connector: Option<String> = row.try_get("connector").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let authentication_type: Option<DBEnumWrapper<AuthenticationType>> = row.try_get("authentication_type").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let payment_method: Option<String> = row.try_get("payment_method").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let payment_method_type: Option<String> = row.try_get("payment_method_type").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let client_source: Option<String> = row.try_get("client_source").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let client_version: Option<String> = row.try_get("client_version").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let profile_id: Option<String> = row.try_get("profile_id").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let card_network: Option<String> = row.try_get("card_network").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let merchant_id: Option<String> = row.try_get("merchant_id").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let card_last_4: Option<String> = row.try_get("card_last_4").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let card_issuer: Option<String> = row.try_get("card_issuer").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let error_reason: Option<String> = row.try_get("error_reason").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let first_attempt: Option<bool> = row.try_get("first_attempt").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let routing_approach: Option<DBEnumWrapper<RoutingApproach>> = row.try_get("routing_approach").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let signature_network: Option<String> = row.try_get("signature_network").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let is_issuer_regulated: Option<bool> = row.try_get("is_issuer_regulated").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let is_debit_routed: Option<bool> = row.try_get("is_debit_routed").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let total: Option<bigdecimal::BigDecimal> = row.try_get("total").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let count: Option<i64> = row.try_get("count").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; // Removing millisecond precision to get accurate diffs against clickhouse let start_bucket: Option<PrimitiveDateTime> = row .try_get::<Option<PrimitiveDateTime>, _>("start_bucket")? .and_then(|dt| dt.replace_millisecond(0).ok()); let end_bucket: Option<PrimitiveDateTime> = row .try_get::<Option<PrimitiveDateTime>, _>("end_bucket")? .and_then(|dt| dt.replace_millisecond(0).ok()); Ok(Self { currency, status, connector, authentication_type, payment_method, payment_method_type, client_source, client_version, profile_id, card_network, merchant_id, card_last_4, card_issuer, error_reason, first_attempt, routing_approach, signature_network, is_issuer_regulated, is_debit_routed, total, count, start_bucket, end_bucket, }) } } impl<'a> FromRow<'a, PgRow> for super::payments::distribution::PaymentDistributionRow { fn from_row(row: &'a PgRow) -> sqlx::Result<Self> { let currency: Option<DBEnumWrapper<Currency>> = row.try_get("currency").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let status: Option<DBEnumWrapper<AttemptStatus>> = row.try_get("status").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let connector: Option<String> = row.try_get("connector").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let authentication_type: Option<DBEnumWrapper<AuthenticationType>> = row.try_get("authentication_type").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let payment_method: Option<String> = row.try_get("payment_method").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let payment_method_type: Option<String> = row.try_get("payment_method_type").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let client_source: Option<String> = row.try_get("client_source").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let client_version: Option<String> = row.try_get("client_version").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let profile_id: Option<String> = row.try_get("profile_id").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let card_network: Option<String> = row.try_get("card_network").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let merchant_id: Option<String> = row.try_get("merchant_id").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let card_last_4: Option<String> = row.try_get("card_last_4").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let card_issuer: Option<String> = row.try_get("card_issuer").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let error_reason: Option<String> = row.try_get("error_reason").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let routing_approach: Option<DBEnumWrapper<RoutingApproach>> = row.try_get("routing_approach").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let signature_network: Option<String> = row.try_get("signature_network").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let is_issuer_regulated: Option<bool> = row.try_get("is_issuer_regulated").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let is_debit_routed: Option<bool> = row.try_get("is_debit_routed").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let total: Option<bigdecimal::BigDecimal> = row.try_get("total").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let count: Option<i64> = row.try_get("count").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let error_message: Option<String> = row.try_get("error_message").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let first_attempt: Option<bool> = row.try_get("first_attempt").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; // Removing millisecond precision to get accurate diffs against clickhouse let start_bucket: Option<PrimitiveDateTime> = row .try_get::<Option<PrimitiveDateTime>, _>("start_bucket")? .and_then(|dt| dt.replace_millisecond(0).ok()); let end_bucket: Option<PrimitiveDateTime> = row .try_get::<Option<PrimitiveDateTime>, _>("end_bucket")? .and_then(|dt| dt.replace_millisecond(0).ok()); Ok(Self { currency, status, connector, authentication_type, payment_method, payment_method_type, client_source, client_version, profile_id, card_network, merchant_id, card_last_4, card_issuer, error_reason, first_attempt, total, count, error_message, routing_approach, signature_network, is_issuer_regulated, is_debit_routed, start_bucket, end_bucket, }) } } impl<'a> FromRow<'a, PgRow> for super::payments::filters::PaymentFilterRow { fn from_row(row: &'a PgRow) -> sqlx::Result<Self> { let currency: Option<DBEnumWrapper<Currency>> = row.try_get("currency").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let status: Option<DBEnumWrapper<AttemptStatus>> = row.try_get("status").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let connector: Option<String> = row.try_get("connector").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let authentication_type: Option<DBEnumWrapper<AuthenticationType>> = row.try_get("authentication_type").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let payment_method: Option<String> = row.try_get("payment_method").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let payment_method_type: Option<String> = row.try_get("payment_method_type").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let client_source: Option<String> = row.try_get("client_source").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let client_version: Option<String> = row.try_get("client_version").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let profile_id: Option<String> = row.try_get("profile_id").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let card_network: Option<String> = row.try_get("card_network").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let merchant_id: Option<String> = row.try_get("merchant_id").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let card_last_4: Option<String> = row.try_get("card_last_4").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let card_issuer: Option<String> = row.try_get("card_issuer").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let error_reason: Option<String> = row.try_get("error_reason").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let first_attempt: Option<bool> = row.try_get("first_attempt").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let routing_approach: Option<DBEnumWrapper<RoutingApproach>> = row.try_get("routing_approach").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let signature_network: Option<String> = row.try_get("signature_network").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let is_issuer_regulated: Option<bool> = row.try_get("is_issuer_regulated").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let is_debit_routed: Option<bool> = row.try_get("is_debit_routed").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; Ok(Self { currency, status, connector, authentication_type, payment_method, payment_method_type, client_source, client_version, profile_id, card_network, merchant_id, card_last_4, card_issuer, error_reason, first_attempt, routing_approach, signature_network, is_issuer_regulated, is_debit_routed, }) } } impl<'a> FromRow<'a, PgRow> for super::payment_intents::metrics::PaymentIntentMetricRow { fn from_row(row: &'a PgRow) -> sqlx::Result<Self> { let status: Option<DBEnumWrapper<IntentStatus>> = row.try_get("status").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let currency: Option<DBEnumWrapper<Currency>> = row.try_get("currency").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let profile_id: Option<String> = row.try_get("profile_id").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let connector: Option<String> = row.try_get("connector").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let authentication_type: Option<DBEnumWrapper<AuthenticationType>> = row.try_get("authentication_type").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let payment_method: Option<String> = row.try_get("payment_method").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let payment_method_type: Option<String> = row.try_get("payment_method_type").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let card_network: Option<String> = row.try_get("card_network").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let merchant_id: Option<String> = row.try_get("merchant_id").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let card_last_4: Option<String> = row.try_get("card_last_4").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let card_issuer: Option<String> = row.try_get("card_issuer").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let error_reason: Option<String> = row.try_get("error_reason").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let total: Option<bigdecimal::BigDecimal> = row.try_get("total").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let count: Option<i64> = row.try_get("count").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let first_attempt: Option<i64> = row.try_get("first_attempt").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; // Removing millisecond precision to get accurate diffs against clickhouse let start_bucket: Option<PrimitiveDateTime> = row .try_get::<Option<PrimitiveDateTime>, _>("start_bucket")? .and_then(|dt| dt.replace_millisecond(0).ok()); let end_bucket: Option<PrimitiveDateTime> = row .try_get::<Option<PrimitiveDateTime>, _>("end_bucket")? .and_then(|dt| dt.replace_millisecond(0).ok()); Ok(Self { status, currency, profile_id, connector, authentication_type, payment_method, payment_method_type, card_network, merchant_id, card_last_4, card_issuer, error_reason, first_attempt, total, count, start_bucket, end_bucket, }) } } impl<'a> FromRow<'a, PgRow> for super::payment_intents::filters::PaymentIntentFilterRow { fn from_row(row: &'a PgRow) -> sqlx::Result<Self> { let status: Option<DBEnumWrapper<IntentStatus>> = row.try_get("status").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let currency: Option<DBEnumWrapper<Currency>> = row.try_get("currency").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let profile_id: Option<String> = row.try_get("profile_id").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let connector: Option<String> = row.try_get("connector").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let authentication_type: Option<DBEnumWrapper<AuthenticationType>> = row.try_get("authentication_type").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let payment_method: Option<String> = row.try_get("payment_method").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let payment_method_type: Option<String> = row.try_get("payment_method_type").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let card_network: Option<String> = row.try_get("card_network").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let merchant_id: Option<String> = row.try_get("merchant_id").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let card_last_4: Option<String> = row.try_get("card_last_4").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let card_issuer: Option<String> = row.try_get("card_issuer").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let error_reason: Option<String> = row.try_get("error_reason").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let customer_id: Option<String> = row.try_get("customer_id").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; Ok(Self { status, currency, profile_id, connector, authentication_type, payment_method, payment_method_type, card_network, merchant_id, card_last_4, card_issuer, error_reason, customer_id, }) } } impl<'a> FromRow<'a, PgRow> for super::refunds::filters::RefundFilterRow { fn from_row(row: &'a PgRow) -> sqlx::Result<Self> { let currency: Option<DBEnumWrapper<Currency>> = row.try_get("currency").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let refund_status: Option<DBEnumWrapper<RefundStatus>> = row.try_get("refund_status").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let connector: Option<String> = row.try_get("connector").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let refund_type: Option<DBEnumWrapper<RefundType>> = row.try_get("refund_type").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let profile_id: Option<String> = row.try_get("profile_id").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let refund_reason: Option<String> = row.try_get("refund_reason").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let refund_error_message: Option<String> = row.try_get("refund_error_message").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; Ok(Self { currency, refund_status, connector, refund_type, profile_id, refund_reason, refund_error_message, }) } } impl<'a> FromRow<'a, PgRow> for super::refunds::distribution::RefundDistributionRow { fn from_row(row: &'a PgRow) -> sqlx::Result<Self> { let currency: Option<DBEnumWrapper<Currency>> = row.try_get("currency").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let refund_status: Option<DBEnumWrapper<RefundStatus>> = row.try_get("refund_status").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let connector: Option<String> = row.try_get("connector").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let refund_type: Option<DBEnumWrapper<RefundType>> = row.try_get("refund_type").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let profile_id: Option<String> = row.try_get("profile_id").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let total: Option<bigdecimal::BigDecimal> = row.try_get("total").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let count: Option<i64> = row.try_get("count").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let refund_reason: Option<String> = row.try_get("refund_reason").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let refund_error_message: Option<String> = row.try_get("refund_error_message").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; // Removing millisecond precision to get accurate diffs against clickhouse let start_bucket: Option<PrimitiveDateTime> = row .try_get::<Option<PrimitiveDateTime>, _>("start_bucket")? .and_then(|dt| dt.replace_millisecond(0).ok()); let end_bucket: Option<PrimitiveDateTime> = row .try_get::<Option<PrimitiveDateTime>, _>("end_bucket")? .and_then(|dt| dt.replace_millisecond(0).ok()); Ok(Self { currency, refund_status, connector, refund_type, profile_id, total, count, refund_reason, refund_error_message, start_bucket, end_bucket, }) } } impl<'a> FromRow<'a, PgRow> for super::frm::filters::FrmFilterRow { fn from_row(row: &'a PgRow) -> sqlx::Result<Self> { let frm_name: Option<String> = row.try_get("frm_name").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let frm_status: Option<DBEnumWrapper<FraudCheckStatus>> = row.try_get("frm_status").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let frm_transaction_type: Option<DBEnumWrapper<FrmTransactionType>> = row.try_get("frm_transaction_type").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; Ok(Self { frm_name, frm_status, frm_transaction_type, }) } } impl<'a> FromRow<'a, PgRow> for super::disputes::filters::DisputeFilterRow { fn from_row(row: &'a PgRow) -> sqlx::Result<Self> { let dispute_stage: Option<String> = row.try_get("dispute_stage").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let dispute_status: Option<String> = row.try_get("dispute_status").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let connector: Option<String> = row.try_get("connector").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let connector_status: Option<String> = row.try_get("connector_status").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let currency: Option<DBEnumWrapper<Currency>> = row.try_get("currency").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; Ok(Self { dispute_stage, dispute_status, connector, connector_status, currency, }) } } impl<'a> FromRow<'a, PgRow> for super::disputes::metrics::DisputeMetricRow { fn from_row(row: &'a PgRow) -> sqlx::Result<Self> { let dispute_stage: Option<DBEnumWrapper<DisputeStage>> = row.try_get("dispute_stage").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let dispute_status: Option<DBEnumWrapper<DisputeStatus>> = row.try_get("dispute_status").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let connector: Option<String> = row.try_get("connector").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let currency: Option<DBEnumWrapper<Currency>> = row.try_get("currency").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let total: Option<bigdecimal::BigDecimal> = row.try_get("total").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; let count: Option<i64> = row.try_get("count").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; // Removing millisecond precision to get accurate diffs against clickhouse let start_bucket: Option<PrimitiveDateTime> = row .try_get::<Option<PrimitiveDateTime>, _>("start_bucket")? .and_then(|dt| dt.replace_millisecond(0).ok()); let end_bucket: Option<PrimitiveDateTime> = row .try_get::<Option<PrimitiveDateTime>, _>("end_bucket")? .and_then(|dt| dt.replace_millisecond(0).ok()); Ok(Self { dispute_stage, dispute_status, connector, currency, total, count, start_bucket, end_bucket, }) } } impl ToSql<SqlxClient> for PrimitiveDateTime { fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> { Ok(self.to_string()) } } impl ToSql<SqlxClient> for AnalyticsCollection { fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> { match self { Self::Payment => Ok("payment_attempt".to_string()), Self::PaymentSessionized => Err(error_stack::report!(ParsingError::UnknownError) .attach_printable("PaymentSessionized table is not implemented for Sqlx"))?, Self::Refund => Ok("refund".to_string()), Self::RefundSessionized => Err(error_stack::report!(ParsingError::UnknownError) .attach_printable("RefundSessionized table is not implemented for Sqlx"))?, Self::SdkEvents => Err(error_stack::report!(ParsingError::UnknownError) .attach_printable("SdkEventsAudit table is not implemented for Sqlx"))?, Self::SdkEventsAnalytics => Err(error_stack::report!(ParsingError::UnknownError) .attach_printable("SdkEvents table is not implemented for Sqlx"))?, Self::ApiEvents => Err(error_stack::report!(ParsingError::UnknownError) .attach_printable("ApiEvents table is not implemented for Sqlx"))?, Self::FraudCheck => Ok("fraud_check".to_string()), Self::PaymentIntent => Ok("payment_intent".to_string()), Self::PaymentIntentSessionized => Err(error_stack::report!( ParsingError::UnknownError ) .attach_printable("PaymentIntentSessionized table is not implemented for Sqlx"))?, Self::ConnectorEvents => Err(error_stack::report!(ParsingError::UnknownError) .attach_printable("ConnectorEvents table is not implemented for Sqlx"))?, Self::ApiEventsAnalytics => Err(error_stack::report!(ParsingError::UnknownError) .attach_printable("ApiEvents table is not implemented for Sqlx"))?, Self::ActivePaymentsAnalytics => Err(error_stack::report!(ParsingError::UnknownError) .attach_printable("ActivePaymentsAnalytics table is not implemented for Sqlx"))?, Self::OutgoingWebhookEvent => Err(error_stack::report!(ParsingError::UnknownError) .attach_printable("OutgoingWebhookEvents table is not implemented for Sqlx"))?, Self::Dispute => Ok("dispute".to_string()), Self::DisputeSessionized => Err(error_stack::report!(ParsingError::UnknownError) .attach_printable("DisputeSessionized table is not implemented for Sqlx"))?, Self::Authentications => Err(error_stack::report!(ParsingError::UnknownError) .attach_printable("Authentications table is not implemented for Sqlx"))?, Self::RoutingEvents => Err(error_stack::report!(ParsingError::UnknownError) .attach_printable("RoutingEvents table is not implemented for Sqlx"))?, } } } impl<T> ToSql<SqlxClient> for Aggregate<T> where T: ToSql<SqlxClient>, { fn to_sql(&self, table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> { Ok(match self { Self::Count { field: _, alias } => { format!( "count(*){}", alias.map_or_else(|| "".to_owned(), |alias| format!(" as {alias}")) ) } Self::Sum { field, alias } => { format!( "sum({}){}", field .to_sql(table_engine) .attach_printable("Failed to sum aggregate")?, alias.map_or_else(|| "".to_owned(), |alias| format!(" as {alias}")) ) } Self::Min { field, alias } => { format!( "min({}){}", field .to_sql(table_engine) .attach_printable("Failed to min aggregate")?, alias.map_or_else(|| "".to_owned(), |alias| format!(" as {alias}")) ) } Self::Max { field, alias } => { format!( "max({}){}", field .to_sql(table_engine) .attach_printable("Failed to max aggregate")?, alias.map_or_else(|| "".to_owned(), |alias| format!(" as {alias}")) ) } Self::Percentile { field, alias, percentile, } => { format!( "percentile_cont(0.{}) within group (order by {} asc){}", percentile.map_or_else(|| "50".to_owned(), |percentile| percentile.to_string()), field .to_sql(table_engine) .attach_printable("Failed to percentile aggregate")?, alias.map_or_else(|| "".to_owned(), |alias| format!(" as {alias}")) ) } Self::DistinctCount { field, alias } => { format!( "count(distinct {}){}", field .to_sql(table_engine) .attach_printable("Failed to distinct count aggregate")?, alias.map_or_else(|| "".to_owned(), |alias| format!(" as {alias}")) ) } }) } } impl<T> ToSql<SqlxClient> for Window<T> where T: ToSql<SqlxClient>, { fn to_sql(&self, table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> { Ok(match self { Self::Sum { field, partition_by, order_by, alias, } => { format!( "sum({}) over ({}{}){}", field .to_sql(table_engine) .attach_printable("Failed to sum window")?, partition_by.as_ref().map_or_else( || "".to_owned(), |partition_by| format!("partition by {}", partition_by.to_owned()) ), order_by.as_ref().map_or_else( || "".to_owned(), |(order_column, order)| format!( " order by {} {}", order_column.to_owned(), order ) ), alias.map_or_else(|| "".to_owned(), |alias| format!(" as {alias}")) ) } Self::RowNumber { field: _, partition_by, order_by, alias, } => { format!( "row_number() over ({}{}){}", partition_by.as_ref().map_or_else( || "".to_owned(), |partition_by| format!("partition by {}", partition_by.to_owned()) ), order_by.as_ref().map_or_else( || "".to_owned(), |(order_column, order)| format!( " order by {} {}", order_column.to_owned(), order ) ), alias.map_or_else(|| "".to_owned(), |alias| format!(" as {alias}")) ) } }) } }
crates/analytics/src/sqlx.rs
analytics::src::sqlx
14,148
true
// File: crates/analytics/src/outgoing_webhook_event.rs // Module: analytics::src::outgoing_webhook_event mod core; pub mod events; pub trait OutgoingWebhookEventAnalytics: events::OutgoingWebhookLogsFilterAnalytics {} pub use self::core::outgoing_webhook_events_core;
crates/analytics/src/outgoing_webhook_event.rs
analytics::src::outgoing_webhook_event
66
true
// File: crates/analytics/src/types.rs // Module: analytics::src::types use std::{fmt::Display, str::FromStr}; use common_utils::{ errors::{CustomResult, ErrorSwitch, ParsingError}, events::{ApiEventMetric, ApiEventsType}, impl_api_event_type, }; use error_stack::{report, Report, ResultExt}; use super::query::QueryBuildingError; use crate::errors::AnalyticsError; #[derive(serde::Deserialize, Debug, serde::Serialize)] #[serde(rename_all = "snake_case")] pub enum AnalyticsDomain { Payments, Refunds, Frm, PaymentIntents, AuthEvents, SdkEvents, ApiEvents, Dispute, Routing, } #[derive(Debug, strum::AsRefStr, strum::Display, Clone, Copy)] pub enum AnalyticsCollection { Payment, PaymentSessionized, Refund, RefundSessionized, FraudCheck, SdkEvents, SdkEventsAnalytics, ApiEvents, PaymentIntent, PaymentIntentSessionized, ConnectorEvents, OutgoingWebhookEvent, Authentications, Dispute, DisputeSessionized, ApiEventsAnalytics, ActivePaymentsAnalytics, RoutingEvents, } #[allow(dead_code)] #[derive(Debug)] pub enum TableEngine { CollapsingMergeTree { sign: &'static str }, BasicTree, } #[derive(Debug, serde::Serialize, serde::Deserialize, Eq, PartialEq, Hash)] #[serde(transparent)] pub struct DBEnumWrapper<T: FromStr + Display>(pub T); impl<T: FromStr + Display> AsRef<T> for DBEnumWrapper<T> { fn as_ref(&self) -> &T { &self.0 } } impl<T: FromStr + Display + Default> Default for DBEnumWrapper<T> { fn default() -> Self { Self(T::default()) } } impl<T> FromStr for DBEnumWrapper<T> where T: FromStr + Display, { type Err = Report<ParsingError>; fn from_str(s: &str) -> Result<Self, Self::Err> { T::from_str(s) .map_err(|_er| report!(ParsingError::EnumParseFailure(std::any::type_name::<T>()))) .map(DBEnumWrapper) .attach_printable_lazy(|| format!("raw_value: {s}")) } } #[async_trait::async_trait] pub trait AnalyticsDataSource where Self: Sized + Sync + Send, { type Row; async fn load_results<T>(&self, query: &str) -> CustomResult<Vec<T>, QueryExecutionError> where Self: LoadRow<T>; fn get_table_engine(_table: AnalyticsCollection) -> TableEngine { TableEngine::BasicTree } } pub trait LoadRow<T> where Self: AnalyticsDataSource, T: Sized, { fn load_row(row: Self::Row) -> CustomResult<T, QueryExecutionError>; } #[derive(thiserror::Error, Debug)] pub enum MetricsError { #[error("Error building query")] QueryBuildingError, #[error("Error running Query")] QueryExecutionFailure, #[error("Error processing query results")] PostProcessingFailure, #[allow(dead_code)] #[error("Not Implemented")] NotImplemented, } #[derive(Debug, thiserror::Error)] pub enum QueryExecutionError { #[error("Failed to extract domain rows")] RowExtractionFailure, #[error("Database error")] DatabaseError, } pub type MetricsResult<T> = CustomResult<T, MetricsError>; impl ErrorSwitch<MetricsError> for QueryBuildingError { fn switch(&self) -> MetricsError { MetricsError::QueryBuildingError } } pub type FiltersResult<T> = CustomResult<T, FiltersError>; #[derive(thiserror::Error, Debug)] pub enum FiltersError { #[error("Error building query")] QueryBuildingError, #[error("Error running Query")] QueryExecutionFailure, #[allow(dead_code)] #[error("Not Implemented: {0}")] NotImplemented(&'static str), } impl ErrorSwitch<FiltersError> for QueryBuildingError { fn switch(&self) -> FiltersError { FiltersError::QueryBuildingError } } impl ErrorSwitch<AnalyticsError> for FiltersError { fn switch(&self) -> AnalyticsError { match self { Self::QueryBuildingError | Self::QueryExecutionFailure => AnalyticsError::UnknownError, Self::NotImplemented(a) => AnalyticsError::NotImplemented(a), } } } impl_api_event_type!(Miscellaneous, (AnalyticsDomain));
crates/analytics/src/types.rs
analytics::src::types
994
true
// File: crates/analytics/src/refunds.rs // Module: analytics::src::refunds pub mod accumulator; mod core; pub mod distribution; pub mod filters; pub mod metrics; pub mod types; pub use accumulator::{RefundMetricAccumulator, RefundMetricsAccumulator}; pub use self::core::{get_filters, get_metrics};
crates/analytics/src/refunds.rs
analytics::src::refunds
74
true
// File: crates/analytics/src/lambda_utils.rs // Module: analytics::src::lambda_utils use aws_config::{self, meta::region::RegionProviderChain, Region}; use aws_sdk_lambda::{types::InvocationType::Event, Client}; use aws_smithy_types::Blob; use common_utils::errors::CustomResult; use error_stack::{report, ResultExt}; use crate::errors::AnalyticsError; async fn get_aws_client(region: String) -> Client { let region_provider = RegionProviderChain::first_try(Region::new(region)); let sdk_config = aws_config::from_env().region(region_provider).load().await; Client::new(&sdk_config) } pub async fn invoke_lambda( function_name: &str, region: &str, json_bytes: &[u8], ) -> CustomResult<(), AnalyticsError> { get_aws_client(region.to_string()) .await .invoke() .function_name(function_name) .invocation_type(Event) .payload(Blob::new(json_bytes.to_owned())) .send() .await .map_err(|er| { let er_rep = format!("{er:?}"); report!(er).attach_printable(er_rep) }) .change_context(AnalyticsError::UnknownError) .attach_printable("Lambda invocation failed")?; Ok(()) }
crates/analytics/src/lambda_utils.rs
analytics::src::lambda_utils
289
true
// File: crates/analytics/src/payments.rs // Module: analytics::src::payments pub mod accumulator; mod core; pub mod distribution; pub mod filters; pub mod metrics; pub mod types; pub use accumulator::{ PaymentDistributionAccumulator, PaymentMetricAccumulator, PaymentMetricsAccumulator, }; pub trait PaymentAnalytics: metrics::PaymentMetricAnalytics + filters::PaymentFilterAnalytics { } pub use self::core::{get_filters, get_metrics};
crates/analytics/src/payments.rs
analytics::src::payments
99
true
// File: crates/analytics/src/clickhouse.rs // Module: analytics::src::clickhouse use std::sync::Arc; use actix_web::http::StatusCode; use common_utils::errors::ParsingError; use error_stack::{report, Report, ResultExt}; use router_env::logger; use time::PrimitiveDateTime; use super::{ active_payments::metrics::ActivePaymentsMetricRow, auth_events::metrics::AuthEventMetricRow, frm::{filters::FrmFilterRow, metrics::FrmMetricRow}, health_check::HealthCheck, payment_intents::{filters::PaymentIntentFilterRow, metrics::PaymentIntentMetricRow}, payments::{ distribution::PaymentDistributionRow, filters::PaymentFilterRow, metrics::PaymentMetricRow, }, query::{Aggregate, ToSql, Window}, refunds::{ distribution::RefundDistributionRow, filters::RefundFilterRow, metrics::RefundMetricRow, }, sdk_events::{filters::SdkEventFilter, metrics::SdkEventMetricRow}, types::{AnalyticsCollection, AnalyticsDataSource, LoadRow, QueryExecutionError}, }; use crate::{ api_event::{ events::ApiLogsResult, filters::ApiEventFilter, metrics::{latency::LatencyAvg, ApiEventMetricRow}, }, auth_events::filters::AuthEventFilterRow, connector_events::events::ConnectorEventsResult, disputes::{filters::DisputeFilterRow, metrics::DisputeMetricRow}, outgoing_webhook_event::events::OutgoingWebhookLogsResult, routing_events::events::RoutingEventsResult, sdk_events::events::SdkEventsResult, types::TableEngine, }; pub type ClickhouseResult<T> = error_stack::Result<T, ClickhouseError>; #[derive(Clone, Debug)] pub struct ClickhouseClient { pub config: Arc<ClickhouseConfig>, pub database: String, } #[derive(Clone, Debug, serde::Deserialize)] pub struct ClickhouseConfig { username: String, password: Option<String>, host: String, } impl Default for ClickhouseConfig { fn default() -> Self { Self { username: "default".to_string(), password: None, host: "http://localhost:8123".to_string(), } } } impl ClickhouseClient { async fn execute_query(&self, query: &str) -> ClickhouseResult<Vec<serde_json::Value>> { logger::debug!("Executing query: {query}"); let client = reqwest::Client::new(); let params = CkhQuery { date_time_output_format: String::from("iso"), output_format_json_quote_64bit_integers: 0, database: self.database.clone(), }; let response = client .post(&self.config.host) .query(&params) .basic_auth(self.config.username.clone(), self.config.password.clone()) .body(format!("{query}\nFORMAT JSON")) .send() .await .change_context(ClickhouseError::ConnectionError)?; logger::debug!(clickhouse_response=?response, query=?query, "Clickhouse response"); if response.status() != StatusCode::OK { response.text().await.map_or_else( |er| { Err(ClickhouseError::ResponseError) .attach_printable_lazy(|| format!("Error: {er:?}")) }, |t| Err(report!(ClickhouseError::ResponseNotOK(t))), ) } else { Ok(response .json::<CkhOutput<serde_json::Value>>() .await .change_context(ClickhouseError::ResponseError)? .data) } } } #[async_trait::async_trait] impl HealthCheck for ClickhouseClient { async fn deep_health_check( &self, ) -> common_utils::errors::CustomResult<(), QueryExecutionError> { self.execute_query("SELECT 1") .await .map(|_| ()) .change_context(QueryExecutionError::DatabaseError) } } #[async_trait::async_trait] impl AnalyticsDataSource for ClickhouseClient { type Row = serde_json::Value; async fn load_results<T>( &self, query: &str, ) -> common_utils::errors::CustomResult<Vec<T>, QueryExecutionError> where Self: LoadRow<T>, { self.execute_query(query) .await .change_context(QueryExecutionError::DatabaseError)? .into_iter() .map(Self::load_row) .collect::<Result<Vec<_>, _>>() .change_context(QueryExecutionError::RowExtractionFailure) } fn get_table_engine(table: AnalyticsCollection) -> TableEngine { match table { AnalyticsCollection::Payment | AnalyticsCollection::PaymentSessionized | AnalyticsCollection::Refund | AnalyticsCollection::RefundSessionized | AnalyticsCollection::FraudCheck | AnalyticsCollection::PaymentIntent | AnalyticsCollection::PaymentIntentSessionized | AnalyticsCollection::Authentications | AnalyticsCollection::Dispute => { TableEngine::CollapsingMergeTree { sign: "sign_flag" } } AnalyticsCollection::DisputeSessionized => { TableEngine::CollapsingMergeTree { sign: "sign_flag" } } AnalyticsCollection::SdkEvents | AnalyticsCollection::SdkEventsAnalytics | AnalyticsCollection::ApiEvents | AnalyticsCollection::ConnectorEvents | AnalyticsCollection::RoutingEvents | AnalyticsCollection::ApiEventsAnalytics | AnalyticsCollection::OutgoingWebhookEvent | AnalyticsCollection::ActivePaymentsAnalytics => TableEngine::BasicTree, } } } impl<T, E> LoadRow<T> for ClickhouseClient where Self::Row: TryInto<T, Error = Report<E>>, { fn load_row(row: Self::Row) -> common_utils::errors::CustomResult<T, QueryExecutionError> { row.try_into() .map_err(|error| error.change_context(QueryExecutionError::RowExtractionFailure)) } } impl super::payments::filters::PaymentFilterAnalytics for ClickhouseClient {} impl super::payments::metrics::PaymentMetricAnalytics for ClickhouseClient {} impl super::payments::distribution::PaymentDistributionAnalytics for ClickhouseClient {} impl super::payment_intents::filters::PaymentIntentFilterAnalytics for ClickhouseClient {} impl super::payment_intents::metrics::PaymentIntentMetricAnalytics for ClickhouseClient {} impl super::refunds::metrics::RefundMetricAnalytics for ClickhouseClient {} impl super::refunds::filters::RefundFilterAnalytics for ClickhouseClient {} impl super::refunds::distribution::RefundDistributionAnalytics for ClickhouseClient {} impl super::frm::metrics::FrmMetricAnalytics for ClickhouseClient {} impl super::frm::filters::FrmFilterAnalytics for ClickhouseClient {} impl super::sdk_events::filters::SdkEventFilterAnalytics for ClickhouseClient {} impl super::sdk_events::metrics::SdkEventMetricAnalytics for ClickhouseClient {} impl super::sdk_events::events::SdkEventsFilterAnalytics for ClickhouseClient {} impl super::active_payments::metrics::ActivePaymentsMetricAnalytics for ClickhouseClient {} impl super::auth_events::metrics::AuthEventMetricAnalytics for ClickhouseClient {} impl super::auth_events::filters::AuthEventFilterAnalytics for ClickhouseClient {} impl super::api_event::events::ApiLogsFilterAnalytics for ClickhouseClient {} impl super::api_event::filters::ApiEventFilterAnalytics for ClickhouseClient {} impl super::api_event::metrics::ApiEventMetricAnalytics for ClickhouseClient {} impl super::connector_events::events::ConnectorEventLogAnalytics for ClickhouseClient {} impl super::routing_events::events::RoutingEventLogAnalytics for ClickhouseClient {} impl super::outgoing_webhook_event::events::OutgoingWebhookLogsFilterAnalytics for ClickhouseClient { } impl super::disputes::filters::DisputeFilterAnalytics for ClickhouseClient {} impl super::disputes::metrics::DisputeMetricAnalytics for ClickhouseClient {} #[derive(Debug, serde::Serialize)] struct CkhQuery { date_time_output_format: String, output_format_json_quote_64bit_integers: u8, database: String, } #[derive(Debug, serde::Deserialize)] struct CkhOutput<T> { data: Vec<T>, } impl TryInto<ApiLogsResult> for serde_json::Value { type Error = Report<ParsingError>; fn try_into(self) -> Result<ApiLogsResult, Self::Error> { serde_json::from_value(self).change_context(ParsingError::StructParseFailure( "Failed to parse ApiLogsResult in clickhouse results", )) } } impl TryInto<SdkEventsResult> for serde_json::Value { type Error = Report<ParsingError>; fn try_into(self) -> Result<SdkEventsResult, Self::Error> { serde_json::from_value(self).change_context(ParsingError::StructParseFailure( "Failed to parse SdkEventsResult in clickhouse results", )) } } impl TryInto<ConnectorEventsResult> for serde_json::Value { type Error = Report<ParsingError>; fn try_into(self) -> Result<ConnectorEventsResult, Self::Error> { serde_json::from_value(self).change_context(ParsingError::StructParseFailure( "Failed to parse ConnectorEventsResult in clickhouse results", )) } } impl TryInto<RoutingEventsResult> for serde_json::Value { type Error = Report<ParsingError>; fn try_into(self) -> Result<RoutingEventsResult, Self::Error> { serde_json::from_value(self).change_context(ParsingError::StructParseFailure( "Failed to parse RoutingEventsResult in clickhouse results", )) } } impl TryInto<PaymentMetricRow> for serde_json::Value { type Error = Report<ParsingError>; fn try_into(self) -> Result<PaymentMetricRow, Self::Error> { serde_json::from_value(self).change_context(ParsingError::StructParseFailure( "Failed to parse PaymentMetricRow in clickhouse results", )) } } impl TryInto<PaymentDistributionRow> for serde_json::Value { type Error = Report<ParsingError>; fn try_into(self) -> Result<PaymentDistributionRow, Self::Error> { serde_json::from_value(self).change_context(ParsingError::StructParseFailure( "Failed to parse PaymentDistributionRow in clickhouse results", )) } } impl TryInto<PaymentFilterRow> for serde_json::Value { type Error = Report<ParsingError>; fn try_into(self) -> Result<PaymentFilterRow, Self::Error> { serde_json::from_value(self).change_context(ParsingError::StructParseFailure( "Failed to parse FilterRow in clickhouse results", )) } } impl TryInto<PaymentIntentMetricRow> for serde_json::Value { type Error = Report<ParsingError>; fn try_into(self) -> Result<PaymentIntentMetricRow, Self::Error> { serde_json::from_value(self).change_context(ParsingError::StructParseFailure( "Failed to parse PaymentIntentMetricRow in clickhouse results", )) } } impl TryInto<PaymentIntentFilterRow> for serde_json::Value { type Error = Report<ParsingError>; fn try_into(self) -> Result<PaymentIntentFilterRow, Self::Error> { serde_json::from_value(self).change_context(ParsingError::StructParseFailure( "Failed to parse PaymentIntentFilterRow in clickhouse results", )) } } impl TryInto<RefundMetricRow> for serde_json::Value { type Error = Report<ParsingError>; fn try_into(self) -> Result<RefundMetricRow, Self::Error> { serde_json::from_value(self).change_context(ParsingError::StructParseFailure( "Failed to parse RefundMetricRow in clickhouse results", )) } } impl TryInto<RefundFilterRow> for serde_json::Value { type Error = Report<ParsingError>; fn try_into(self) -> Result<RefundFilterRow, Self::Error> { serde_json::from_value(self).change_context(ParsingError::StructParseFailure( "Failed to parse RefundFilterRow in clickhouse results", )) } } impl TryInto<RefundDistributionRow> for serde_json::Value { type Error = Report<ParsingError>; fn try_into(self) -> Result<RefundDistributionRow, Self::Error> { serde_json::from_value(self).change_context(ParsingError::StructParseFailure( "Failed to parse RefundDistributionRow in clickhouse results", )) } } impl TryInto<FrmMetricRow> for serde_json::Value { type Error = Report<ParsingError>; fn try_into(self) -> Result<FrmMetricRow, Self::Error> { serde_json::from_value(self).change_context(ParsingError::StructParseFailure( "Failed to parse FrmMetricRow in clickhouse results", )) } } impl TryInto<FrmFilterRow> for serde_json::Value { type Error = Report<ParsingError>; fn try_into(self) -> Result<FrmFilterRow, Self::Error> { serde_json::from_value(self).change_context(ParsingError::StructParseFailure( "Failed to parse FrmFilterRow in clickhouse results", )) } } impl TryInto<DisputeMetricRow> for serde_json::Value { type Error = Report<ParsingError>; fn try_into(self) -> Result<DisputeMetricRow, Self::Error> { serde_json::from_value(self).change_context(ParsingError::StructParseFailure( "Failed to parse DisputeMetricRow in clickhouse results", )) } } impl TryInto<DisputeFilterRow> for serde_json::Value { type Error = Report<ParsingError>; fn try_into(self) -> Result<DisputeFilterRow, Self::Error> { serde_json::from_value(self).change_context(ParsingError::StructParseFailure( "Failed to parse DisputeFilterRow in clickhouse results", )) } } impl TryInto<ApiEventMetricRow> for serde_json::Value { type Error = Report<ParsingError>; fn try_into(self) -> Result<ApiEventMetricRow, Self::Error> { serde_json::from_value(self).change_context(ParsingError::StructParseFailure( "Failed to parse ApiEventMetricRow in clickhouse results", )) } } impl TryInto<LatencyAvg> for serde_json::Value { type Error = Report<ParsingError>; fn try_into(self) -> Result<LatencyAvg, Self::Error> { serde_json::from_value(self).change_context(ParsingError::StructParseFailure( "Failed to parse LatencyAvg in clickhouse results", )) } } impl TryInto<SdkEventMetricRow> for serde_json::Value { type Error = Report<ParsingError>; fn try_into(self) -> Result<SdkEventMetricRow, Self::Error> { serde_json::from_value(self).change_context(ParsingError::StructParseFailure( "Failed to parse SdkEventMetricRow in clickhouse results", )) } } impl TryInto<SdkEventFilter> for serde_json::Value { type Error = Report<ParsingError>; fn try_into(self) -> Result<SdkEventFilter, Self::Error> { serde_json::from_value(self).change_context(ParsingError::StructParseFailure( "Failed to parse SdkEventFilter in clickhouse results", )) } } impl TryInto<AuthEventMetricRow> for serde_json::Value { type Error = Report<ParsingError>; fn try_into(self) -> Result<AuthEventMetricRow, Self::Error> { serde_json::from_value(self).change_context(ParsingError::StructParseFailure( "Failed to parse AuthEventMetricRow in clickhouse results", )) } } impl TryInto<AuthEventFilterRow> for serde_json::Value { type Error = Report<ParsingError>; fn try_into(self) -> Result<AuthEventFilterRow, Self::Error> { serde_json::from_value(self).change_context(ParsingError::StructParseFailure( "Failed to parse AuthEventFilterRow in clickhouse results", )) } } impl TryInto<ApiEventFilter> for serde_json::Value { type Error = Report<ParsingError>; fn try_into(self) -> Result<ApiEventFilter, Self::Error> { serde_json::from_value(self).change_context(ParsingError::StructParseFailure( "Failed to parse ApiEventFilter in clickhouse results", )) } } impl TryInto<OutgoingWebhookLogsResult> for serde_json::Value { type Error = Report<ParsingError>; fn try_into(self) -> Result<OutgoingWebhookLogsResult, Self::Error> { serde_json::from_value(self).change_context(ParsingError::StructParseFailure( "Failed to parse OutgoingWebhookLogsResult in clickhouse results", )) } } impl TryInto<ActivePaymentsMetricRow> for serde_json::Value { type Error = Report<ParsingError>; fn try_into(self) -> Result<ActivePaymentsMetricRow, Self::Error> { serde_json::from_value(self).change_context(ParsingError::StructParseFailure( "Failed to parse ActivePaymentsMetricRow in clickhouse results", )) } } impl ToSql<ClickhouseClient> for PrimitiveDateTime { fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> { Ok(self.assume_utc().unix_timestamp().to_string()) } } impl ToSql<ClickhouseClient> for AnalyticsCollection { fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> { match self { Self::Payment => Ok("payment_attempts".to_string()), Self::PaymentSessionized => Ok("sessionizer_payment_attempts".to_string()), Self::Refund => Ok("refunds".to_string()), Self::RefundSessionized => Ok("sessionizer_refunds".to_string()), Self::FraudCheck => Ok("fraud_check".to_string()), Self::SdkEvents => Ok("sdk_events_audit".to_string()), Self::SdkEventsAnalytics => Ok("sdk_events".to_string()), Self::ApiEvents => Ok("api_events_audit".to_string()), Self::ApiEventsAnalytics => Ok("api_events".to_string()), Self::PaymentIntent => Ok("payment_intents".to_string()), Self::PaymentIntentSessionized => Ok("sessionizer_payment_intents".to_string()), Self::ConnectorEvents => Ok("connector_events_audit".to_string()), Self::OutgoingWebhookEvent => Ok("outgoing_webhook_events_audit".to_string()), Self::Dispute => Ok("dispute".to_string()), Self::DisputeSessionized => Ok("sessionizer_dispute".to_string()), Self::ActivePaymentsAnalytics => Ok("active_payments".to_string()), Self::Authentications => Ok("authentications".to_string()), Self::RoutingEvents => Ok("routing_events_audit".to_string()), } } } impl<T> ToSql<ClickhouseClient> for Aggregate<T> where T: ToSql<ClickhouseClient>, { fn to_sql(&self, table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> { Ok(match self { Self::Count { field: _, alias } => { let query = match table_engine { TableEngine::CollapsingMergeTree { sign } => format!("sum({sign})"), TableEngine::BasicTree => "count(*)".to_string(), }; format!( "{query}{}", alias.map_or_else(|| "".to_owned(), |alias| format!(" as {alias}")) ) } Self::Sum { field, alias } => { let query = match table_engine { TableEngine::CollapsingMergeTree { sign } => format!( "sum({sign} * {})", field .to_sql(table_engine) .attach_printable("Failed to sum aggregate")? ), TableEngine::BasicTree => format!( "sum({})", field .to_sql(table_engine) .attach_printable("Failed to sum aggregate")? ), }; format!( "{query}{}", alias.map_or_else(|| "".to_owned(), |alias| format!(" as {alias}")) ) } Self::Min { field, alias } => { format!( "min({}){}", field .to_sql(table_engine) .attach_printable("Failed to min aggregate")?, alias.map_or_else(|| "".to_owned(), |alias| format!(" as {alias}")) ) } Self::Max { field, alias } => { format!( "max({}){}", field .to_sql(table_engine) .attach_printable("Failed to max aggregate")?, alias.map_or_else(|| "".to_owned(), |alias| format!(" as {alias}")) ) } Self::Percentile { field, alias, percentile, } => { format!( "quantilesExact(0.{})({})[1]{}", percentile.map_or_else(|| "50".to_owned(), |percentile| percentile.to_string()), field .to_sql(table_engine) .attach_printable("Failed to percentile aggregate")?, alias.map_or_else(|| "".to_owned(), |alias| format!(" as {alias}")) ) } Self::DistinctCount { field, alias } => { format!( "count(distinct {}){}", field .to_sql(table_engine) .attach_printable("Failed to percentile aggregate")?, alias.map_or_else(|| "".to_owned(), |alias| format!(" as {alias}")) ) } }) } } impl<T> ToSql<ClickhouseClient> for Window<T> where T: ToSql<ClickhouseClient>, { fn to_sql(&self, table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> { Ok(match self { Self::Sum { field, partition_by, order_by, alias, } => { format!( "sum({}) over ({}{}){}", field .to_sql(table_engine) .attach_printable("Failed to sum window")?, partition_by.as_ref().map_or_else( || "".to_owned(), |partition_by| format!("partition by {}", partition_by.to_owned()) ), order_by.as_ref().map_or_else( || "".to_owned(), |(order_column, order)| format!( " order by {} {}", order_column.to_owned(), order ) ), alias.map_or_else(|| "".to_owned(), |alias| format!(" as {alias}")) ) } Self::RowNumber { field: _, partition_by, order_by, alias, } => { format!( "row_number() over ({}{}){}", partition_by.as_ref().map_or_else( || "".to_owned(), |partition_by| format!("partition by {}", partition_by.to_owned()) ), order_by.as_ref().map_or_else( || "".to_owned(), |(order_column, order)| format!( " order by {} {}", order_column.to_owned(), order ) ), alias.map_or_else(|| "".to_owned(), |alias| format!(" as {alias}")) ) } }) } } #[derive(Debug, thiserror::Error)] pub enum ClickhouseError { #[error("Clickhouse connection error")] ConnectionError, #[error("Clickhouse NON-200 response content: '{0}'")] ResponseNotOK(String), #[error("Clickhouse response error")] ResponseError, }
crates/analytics/src/clickhouse.rs
analytics::src::clickhouse
5,163
true
// File: crates/analytics/src/lib.rs // Module: analytics::src::lib pub mod active_payments; pub mod api_event; pub mod auth_events; mod clickhouse; pub mod connector_events; pub mod core; pub mod disputes; pub mod enums; pub mod errors; pub mod frm; pub mod health_check; pub mod metrics; pub mod opensearch; pub mod outgoing_webhook_event; pub mod payment_intents; pub mod payments; mod query; pub mod refunds; pub mod routing_events; pub mod sdk_events; pub mod search; mod sqlx; mod types; use api_event::metrics::{ApiEventMetric, ApiEventMetricRow}; use common_utils::{errors::CustomResult, types::TenantConfig}; use disputes::metrics::{DisputeMetric, DisputeMetricRow}; use enums::AuthInfo; use hyperswitch_interfaces::secrets_interface::{ secret_handler::SecretsHandler, secret_state::{RawSecret, SecretStateContainer, SecuredSecret}, SecretManagementInterface, SecretsManagementError, }; use refunds::distribution::{RefundDistribution, RefundDistributionRow}; pub use types::AnalyticsDomain; pub mod lambda_utils; pub mod utils; use std::{collections::HashSet, sync::Arc}; use api_models::analytics::{ active_payments::{ActivePaymentsMetrics, ActivePaymentsMetricsBucketIdentifier}, api_event::{ ApiEventDimensions, ApiEventFilters, ApiEventMetrics, ApiEventMetricsBucketIdentifier, }, auth_events::{ AuthEventDimensions, AuthEventFilters, AuthEventMetrics, AuthEventMetricsBucketIdentifier, }, disputes::{DisputeDimensions, DisputeFilters, DisputeMetrics, DisputeMetricsBucketIdentifier}, frm::{FrmDimensions, FrmFilters, FrmMetrics, FrmMetricsBucketIdentifier}, payment_intents::{ PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetrics, PaymentIntentMetricsBucketIdentifier, }, payments::{PaymentDimensions, PaymentFilters, PaymentMetrics, PaymentMetricsBucketIdentifier}, refunds::{RefundDimensions, RefundFilters, RefundMetrics, RefundMetricsBucketIdentifier}, sdk_events::{ SdkEventDimensions, SdkEventFilters, SdkEventMetrics, SdkEventMetricsBucketIdentifier, }, Granularity, PaymentDistributionBody, RefundDistributionBody, TimeRange, }; use clickhouse::ClickhouseClient; pub use clickhouse::ClickhouseConfig; use error_stack::report; use router_env::{ logger, tracing::{self, instrument}, types::FlowMetric, }; use storage_impl::config::Database; use strum::Display; use self::{ active_payments::metrics::{ActivePaymentsMetric, ActivePaymentsMetricRow}, auth_events::metrics::{AuthEventMetric, AuthEventMetricRow}, frm::metrics::{FrmMetric, FrmMetricRow}, payment_intents::metrics::{PaymentIntentMetric, PaymentIntentMetricRow}, payments::{ distribution::{PaymentDistribution, PaymentDistributionRow}, metrics::{PaymentMetric, PaymentMetricRow}, }, refunds::metrics::{RefundMetric, RefundMetricRow}, sdk_events::metrics::{SdkEventMetric, SdkEventMetricRow}, sqlx::SqlxClient, types::MetricsError, }; #[derive(Clone, Debug)] pub enum AnalyticsProvider { Sqlx(SqlxClient), Clickhouse(ClickhouseClient), CombinedCkh(SqlxClient, ClickhouseClient), CombinedSqlx(SqlxClient, ClickhouseClient), } impl Default for AnalyticsProvider { fn default() -> Self { Self::Sqlx(SqlxClient::default()) } } impl std::fmt::Display for AnalyticsProvider { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let analytics_provider = match self { Self::Clickhouse(_) => "Clickhouse", Self::Sqlx(_) => "Sqlx", Self::CombinedCkh(_, _) => "CombinedCkh", Self::CombinedSqlx(_, _) => "CombinedSqlx", }; write!(f, "{analytics_provider}") } } impl AnalyticsProvider { #[instrument(skip_all)] pub async fn get_payment_metrics( &self, metric: &PaymentMetrics, dimensions: &[PaymentDimensions], auth: &AuthInfo, filters: &PaymentFilters, granularity: Option<Granularity>, time_range: &TimeRange, ) -> types::MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> { // Metrics to get the fetch time for each payment metric metrics::request::record_operation_time( async { match self { Self::Sqlx(pool) => { metric .load_metrics( dimensions, auth, filters, granularity, time_range, pool, ) .await } Self::Clickhouse(pool) => { metric .load_metrics( dimensions, auth, filters, granularity, time_range, pool, ) .await } Self::CombinedCkh(sqlx_pool, ckh_pool) => { let (ckh_result, sqlx_result) = tokio::join!(metric .load_metrics( dimensions, auth, filters, granularity, time_range, ckh_pool, ), metric .load_metrics( dimensions, auth, filters, granularity, time_range, sqlx_pool, )); match (&sqlx_result, &ckh_result) { (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics metrics") }, _ => {} }; ckh_result } Self::CombinedSqlx(sqlx_pool, ckh_pool) => { let (ckh_result, sqlx_result) = tokio::join!(metric .load_metrics( dimensions, auth, filters, granularity, time_range, ckh_pool, ), metric .load_metrics( dimensions, auth, filters, granularity, time_range, sqlx_pool, )); match (&sqlx_result, &ckh_result) { (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics metrics") }, _ => {} }; sqlx_result } } }, &metrics::METRIC_FETCH_TIME, metric, self, ) .await } pub async fn get_payment_distribution( &self, distribution: &PaymentDistributionBody, dimensions: &[PaymentDimensions], auth: &AuthInfo, filters: &PaymentFilters, granularity: Option<Granularity>, time_range: &TimeRange, ) -> types::MetricsResult<Vec<(PaymentMetricsBucketIdentifier, PaymentDistributionRow)>> { // Metrics to get the fetch time for each payment metric metrics::request::record_operation_time( async { match self { Self::Sqlx(pool) => { distribution.distribution_for .load_distribution( distribution, dimensions, auth, filters, granularity, time_range, pool, ) .await } Self::Clickhouse(pool) => { distribution.distribution_for .load_distribution( distribution, dimensions, auth, filters, granularity, time_range, pool, ) .await } Self::CombinedCkh(sqlx_pool, ckh_pool) => { let (ckh_result, sqlx_result) = tokio::join!(distribution.distribution_for .load_distribution( distribution, dimensions, auth, filters, granularity, time_range, ckh_pool, ), distribution.distribution_for .load_distribution( distribution, dimensions, auth, filters, granularity, time_range, sqlx_pool, )); match (&sqlx_result, &ckh_result) { (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics distribution") }, _ => {} }; ckh_result } Self::CombinedSqlx(sqlx_pool, ckh_pool) => { let (ckh_result, sqlx_result) = tokio::join!(distribution.distribution_for .load_distribution( distribution, dimensions, auth, filters, granularity, time_range, ckh_pool, ), distribution.distribution_for .load_distribution( distribution, dimensions, auth, filters, granularity, time_range, sqlx_pool, )); match (&sqlx_result, &ckh_result) { (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics distribution") }, _ => {} }; sqlx_result } } }, &metrics::METRIC_FETCH_TIME, &distribution.distribution_for, self, ) .await } pub async fn get_payment_intent_metrics( &self, metric: &PaymentIntentMetrics, dimensions: &[PaymentIntentDimensions], auth: &AuthInfo, filters: &PaymentIntentFilters, granularity: Option<Granularity>, time_range: &TimeRange, ) -> types::MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>> { // Metrics to get the fetch time for each payment intent metric metrics::request::record_operation_time( async { match self { Self::Sqlx(pool) => { metric .load_metrics( dimensions, auth, filters, granularity, time_range, pool, ) .await } Self::Clickhouse(pool) => { metric .load_metrics( dimensions, auth, filters, granularity, time_range, pool, ) .await } Self::CombinedCkh(sqlx_pool, ckh_pool) => { let (ckh_result, sqlx_result) = tokio::join!(metric .load_metrics( dimensions, auth, filters, granularity, time_range, ckh_pool, ), metric .load_metrics( dimensions, auth, filters, granularity, time_range, sqlx_pool, )); match (&sqlx_result, &ckh_result) { (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payment intents analytics metrics") }, _ => {} }; ckh_result } Self::CombinedSqlx(sqlx_pool, ckh_pool) => { let (ckh_result, sqlx_result) = tokio::join!(metric .load_metrics( dimensions, auth, filters, granularity, time_range, ckh_pool, ), metric .load_metrics( dimensions, auth, filters, granularity, time_range, sqlx_pool, )); match (&sqlx_result, &ckh_result) { (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payment intents analytics metrics") }, _ => {} }; sqlx_result } } }, &metrics::METRIC_FETCH_TIME, metric, self, ) .await } pub async fn get_refund_metrics( &self, metric: &RefundMetrics, dimensions: &[RefundDimensions], auth: &AuthInfo, filters: &RefundFilters, granularity: Option<Granularity>, time_range: &TimeRange, ) -> types::MetricsResult<HashSet<(RefundMetricsBucketIdentifier, RefundMetricRow)>> { // Metrics to get the fetch time for each refund metric metrics::request::record_operation_time( async { match self { Self::Sqlx(pool) => { metric .load_metrics( dimensions, auth, filters, granularity, time_range, pool, ) .await } Self::Clickhouse(pool) => { metric .load_metrics( dimensions, auth, filters, granularity, time_range, pool, ) .await } Self::CombinedCkh(sqlx_pool, ckh_pool) => { let (ckh_result, sqlx_result) = tokio::join!( metric.load_metrics( dimensions, auth, filters, granularity, time_range, ckh_pool, ), metric.load_metrics( dimensions, auth, filters, granularity, time_range, sqlx_pool, ) ); match (&sqlx_result, &ckh_result) { (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres refunds analytics metrics") } _ => {} }; ckh_result } Self::CombinedSqlx(sqlx_pool, ckh_pool) => { let (ckh_result, sqlx_result) = tokio::join!( metric.load_metrics( dimensions, auth, filters, granularity, time_range, ckh_pool, ), metric.load_metrics( dimensions, auth, filters, granularity, time_range, sqlx_pool, ) ); match (&sqlx_result, &ckh_result) { (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres refunds analytics metrics") } _ => {} }; sqlx_result } } }, &metrics::METRIC_FETCH_TIME, metric, self, ) .await } pub async fn get_refund_distribution( &self, distribution: &RefundDistributionBody, dimensions: &[RefundDimensions], auth: &AuthInfo, filters: &RefundFilters, granularity: &Option<Granularity>, time_range: &TimeRange, ) -> types::MetricsResult<Vec<(RefundMetricsBucketIdentifier, RefundDistributionRow)>> { // Metrics to get the fetch time for each payment metric metrics::request::record_operation_time( async { match self { Self::Sqlx(pool) => { distribution.distribution_for .load_distribution( distribution, dimensions, auth, filters, granularity, time_range, pool, ) .await } Self::Clickhouse(pool) => { distribution.distribution_for .load_distribution( distribution, dimensions, auth, filters, granularity, time_range, pool, ) .await } Self::CombinedCkh(sqlx_pool, ckh_pool) => { let (ckh_result, sqlx_result) = tokio::join!(distribution.distribution_for .load_distribution( distribution, dimensions, auth, filters, granularity, time_range, ckh_pool, ), distribution.distribution_for .load_distribution( distribution, dimensions, auth, filters, granularity, time_range, sqlx_pool, )); match (&sqlx_result, &ckh_result) { (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics distribution") }, _ => {} }; ckh_result } Self::CombinedSqlx(sqlx_pool, ckh_pool) => { let (ckh_result, sqlx_result) = tokio::join!(distribution.distribution_for .load_distribution( distribution, dimensions, auth, filters, granularity, time_range, ckh_pool, ), distribution.distribution_for .load_distribution( distribution, dimensions, auth, filters, granularity, time_range, sqlx_pool, )); match (&sqlx_result, &ckh_result) { (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics distribution") }, _ => {} }; sqlx_result } } }, &metrics::METRIC_FETCH_TIME, &distribution.distribution_for, self, ) .await } pub async fn get_frm_metrics( &self, metric: &FrmMetrics, dimensions: &[FrmDimensions], merchant_id: &common_utils::id_type::MerchantId, filters: &FrmFilters, granularity: Option<Granularity>, time_range: &TimeRange, ) -> types::MetricsResult<Vec<(FrmMetricsBucketIdentifier, FrmMetricRow)>> { // Metrics to get the fetch time for each refund metric metrics::request::record_operation_time( async { match self { Self::Sqlx(pool) => { metric .load_metrics( dimensions, merchant_id, filters, granularity, time_range, pool, ) .await } Self::Clickhouse(pool) => { metric .load_metrics( dimensions, merchant_id, filters, granularity, time_range, pool, ) .await } Self::CombinedCkh(sqlx_pool, ckh_pool) => { let (ckh_result, sqlx_result) = tokio::join!( metric.load_metrics( dimensions, merchant_id, filters, granularity, time_range, ckh_pool, ), metric.load_metrics( dimensions, merchant_id, filters, granularity, time_range, sqlx_pool, ) ); match (&sqlx_result, &ckh_result) { (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres frm analytics metrics") } _ => {} }; ckh_result } Self::CombinedSqlx(sqlx_pool, ckh_pool) => { let (ckh_result, sqlx_result) = tokio::join!( metric.load_metrics( dimensions, merchant_id, filters, granularity, time_range, ckh_pool, ), metric.load_metrics( dimensions, merchant_id, filters, granularity, time_range, sqlx_pool, ) ); match (&sqlx_result, &ckh_result) { (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres frm analytics metrics") } _ => {} }; sqlx_result } } }, &metrics::METRIC_FETCH_TIME, metric, self, ) .await } pub async fn get_dispute_metrics( &self, metric: &DisputeMetrics, dimensions: &[DisputeDimensions], auth: &AuthInfo, filters: &DisputeFilters, granularity: Option<Granularity>, time_range: &TimeRange, ) -> types::MetricsResult<HashSet<(DisputeMetricsBucketIdentifier, DisputeMetricRow)>> { // Metrics to get the fetch time for each refund metric metrics::request::record_operation_time( async { match self { Self::Sqlx(pool) => { metric .load_metrics( dimensions, auth, filters, granularity, time_range, pool, ) .await } Self::Clickhouse(pool) => { metric .load_metrics( dimensions, auth, filters, granularity, time_range, pool, ) .await } Self::CombinedCkh(sqlx_pool, ckh_pool) => { let (ckh_result, sqlx_result) = tokio::join!( metric.load_metrics( dimensions, auth, filters, granularity, time_range, ckh_pool, ), metric.load_metrics( dimensions, auth, filters, granularity, time_range, sqlx_pool, ) ); match (&sqlx_result, &ckh_result) { (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres disputes analytics metrics") } _ => {} }; ckh_result } Self::CombinedSqlx(sqlx_pool, ckh_pool) => { let (ckh_result, sqlx_result) = tokio::join!( metric.load_metrics( dimensions, auth, filters, granularity, time_range, ckh_pool, ), metric.load_metrics( dimensions, auth, filters, granularity, time_range, sqlx_pool, ) ); match (&sqlx_result, &ckh_result) { (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres disputes analytics metrics") } _ => {} }; sqlx_result } } }, &metrics::METRIC_FETCH_TIME, metric, self, ) .await } pub async fn get_sdk_event_metrics( &self, metric: &SdkEventMetrics, dimensions: &[SdkEventDimensions], publishable_key: &str, filters: &SdkEventFilters, granularity: Option<Granularity>, time_range: &TimeRange, ) -> types::MetricsResult<HashSet<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>> { match self { Self::Sqlx(_pool) => Err(report!(MetricsError::NotImplemented)), Self::Clickhouse(pool) => { metric .load_metrics( dimensions, publishable_key, filters, granularity, time_range, pool, ) .await } Self::CombinedCkh(_sqlx_pool, ckh_pool) | Self::CombinedSqlx(_sqlx_pool, ckh_pool) => { metric .load_metrics( dimensions, publishable_key, filters, granularity, // Since SDK events are ckh only use ckh here time_range, ckh_pool, ) .await } } } pub async fn get_active_payments_metrics( &self, metric: &ActivePaymentsMetrics, merchant_id: &common_utils::id_type::MerchantId, publishable_key: &str, time_range: &TimeRange, ) -> types::MetricsResult< HashSet<( ActivePaymentsMetricsBucketIdentifier, ActivePaymentsMetricRow, )>, > { match self { Self::Sqlx(_pool) => Err(report!(MetricsError::NotImplemented)), Self::Clickhouse(pool) => { metric .load_metrics(merchant_id, publishable_key, time_range, pool) .await } Self::CombinedCkh(_sqlx_pool, ckh_pool) | Self::CombinedSqlx(_sqlx_pool, ckh_pool) => { metric .load_metrics(merchant_id, publishable_key, time_range, ckh_pool) .await } } } pub async fn get_auth_event_metrics( &self, metric: &AuthEventMetrics, dimensions: &[AuthEventDimensions], auth: &AuthInfo, filters: &AuthEventFilters, granularity: Option<Granularity>, time_range: &TimeRange, ) -> types::MetricsResult<HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> { match self { Self::Sqlx(_pool) => Err(report!(MetricsError::NotImplemented)), Self::Clickhouse(pool) => { metric .load_metrics(auth, dimensions, filters, granularity, time_range, pool) .await } Self::CombinedCkh(_sqlx_pool, ckh_pool) | Self::CombinedSqlx(_sqlx_pool, ckh_pool) => { metric .load_metrics( auth, dimensions, filters, granularity, // Since API events are ckh only use ckh here time_range, ckh_pool, ) .await } } } pub async fn get_api_event_metrics( &self, metric: &ApiEventMetrics, dimensions: &[ApiEventDimensions], merchant_id: &common_utils::id_type::MerchantId, filters: &ApiEventFilters, granularity: Option<Granularity>, time_range: &TimeRange, ) -> types::MetricsResult<HashSet<(ApiEventMetricsBucketIdentifier, ApiEventMetricRow)>> { match self { Self::Sqlx(_pool) => Err(report!(MetricsError::NotImplemented)), Self::Clickhouse(ckh_pool) | Self::CombinedCkh(_, ckh_pool) | Self::CombinedSqlx(_, ckh_pool) => { // Since API events are ckh only use ckh here metric .load_metrics( dimensions, merchant_id, filters, granularity, time_range, ckh_pool, ) .await } } } pub async fn from_conf(config: &AnalyticsConfig, tenant: &dyn TenantConfig) -> Self { match config { AnalyticsConfig::Sqlx { sqlx, .. } => { Self::Sqlx(SqlxClient::from_conf(sqlx, tenant.get_schema()).await) } AnalyticsConfig::Clickhouse { clickhouse, .. } => Self::Clickhouse(ClickhouseClient { config: Arc::new(clickhouse.clone()), database: tenant.get_clickhouse_database().to_string(), }), AnalyticsConfig::CombinedCkh { sqlx, clickhouse, .. } => Self::CombinedCkh( SqlxClient::from_conf(sqlx, tenant.get_schema()).await, ClickhouseClient { config: Arc::new(clickhouse.clone()), database: tenant.get_clickhouse_database().to_string(), }, ), AnalyticsConfig::CombinedSqlx { sqlx, clickhouse, .. } => Self::CombinedSqlx( SqlxClient::from_conf(sqlx, tenant.get_schema()).await, ClickhouseClient { config: Arc::new(clickhouse.clone()), database: tenant.get_clickhouse_database().to_string(), }, ), } } } #[derive(Clone, Debug, serde::Deserialize)] #[serde(tag = "source", rename_all = "lowercase")] pub enum AnalyticsConfig { Sqlx { sqlx: Database, #[serde(default)] forex_enabled: bool, }, Clickhouse { clickhouse: ClickhouseConfig, #[serde(default)] forex_enabled: bool, }, CombinedCkh { sqlx: Database, clickhouse: ClickhouseConfig, #[serde(default)] forex_enabled: bool, }, CombinedSqlx { sqlx: Database, clickhouse: ClickhouseConfig, #[serde(default)] forex_enabled: bool, }, } impl AnalyticsConfig { pub fn get_forex_enabled(&self) -> bool { match self { Self::Sqlx { forex_enabled, .. } | Self::Clickhouse { forex_enabled, .. } | Self::CombinedCkh { forex_enabled, .. } | Self::CombinedSqlx { forex_enabled, .. } => *forex_enabled, } } } #[async_trait::async_trait] impl SecretsHandler for AnalyticsConfig { async fn convert_to_raw_secret( value: SecretStateContainer<Self, SecuredSecret>, secret_management_client: &dyn SecretManagementInterface, ) -> CustomResult<SecretStateContainer<Self, RawSecret>, SecretsManagementError> { let analytics_config = value.get_inner(); let decrypted_password = match analytics_config { // Todo: Perform kms decryption of clickhouse password Self::Clickhouse { .. } => masking::Secret::new(String::default()), Self::Sqlx { sqlx, .. } | Self::CombinedCkh { sqlx, .. } | Self::CombinedSqlx { sqlx, .. } => { secret_management_client .get_secret(sqlx.password.clone()) .await? } }; Ok(value.transition_state(|conf| match conf { Self::Sqlx { sqlx, forex_enabled, } => Self::Sqlx { sqlx: Database { password: decrypted_password, ..sqlx }, forex_enabled, }, Self::Clickhouse { clickhouse, forex_enabled, } => Self::Clickhouse { clickhouse, forex_enabled, }, Self::CombinedCkh { sqlx, clickhouse, forex_enabled, } => Self::CombinedCkh { sqlx: Database { password: decrypted_password, ..sqlx }, clickhouse, forex_enabled, }, Self::CombinedSqlx { sqlx, clickhouse, forex_enabled, } => Self::CombinedSqlx { sqlx: Database { password: decrypted_password, ..sqlx }, clickhouse, forex_enabled, }, })) } } impl Default for AnalyticsConfig { fn default() -> Self { Self::Sqlx { sqlx: Database::default(), forex_enabled: false, } } } #[derive(Clone, Debug, serde::Deserialize, Default, serde::Serialize)] pub struct ReportConfig { pub payment_function: String, pub refund_function: String, pub dispute_function: String, pub authentication_function: String, pub region: String, } /// Analytics Flow routes Enums /// Info - Dimensions and filters available for the domain /// Filters - Set of values present for the dimension /// Metrics - Analytical data on dimensions and metrics #[derive(Debug, Display, Clone, PartialEq, Eq)] pub enum AnalyticsFlow { GetInfo, GetPaymentMetrics, GetPaymentIntentMetrics, GetRefundsMetrics, GetFrmMetrics, GetSdkMetrics, GetAuthMetrics, GetAuthEventFilters, GetActivePaymentsMetrics, GetPaymentFilters, GetPaymentIntentFilters, GetRefundFilters, GetFrmFilters, GetSdkEventFilters, GetApiEvents, GetSdkEvents, GeneratePaymentReport, GenerateDisputeReport, GenerateRefundReport, GenerateAuthenticationReport, GetApiEventMetrics, GetApiEventFilters, GetConnectorEvents, GetOutgoingWebhookEvents, GetGlobalSearchResults, GetSearchResults, GetDisputeFilters, GetDisputeMetrics, GetSankey, GetRoutingEvents, } impl FlowMetric for AnalyticsFlow {}
crates/analytics/src/lib.rs
analytics::src::lib
7,071
true
// File: crates/analytics/src/auth_events.rs // Module: analytics::src::auth_events pub mod accumulator; mod core; pub mod filters; pub mod metrics; pub mod sankey; pub mod types; pub use accumulator::{AuthEventMetricAccumulator, AuthEventMetricsAccumulator}; pub use self::core::{get_filters, get_metrics, get_sankey};
crates/analytics/src/auth_events.rs
analytics::src::auth_events
79
true
// File: crates/analytics/src/query.rs // Module: analytics::src::query use std::{fmt, marker::PhantomData}; use api_models::{ analytics::{ self as analytics_api, api_event::ApiEventDimensions, auth_events::{AuthEventDimensions, AuthEventFlows}, disputes::DisputeDimensions, frm::{FrmDimensions, FrmTransactionType}, payment_intents::PaymentIntentDimensions, payments::{PaymentDimensions, PaymentDistributions}, refunds::{RefundDimensions, RefundDistributions, RefundType}, sdk_events::{SdkEventDimensions, SdkEventNames}, Granularity, }, enums::{ AttemptStatus, AuthenticationType, Connector, Currency, DisputeStage, IntentStatus, PaymentMethod, PaymentMethodType, RoutingApproach, }, refunds::RefundStatus, }; use common_enums::{ AuthenticationConnectors, AuthenticationStatus, DecoupledAuthenticationType, TransactionStatus, }; use common_utils::{ errors::{CustomResult, ParsingError}, id_type::{MerchantId, OrganizationId, ProfileId}, }; use diesel_models::{enums as storage_enums, enums::FraudCheckStatus}; use error_stack::ResultExt; use router_env::{logger, Flow}; use super::types::{AnalyticsCollection, AnalyticsDataSource, LoadRow, TableEngine}; use crate::{enums::AuthInfo, types::QueryExecutionError}; pub type QueryResult<T> = error_stack::Result<T, QueryBuildingError>; pub trait QueryFilter<T> where T: AnalyticsDataSource, AnalyticsCollection: ToSql<T>, { fn set_filter_clause(&self, builder: &mut QueryBuilder<T>) -> QueryResult<()>; } pub trait GroupByClause<T> where T: AnalyticsDataSource, AnalyticsCollection: ToSql<T>, { fn set_group_by_clause(&self, builder: &mut QueryBuilder<T>) -> QueryResult<()>; } pub trait SeriesBucket { type SeriesType; type GranularityLevel; fn get_lowest_common_granularity_level(&self) -> Self::GranularityLevel; fn get_bucket_size(&self) -> u8; fn clip_to_start( &self, value: Self::SeriesType, ) -> error_stack::Result<Self::SeriesType, PostProcessingError>; fn clip_to_end( &self, value: Self::SeriesType, ) -> error_stack::Result<Self::SeriesType, PostProcessingError>; } impl<T> QueryFilter<T> for analytics_api::TimeRange where T: AnalyticsDataSource, time::PrimitiveDateTime: ToSql<T>, AnalyticsCollection: ToSql<T>, Granularity: GroupByClause<T>, { fn set_filter_clause(&self, builder: &mut QueryBuilder<T>) -> QueryResult<()> { builder.add_custom_filter_clause("created_at", self.start_time, FilterTypes::Gte)?; if let Some(end) = self.end_time { builder.add_custom_filter_clause("created_at", end, FilterTypes::Lte)?; } Ok(()) } } impl GroupByClause<super::SqlxClient> for Granularity { fn set_group_by_clause( &self, builder: &mut QueryBuilder<super::SqlxClient>, ) -> QueryResult<()> { let trunc_scale = self.get_lowest_common_granularity_level(); let granularity_bucket_scale = match self { Self::OneMin => None, Self::FiveMin | Self::FifteenMin | Self::ThirtyMin => Some("minute"), Self::OneHour | Self::OneDay => None, }; let granularity_divisor = self.get_bucket_size(); builder .add_group_by_clause(format!("DATE_TRUNC('{trunc_scale}', created_at)")) .attach_printable("Error adding time prune group by")?; if let Some(scale) = granularity_bucket_scale { builder .add_group_by_clause(format!( "FLOOR(DATE_PART('{scale}', created_at)/{granularity_divisor})" )) .attach_printable("Error adding time binning group by")?; } Ok(()) } } impl GroupByClause<super::ClickhouseClient> for Granularity { fn set_group_by_clause( &self, builder: &mut QueryBuilder<super::ClickhouseClient>, ) -> QueryResult<()> { let interval = match self { Self::OneMin => "toStartOfMinute(created_at)", Self::FiveMin => "toStartOfFiveMinutes(created_at)", Self::FifteenMin => "toStartOfFifteenMinutes(created_at)", Self::ThirtyMin => "toStartOfInterval(created_at, INTERVAL 30 minute)", Self::OneHour => "toStartOfHour(created_at)", Self::OneDay => "toStartOfDay(created_at)", }; builder .add_group_by_clause(interval) .attach_printable("Error adding interval group by") } } #[derive(strum::Display)] #[strum(serialize_all = "lowercase")] pub enum TimeGranularityLevel { Minute, Hour, Day, } impl SeriesBucket for Granularity { type SeriesType = time::PrimitiveDateTime; type GranularityLevel = TimeGranularityLevel; fn get_lowest_common_granularity_level(&self) -> Self::GranularityLevel { match self { Self::OneMin => TimeGranularityLevel::Minute, Self::FiveMin | Self::FifteenMin | Self::ThirtyMin | Self::OneHour => { TimeGranularityLevel::Hour } Self::OneDay => TimeGranularityLevel::Day, } } fn get_bucket_size(&self) -> u8 { match self { Self::OneMin => 60, Self::FiveMin => 5, Self::FifteenMin => 15, Self::ThirtyMin => 30, Self::OneHour => 60, Self::OneDay => 24, } } fn clip_to_start( &self, value: Self::SeriesType, ) -> error_stack::Result<Self::SeriesType, PostProcessingError> { let clip_start = |value: u8, modulo: u8| -> u8 { value - value % modulo }; let clipped_time = match ( self.get_lowest_common_granularity_level(), self.get_bucket_size(), ) { (TimeGranularityLevel::Minute, i) => time::Time::MIDNIGHT .replace_second(clip_start(value.second(), i)) .and_then(|t| t.replace_minute(value.minute())) .and_then(|t| t.replace_hour(value.hour())), (TimeGranularityLevel::Hour, i) => time::Time::MIDNIGHT .replace_minute(clip_start(value.minute(), i)) .and_then(|t| t.replace_hour(value.hour())), (TimeGranularityLevel::Day, i) => { time::Time::MIDNIGHT.replace_hour(clip_start(value.hour(), i)) } } .change_context(PostProcessingError::BucketClipping)?; Ok(value.replace_time(clipped_time)) } fn clip_to_end( &self, value: Self::SeriesType, ) -> error_stack::Result<Self::SeriesType, PostProcessingError> { let clip_end = |value: u8, modulo: u8| -> u8 { value + modulo - 1 - value % modulo }; let clipped_time = match ( self.get_lowest_common_granularity_level(), self.get_bucket_size(), ) { (TimeGranularityLevel::Minute, i) => time::Time::MIDNIGHT .replace_second(clip_end(value.second(), i)) .and_then(|t| t.replace_minute(value.minute())) .and_then(|t| t.replace_hour(value.hour())), (TimeGranularityLevel::Hour, i) => time::Time::MIDNIGHT .replace_minute(clip_end(value.minute(), i)) .and_then(|t| t.replace_hour(value.hour())), (TimeGranularityLevel::Day, i) => { time::Time::MIDNIGHT.replace_hour(clip_end(value.hour(), i)) } } .change_context(PostProcessingError::BucketClipping) .attach_printable_lazy(|| format!("Bucket Clip Error: {value}"))?; Ok(value.replace_time(clipped_time)) } } #[derive(thiserror::Error, Debug)] pub enum QueryBuildingError { #[allow(dead_code)] #[error("Not Implemented: {0}")] NotImplemented(String), #[error("Failed to Serialize to SQL")] SqlSerializeError, #[error("Failed to build sql query: {0}")] InvalidQuery(&'static str), } #[derive(thiserror::Error, Debug)] pub enum PostProcessingError { #[error("Error Clipping values to bucket sizes")] BucketClipping, } #[derive(Debug)] pub enum Aggregate<R> { Count { field: Option<R>, alias: Option<&'static str>, }, Sum { field: R, alias: Option<&'static str>, }, Min { field: R, alias: Option<&'static str>, }, Max { field: R, alias: Option<&'static str>, }, Percentile { field: R, alias: Option<&'static str>, percentile: Option<&'static u8>, }, DistinctCount { field: R, alias: Option<&'static str>, }, } // Window functions in query // --- // Description - // field: to_sql type value used as expr in aggregation // partition_by: partition by fields in window // order_by: order by fields and order (Ascending / Descending) in window // alias: alias of window expr in query // --- // Usage - // Window::Sum { // field: "count", // partition_by: Some(query_builder.transform_to_sql_values(&dimensions).switch()?), // order_by: Some(("value", Descending)), // alias: Some("total"), // } #[derive(Debug)] pub enum Window<R> { Sum { field: R, partition_by: Option<String>, order_by: Option<(String, Order)>, alias: Option<&'static str>, }, RowNumber { field: R, partition_by: Option<String>, order_by: Option<(String, Order)>, alias: Option<&'static str>, }, } #[derive(Debug, Clone, Copy)] pub enum Order { Ascending, Descending, } impl fmt::Display for Order { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::Ascending => write!(f, "asc"), Self::Descending => write!(f, "desc"), } } } // Select TopN values for a group based on a metric // --- // Description - // columns: Columns in group to select TopN values for // count: N in TopN // order_column: metric used to sort and limit TopN // order: sort order of metric (Ascending / Descending) // --- // Usage - // Use via add_top_n_clause fn of query_builder // add_top_n_clause( // &dimensions, // distribution.distribution_cardinality.into(), // "count", // Order::Descending, // ) #[allow(dead_code)] #[derive(Debug)] pub struct TopN { pub columns: String, pub count: u64, pub order_column: String, pub order: Order, } #[derive(Debug, Clone)] pub struct LimitByClause { limit: u64, columns: Vec<String>, } impl fmt::Display for LimitByClause { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "LIMIT {} BY {}", self.limit, self.columns.join(", ")) } } #[derive(Debug, Default, Clone, Copy)] pub enum FilterCombinator { #[default] And, Or, } impl<T: AnalyticsDataSource> ToSql<T> for FilterCombinator { fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> { Ok(match self { Self::And => " AND ", Self::Or => " OR ", } .to_owned()) } } #[derive(Debug, Clone)] pub enum Filter { Plain(String, FilterTypes, String), NestedFilter(FilterCombinator, Vec<Filter>), } impl Default for Filter { fn default() -> Self { Self::NestedFilter(FilterCombinator::default(), Vec::new()) } } impl<T: AnalyticsDataSource> ToSql<T> for Filter { fn to_sql(&self, table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> { Ok(match self { Self::Plain(l, op, r) => filter_type_to_sql(l, *op, r), Self::NestedFilter(operator, filters) => { format!( "( {} )", filters .iter() .map(|f| <Self as ToSql<T>>::to_sql(f, table_engine)) .collect::<Result<Vec<String>, _>>()? .join( <FilterCombinator as ToSql<T>>::to_sql(operator, table_engine)? .as_ref() ) ) } }) } } #[derive(Debug)] pub struct QueryBuilder<T> where T: AnalyticsDataSource, AnalyticsCollection: ToSql<T>, { columns: Vec<String>, filters: Filter, group_by: Vec<String>, order_by: Vec<String>, having: Option<Vec<(String, FilterTypes, String)>>, limit_by: Option<LimitByClause>, outer_select: Vec<String>, top_n: Option<TopN>, table: AnalyticsCollection, distinct: bool, db_type: PhantomData<T>, table_engine: TableEngine, } pub trait ToSql<T: AnalyticsDataSource> { fn to_sql(&self, table_engine: &TableEngine) -> error_stack::Result<String, ParsingError>; } impl<T: AnalyticsDataSource> ToSql<T> for &MerchantId { fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> { Ok(self.get_string_repr().to_owned()) } } impl<T: AnalyticsDataSource> ToSql<T> for MerchantId { fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> { Ok(self.get_string_repr().to_owned()) } } impl<T: AnalyticsDataSource> ToSql<T> for &OrganizationId { fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> { Ok(self.get_string_repr().to_owned()) } } impl<T: AnalyticsDataSource> ToSql<T> for ProfileId { fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> { Ok(self.get_string_repr().to_owned()) } } impl<T: AnalyticsDataSource> ToSql<T> for &common_utils::id_type::PaymentId { fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> { Ok(self.get_string_repr().to_owned()) } } impl<T: AnalyticsDataSource> ToSql<T> for common_utils::id_type::CustomerId { fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> { Ok(self.get_string_repr().to_owned()) } } impl<T: AnalyticsDataSource> ToSql<T> for bool { fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> { let flag = *self; Ok(i8::from(flag).to_string()) } } /// Implement `ToSql` on arrays of types that impl `ToString`. macro_rules! impl_to_sql_for_to_string { ($($type:ty),+) => { $( impl<T: AnalyticsDataSource> ToSql<T> for $type { fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result<String, ParsingError> { Ok(self.to_string()) } } )+ }; } impl_to_sql_for_to_string!( String, &str, &PaymentDimensions, &PaymentIntentDimensions, &RefundDimensions, &FrmDimensions, PaymentDimensions, PaymentIntentDimensions, &PaymentDistributions, RefundDimensions, &RefundDistributions, FrmDimensions, PaymentMethod, PaymentMethodType, AuthenticationType, Connector, AttemptStatus, IntentStatus, RefundStatus, FraudCheckStatus, storage_enums::RefundStatus, Currency, RefundType, FrmTransactionType, TransactionStatus, AuthenticationStatus, AuthenticationConnectors, DecoupledAuthenticationType, Flow, &String, &bool, &u64, u64, Order, RoutingApproach ); impl_to_sql_for_to_string!( &SdkEventDimensions, SdkEventDimensions, SdkEventNames, AuthEventFlows, &ApiEventDimensions, ApiEventDimensions, &DisputeDimensions, DisputeDimensions, DisputeStage, AuthEventDimensions, &AuthEventDimensions ); #[derive(Debug, Clone, Copy)] pub enum FilterTypes { Equal, NotEqual, EqualBool, In, Gte, Lte, Gt, Like, NotLike, IsNotNull, } pub fn filter_type_to_sql(l: &str, op: FilterTypes, r: &str) -> String { match op { FilterTypes::EqualBool => format!("{l} = {r}"), FilterTypes::Equal => format!("{l} = '{r}'"), FilterTypes::NotEqual => format!("{l} != '{r}'"), FilterTypes::In => format!("{l} IN ({r})"), FilterTypes::Gte => format!("{l} >= '{r}'"), FilterTypes::Gt => format!("{l} > {r}"), FilterTypes::Lte => format!("{l} <= '{r}'"), FilterTypes::Like => format!("{l} LIKE '%{r}%'"), FilterTypes::NotLike => format!("{l} NOT LIKE '%{r}%'"), FilterTypes::IsNotNull => format!("{l} IS NOT NULL"), } } impl<T> QueryBuilder<T> where T: AnalyticsDataSource, AnalyticsCollection: ToSql<T>, { pub fn new(table: AnalyticsCollection) -> Self { Self { columns: Default::default(), filters: Default::default(), group_by: Default::default(), order_by: Default::default(), having: Default::default(), limit_by: Default::default(), outer_select: Default::default(), top_n: Default::default(), table, distinct: Default::default(), db_type: Default::default(), table_engine: T::get_table_engine(table), } } pub fn add_select_column(&mut self, column: impl ToSql<T>) -> QueryResult<()> { self.columns.push( column .to_sql(&self.table_engine) .change_context(QueryBuildingError::SqlSerializeError) .attach_printable("Error serializing select column")?, ); Ok(()) } pub fn transform_to_sql_values(&mut self, values: &[impl ToSql<T>]) -> QueryResult<String> { let res = values .iter() .map(|i| i.to_sql(&self.table_engine)) .collect::<error_stack::Result<Vec<String>, ParsingError>>() .change_context(QueryBuildingError::SqlSerializeError) .attach_printable("Error serializing range filter value")? .join(", "); Ok(res) } pub fn add_top_n_clause( &mut self, columns: &[impl ToSql<T>], count: u64, order_column: impl ToSql<T>, order: Order, ) -> QueryResult<()> where Window<&'static str>: ToSql<T>, { let partition_by_columns = self.transform_to_sql_values(columns)?; let order_by_column = order_column .to_sql(&self.table_engine) .change_context(QueryBuildingError::SqlSerializeError) .attach_printable("Error serializing select column")?; self.add_outer_select_column(Window::RowNumber { field: "", partition_by: Some(partition_by_columns.clone()), order_by: Some((order_by_column.clone(), order)), alias: Some("top_n"), })?; self.top_n = Some(TopN { columns: partition_by_columns, count, order_column: order_by_column, order, }); Ok(()) } pub fn set_distinct(&mut self) { self.distinct = true } pub fn add_filter_clause( &mut self, key: impl ToSql<T>, value: impl ToSql<T>, ) -> QueryResult<()> { self.add_custom_filter_clause(key, value, FilterTypes::Equal) } pub fn add_bool_filter_clause( &mut self, key: impl ToSql<T>, value: impl ToSql<T>, ) -> QueryResult<()> { self.add_custom_filter_clause(key, value, FilterTypes::EqualBool) } pub fn add_negative_filter_clause( &mut self, key: impl ToSql<T>, value: impl ToSql<T>, ) -> QueryResult<()> { self.add_custom_filter_clause(key, value, FilterTypes::NotEqual) } pub fn add_custom_filter_clause( &mut self, lhs: impl ToSql<T>, rhs: impl ToSql<T>, comparison: FilterTypes, ) -> QueryResult<()> { let filter = Filter::Plain( lhs.to_sql(&self.table_engine) .change_context(QueryBuildingError::SqlSerializeError) .attach_printable("Error serializing filter key")?, comparison, rhs.to_sql(&self.table_engine) .change_context(QueryBuildingError::SqlSerializeError) .attach_printable("Error serializing filter value")?, ); self.add_nested_filter_clause(filter); Ok(()) } pub fn add_nested_filter_clause(&mut self, filter: Filter) { match &mut self.filters { Filter::NestedFilter(_, ref mut filters) => filters.push(filter), f @ Filter::Plain(_, _, _) => { self.filters = Filter::NestedFilter(FilterCombinator::And, vec![f.clone(), filter]); } } } pub fn add_filter_in_range_clause( &mut self, key: impl ToSql<T>, values: &[impl ToSql<T>], ) -> QueryResult<()> { let list = values .iter() .map(|i| { // trimming whitespaces from the filter values received in request, to prevent a possibility of an SQL injection i.to_sql(&self.table_engine).map(|s| { let trimmed_str = s.replace(' ', ""); format!("'{trimmed_str}'") }) }) .collect::<error_stack::Result<Vec<String>, ParsingError>>() .change_context(QueryBuildingError::SqlSerializeError) .attach_printable("Error serializing range filter value")? .join(", "); self.add_custom_filter_clause(key, list, FilterTypes::In) } pub fn add_group_by_clause(&mut self, column: impl ToSql<T>) -> QueryResult<()> { self.group_by.push( column .to_sql(&self.table_engine) .change_context(QueryBuildingError::SqlSerializeError) .attach_printable("Error serializing group by field")?, ); Ok(()) } pub fn add_order_by_clause( &mut self, column: impl ToSql<T>, order: impl ToSql<T>, ) -> QueryResult<()> { let column_sql = column .to_sql(&self.table_engine) .change_context(QueryBuildingError::SqlSerializeError) .attach_printable("Error serializing order by column")?; let order_sql = order .to_sql(&self.table_engine) .change_context(QueryBuildingError::SqlSerializeError) .attach_printable("Error serializing order direction")?; self.order_by.push(format!("{column_sql} {order_sql}")); Ok(()) } pub fn set_limit_by(&mut self, limit: u64, columns: &[impl ToSql<T>]) -> QueryResult<()> { let columns = columns .iter() .map(|col| col.to_sql(&self.table_engine)) .collect::<Result<Vec<String>, _>>() .change_context(QueryBuildingError::SqlSerializeError) .attach_printable("Error serializing LIMIT BY columns")?; self.limit_by = Some(LimitByClause { limit, columns }); Ok(()) } pub fn add_granularity_in_mins(&mut self, granularity: Granularity) -> QueryResult<()> { let interval = match granularity { Granularity::OneMin => "1", Granularity::FiveMin => "5", Granularity::FifteenMin => "15", Granularity::ThirtyMin => "30", Granularity::OneHour => "60", Granularity::OneDay => "1440", }; let _ = self.add_select_column(format!( "toStartOfInterval(created_at, INTERVAL {interval} MINUTE) as time_bucket" )); Ok(()) } fn get_filter_clause(&self) -> QueryResult<String> { <Filter as ToSql<T>>::to_sql(&self.filters, &self.table_engine) .change_context(QueryBuildingError::SqlSerializeError) } fn get_select_clause(&self) -> String { self.columns.join(", ") } fn get_group_by_clause(&self) -> String { self.group_by.join(", ") } fn get_outer_select_clause(&self) -> String { self.outer_select.join(", ") } pub fn add_having_clause<R>( &mut self, aggregate: Aggregate<R>, filter_type: FilterTypes, value: impl ToSql<T>, ) -> QueryResult<()> where Aggregate<R>: ToSql<T>, { let aggregate = aggregate .to_sql(&self.table_engine) .change_context(QueryBuildingError::SqlSerializeError) .attach_printable("Error serializing having aggregate")?; let value = value .to_sql(&self.table_engine) .change_context(QueryBuildingError::SqlSerializeError) .attach_printable("Error serializing having value")?; let entry = (aggregate, filter_type, value); if let Some(having) = &mut self.having { having.push(entry); } else { self.having = Some(vec![entry]); } Ok(()) } pub fn add_outer_select_column(&mut self, column: impl ToSql<T>) -> QueryResult<()> { self.outer_select.push( column .to_sql(&self.table_engine) .change_context(QueryBuildingError::SqlSerializeError) .attach_printable("Error serializing outer select column")?, ); Ok(()) } pub fn get_filter_type_clause(&self) -> Option<String> { self.having.as_ref().map(|vec| { vec.iter() .map(|(l, op, r)| filter_type_to_sql(l, *op, r)) .collect::<Vec<String>>() .join(" AND ") }) } pub fn build_query(&mut self) -> QueryResult<String> where Aggregate<&'static str>: ToSql<T>, Window<&'static str>: ToSql<T>, { if self.columns.is_empty() { Err(QueryBuildingError::InvalidQuery( "No select fields provided", ))?; } let mut query = String::from("SELECT "); if self.distinct { query.push_str("DISTINCT "); } query.push_str(&self.get_select_clause()); query.push_str(" FROM "); query.push_str( &self .table .to_sql(&self.table_engine) .change_context(QueryBuildingError::SqlSerializeError) .attach_printable("Error serializing table value")?, ); let filter_clause = self.get_filter_clause()?; if !filter_clause.is_empty() { query.push_str(" WHERE "); query.push_str(filter_clause.as_str()); } if !self.group_by.is_empty() { query.push_str(" GROUP BY "); query.push_str(&self.get_group_by_clause()); if let TableEngine::CollapsingMergeTree { sign } = self.table_engine { self.add_having_clause( Aggregate::Count { field: Some(sign), alias: None, }, FilterTypes::Gte, "1", )?; } } if self.having.is_some() { if let Some(condition) = self.get_filter_type_clause() { query.push_str(" HAVING "); query.push_str(condition.as_str()); } } if !self.order_by.is_empty() { query.push_str(" ORDER BY "); query.push_str(&self.order_by.join(", ")); } if let Some(limit_by) = &self.limit_by { query.push_str(&format!(" {limit_by}")); } if !self.outer_select.is_empty() { query.insert_str( 0, format!("SELECT {} FROM (", &self.get_outer_select_clause()).as_str(), ); query.push_str(") _"); } if let Some(top_n) = &self.top_n { query.insert_str(0, "SELECT * FROM ("); query.push_str(format!(") _ WHERE top_n <= {}", top_n.count).as_str()); } logger::debug!(%query); Ok(query) } pub async fn execute_query<R, P>( &mut self, store: &P, ) -> CustomResult<CustomResult<Vec<R>, QueryExecutionError>, QueryBuildingError> where P: LoadRow<R> + AnalyticsDataSource, Aggregate<&'static str>: ToSql<T>, Window<&'static str>: ToSql<T>, { let query = self .build_query() .change_context(QueryBuildingError::SqlSerializeError) .attach_printable("Failed to execute query")?; Ok(store.load_results(query.as_str()).await) } } impl<T> QueryFilter<T> for AuthInfo where T: AnalyticsDataSource, AnalyticsCollection: ToSql<T>, { fn set_filter_clause(&self, builder: &mut QueryBuilder<T>) -> QueryResult<()> { match self { Self::OrgLevel { org_id } => { builder .add_filter_clause("organization_id", org_id) .attach_printable("Error adding organization_id filter")?; } Self::MerchantLevel { org_id, merchant_ids, } => { builder .add_filter_clause("organization_id", org_id) .attach_printable("Error adding organization_id filter")?; builder .add_filter_in_range_clause("merchant_id", merchant_ids) .attach_printable("Error adding merchant_id filter")?; } Self::ProfileLevel { org_id, merchant_id, profile_ids, } => { builder .add_filter_clause("organization_id", org_id) .attach_printable("Error adding organization_id filter")?; builder .add_filter_clause("merchant_id", merchant_id) .attach_printable("Error adding merchant_id filter")?; builder .add_filter_in_range_clause("profile_id", profile_ids) .attach_printable("Error adding profile_id filter")?; } } Ok(()) } }
crates/analytics/src/query.rs
analytics::src::query
6,942
true
// File: crates/analytics/src/disputes.rs // Module: analytics::src::disputes pub mod accumulators; mod core; pub mod filters; pub mod metrics; pub mod types; pub use accumulators::{DisputeMetricAccumulator, DisputeMetricsAccumulator}; pub trait DisputeAnalytics: metrics::DisputeMetricAnalytics {} pub use self::core::{get_filters, get_metrics};
crates/analytics/src/disputes.rs
analytics::src::disputes
87
true
// File: crates/analytics/src/opensearch.rs // Module: analytics::src::opensearch use std::collections::HashSet; use api_models::{ analytics::search::SearchIndex, errors::types::{ApiError, ApiErrorResponse}, }; use aws_config::{self, meta::region::RegionProviderChain, Region}; use common_utils::{ errors::{CustomResult, ErrorSwitch}, types::TimeRange, }; use error_stack::ResultExt; use opensearch::{ auth::Credentials, cert::CertificateValidation, cluster::{Cluster, ClusterHealthParts}, http::{ request::JsonBody, response::Response, transport::{SingleNodeConnectionPool, Transport, TransportBuilder}, Url, }, MsearchParts, OpenSearch, SearchParts, }; use serde_json::{json, Map, Value}; use storage_impl::errors::{ApplicationError, StorageError, StorageResult}; use time::PrimitiveDateTime; use super::{health_check::HealthCheck, query::QueryResult, types::QueryExecutionError}; use crate::{enums::AuthInfo, query::QueryBuildingError}; #[derive(Clone, Debug, serde::Deserialize)] #[serde(tag = "auth")] #[serde(rename_all = "lowercase")] pub enum OpenSearchAuth { Basic { username: String, password: String }, Aws { region: String }, } #[derive(Clone, Debug, serde::Deserialize)] pub struct OpenSearchIndexes { pub payment_attempts: String, pub payment_intents: String, pub refunds: String, pub disputes: String, pub sessionizer_payment_attempts: String, pub sessionizer_payment_intents: String, pub sessionizer_refunds: String, pub sessionizer_disputes: String, } #[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize, PartialEq, Eq, Hash)] pub struct OpensearchTimeRange { #[serde(with = "common_utils::custom_serde::iso8601")] pub gte: PrimitiveDateTime, #[serde(default, with = "common_utils::custom_serde::iso8601::option")] pub lte: Option<PrimitiveDateTime>, } impl From<TimeRange> for OpensearchTimeRange { fn from(time_range: TimeRange) -> Self { Self { gte: time_range.start_time, lte: time_range.end_time, } } } #[derive(Clone, Debug, serde::Deserialize)] pub struct OpenSearchConfig { host: String, auth: OpenSearchAuth, indexes: OpenSearchIndexes, #[serde(default)] enabled: bool, } impl Default for OpenSearchConfig { fn default() -> Self { Self { host: "https://localhost:9200".to_string(), auth: OpenSearchAuth::Basic { username: "admin".to_string(), password: "admin".to_string(), }, indexes: OpenSearchIndexes { payment_attempts: "hyperswitch-payment-attempt-events".to_string(), payment_intents: "hyperswitch-payment-intent-events".to_string(), refunds: "hyperswitch-refund-events".to_string(), disputes: "hyperswitch-dispute-events".to_string(), sessionizer_payment_attempts: "sessionizer-payment-attempt-events".to_string(), sessionizer_payment_intents: "sessionizer-payment-intent-events".to_string(), sessionizer_refunds: "sessionizer-refund-events".to_string(), sessionizer_disputes: "sessionizer-dispute-events".to_string(), }, enabled: false, } } } #[derive(Debug, thiserror::Error)] pub enum OpenSearchError { #[error("Opensearch is not enabled")] NotEnabled, #[error("Opensearch connection error")] ConnectionError, #[error("Opensearch NON-200 response content: '{0}'")] ResponseNotOK(String), #[error("Opensearch bad request error")] BadRequestError(String), #[error("Opensearch response error")] ResponseError, #[error("Opensearch query building error")] QueryBuildingError, #[error("Opensearch deserialisation error")] DeserialisationError, #[error("Opensearch index access not present error: {0:?}")] IndexAccessNotPermittedError(SearchIndex), #[error("Opensearch unknown error")] UnknownError, #[error("Opensearch access forbidden error")] AccessForbiddenError, } impl ErrorSwitch<OpenSearchError> for QueryBuildingError { fn switch(&self) -> OpenSearchError { OpenSearchError::QueryBuildingError } } impl ErrorSwitch<ApiErrorResponse> for OpenSearchError { fn switch(&self) -> ApiErrorResponse { match self { Self::ConnectionError => ApiErrorResponse::InternalServerError(ApiError::new( "IR", 0, "Connection error", None, )), Self::BadRequestError(response) => { ApiErrorResponse::BadRequest(ApiError::new("IR", 1, response.to_string(), None)) } Self::ResponseNotOK(response) => ApiErrorResponse::InternalServerError(ApiError::new( "IR", 1, format!("Something went wrong {response}"), None, )), Self::ResponseError => ApiErrorResponse::InternalServerError(ApiError::new( "IR", 2, "Something went wrong", None, )), Self::QueryBuildingError => ApiErrorResponse::InternalServerError(ApiError::new( "IR", 3, "Query building error", None, )), Self::DeserialisationError => ApiErrorResponse::InternalServerError(ApiError::new( "IR", 4, "Deserialisation error", None, )), Self::IndexAccessNotPermittedError(index) => { ApiErrorResponse::ForbiddenCommonResource(ApiError::new( "IR", 5, format!("Index access not permitted: {index:?}"), None, )) } Self::UnknownError => { ApiErrorResponse::InternalServerError(ApiError::new("IR", 6, "Unknown error", None)) } Self::AccessForbiddenError => ApiErrorResponse::ForbiddenCommonResource(ApiError::new( "IR", 7, "Access Forbidden error", None, )), Self::NotEnabled => ApiErrorResponse::InternalServerError(ApiError::new( "IR", 8, "Opensearch is not enabled", None, )), } } } #[derive(Clone, Debug)] pub struct OpenSearchClient { pub client: OpenSearch, pub transport: Transport, pub indexes: OpenSearchIndexes, } impl OpenSearchClient { pub async fn create(conf: &OpenSearchConfig) -> CustomResult<Self, OpenSearchError> { let url = Url::parse(&conf.host).map_err(|_| OpenSearchError::ConnectionError)?; let transport = match &conf.auth { OpenSearchAuth::Basic { username, password } => { let credentials = Credentials::Basic(username.clone(), password.clone()); TransportBuilder::new(SingleNodeConnectionPool::new(url)) .cert_validation(CertificateValidation::None) .auth(credentials) .build() .map_err(|_| OpenSearchError::ConnectionError)? } OpenSearchAuth::Aws { region } => { let region_provider = RegionProviderChain::first_try(Region::new(region.clone())); let sdk_config = aws_config::from_env().region(region_provider).load().await; let conn_pool = SingleNodeConnectionPool::new(url); TransportBuilder::new(conn_pool) .auth( sdk_config .clone() .try_into() .map_err(|_| OpenSearchError::ConnectionError)?, ) .service_name("es") .build() .map_err(|_| OpenSearchError::ConnectionError)? } }; Ok(Self { transport: transport.clone(), client: OpenSearch::new(transport), indexes: conf.indexes.clone(), }) } pub fn search_index_to_opensearch_index(&self, index: SearchIndex) -> String { match index { SearchIndex::PaymentAttempts => self.indexes.payment_attempts.clone(), SearchIndex::PaymentIntents => self.indexes.payment_intents.clone(), SearchIndex::Refunds => self.indexes.refunds.clone(), SearchIndex::Disputes => self.indexes.disputes.clone(), SearchIndex::SessionizerPaymentAttempts => { self.indexes.sessionizer_payment_attempts.clone() } SearchIndex::SessionizerPaymentIntents => { self.indexes.sessionizer_payment_intents.clone() } SearchIndex::SessionizerRefunds => self.indexes.sessionizer_refunds.clone(), SearchIndex::SessionizerDisputes => self.indexes.sessionizer_disputes.clone(), } } pub async fn execute( &self, query_builder: OpenSearchQueryBuilder, ) -> CustomResult<Response, OpenSearchError> { match query_builder.query_type { OpenSearchQuery::Msearch(ref indexes) => { let payload = query_builder .construct_payload(indexes) .change_context(OpenSearchError::QueryBuildingError)?; let payload_with_indexes = payload.into_iter().zip(indexes).fold( Vec::new(), |mut payload_with_indexes, (index_hit, index)| { payload_with_indexes.push( json!({"index": self.search_index_to_opensearch_index(*index)}).into(), ); payload_with_indexes.push(JsonBody::new(index_hit.clone())); payload_with_indexes }, ); self.client .msearch(MsearchParts::None) .body(payload_with_indexes) .send() .await .change_context(OpenSearchError::ResponseError) } OpenSearchQuery::Search(index) => { let payload = query_builder .clone() .construct_payload(&[index]) .change_context(OpenSearchError::QueryBuildingError)?; let final_payload = payload.first().unwrap_or(&Value::Null); self.client .search(SearchParts::Index(&[ &self.search_index_to_opensearch_index(index) ])) .from(query_builder.offset.unwrap_or(0)) .size(query_builder.count.unwrap_or(10)) .body(final_payload) .send() .await .change_context(OpenSearchError::ResponseError) } } } } #[async_trait::async_trait] impl HealthCheck for OpenSearchClient { async fn deep_health_check(&self) -> CustomResult<(), QueryExecutionError> { let health = Cluster::new(&self.transport) .health(ClusterHealthParts::None) .send() .await .change_context(QueryExecutionError::DatabaseError)? .json::<OpenSearchHealth>() .await .change_context(QueryExecutionError::DatabaseError)?; if health.status != OpenSearchHealthStatus::Red { Ok(()) } else { Err::<(), error_stack::Report<QueryExecutionError>>( QueryExecutionError::DatabaseError.into(), ) .attach_printable_lazy(|| format!("Opensearch cluster health is red: {health:?}")) } } } impl OpenSearchIndexes { pub fn validate(&self) -> Result<(), ApplicationError> { use common_utils::{ext_traits::ConfigExt, fp_utils::when}; when(self.payment_attempts.is_default_or_empty(), || { Err(ApplicationError::InvalidConfigurationValueError( "Opensearch Payment Attempts index must not be empty".into(), )) })?; when(self.payment_intents.is_default_or_empty(), || { Err(ApplicationError::InvalidConfigurationValueError( "Opensearch Payment Intents index must not be empty".into(), )) })?; when(self.refunds.is_default_or_empty(), || { Err(ApplicationError::InvalidConfigurationValueError( "Opensearch Refunds index must not be empty".into(), )) })?; when(self.disputes.is_default_or_empty(), || { Err(ApplicationError::InvalidConfigurationValueError( "Opensearch Disputes index must not be empty".into(), )) })?; when( self.sessionizer_payment_attempts.is_default_or_empty(), || { Err(ApplicationError::InvalidConfigurationValueError( "Opensearch Sessionizer Payment Attempts index must not be empty".into(), )) }, )?; when( self.sessionizer_payment_intents.is_default_or_empty(), || { Err(ApplicationError::InvalidConfigurationValueError( "Opensearch Sessionizer Payment Intents index must not be empty".into(), )) }, )?; when(self.sessionizer_refunds.is_default_or_empty(), || { Err(ApplicationError::InvalidConfigurationValueError( "Opensearch Sessionizer Refunds index must not be empty".into(), )) })?; when(self.sessionizer_disputes.is_default_or_empty(), || { Err(ApplicationError::InvalidConfigurationValueError( "Opensearch Sessionizer Disputes index must not be empty".into(), )) })?; Ok(()) } } impl OpenSearchAuth { pub fn validate(&self) -> Result<(), ApplicationError> { use common_utils::{ext_traits::ConfigExt, fp_utils::when}; match self { Self::Basic { username, password } => { when(username.is_default_or_empty(), || { Err(ApplicationError::InvalidConfigurationValueError( "Opensearch Basic auth username must not be empty".into(), )) })?; when(password.is_default_or_empty(), || { Err(ApplicationError::InvalidConfigurationValueError( "Opensearch Basic auth password must not be empty".into(), )) })?; } Self::Aws { region } => { when(region.is_default_or_empty(), || { Err(ApplicationError::InvalidConfigurationValueError( "Opensearch Aws auth region must not be empty".into(), )) })?; } }; Ok(()) } } impl OpenSearchConfig { pub async fn get_opensearch_client(&self) -> StorageResult<Option<OpenSearchClient>> { if !self.enabled { return Ok(None); } Ok(Some( OpenSearchClient::create(self) .await .change_context(StorageError::InitializationError)?, )) } pub fn validate(&self) -> Result<(), ApplicationError> { use common_utils::{ext_traits::ConfigExt, fp_utils::when}; if !self.enabled { return Ok(()); } when(self.host.is_default_or_empty(), || { Err(ApplicationError::InvalidConfigurationValueError( "Opensearch host must not be empty".into(), )) })?; self.indexes.validate()?; self.auth.validate()?; Ok(()) } } #[derive(Debug, serde::Deserialize, PartialEq)] #[serde(rename_all = "lowercase")] pub enum OpenSearchHealthStatus { Red, Green, Yellow, } #[derive(Debug, serde::Deserialize)] pub struct OpenSearchHealth { pub status: OpenSearchHealthStatus, } #[derive(Debug, Clone)] pub enum OpenSearchQuery { Msearch(Vec<SearchIndex>), Search(SearchIndex), } #[derive(Debug, Clone)] pub struct OpenSearchQueryBuilder { pub query_type: OpenSearchQuery, pub query: String, pub offset: Option<i64>, pub count: Option<i64>, pub filters: Vec<(String, Vec<Value>)>, pub time_range: Option<OpensearchTimeRange>, search_params: Vec<AuthInfo>, case_sensitive_fields: HashSet<&'static str>, } impl OpenSearchQueryBuilder { pub fn new(query_type: OpenSearchQuery, query: String, search_params: Vec<AuthInfo>) -> Self { Self { query_type, query, search_params, offset: Default::default(), count: Default::default(), filters: Default::default(), time_range: Default::default(), case_sensitive_fields: HashSet::from([ "customer_email.keyword", "search_tags.keyword", "card_last_4.keyword", "payment_id.keyword", "amount", "customer_id.keyword", ]), } } pub fn set_offset_n_count(&mut self, offset: i64, count: i64) -> QueryResult<()> { self.offset = Some(offset); self.count = Some(count); Ok(()) } pub fn set_time_range(&mut self, time_range: OpensearchTimeRange) -> QueryResult<()> { self.time_range = Some(time_range); Ok(()) } pub fn add_filter_clause(&mut self, lhs: String, rhs: Vec<Value>) -> QueryResult<()> { self.filters.push((lhs, rhs)); Ok(()) } pub fn get_status_field(&self, index: SearchIndex) -> &str { match index { SearchIndex::Refunds | SearchIndex::SessionizerRefunds => "refund_status.keyword", SearchIndex::Disputes | SearchIndex::SessionizerDisputes => "dispute_status.keyword", _ => "status.keyword", } } pub fn get_amount_field(&self, index: SearchIndex) -> &str { match index { SearchIndex::Refunds | SearchIndex::SessionizerRefunds => "refund_amount", SearchIndex::Disputes | SearchIndex::SessionizerDisputes => "dispute_amount", _ => "amount", } } pub fn build_filter_array( &self, case_sensitive_filters: Vec<&(String, Vec<Value>)>, index: SearchIndex, ) -> Vec<Value> { let mut filter_array = Vec::new(); if !self.query.is_empty() { filter_array.push(json!({ "multi_match": { "type": "phrase", "query": self.query, "lenient": true } })); } let case_sensitive_json_filters = case_sensitive_filters .into_iter() .map(|(k, v)| { let key = if *k == "amount" { self.get_amount_field(index).to_string() } else { k.clone() }; json!({"terms": {key: v}}) }) .collect::<Vec<Value>>(); filter_array.extend(case_sensitive_json_filters); if let Some(ref time_range) = self.time_range { let range = json!(time_range); filter_array.push(json!({ "range": { "@timestamp": range } })); } filter_array } pub fn build_case_insensitive_filters( &self, mut payload: Value, case_insensitive_filters: &[&(String, Vec<Value>)], auth_array: Vec<Value>, index: SearchIndex, ) -> Value { let mut must_array = case_insensitive_filters .iter() .map(|(k, v)| { let key = if *k == "status.keyword" { self.get_status_field(index).to_string() } else { k.clone() }; json!({ "bool": { "must": [ { "bool": { "should": v.iter().map(|value| { json!({ "term": { format!("{}", key): { "value": value, "case_insensitive": true } } }) }).collect::<Vec<Value>>(), "minimum_should_match": 1 } } ] } }) }) .collect::<Vec<Value>>(); must_array.push(json!({ "bool": { "must": [ { "bool": { "should": auth_array, "minimum_should_match": 1 } } ] }})); if let Some(query) = payload.get_mut("query") { if let Some(bool_obj) = query.get_mut("bool") { if let Some(bool_map) = bool_obj.as_object_mut() { bool_map.insert("must".to_string(), Value::Array(must_array)); } } } payload } pub fn build_auth_array(&self) -> Vec<Value> { self.search_params .iter() .map(|user_level| match user_level { AuthInfo::OrgLevel { org_id } => { let must_clauses = vec![json!({ "term": { "organization_id.keyword": { "value": org_id } } })]; json!({ "bool": { "must": must_clauses } }) } AuthInfo::MerchantLevel { org_id, merchant_ids, } => { let must_clauses = vec![ json!({ "term": { "organization_id.keyword": { "value": org_id } } }), json!({ "terms": { "merchant_id.keyword": merchant_ids } }), ]; json!({ "bool": { "must": must_clauses } }) } AuthInfo::ProfileLevel { org_id, merchant_id, profile_ids, } => { let must_clauses = vec![ json!({ "term": { "organization_id.keyword": { "value": org_id } } }), json!({ "term": { "merchant_id.keyword": { "value": merchant_id } } }), json!({ "terms": { "profile_id.keyword": profile_ids } }), ]; json!({ "bool": { "must": must_clauses } }) } }) .collect::<Vec<Value>>() } /// # Panics /// /// This function will panic if: /// /// * The structure of the JSON query is not as expected (e.g., missing keys or incorrect types). /// /// Ensure that the input data and the structure of the query are valid and correctly handled. pub fn construct_payload(&self, indexes: &[SearchIndex]) -> QueryResult<Vec<Value>> { let mut query_obj = Map::new(); let bool_obj = Map::new(); let (case_sensitive_filters, case_insensitive_filters): (Vec<_>, Vec<_>) = self .filters .iter() .partition(|(k, _)| self.case_sensitive_fields.contains(k.as_str())); let should_array = self.build_auth_array(); query_obj.insert("bool".to_string(), Value::Object(bool_obj.clone())); let mut sort_obj = Map::new(); sort_obj.insert( "@timestamp".to_string(), json!({ "order": "desc" }), ); Ok(indexes .iter() .map(|index| { let mut payload = json!({ "query": query_obj.clone(), "sort": [ Value::Object(sort_obj.clone()) ] }); let filter_array = self.build_filter_array(case_sensitive_filters.clone(), *index); if !filter_array.is_empty() { payload .get_mut("query") .and_then(|query| query.get_mut("bool")) .and_then(|bool_obj| bool_obj.as_object_mut()) .map(|bool_map| { bool_map.insert("filter".to_string(), Value::Array(filter_array)); }); } payload = self.build_case_insensitive_filters( payload, &case_insensitive_filters, should_array.clone(), *index, ); payload }) .collect::<Vec<Value>>()) } }
crates/analytics/src/opensearch.rs
analytics::src::opensearch
5,077
true
// File: crates/analytics/src/enums.rs // Module: analytics::src::enums pub use common_utils::types::authentication::AuthInfo;
crates/analytics/src/enums.rs
analytics::src::enums
32
true
// File: crates/analytics/src/metrics.rs // Module: analytics::src::metrics use router_env::{global_meter, histogram_metric_f64, histogram_metric_u64}; global_meter!(GLOBAL_METER, "ROUTER_API"); histogram_metric_f64!(METRIC_FETCH_TIME, GLOBAL_METER); histogram_metric_u64!(BUCKETS_FETCHED, GLOBAL_METER); pub mod request;
crates/analytics/src/metrics.rs
analytics::src::metrics
88
true
// File: crates/analytics/src/frm.rs // Module: analytics::src::frm pub mod accumulator; mod core; pub mod filters; pub mod metrics; pub mod types; pub use accumulator::{FrmMetricAccumulator, FrmMetricsAccumulator}; pub use self::core::{get_filters, get_metrics};
crates/analytics/src/frm.rs
analytics::src::frm
68
true
// File: crates/analytics/src/errors.rs // Module: analytics::src::errors use api_models::errors::types::{ApiError, ApiErrorResponse}; use common_utils::errors::{CustomResult, ErrorSwitch}; pub type AnalyticsResult<T> = CustomResult<T, AnalyticsError>; #[derive(Debug, Clone, serde::Serialize, thiserror::Error)] pub enum AnalyticsError { #[allow(dead_code)] #[error("Not implemented: {0}")] NotImplemented(&'static str), #[error("Unknown Analytics Error")] UnknownError, #[error("Access Forbidden Analytics Error")] AccessForbiddenError, #[error("Failed to fetch currency exchange rate")] ForexFetchFailed, } impl ErrorSwitch<ApiErrorResponse> for AnalyticsError { fn switch(&self) -> ApiErrorResponse { match self { Self::NotImplemented(feature) => ApiErrorResponse::NotImplemented(ApiError::new( "IR", 0, format!("{feature} is not implemented."), None, )), Self::UnknownError => ApiErrorResponse::InternalServerError(ApiError::new( "HE", 0, "Something went wrong", None, )), Self::AccessForbiddenError => { ApiErrorResponse::Unauthorized(ApiError::new("IR", 0, "Access Forbidden", None)) } Self::ForexFetchFailed => ApiErrorResponse::InternalServerError(ApiError::new( "HE", 0, "Failed to fetch currency exchange rate", None, )), } } }
crates/analytics/src/errors.rs
analytics::src::errors
320
true
// File: crates/analytics/src/routing_events.rs // Module: analytics::src::routing_events mod core; pub mod events; pub trait RoutingEventAnalytics: events::RoutingEventLogAnalytics {} pub use self::core::routing_events_core;
crates/analytics/src/routing_events.rs
analytics::src::routing_events
52
true
// File: crates/analytics/src/connector_events.rs // Module: analytics::src::connector_events mod core; pub mod events; pub trait ConnectorEventAnalytics: events::ConnectorEventLogAnalytics {} pub use self::core::connector_events_core;
crates/analytics/src/connector_events.rs
analytics::src::connector_events
52
true
// File: crates/analytics/src/active_payments.rs // Module: analytics::src::active_payments pub mod accumulator; mod core; pub mod metrics; pub use accumulator::{ActivePaymentsMetricAccumulator, ActivePaymentsMetricsAccumulator}; pub use self::core::get_metrics;
crates/analytics/src/active_payments.rs
analytics::src::active_payments
60
true
// File: crates/analytics/src/health_check.rs // Module: analytics::src::health_check use common_utils::errors::CustomResult; use crate::types::QueryExecutionError; #[async_trait::async_trait] pub trait HealthCheck { async fn deep_health_check(&self) -> CustomResult<(), QueryExecutionError>; }
crates/analytics/src/health_check.rs
analytics::src::health_check
70
true
// File: crates/analytics/src/search.rs // Module: analytics::src::search use api_models::analytics::search::{ GetGlobalSearchRequest, GetSearchRequestWithIndex, GetSearchResponse, OpenMsearchOutput, OpensearchOutput, SearchIndex, SearchStatus, }; use common_utils::errors::{CustomResult, ReportSwitchExt}; use error_stack::ResultExt; use router_env::tracing; use serde_json::Value; use crate::{ enums::AuthInfo, opensearch::{OpenSearchClient, OpenSearchError, OpenSearchQuery, OpenSearchQueryBuilder}, }; pub fn convert_to_value<T: Into<Value>>(items: Vec<T>) -> Vec<Value> { items.into_iter().map(|item| item.into()).collect() } pub async fn msearch_results( client: &OpenSearchClient, req: GetGlobalSearchRequest, search_params: Vec<AuthInfo>, indexes: Vec<SearchIndex>, ) -> CustomResult<Vec<GetSearchResponse>, OpenSearchError> { if req.query.trim().is_empty() && req .filters .as_ref() .is_none_or(|filters| filters.is_all_none()) { return Err(OpenSearchError::BadRequestError( "Both query and filters are empty".to_string(), ) .into()); } let mut query_builder = OpenSearchQueryBuilder::new( OpenSearchQuery::Msearch(indexes.clone()), req.query, search_params, ); if let Some(filters) = req.filters { if let Some(currency) = filters.currency { if !currency.is_empty() { query_builder .add_filter_clause("currency.keyword".to_string(), convert_to_value(currency)) .switch()?; } }; if let Some(status) = filters.status { if !status.is_empty() { query_builder .add_filter_clause("status.keyword".to_string(), convert_to_value(status)) .switch()?; } }; if let Some(payment_method) = filters.payment_method { if !payment_method.is_empty() { query_builder .add_filter_clause( "payment_method.keyword".to_string(), convert_to_value(payment_method), ) .switch()?; } }; if let Some(customer_email) = filters.customer_email { if !customer_email.is_empty() { query_builder .add_filter_clause( "customer_email.keyword".to_string(), convert_to_value( customer_email .iter() .filter_map(|email| { // TODO: Add trait based inputs instead of converting this to strings serde_json::to_value(email) .ok() .and_then(|a| a.as_str().map(|a| a.to_string())) }) .collect(), ), ) .switch()?; } }; if let Some(search_tags) = filters.search_tags { if !search_tags.is_empty() { query_builder .add_filter_clause( "feature_metadata.search_tags.keyword".to_string(), convert_to_value( search_tags .iter() .filter_map(|search_tag| { // TODO: Add trait based inputs instead of converting this to strings serde_json::to_value(search_tag) .ok() .and_then(|a| a.as_str().map(|a| a.to_string())) }) .collect(), ), ) .switch()?; } }; if let Some(connector) = filters.connector { if !connector.is_empty() { query_builder .add_filter_clause("connector.keyword".to_string(), convert_to_value(connector)) .switch()?; } }; if let Some(payment_method_type) = filters.payment_method_type { if !payment_method_type.is_empty() { query_builder .add_filter_clause( "payment_method_type.keyword".to_string(), convert_to_value(payment_method_type), ) .switch()?; } }; if let Some(card_network) = filters.card_network { if !card_network.is_empty() { query_builder .add_filter_clause( "card_network.keyword".to_string(), convert_to_value(card_network), ) .switch()?; } }; if let Some(card_last_4) = filters.card_last_4 { if !card_last_4.is_empty() { query_builder .add_filter_clause( "card_last_4.keyword".to_string(), convert_to_value(card_last_4), ) .switch()?; } }; if let Some(payment_id) = filters.payment_id { if !payment_id.is_empty() { query_builder .add_filter_clause( "payment_id.keyword".to_string(), convert_to_value(payment_id), ) .switch()?; } }; if let Some(amount) = filters.amount { if !amount.is_empty() { query_builder .add_filter_clause("amount".to_string(), convert_to_value(amount)) .switch()?; } }; if let Some(customer_id) = filters.customer_id { if !customer_id.is_empty() { query_builder .add_filter_clause( "customer_id.keyword".to_string(), convert_to_value(customer_id), ) .switch()?; } }; }; if let Some(time_range) = req.time_range { query_builder.set_time_range(time_range.into()).switch()?; }; let response_text: OpenMsearchOutput = client .execute(query_builder) .await .change_context(OpenSearchError::ConnectionError)? .text() .await .change_context(OpenSearchError::ResponseError) .and_then(|body: String| { serde_json::from_str::<OpenMsearchOutput>(&body) .change_context(OpenSearchError::DeserialisationError) .attach_printable(body.clone()) })?; let response_body: OpenMsearchOutput = response_text; Ok(response_body .responses .into_iter() .zip(indexes) .map(|(index_hit, index)| match index_hit { OpensearchOutput::Success(success) => GetSearchResponse { count: success.hits.total.value, index, hits: success .hits .hits .into_iter() .map(|hit| hit.source) .collect(), status: SearchStatus::Success, }, OpensearchOutput::Error(error) => { tracing::error!( index = ?index, error_response = ?error, "Search error" ); GetSearchResponse { count: 0, index, hits: Vec::new(), status: SearchStatus::Failure, } } }) .collect()) } pub async fn search_results( client: &OpenSearchClient, req: GetSearchRequestWithIndex, search_params: Vec<AuthInfo>, ) -> CustomResult<GetSearchResponse, OpenSearchError> { let search_req = req.search_req; if search_req.query.trim().is_empty() && search_req .filters .as_ref() .is_none_or(|filters| filters.is_all_none()) { return Err(OpenSearchError::BadRequestError( "Both query and filters are empty".to_string(), ) .into()); } let mut query_builder = OpenSearchQueryBuilder::new( OpenSearchQuery::Search(req.index), search_req.query, search_params, ); if let Some(filters) = search_req.filters { if let Some(currency) = filters.currency { if !currency.is_empty() { query_builder .add_filter_clause("currency.keyword".to_string(), convert_to_value(currency)) .switch()?; } }; if let Some(status) = filters.status { if !status.is_empty() { query_builder .add_filter_clause("status.keyword".to_string(), convert_to_value(status)) .switch()?; } }; if let Some(payment_method) = filters.payment_method { if !payment_method.is_empty() { query_builder .add_filter_clause( "payment_method.keyword".to_string(), convert_to_value(payment_method), ) .switch()?; } }; if let Some(customer_email) = filters.customer_email { if !customer_email.is_empty() { query_builder .add_filter_clause( "customer_email.keyword".to_string(), convert_to_value( customer_email .iter() .filter_map(|email| { // TODO: Add trait based inputs instead of converting this to strings serde_json::to_value(email) .ok() .and_then(|a| a.as_str().map(|a| a.to_string())) }) .collect(), ), ) .switch()?; } }; if let Some(search_tags) = filters.search_tags { if !search_tags.is_empty() { query_builder .add_filter_clause( "feature_metadata.search_tags.keyword".to_string(), convert_to_value( search_tags .iter() .filter_map(|search_tag| { // TODO: Add trait based inputs instead of converting this to strings serde_json::to_value(search_tag) .ok() .and_then(|a| a.as_str().map(|a| a.to_string())) }) .collect(), ), ) .switch()?; } }; if let Some(connector) = filters.connector { if !connector.is_empty() { query_builder .add_filter_clause("connector.keyword".to_string(), convert_to_value(connector)) .switch()?; } }; if let Some(payment_method_type) = filters.payment_method_type { if !payment_method_type.is_empty() { query_builder .add_filter_clause( "payment_method_type.keyword".to_string(), convert_to_value(payment_method_type), ) .switch()?; } }; if let Some(card_network) = filters.card_network { if !card_network.is_empty() { query_builder .add_filter_clause( "card_network.keyword".to_string(), convert_to_value(card_network), ) .switch()?; } }; if let Some(card_last_4) = filters.card_last_4 { if !card_last_4.is_empty() { query_builder .add_filter_clause( "card_last_4.keyword".to_string(), convert_to_value(card_last_4), ) .switch()?; } }; if let Some(payment_id) = filters.payment_id { if !payment_id.is_empty() { query_builder .add_filter_clause( "payment_id.keyword".to_string(), convert_to_value(payment_id), ) .switch()?; } }; if let Some(amount) = filters.amount { if !amount.is_empty() { query_builder .add_filter_clause("amount".to_string(), convert_to_value(amount)) .switch()?; } }; if let Some(customer_id) = filters.customer_id { if !customer_id.is_empty() { query_builder .add_filter_clause( "customer_id.keyword".to_string(), convert_to_value(customer_id), ) .switch()?; } }; }; if let Some(time_range) = search_req.time_range { query_builder.set_time_range(time_range.into()).switch()?; }; query_builder .set_offset_n_count(search_req.offset, search_req.count) .switch()?; let response_text: OpensearchOutput = client .execute(query_builder) .await .change_context(OpenSearchError::ConnectionError)? .text() .await .change_context(OpenSearchError::ResponseError) .and_then(|body: String| { serde_json::from_str::<OpensearchOutput>(&body) .change_context(OpenSearchError::DeserialisationError) .attach_printable(body.clone()) })?; let response_body: OpensearchOutput = response_text; match response_body { OpensearchOutput::Success(success) => Ok(GetSearchResponse { count: success.hits.total.value, index: req.index, hits: success .hits .hits .into_iter() .map(|hit| hit.source) .collect(), status: SearchStatus::Success, }), OpensearchOutput::Error(error) => { tracing::error!( index = ?req.index, error_response = ?error, "Search error" ); Ok(GetSearchResponse { count: 0, index: req.index, hits: Vec::new(), status: SearchStatus::Failure, }) } } }
crates/analytics/src/search.rs
analytics::src::search
2,734
true
// File: crates/analytics/src/sdk_events.rs // Module: analytics::src::sdk_events pub mod accumulator; mod core; pub mod events; pub mod filters; pub mod metrics; pub mod types; pub use accumulator::{SdkEventMetricAccumulator, SdkEventMetricsAccumulator}; pub use self::core::{get_filters, get_metrics, sdk_events_core};
crates/analytics/src/sdk_events.rs
analytics::src::sdk_events
79
true
// File: crates/analytics/src/utils.rs // Module: analytics::src::utils use api_models::analytics::{ api_event::{ApiEventDimensions, ApiEventMetrics}, auth_events::{AuthEventDimensions, AuthEventMetrics}, disputes::{DisputeDimensions, DisputeMetrics}, frm::{FrmDimensions, FrmMetrics}, payment_intents::{PaymentIntentDimensions, PaymentIntentMetrics}, payments::{PaymentDimensions, PaymentMetrics}, refunds::{RefundDimensions, RefundMetrics}, sdk_events::{SdkEventDimensions, SdkEventMetrics}, NameDescription, }; use strum::IntoEnumIterator; pub fn get_payment_dimensions() -> Vec<NameDescription> { vec![ PaymentDimensions::Connector, PaymentDimensions::PaymentMethod, PaymentDimensions::PaymentMethodType, PaymentDimensions::Currency, PaymentDimensions::AuthType, PaymentDimensions::PaymentStatus, PaymentDimensions::ClientSource, PaymentDimensions::ClientVersion, PaymentDimensions::ProfileId, PaymentDimensions::CardNetwork, PaymentDimensions::MerchantId, PaymentDimensions::RoutingApproach, ] .into_iter() .map(Into::into) .collect() } pub fn get_payment_intent_dimensions() -> Vec<NameDescription> { vec![ PaymentIntentDimensions::PaymentIntentStatus, PaymentIntentDimensions::Currency, PaymentIntentDimensions::ProfileId, PaymentIntentDimensions::Connector, PaymentIntentDimensions::AuthType, PaymentIntentDimensions::PaymentMethod, PaymentIntentDimensions::PaymentMethodType, PaymentIntentDimensions::CardNetwork, PaymentIntentDimensions::MerchantId, ] .into_iter() .map(Into::into) .collect() } pub fn get_auth_event_dimensions() -> Vec<NameDescription> { vec![ AuthEventDimensions::AuthenticationConnector, AuthEventDimensions::MessageVersion, AuthEventDimensions::AcsReferenceNumber, AuthEventDimensions::Platform, AuthEventDimensions::Mcc, AuthEventDimensions::Currency, AuthEventDimensions::MerchantCountry, AuthEventDimensions::BillingCountry, AuthEventDimensions::ShippingCountry, AuthEventDimensions::IssuerCountry, AuthEventDimensions::IssuerId, AuthEventDimensions::EarliestSupportedVersion, AuthEventDimensions::LatestSupportedVersion, AuthEventDimensions::WhitelistDecision, AuthEventDimensions::DeviceManufacturer, AuthEventDimensions::DeviceType, AuthEventDimensions::DeviceBrand, AuthEventDimensions::DeviceOs, AuthEventDimensions::DeviceDisplay, AuthEventDimensions::BrowserName, AuthEventDimensions::BrowserVersion, AuthEventDimensions::SchemeName, AuthEventDimensions::ExemptionRequested, AuthEventDimensions::ExemptionAccepted, ] .into_iter() .map(Into::into) .collect() } pub fn get_refund_dimensions() -> Vec<NameDescription> { RefundDimensions::iter().map(Into::into).collect() } pub fn get_frm_dimensions() -> Vec<NameDescription> { FrmDimensions::iter().map(Into::into).collect() } pub fn get_sdk_event_dimensions() -> Vec<NameDescription> { SdkEventDimensions::iter().map(Into::into).collect() } pub fn get_api_event_dimensions() -> Vec<NameDescription> { ApiEventDimensions::iter().map(Into::into).collect() } pub fn get_payment_metrics_info() -> Vec<NameDescription> { PaymentMetrics::iter().map(Into::into).collect() } pub fn get_payment_intent_metrics_info() -> Vec<NameDescription> { PaymentIntentMetrics::iter().map(Into::into).collect() } pub fn get_refund_metrics_info() -> Vec<NameDescription> { RefundMetrics::iter().map(Into::into).collect() } pub fn get_frm_metrics_info() -> Vec<NameDescription> { FrmMetrics::iter().map(Into::into).collect() } pub fn get_sdk_event_metrics_info() -> Vec<NameDescription> { SdkEventMetrics::iter().map(Into::into).collect() } pub fn get_auth_event_metrics_info() -> Vec<NameDescription> { AuthEventMetrics::iter().map(Into::into).collect() } pub fn get_api_event_metrics_info() -> Vec<NameDescription> { ApiEventMetrics::iter().map(Into::into).collect() } pub fn get_dispute_metrics_info() -> Vec<NameDescription> { DisputeMetrics::iter().map(Into::into).collect() } pub fn get_dispute_dimensions() -> Vec<NameDescription> { DisputeDimensions::iter().map(Into::into).collect() }
crates/analytics/src/utils.rs
analytics::src::utils
987
true
// File: crates/analytics/src/outgoing_webhook_event/core.rs // Module: analytics::src::outgoing_webhook_event::core use api_models::analytics::outgoing_webhook_event::OutgoingWebhookLogsRequest; use common_utils::errors::ReportSwitchExt; use error_stack::ResultExt; use super::events::{get_outgoing_webhook_event, OutgoingWebhookLogsResult}; use crate::{errors::AnalyticsResult, types::FiltersError, AnalyticsProvider}; pub async fn outgoing_webhook_events_core( pool: &AnalyticsProvider, req: OutgoingWebhookLogsRequest, merchant_id: &common_utils::id_type::MerchantId, ) -> AnalyticsResult<Vec<OutgoingWebhookLogsResult>> { let data = match pool { AnalyticsProvider::Sqlx(_) => Err(FiltersError::NotImplemented( "Outgoing Webhook Events Logs not implemented for SQLX", )) .attach_printable("SQL Analytics is not implemented for Outgoing Webhook Events"), AnalyticsProvider::Clickhouse(ckh_pool) | AnalyticsProvider::CombinedSqlx(_, ckh_pool) | AnalyticsProvider::CombinedCkh(_, ckh_pool) => { get_outgoing_webhook_event(merchant_id, req, ckh_pool).await } } .switch()?; Ok(data) }
crates/analytics/src/outgoing_webhook_event/core.rs
analytics::src::outgoing_webhook_event::core
283
true
// File: crates/analytics/src/outgoing_webhook_event/events.rs // Module: analytics::src::outgoing_webhook_event::events use api_models::analytics::{outgoing_webhook_event::OutgoingWebhookLogsRequest, Granularity}; use common_utils::errors::ReportSwitchExt; use error_stack::ResultExt; use time::PrimitiveDateTime; use crate::{ query::{Aggregate, GroupByClause, QueryBuilder, ToSql, Window}, types::{AnalyticsCollection, AnalyticsDataSource, FiltersError, FiltersResult, LoadRow}, }; pub trait OutgoingWebhookLogsFilterAnalytics: LoadRow<OutgoingWebhookLogsResult> {} pub async fn get_outgoing_webhook_event<T>( merchant_id: &common_utils::id_type::MerchantId, query_param: OutgoingWebhookLogsRequest, pool: &T, ) -> FiltersResult<Vec<OutgoingWebhookLogsResult>> where T: AnalyticsDataSource + OutgoingWebhookLogsFilterAnalytics, PrimitiveDateTime: ToSql<T>, AnalyticsCollection: ToSql<T>, Granularity: GroupByClause<T>, Aggregate<&'static str>: ToSql<T>, Window<&'static str>: ToSql<T>, { let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::OutgoingWebhookEvent); query_builder.add_select_column("*").switch()?; query_builder .add_filter_clause("merchant_id", merchant_id) .switch()?; query_builder .add_filter_clause("payment_id", &query_param.payment_id) .switch()?; if let Some(event_id) = query_param.event_id { query_builder .add_filter_clause("event_id", &event_id) .switch()?; } if let Some(refund_id) = query_param.refund_id { query_builder .add_filter_clause("refund_id", &refund_id) .switch()?; } if let Some(dispute_id) = query_param.dispute_id { query_builder .add_filter_clause("dispute_id", &dispute_id) .switch()?; } if let Some(mandate_id) = query_param.mandate_id { query_builder .add_filter_clause("mandate_id", &mandate_id) .switch()?; } if let Some(payment_method_id) = query_param.payment_method_id { query_builder .add_filter_clause("payment_method_id", &payment_method_id) .switch()?; } if let Some(attempt_id) = query_param.attempt_id { query_builder .add_filter_clause("attempt_id", &attempt_id) .switch()?; } //TODO!: update the execute_query function to return reports instead of plain errors... query_builder .execute_query::<OutgoingWebhookLogsResult, _>(pool) .await .change_context(FiltersError::QueryBuildingError)? .change_context(FiltersError::QueryExecutionFailure) } #[derive(Debug, serde::Serialize, serde::Deserialize)] pub struct OutgoingWebhookLogsResult { pub merchant_id: common_utils::id_type::MerchantId, pub event_id: String, pub event_type: String, pub outgoing_webhook_event_type: String, pub payment_id: common_utils::id_type::PaymentId, pub refund_id: Option<String>, pub attempt_id: Option<String>, pub dispute_id: Option<String>, pub payment_method_id: Option<String>, pub mandate_id: Option<String>, pub content: Option<String>, pub is_error: bool, pub error: Option<String>, #[serde(with = "common_utils::custom_serde::iso8601")] pub created_at: PrimitiveDateTime, }
crates/analytics/src/outgoing_webhook_event/events.rs
analytics::src::outgoing_webhook_event::events
799
true
// File: crates/analytics/src/metrics/request.rs // Module: analytics::src::metrics::request #[inline] pub async fn record_operation_time<F, R, T>( future: F, metric: &router_env::opentelemetry::metrics::Histogram<f64>, metric_name: &T, source: &crate::AnalyticsProvider, ) -> R where F: futures::Future<Output = R>, T: ToString, { let (result, time) = common_utils::metrics::utils::time_future(future).await; let attributes = router_env::metric_attributes!( ("metric_name", metric_name.to_string()), ("source", source.to_string()), ); let value = time.as_secs_f64(); metric.record(value, attributes); router_env::logger::debug!("Attributes: {:?}, Time: {}", attributes, value); result }
crates/analytics/src/metrics/request.rs
analytics::src::metrics::request
194
true
// File: crates/analytics/src/payments/core.rs // Module: analytics::src::payments::core #![allow(dead_code)] use std::collections::{HashMap, HashSet}; use api_models::analytics::{ payments::{ MetricsBucketResponse, PaymentDimensions, PaymentDistributions, PaymentMetrics, PaymentMetricsBucketIdentifier, }, FilterValue, GetPaymentFiltersRequest, GetPaymentMetricRequest, PaymentFiltersResponse, PaymentsAnalyticsMetadata, PaymentsMetricsResponse, }; use bigdecimal::ToPrimitive; use common_enums::Currency; use common_utils::errors::CustomResult; use currency_conversion::{conversion::convert, types::ExchangeRates}; use error_stack::ResultExt; use router_env::{ instrument, logger, tracing::{self, Instrument}, }; use super::{ distribution::PaymentDistributionRow, filters::{get_payment_filter_for_dimension, PaymentFilterRow}, metrics::PaymentMetricRow, PaymentMetricsAccumulator, }; use crate::{ enums::AuthInfo, errors::{AnalyticsError, AnalyticsResult}, metrics, payments::{PaymentDistributionAccumulator, PaymentMetricAccumulator}, AnalyticsProvider, }; #[derive(Debug)] pub enum TaskType { MetricTask( PaymentMetrics, CustomResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>, AnalyticsError>, ), DistributionTask( PaymentDistributions, CustomResult<Vec<(PaymentMetricsBucketIdentifier, PaymentDistributionRow)>, AnalyticsError>, ), } #[instrument(skip_all)] pub async fn get_metrics( pool: &AnalyticsProvider, ex_rates: &Option<ExchangeRates>, auth: &AuthInfo, req: GetPaymentMetricRequest, ) -> AnalyticsResult<PaymentsMetricsResponse<MetricsBucketResponse>> { let mut metrics_accumulator: HashMap< PaymentMetricsBucketIdentifier, PaymentMetricsAccumulator, > = HashMap::new(); let mut set = tokio::task::JoinSet::new(); for metric_type in req.metrics.iter().cloned() { let req = req.clone(); let pool = pool.clone(); let task_span = tracing::debug_span!( "analytics_payments_metrics_query", payment_metric = metric_type.as_ref() ); // TODO: lifetime issues with joinset, // can be optimized away if joinset lifetime requirements are relaxed let auth_scoped = auth.to_owned(); set.spawn( async move { let data = pool .get_payment_metrics( &metric_type, &req.group_by_names.clone(), &auth_scoped, &req.filters, req.time_series.map(|t| t.granularity), &req.time_range, ) .await .change_context(AnalyticsError::UnknownError); TaskType::MetricTask(metric_type, data) } .instrument(task_span), ); } if let Some(distribution) = req.clone().distribution { let req = req.clone(); let pool = pool.clone(); let task_span = tracing::debug_span!( "analytics_payments_distribution_query", payment_distribution = distribution.distribution_for.as_ref() ); let auth_scoped = auth.to_owned(); set.spawn( async move { let data = pool .get_payment_distribution( &distribution, &req.group_by_names.clone(), &auth_scoped, &req.filters, req.time_series.map(|t| t.granularity), &req.time_range, ) .await .change_context(AnalyticsError::UnknownError); TaskType::DistributionTask(distribution.distribution_for, data) } .instrument(task_span), ); } while let Some(task_type) = set .join_next() .await .transpose() .change_context(AnalyticsError::UnknownError)? { match task_type { TaskType::MetricTask(metric, data) => { let data = data?; let attributes = router_env::metric_attributes!( ("metric_type", metric.to_string()), ("source", pool.to_string()), ); let value = u64::try_from(data.len()); if let Ok(val) = value { metrics::BUCKETS_FETCHED.record(val, attributes); logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val); } for (id, value) in data { logger::debug!(bucket_id=?id, bucket_value=?value, "Bucket row for metric {metric}"); let metrics_builder = metrics_accumulator.entry(id).or_default(); match metric { PaymentMetrics::PaymentSuccessRate | PaymentMetrics::SessionizedPaymentSuccessRate => metrics_builder .payment_success_rate .add_metrics_bucket(&value), PaymentMetrics::PaymentCount | PaymentMetrics::SessionizedPaymentCount => { metrics_builder.payment_count.add_metrics_bucket(&value) } PaymentMetrics::PaymentSuccessCount | PaymentMetrics::SessionizedPaymentSuccessCount => { metrics_builder.payment_success.add_metrics_bucket(&value) } PaymentMetrics::PaymentProcessedAmount | PaymentMetrics::SessionizedPaymentProcessedAmount => { metrics_builder.processed_amount.add_metrics_bucket(&value) } PaymentMetrics::AvgTicketSize | PaymentMetrics::SessionizedAvgTicketSize => { metrics_builder.avg_ticket_size.add_metrics_bucket(&value) } PaymentMetrics::RetriesCount | PaymentMetrics::SessionizedRetriesCount => { metrics_builder.retries_count.add_metrics_bucket(&value); metrics_builder .retries_amount_processed .add_metrics_bucket(&value) } PaymentMetrics::ConnectorSuccessRate | PaymentMetrics::SessionizedConnectorSuccessRate => { metrics_builder .connector_success_rate .add_metrics_bucket(&value); } PaymentMetrics::DebitRouting | PaymentMetrics::SessionizedDebitRouting => { metrics_builder.debit_routing.add_metrics_bucket(&value); } PaymentMetrics::PaymentsDistribution => { metrics_builder .payments_distribution .add_metrics_bucket(&value); } PaymentMetrics::FailureReasons => { metrics_builder .failure_reasons_distribution .add_metrics_bucket(&value); } } } logger::debug!( "Analytics Accumulated Results: metric: {}, results: {:#?}", metric, metrics_accumulator ); } TaskType::DistributionTask(distribution, data) => { let data = data?; let attributes = router_env::metric_attributes!( ("distribution_type", distribution.to_string()), ("source", pool.to_string()), ); let value = u64::try_from(data.len()); if let Ok(val) = value { metrics::BUCKETS_FETCHED.record(val, attributes); logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val); } for (id, value) in data { logger::debug!(bucket_id=?id, bucket_value=?value, "Bucket row for distribution {distribution}"); let metrics_accumulator = metrics_accumulator.entry(id).or_default(); match distribution { PaymentDistributions::PaymentErrorMessage => metrics_accumulator .payment_error_message .add_distribution_bucket(&value), } } logger::debug!( "Analytics Accumulated Results: distribution: {}, results: {:#?}", distribution, metrics_accumulator ); } } } let mut total_payment_processed_amount = 0; let mut total_payment_processed_count = 0; let mut total_payment_processed_amount_without_smart_retries = 0; let mut total_payment_processed_count_without_smart_retries = 0; let mut total_failure_reasons_count = 0; let mut total_failure_reasons_count_without_smart_retries = 0; let mut total_payment_processed_amount_in_usd = 0; let mut total_payment_processed_amount_without_smart_retries_usd = 0; let query_data: Vec<MetricsBucketResponse> = metrics_accumulator .into_iter() .map(|(id, val)| { let mut collected_values = val.collect(); if let Some(amount) = collected_values.payment_processed_amount { let amount_in_usd = if let Some(ex_rates) = ex_rates { id.currency .and_then(|currency| { i64::try_from(amount) .inspect_err(|e| logger::error!("Amount conversion error: {:?}", e)) .ok() .and_then(|amount_i64| { convert(ex_rates, currency, Currency::USD, amount_i64) .inspect_err(|e| { logger::error!("Currency conversion error: {:?}", e) }) .ok() }) }) .map(|amount| (amount * rust_decimal::Decimal::new(100, 0)).to_u64()) .unwrap_or_default() } else { None }; collected_values.payment_processed_amount_in_usd = amount_in_usd; total_payment_processed_amount += amount; total_payment_processed_amount_in_usd += amount_in_usd.unwrap_or(0); } if let Some(count) = collected_values.payment_processed_count { total_payment_processed_count += count; } if let Some(amount) = collected_values.payment_processed_amount_without_smart_retries { let amount_in_usd = if let Some(ex_rates) = ex_rates { id.currency .and_then(|currency| { i64::try_from(amount) .inspect_err(|e| logger::error!("Amount conversion error: {:?}", e)) .ok() .and_then(|amount_i64| { convert(ex_rates, currency, Currency::USD, amount_i64) .inspect_err(|e| { logger::error!("Currency conversion error: {:?}", e) }) .ok() }) }) .map(|amount| (amount * rust_decimal::Decimal::new(100, 0)).to_u64()) .unwrap_or_default() } else { None }; collected_values.payment_processed_amount_without_smart_retries_usd = amount_in_usd; total_payment_processed_amount_without_smart_retries += amount; total_payment_processed_amount_without_smart_retries_usd += amount_in_usd.unwrap_or(0); } if let Some(count) = collected_values.payment_processed_count_without_smart_retries { total_payment_processed_count_without_smart_retries += count; } if let Some(count) = collected_values.failure_reason_count { total_failure_reasons_count += count; } if let Some(count) = collected_values.failure_reason_count_without_smart_retries { total_failure_reasons_count_without_smart_retries += count; } if let Some(savings) = collected_values.debit_routing_savings { let savings_in_usd = if let Some(ex_rates) = ex_rates { id.currency .and_then(|currency| { i64::try_from(savings) .inspect_err(|e| { logger::error!( "Debit Routing savings conversion error: {:?}", e ) }) .ok() .and_then(|savings_i64| { convert(ex_rates, currency, Currency::USD, savings_i64) .inspect_err(|e| { logger::error!("Currency conversion error: {:?}", e) }) .ok() }) }) .map(|savings| (savings * rust_decimal::Decimal::new(100, 0)).to_u64()) .unwrap_or_default() } else { None }; collected_values.debit_routing_savings_in_usd = savings_in_usd; } MetricsBucketResponse { values: collected_values, dimensions: id, } }) .collect(); Ok(PaymentsMetricsResponse { query_data, meta_data: [PaymentsAnalyticsMetadata { total_payment_processed_amount: Some(total_payment_processed_amount), total_payment_processed_amount_in_usd: if ex_rates.is_some() { Some(total_payment_processed_amount_in_usd) } else { None }, total_payment_processed_amount_without_smart_retries: Some( total_payment_processed_amount_without_smart_retries, ), total_payment_processed_amount_without_smart_retries_usd: if ex_rates.is_some() { Some(total_payment_processed_amount_without_smart_retries_usd) } else { None }, total_payment_processed_count: Some(total_payment_processed_count), total_payment_processed_count_without_smart_retries: Some( total_payment_processed_count_without_smart_retries, ), total_failure_reasons_count: Some(total_failure_reasons_count), total_failure_reasons_count_without_smart_retries: Some( total_failure_reasons_count_without_smart_retries, ), }], }) } pub async fn get_filters( pool: &AnalyticsProvider, req: GetPaymentFiltersRequest, auth: &AuthInfo, ) -> AnalyticsResult<PaymentFiltersResponse> { let mut res = PaymentFiltersResponse::default(); for dim in req.group_by_names { let values = match pool { AnalyticsProvider::Sqlx(pool) => { get_payment_filter_for_dimension(dim, auth, &req.time_range, pool) .await } AnalyticsProvider::Clickhouse(pool) => { get_payment_filter_for_dimension(dim, auth, &req.time_range, pool) .await } AnalyticsProvider::CombinedCkh(sqlx_poll, ckh_pool) => { let ckh_result = get_payment_filter_for_dimension( dim, auth, &req.time_range, ckh_pool, ) .await; let sqlx_result = get_payment_filter_for_dimension( dim, auth, &req.time_range, sqlx_poll, ) .await; match (&sqlx_result, &ckh_result) { (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics filters") }, _ => {} }; ckh_result } AnalyticsProvider::CombinedSqlx(sqlx_poll, ckh_pool) => { let ckh_result = get_payment_filter_for_dimension( dim, auth, &req.time_range, ckh_pool, ) .await; let sqlx_result = get_payment_filter_for_dimension( dim, auth, &req.time_range, sqlx_poll, ) .await; match (&sqlx_result, &ckh_result) { (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics filters") }, _ => {} }; sqlx_result } } .change_context(AnalyticsError::UnknownError)? .into_iter() .filter_map(|fil: PaymentFilterRow| match dim { PaymentDimensions::Currency => fil.currency.map(|i| i.as_ref().to_string()), PaymentDimensions::PaymentStatus => fil.status.map(|i| i.as_ref().to_string()), PaymentDimensions::Connector => fil.connector, PaymentDimensions::AuthType => fil.authentication_type.map(|i| i.as_ref().to_string()), PaymentDimensions::PaymentMethod => fil.payment_method, PaymentDimensions::PaymentMethodType => fil.payment_method_type, PaymentDimensions::ClientSource => fil.client_source, PaymentDimensions::ClientVersion => fil.client_version, PaymentDimensions::ProfileId => fil.profile_id, PaymentDimensions::CardNetwork => fil.card_network, PaymentDimensions::MerchantId => fil.merchant_id, PaymentDimensions::CardLast4 => fil.card_last_4, PaymentDimensions::CardIssuer => fil.card_issuer, PaymentDimensions::ErrorReason => fil.error_reason, PaymentDimensions::RoutingApproach => fil.routing_approach.map(|i| i.as_ref().to_string()), PaymentDimensions::SignatureNetwork => fil.signature_network, PaymentDimensions::IsIssuerRegulated => fil.is_issuer_regulated.map(|b| b.to_string()), PaymentDimensions::IsDebitRouted => fil.is_debit_routed.map(|b| b.to_string()) }) .collect::<Vec<String>>(); res.query_data.push(FilterValue { dimension: dim, values, }) } Ok(res) }
crates/analytics/src/payments/core.rs
analytics::src::payments::core
3,539
true
// File: crates/analytics/src/payments/distribution.rs // Module: analytics::src::payments::distribution use api_models::analytics::{ payments::{ PaymentDimensions, PaymentDistributions, PaymentFilters, PaymentMetricsBucketIdentifier, }, Granularity, PaymentDistributionBody, TimeRange, }; use diesel_models::enums as storage_enums; use time::PrimitiveDateTime; use crate::{ enums::AuthInfo, query::{Aggregate, GroupByClause, ToSql, Window}, types::{AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, LoadRow, MetricsResult}, }; mod payment_error_message; use payment_error_message::PaymentErrorMessage; #[derive(Debug, PartialEq, Eq, serde::Deserialize)] pub struct PaymentDistributionRow { pub currency: Option<DBEnumWrapper<storage_enums::Currency>>, pub status: Option<DBEnumWrapper<storage_enums::AttemptStatus>>, pub connector: Option<String>, pub authentication_type: Option<DBEnumWrapper<storage_enums::AuthenticationType>>, pub payment_method: Option<String>, pub payment_method_type: Option<String>, pub client_source: Option<String>, pub client_version: Option<String>, pub profile_id: Option<String>, pub card_network: Option<String>, pub merchant_id: Option<String>, pub card_last_4: Option<String>, pub card_issuer: Option<String>, pub error_reason: Option<String>, pub first_attempt: Option<bool>, pub total: Option<bigdecimal::BigDecimal>, pub count: Option<i64>, pub error_message: Option<String>, pub routing_approach: Option<DBEnumWrapper<storage_enums::RoutingApproach>>, pub signature_network: Option<String>, pub is_issuer_regulated: Option<bool>, pub is_debit_routed: Option<bool>, #[serde(with = "common_utils::custom_serde::iso8601::option")] pub start_bucket: Option<PrimitiveDateTime>, #[serde(with = "common_utils::custom_serde::iso8601::option")] pub end_bucket: Option<PrimitiveDateTime>, } pub trait PaymentDistributionAnalytics: LoadRow<PaymentDistributionRow> {} #[async_trait::async_trait] pub trait PaymentDistribution<T> where T: AnalyticsDataSource + PaymentDistributionAnalytics, { #[allow(clippy::too_many_arguments)] async fn load_distribution( &self, distribution: &PaymentDistributionBody, dimensions: &[PaymentDimensions], auth: &AuthInfo, filters: &PaymentFilters, granularity: Option<Granularity>, time_range: &TimeRange, pool: &T, ) -> MetricsResult<Vec<(PaymentMetricsBucketIdentifier, PaymentDistributionRow)>>; } #[async_trait::async_trait] impl<T> PaymentDistribution<T> for PaymentDistributions where T: AnalyticsDataSource + PaymentDistributionAnalytics, PrimitiveDateTime: ToSql<T>, AnalyticsCollection: ToSql<T>, Granularity: GroupByClause<T>, Aggregate<&'static str>: ToSql<T>, Window<&'static str>: ToSql<T>, { async fn load_distribution( &self, distribution: &PaymentDistributionBody, dimensions: &[PaymentDimensions], auth: &AuthInfo, filters: &PaymentFilters, granularity: Option<Granularity>, time_range: &TimeRange, pool: &T, ) -> MetricsResult<Vec<(PaymentMetricsBucketIdentifier, PaymentDistributionRow)>> { match self { Self::PaymentErrorMessage => { PaymentErrorMessage .load_distribution( distribution, dimensions, auth, filters, granularity, time_range, pool, ) .await } } } }
crates/analytics/src/payments/distribution.rs
analytics::src::payments::distribution
785
true
// File: crates/analytics/src/payments/types.rs // Module: analytics::src::payments::types use api_models::analytics::payments::{PaymentDimensions, PaymentFilters}; use error_stack::ResultExt; use crate::{ query::{QueryBuilder, QueryFilter, QueryResult, ToSql}, types::{AnalyticsCollection, AnalyticsDataSource}, }; impl<T> QueryFilter<T> for PaymentFilters where T: AnalyticsDataSource, AnalyticsCollection: ToSql<T>, { fn set_filter_clause(&self, builder: &mut QueryBuilder<T>) -> QueryResult<()> { if !self.currency.is_empty() { builder .add_filter_in_range_clause(PaymentDimensions::Currency, &self.currency) .attach_printable("Error adding currency filter")?; } if !self.status.is_empty() { builder .add_filter_in_range_clause(PaymentDimensions::PaymentStatus, &self.status) .attach_printable("Error adding payment status filter")?; } if !self.connector.is_empty() { builder .add_filter_in_range_clause(PaymentDimensions::Connector, &self.connector) .attach_printable("Error adding connector filter")?; } if !self.auth_type.is_empty() { builder .add_filter_in_range_clause(PaymentDimensions::AuthType, &self.auth_type) .attach_printable("Error adding auth type filter")?; } if !self.payment_method.is_empty() { builder .add_filter_in_range_clause(PaymentDimensions::PaymentMethod, &self.payment_method) .attach_printable("Error adding payment method filter")?; } if !self.payment_method_type.is_empty() { builder .add_filter_in_range_clause( PaymentDimensions::PaymentMethodType, &self.payment_method_type, ) .attach_printable("Error adding payment method type filter")?; } if !self.client_source.is_empty() { builder .add_filter_in_range_clause(PaymentDimensions::ClientSource, &self.client_source) .attach_printable("Error adding client source filter")?; } if !self.client_version.is_empty() { builder .add_filter_in_range_clause(PaymentDimensions::ClientVersion, &self.client_version) .attach_printable("Error adding client version filter")?; } if !self.profile_id.is_empty() { builder .add_filter_in_range_clause(PaymentDimensions::ProfileId, &self.profile_id) .attach_printable("Error adding profile id filter")?; } if !self.card_network.is_empty() { let card_networks: Vec<String> = self .card_network .iter() .flat_map(|cn| { [ format!("\"{cn}\""), cn.to_string(), format!("\"{cn}\"").to_uppercase(), ] }) .collect(); builder .add_filter_in_range_clause( PaymentDimensions::CardNetwork, card_networks.as_slice(), ) .attach_printable("Error adding card network filter")?; } if !self.merchant_id.is_empty() { builder .add_filter_in_range_clause(PaymentDimensions::MerchantId, &self.merchant_id) .attach_printable("Error adding merchant id filter")?; } if !self.card_last_4.is_empty() { builder .add_filter_in_range_clause(PaymentDimensions::CardLast4, &self.card_last_4) .attach_printable("Error adding card last 4 filter")?; } if !self.card_issuer.is_empty() { builder .add_filter_in_range_clause(PaymentDimensions::CardIssuer, &self.card_issuer) .attach_printable("Error adding card issuer filter")?; } if !self.error_reason.is_empty() { builder .add_filter_in_range_clause(PaymentDimensions::ErrorReason, &self.error_reason) .attach_printable("Error adding error reason filter")?; } if !self.first_attempt.is_empty() { builder .add_filter_in_range_clause("first_attempt", &self.first_attempt) .attach_printable("Error adding first attempt filter")?; } if !self.routing_approach.is_empty() { builder .add_filter_in_range_clause( PaymentDimensions::RoutingApproach, &self.routing_approach, ) .attach_printable("Error adding routing approach filter")?; } if !self.signature_network.is_empty() { builder .add_filter_in_range_clause( PaymentDimensions::SignatureNetwork, &self.signature_network, ) .attach_printable("Error adding signature network filter")?; } if !self.is_issuer_regulated.is_empty() { builder .add_filter_in_range_clause( PaymentDimensions::IsIssuerRegulated, &self.is_issuer_regulated, ) .attach_printable("Error adding is issuer regulated filter")?; } if !self.is_debit_routed.is_empty() { builder .add_filter_in_range_clause(PaymentDimensions::IsDebitRouted, &self.is_debit_routed) .attach_printable("Error adding is debit routed filter")?; } Ok(()) } }
crates/analytics/src/payments/types.rs
analytics::src::payments::types
1,128
true
// File: crates/analytics/src/payments/metrics.rs // Module: analytics::src::payments::metrics use std::collections::HashSet; use api_models::analytics::{ payments::{PaymentDimensions, PaymentFilters, PaymentMetrics, PaymentMetricsBucketIdentifier}, Granularity, TimeRange, }; use diesel_models::enums as storage_enums; use time::PrimitiveDateTime; use crate::{ enums::AuthInfo, query::{Aggregate, GroupByClause, ToSql, Window}, types::{AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, LoadRow, MetricsResult}, }; mod avg_ticket_size; mod connector_success_rate; mod debit_routing; mod payment_count; mod payment_processed_amount; mod payment_success_count; mod retries_count; mod sessionized_metrics; mod success_rate; use avg_ticket_size::AvgTicketSize; use connector_success_rate::ConnectorSuccessRate; use debit_routing::DebitRouting; use payment_count::PaymentCount; use payment_processed_amount::PaymentProcessedAmount; use payment_success_count::PaymentSuccessCount; use success_rate::PaymentSuccessRate; use self::retries_count::RetriesCount; #[derive(Debug, PartialEq, Eq, serde::Deserialize, Hash)] pub struct PaymentMetricRow { pub currency: Option<DBEnumWrapper<storage_enums::Currency>>, pub status: Option<DBEnumWrapper<storage_enums::AttemptStatus>>, pub connector: Option<String>, pub authentication_type: Option<DBEnumWrapper<storage_enums::AuthenticationType>>, pub payment_method: Option<String>, pub payment_method_type: Option<String>, pub client_source: Option<String>, pub client_version: Option<String>, pub profile_id: Option<String>, pub card_network: Option<String>, pub merchant_id: Option<String>, pub card_last_4: Option<String>, pub card_issuer: Option<String>, pub error_reason: Option<String>, pub first_attempt: Option<bool>, pub total: Option<bigdecimal::BigDecimal>, pub count: Option<i64>, pub routing_approach: Option<DBEnumWrapper<storage_enums::RoutingApproach>>, pub signature_network: Option<String>, pub is_issuer_regulated: Option<bool>, pub is_debit_routed: Option<bool>, #[serde(with = "common_utils::custom_serde::iso8601::option")] pub start_bucket: Option<PrimitiveDateTime>, #[serde(with = "common_utils::custom_serde::iso8601::option")] pub end_bucket: Option<PrimitiveDateTime>, } pub trait PaymentMetricAnalytics: LoadRow<PaymentMetricRow> {} #[async_trait::async_trait] pub trait PaymentMetric<T> where T: AnalyticsDataSource + PaymentMetricAnalytics, { async fn load_metrics( &self, dimensions: &[PaymentDimensions], auth: &AuthInfo, filters: &PaymentFilters, granularity: Option<Granularity>, time_range: &TimeRange, pool: &T, ) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>>; } #[async_trait::async_trait] impl<T> PaymentMetric<T> for PaymentMetrics where T: AnalyticsDataSource + PaymentMetricAnalytics, PrimitiveDateTime: ToSql<T>, AnalyticsCollection: ToSql<T>, Granularity: GroupByClause<T>, Aggregate<&'static str>: ToSql<T>, Window<&'static str>: ToSql<T>, { async fn load_metrics( &self, dimensions: &[PaymentDimensions], auth: &AuthInfo, filters: &PaymentFilters, granularity: Option<Granularity>, time_range: &TimeRange, pool: &T, ) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> { match self { Self::PaymentSuccessRate => { PaymentSuccessRate .load_metrics(dimensions, auth, filters, granularity, time_range, pool) .await } Self::PaymentCount => { PaymentCount .load_metrics(dimensions, auth, filters, granularity, time_range, pool) .await } Self::PaymentSuccessCount => { PaymentSuccessCount .load_metrics(dimensions, auth, filters, granularity, time_range, pool) .await } Self::PaymentProcessedAmount => { PaymentProcessedAmount .load_metrics(dimensions, auth, filters, granularity, time_range, pool) .await } Self::AvgTicketSize => { AvgTicketSize .load_metrics(dimensions, auth, filters, granularity, time_range, pool) .await } Self::RetriesCount => { RetriesCount .load_metrics(dimensions, auth, filters, granularity, time_range, pool) .await } Self::ConnectorSuccessRate => { ConnectorSuccessRate .load_metrics(dimensions, auth, filters, granularity, time_range, pool) .await } Self::DebitRouting => { DebitRouting .load_metrics(dimensions, auth, filters, granularity, time_range, pool) .await } Self::SessionizedPaymentSuccessRate => { sessionized_metrics::PaymentSuccessRate .load_metrics(dimensions, auth, filters, granularity, time_range, pool) .await } Self::SessionizedPaymentCount => { sessionized_metrics::PaymentCount .load_metrics(dimensions, auth, filters, granularity, time_range, pool) .await } Self::SessionizedPaymentSuccessCount => { sessionized_metrics::PaymentSuccessCount .load_metrics(dimensions, auth, filters, granularity, time_range, pool) .await } Self::SessionizedPaymentProcessedAmount => { sessionized_metrics::PaymentProcessedAmount .load_metrics(dimensions, auth, filters, granularity, time_range, pool) .await } Self::SessionizedAvgTicketSize => { sessionized_metrics::AvgTicketSize .load_metrics(dimensions, auth, filters, granularity, time_range, pool) .await } Self::SessionizedRetriesCount => { sessionized_metrics::RetriesCount .load_metrics(dimensions, auth, filters, granularity, time_range, pool) .await } Self::SessionizedConnectorSuccessRate => { sessionized_metrics::ConnectorSuccessRate .load_metrics(dimensions, auth, filters, granularity, time_range, pool) .await } Self::PaymentsDistribution => { sessionized_metrics::PaymentsDistribution .load_metrics(dimensions, auth, filters, granularity, time_range, pool) .await } Self::FailureReasons => { sessionized_metrics::FailureReasons .load_metrics(dimensions, auth, filters, granularity, time_range, pool) .await } Self::SessionizedDebitRouting => { sessionized_metrics::DebitRouting .load_metrics(dimensions, auth, filters, granularity, time_range, pool) .await } } } }
crates/analytics/src/payments/metrics.rs
analytics::src::payments::metrics
1,521
true
// File: crates/analytics/src/payments/filters.rs // Module: analytics::src::payments::filters use api_models::analytics::{payments::PaymentDimensions, Granularity, TimeRange}; use common_utils::errors::ReportSwitchExt; use diesel_models::enums::{AttemptStatus, AuthenticationType, Currency, RoutingApproach}; use error_stack::ResultExt; use time::PrimitiveDateTime; use crate::{ enums::AuthInfo, query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window}, types::{ AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, FiltersError, FiltersResult, LoadRow, }, }; pub trait PaymentFilterAnalytics: LoadRow<PaymentFilterRow> {} pub async fn get_payment_filter_for_dimension<T>( dimension: PaymentDimensions, auth: &AuthInfo, time_range: &TimeRange, pool: &T, ) -> FiltersResult<Vec<PaymentFilterRow>> where T: AnalyticsDataSource + PaymentFilterAnalytics, PrimitiveDateTime: ToSql<T>, AnalyticsCollection: ToSql<T>, Granularity: GroupByClause<T>, Aggregate<&'static str>: ToSql<T>, Window<&'static str>: ToSql<T>, { let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::Payment); query_builder.add_select_column(dimension).switch()?; time_range .set_filter_clause(&mut query_builder) .attach_printable("Error filtering time range") .switch()?; auth.set_filter_clause(&mut query_builder).switch()?; query_builder.set_distinct(); query_builder .execute_query::<PaymentFilterRow, _>(pool) .await .change_context(FiltersError::QueryBuildingError)? .change_context(FiltersError::QueryExecutionFailure) } #[derive(Debug, serde::Serialize, Eq, PartialEq, serde::Deserialize)] pub struct PaymentFilterRow { pub currency: Option<DBEnumWrapper<Currency>>, pub status: Option<DBEnumWrapper<AttemptStatus>>, pub connector: Option<String>, pub authentication_type: Option<DBEnumWrapper<AuthenticationType>>, pub payment_method: Option<String>, pub payment_method_type: Option<String>, pub client_source: Option<String>, pub client_version: Option<String>, pub profile_id: Option<String>, pub card_network: Option<String>, pub merchant_id: Option<String>, pub card_last_4: Option<String>, pub card_issuer: Option<String>, pub error_reason: Option<String>, pub first_attempt: Option<bool>, pub routing_approach: Option<DBEnumWrapper<RoutingApproach>>, pub signature_network: Option<String>, pub is_issuer_regulated: Option<bool>, pub is_debit_routed: Option<bool>, }
crates/analytics/src/payments/filters.rs
analytics::src::payments::filters
594
true
// File: crates/analytics/src/payments/accumulator.rs // Module: analytics::src::payments::accumulator use api_models::analytics::payments::{ErrorResult, PaymentMetricsBucketValue}; use bigdecimal::ToPrimitive; use diesel_models::enums as storage_enums; use router_env::logger; use super::{distribution::PaymentDistributionRow, metrics::PaymentMetricRow}; #[derive(Debug, Default)] pub struct PaymentMetricsAccumulator { pub payment_success_rate: SuccessRateAccumulator, pub payment_count: CountAccumulator, pub payment_success: CountAccumulator, pub processed_amount: ProcessedAmountAccumulator, pub avg_ticket_size: AverageAccumulator, pub payment_error_message: ErrorDistributionAccumulator, pub retries_count: CountAccumulator, pub retries_amount_processed: RetriesAmountAccumulator, pub connector_success_rate: SuccessRateAccumulator, pub payments_distribution: PaymentsDistributionAccumulator, pub failure_reasons_distribution: FailureReasonsDistributionAccumulator, pub debit_routing: DebitRoutingAccumulator, } #[derive(Debug, Default)] pub struct ErrorDistributionRow { pub count: i64, pub total: i64, pub error_message: String, } #[derive(Debug, Default)] pub struct ErrorDistributionAccumulator { pub error_vec: Vec<ErrorDistributionRow>, } #[derive(Debug, Default)] pub struct FailureReasonsDistributionAccumulator { pub count: u64, pub count_without_retries: u64, } #[derive(Debug, Default)] pub struct SuccessRateAccumulator { pub success: i64, pub total: i64, } #[derive(Debug, Default)] #[repr(transparent)] pub struct CountAccumulator { pub count: Option<i64>, } #[derive(Debug, Default)] pub struct ProcessedAmountAccumulator { pub count_with_retries: Option<i64>, pub total_with_retries: Option<i64>, pub count_without_retries: Option<i64>, pub total_without_retries: Option<i64>, } #[derive(Debug, Default)] pub struct DebitRoutingAccumulator { pub transaction_count: u64, pub savings_amount: u64, } #[derive(Debug, Default)] pub struct AverageAccumulator { pub total: u32, pub count: u32, } #[derive(Debug, Default)] #[repr(transparent)] pub struct RetriesAmountAccumulator { pub total: Option<i64>, } #[derive(Debug, Default)] pub struct PaymentsDistributionAccumulator { pub success: u32, pub failed: u32, pub total: u32, pub success_without_retries: u32, pub success_with_only_retries: u32, pub failed_without_retries: u32, pub failed_with_only_retries: u32, pub total_without_retries: u32, pub total_with_only_retries: u32, } pub trait PaymentMetricAccumulator { type MetricOutput; fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow); fn collect(self) -> Self::MetricOutput; } pub trait PaymentDistributionAccumulator { type DistributionOutput; fn add_distribution_bucket(&mut self, distribution: &PaymentDistributionRow); fn collect(self) -> Self::DistributionOutput; } impl PaymentDistributionAccumulator for ErrorDistributionAccumulator { type DistributionOutput = Option<Vec<ErrorResult>>; fn add_distribution_bucket(&mut self, distribution: &PaymentDistributionRow) { self.error_vec.push(ErrorDistributionRow { count: distribution.count.unwrap_or_default(), total: distribution .total .clone() .map(|i| i.to_i64().unwrap_or_default()) .unwrap_or_default(), error_message: distribution.error_message.clone().unwrap_or("".to_string()), }) } fn collect(mut self) -> Self::DistributionOutput { if self.error_vec.is_empty() { None } else { self.error_vec.sort_by(|a, b| b.count.cmp(&a.count)); let mut res: Vec<ErrorResult> = Vec::new(); for val in self.error_vec.into_iter() { let perc = f64::from(u32::try_from(val.count).ok()?) * 100.0 / f64::from(u32::try_from(val.total).ok()?); res.push(ErrorResult { reason: val.error_message, count: val.count, percentage: (perc * 100.0).round() / 100.0, }) } Some(res) } } } impl PaymentMetricAccumulator for FailureReasonsDistributionAccumulator { type MetricOutput = (Option<u64>, Option<u64>); fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow) { if let Some(count) = metrics.count { if let Ok(count_u64) = u64::try_from(count) { self.count += count_u64; } } if metrics.first_attempt.unwrap_or(false) { if let Some(count) = metrics.count { if let Ok(count_u64) = u64::try_from(count) { self.count_without_retries += count_u64; } } } } fn collect(self) -> Self::MetricOutput { (Some(self.count), Some(self.count_without_retries)) } } impl PaymentMetricAccumulator for SuccessRateAccumulator { type MetricOutput = Option<f64>; fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow) { if let Some(ref status) = metrics.status { if status.as_ref() == &storage_enums::AttemptStatus::Charged { self.success += metrics.count.unwrap_or_default(); } }; self.total += metrics.count.unwrap_or_default(); } fn collect(self) -> Self::MetricOutput { if self.total <= 0 { None } else { Some( f64::from(u32::try_from(self.success).ok()?) * 100.0 / f64::from(u32::try_from(self.total).ok()?), ) } } } impl PaymentMetricAccumulator for DebitRoutingAccumulator { type MetricOutput = (Option<u64>, Option<u64>, Option<u64>); fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow) { if let Some(count) = metrics.count { self.transaction_count += u64::try_from(count).unwrap_or(0); } if let Some(total) = metrics.total.as_ref().and_then(ToPrimitive::to_u64) { self.savings_amount += total; } } fn collect(self) -> Self::MetricOutput { ( Some(self.transaction_count), Some(self.savings_amount), Some(0), ) } } impl PaymentMetricAccumulator for PaymentsDistributionAccumulator { type MetricOutput = ( Option<f64>, Option<f64>, Option<f64>, Option<f64>, Option<f64>, Option<f64>, ); fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow) { if let Some(ref status) = metrics.status { if status.as_ref() == &storage_enums::AttemptStatus::Charged { if let Some(success) = metrics .count .and_then(|success| u32::try_from(success).ok()) { self.success += success; if metrics.first_attempt.unwrap_or(false) { self.success_without_retries += success; } else { self.success_with_only_retries += success; } } } if status.as_ref() == &storage_enums::AttemptStatus::Failure { if let Some(failed) = metrics.count.and_then(|failed| u32::try_from(failed).ok()) { self.failed += failed; if metrics.first_attempt.unwrap_or(false) { self.failed_without_retries += failed; } else { self.failed_with_only_retries += failed; } } } if status.as_ref() != &storage_enums::AttemptStatus::AuthenticationFailed && status.as_ref() != &storage_enums::AttemptStatus::PaymentMethodAwaited && status.as_ref() != &storage_enums::AttemptStatus::DeviceDataCollectionPending && status.as_ref() != &storage_enums::AttemptStatus::ConfirmationAwaited && status.as_ref() != &storage_enums::AttemptStatus::Unresolved { if let Some(total) = metrics.count.and_then(|total| u32::try_from(total).ok()) { self.total += total; if metrics.first_attempt.unwrap_or(false) { self.total_without_retries += total; } else { self.total_with_only_retries += total; } } } } } fn collect(self) -> Self::MetricOutput { if self.total == 0 { (None, None, None, None, None, None) } else { let success = Some(self.success); let success_without_retries = Some(self.success_without_retries); let success_with_only_retries = Some(self.success_with_only_retries); let failed = Some(self.failed); let failed_with_only_retries = Some(self.failed_with_only_retries); let failed_without_retries = Some(self.failed_without_retries); let total = Some(self.total); let total_without_retries = Some(self.total_without_retries); let total_with_only_retries = Some(self.total_with_only_retries); let success_rate = match (success, total) { (Some(s), Some(t)) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)), _ => None, }; let success_rate_without_retries = match (success_without_retries, total_without_retries) { (Some(s), Some(t)) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)), _ => None, }; let success_rate_with_only_retries = match (success_with_only_retries, total_with_only_retries) { (Some(s), Some(t)) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)), _ => None, }; let failed_rate = match (failed, total) { (Some(s), Some(t)) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)), _ => None, }; let failed_rate_without_retries = match (failed_without_retries, total_without_retries) { (Some(s), Some(t)) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)), _ => None, }; let failed_rate_with_only_retries = match (failed_with_only_retries, total_with_only_retries) { (Some(s), Some(t)) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)), _ => None, }; ( success_rate, success_rate_without_retries, success_rate_with_only_retries, failed_rate, failed_rate_without_retries, failed_rate_with_only_retries, ) } } } impl PaymentMetricAccumulator for CountAccumulator { type MetricOutput = Option<u64>; #[inline] fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow) { self.count = match (self.count, metrics.count) { (None, None) => None, (None, i @ Some(_)) | (i @ Some(_), None) => i, (Some(a), Some(b)) => Some(a + b), } } #[inline] fn collect(self) -> Self::MetricOutput { self.count.and_then(|i| u64::try_from(i).ok()) } } impl PaymentMetricAccumulator for ProcessedAmountAccumulator { type MetricOutput = ( Option<u64>, Option<u64>, Option<u64>, Option<u64>, Option<u64>, Option<u64>, ); #[inline] fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow) { self.total_with_retries = match ( self.total_with_retries, metrics.total.as_ref().and_then(ToPrimitive::to_i64), ) { (None, None) => None, (None, i @ Some(_)) | (i @ Some(_), None) => i, (Some(a), Some(b)) => Some(a + b), }; self.count_with_retries = match (self.count_with_retries, metrics.count) { (None, None) => None, (None, i @ Some(_)) | (i @ Some(_), None) => i, (Some(a), Some(b)) => Some(a + b), }; if metrics.first_attempt.unwrap_or(false) { self.total_without_retries = match ( self.total_without_retries, metrics.total.as_ref().and_then(ToPrimitive::to_i64), ) { (None, None) => None, (None, i @ Some(_)) | (i @ Some(_), None) => i, (Some(a), Some(b)) => Some(a + b), }; self.count_without_retries = match (self.count_without_retries, metrics.count) { (None, None) => None, (None, i @ Some(_)) | (i @ Some(_), None) => i, (Some(a), Some(b)) => Some(a + b), }; } } #[inline] fn collect(self) -> Self::MetricOutput { let total_with_retries = u64::try_from(self.total_with_retries.unwrap_or(0)).ok(); let count_with_retries = self.count_with_retries.and_then(|i| u64::try_from(i).ok()); let total_without_retries = u64::try_from(self.total_without_retries.unwrap_or(0)).ok(); let count_without_retries = self .count_without_retries .and_then(|i| u64::try_from(i).ok()); ( total_with_retries, count_with_retries, total_without_retries, count_without_retries, Some(0), Some(0), ) } } impl PaymentMetricAccumulator for RetriesAmountAccumulator { type MetricOutput = Option<u64>; fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow) { self.total = match ( self.total, metrics.total.as_ref().and_then(ToPrimitive::to_i64), ) { (None, None) => None, (None, i @ Some(_)) | (i @ Some(_), None) => i, (Some(a), Some(b)) => Some(a + b), }; } #[inline] fn collect(self) -> Self::MetricOutput { u64::try_from(self.total.unwrap_or(0)).ok() } } impl PaymentMetricAccumulator for AverageAccumulator { type MetricOutput = Option<f64>; fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow) { let total = metrics.total.as_ref().and_then(ToPrimitive::to_u32); let count = metrics.count.and_then(|total| u32::try_from(total).ok()); match (total, count) { (Some(total), Some(count)) => { self.total += total; self.count += count; } _ => { logger::error!(message="Dropping metrics for average accumulator", metric=?metrics); } } } fn collect(self) -> Self::MetricOutput { if self.count == 0 { None } else { Some(f64::from(self.total) / f64::from(self.count)) } } } impl PaymentMetricsAccumulator { pub fn collect(self) -> PaymentMetricsBucketValue { let ( payment_processed_amount, payment_processed_count, payment_processed_amount_without_smart_retries, payment_processed_count_without_smart_retries, payment_processed_amount_in_usd, payment_processed_amount_without_smart_retries_usd, ) = self.processed_amount.collect(); let ( payments_success_rate_distribution, payments_success_rate_distribution_without_smart_retries, payments_success_rate_distribution_with_only_retries, payments_failure_rate_distribution, payments_failure_rate_distribution_without_smart_retries, payments_failure_rate_distribution_with_only_retries, ) = self.payments_distribution.collect(); let (failure_reason_count, failure_reason_count_without_smart_retries) = self.failure_reasons_distribution.collect(); let (debit_routed_transaction_count, debit_routing_savings, debit_routing_savings_in_usd) = self.debit_routing.collect(); PaymentMetricsBucketValue { payment_success_rate: self.payment_success_rate.collect(), payment_count: self.payment_count.collect(), payment_success_count: self.payment_success.collect(), payment_processed_amount, payment_processed_count, payment_processed_amount_without_smart_retries, payment_processed_count_without_smart_retries, avg_ticket_size: self.avg_ticket_size.collect(), payment_error_message: self.payment_error_message.collect(), retries_count: self.retries_count.collect(), retries_amount_processed: self.retries_amount_processed.collect(), connector_success_rate: self.connector_success_rate.collect(), payments_success_rate_distribution, payments_success_rate_distribution_without_smart_retries, payments_success_rate_distribution_with_only_retries, payments_failure_rate_distribution, payments_failure_rate_distribution_without_smart_retries, payments_failure_rate_distribution_with_only_retries, failure_reason_count, failure_reason_count_without_smart_retries, payment_processed_amount_in_usd, payment_processed_amount_without_smart_retries_usd, debit_routed_transaction_count, debit_routing_savings, debit_routing_savings_in_usd, } } }
crates/analytics/src/payments/accumulator.rs
analytics::src::payments::accumulator
4,020
true
// File: crates/analytics/src/payments/metrics/connector_success_rate.rs // Module: analytics::src::payments::metrics::connector_success_rate use std::collections::HashSet; use api_models::analytics::{ payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier}, Granularity, TimeRange, }; use common_utils::errors::ReportSwitchExt; use error_stack::ResultExt; use time::PrimitiveDateTime; use super::PaymentMetricRow; use crate::{ enums::AuthInfo, query::{ Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window, }, types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, }; #[derive(Default)] pub(super) struct ConnectorSuccessRate; #[async_trait::async_trait] impl<T> super::PaymentMetric<T> for ConnectorSuccessRate where T: AnalyticsDataSource + super::PaymentMetricAnalytics, PrimitiveDateTime: ToSql<T>, AnalyticsCollection: ToSql<T>, Granularity: GroupByClause<T>, Aggregate<&'static str>: ToSql<T>, Window<&'static str>: ToSql<T>, { async fn load_metrics( &self, dimensions: &[PaymentDimensions], auth: &AuthInfo, filters: &PaymentFilters, granularity: Option<Granularity>, time_range: &TimeRange, pool: &T, ) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> { let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::Payment); let mut dimensions = dimensions.to_vec(); dimensions.push(PaymentDimensions::PaymentStatus); for dim in dimensions.iter() { query_builder.add_select_column(dim).switch()?; } query_builder .add_select_column(Aggregate::Count { field: None, alias: Some("count"), }) .switch()?; query_builder .add_select_column(Aggregate::Min { field: "created_at", alias: Some("start_bucket"), }) .switch()?; query_builder .add_select_column(Aggregate::Max { field: "created_at", alias: Some("end_bucket"), }) .switch()?; filters.set_filter_clause(&mut query_builder).switch()?; auth.set_filter_clause(&mut query_builder).switch()?; query_builder .add_custom_filter_clause(PaymentDimensions::Connector, "NULL", FilterTypes::IsNotNull) .switch()?; time_range .set_filter_clause(&mut query_builder) .attach_printable("Error filtering time range") .switch()?; for dim in dimensions.iter() { query_builder .add_group_by_clause(dim) .attach_printable("Error grouping by dimensions") .switch()?; } if let Some(granularity) = granularity { granularity .set_group_by_clause(&mut query_builder) .attach_printable("Error adding granularity") .switch()?; } query_builder .execute_query::<PaymentMetricRow, _>(pool) .await .change_context(MetricsError::QueryBuildingError)? .change_context(MetricsError::QueryExecutionFailure)? .into_iter() .map(|i| { Ok(( PaymentMetricsBucketIdentifier::new( i.currency.as_ref().map(|i| i.0), None, i.connector.clone(), i.authentication_type.as_ref().map(|i| i.0), i.payment_method.clone(), i.payment_method_type.clone(), i.client_source.clone(), i.client_version.clone(), i.profile_id.clone(), i.card_network.clone(), i.merchant_id.clone(), i.card_last_4.clone(), i.card_issuer.clone(), i.error_reason.clone(), i.routing_approach.as_ref().map(|i| i.0.clone()), i.signature_network.clone(), i.is_issuer_regulated, i.is_debit_routed, TimeRange { start_time: match (granularity, i.start_bucket) { (Some(g), Some(st)) => g.clip_to_start(st)?, _ => time_range.start_time, }, end_time: granularity.as_ref().map_or_else( || Ok(time_range.end_time), |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), )?, }, ), i, )) }) .collect::<error_stack::Result< HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>, crate::query::PostProcessingError, >>() .change_context(MetricsError::PostProcessingFailure) } }
crates/analytics/src/payments/metrics/connector_success_rate.rs
analytics::src::payments::metrics::connector_success_rate
1,002
true
// File: crates/analytics/src/payments/metrics/retries_count.rs // Module: analytics::src::payments::metrics::retries_count use std::collections::HashSet; use api_models::{ analytics::{ payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier}, Granularity, TimeRange, }, enums::IntentStatus, }; use common_utils::errors::ReportSwitchExt; use error_stack::ResultExt; use time::PrimitiveDateTime; use super::PaymentMetricRow; use crate::{ enums::AuthInfo, query::{ Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window, }, types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, }; #[derive(Default)] pub(super) struct RetriesCount; #[async_trait::async_trait] impl<T> super::PaymentMetric<T> for RetriesCount where T: AnalyticsDataSource + super::PaymentMetricAnalytics, PrimitiveDateTime: ToSql<T>, AnalyticsCollection: ToSql<T>, Granularity: GroupByClause<T>, Aggregate<&'static str>: ToSql<T>, Window<&'static str>: ToSql<T>, { async fn load_metrics( &self, _dimensions: &[PaymentDimensions], auth: &AuthInfo, _filters: &PaymentFilters, granularity: Option<Granularity>, time_range: &TimeRange, pool: &T, ) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> { let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::PaymentIntent); query_builder .add_select_column(Aggregate::Count { field: None, alias: Some("count"), }) .switch()?; query_builder .add_select_column(Aggregate::Sum { field: "amount", alias: Some("total"), }) .switch()?; query_builder .add_select_column(Aggregate::Min { field: "created_at", alias: Some("start_bucket"), }) .switch()?; query_builder .add_select_column(Aggregate::Max { field: "created_at", alias: Some("end_bucket"), }) .switch()?; auth.set_filter_clause(&mut query_builder).switch()?; query_builder .add_custom_filter_clause("attempt_count", "1", FilterTypes::Gt) .switch()?; query_builder .add_custom_filter_clause("status", IntentStatus::Succeeded, FilterTypes::Equal) .switch()?; time_range .set_filter_clause(&mut query_builder) .attach_printable("Error filtering time range") .switch()?; if let Some(granularity) = granularity { granularity .set_group_by_clause(&mut query_builder) .attach_printable("Error adding granularity") .switch()?; } query_builder .execute_query::<PaymentMetricRow, _>(pool) .await .change_context(MetricsError::QueryBuildingError)? .change_context(MetricsError::QueryExecutionFailure)? .into_iter() .map(|i| { Ok(( PaymentMetricsBucketIdentifier::new( i.currency.as_ref().map(|i| i.0), None, i.connector.clone(), i.authentication_type.as_ref().map(|i| i.0), i.payment_method.clone(), i.payment_method_type.clone(), i.client_source.clone(), i.client_version.clone(), i.profile_id.clone(), i.card_network.clone(), i.merchant_id.clone(), i.card_last_4.clone(), i.card_issuer.clone(), i.error_reason.clone(), i.routing_approach.as_ref().map(|i| i.0.clone()), i.signature_network.clone(), i.is_issuer_regulated, i.is_debit_routed, TimeRange { start_time: match (granularity, i.start_bucket) { (Some(g), Some(st)) => g.clip_to_start(st)?, _ => time_range.start_time, }, end_time: granularity.as_ref().map_or_else( || Ok(time_range.end_time), |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), )?, }, ), i, )) }) .collect::<error_stack::Result< HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>, crate::query::PostProcessingError, >>() .change_context(MetricsError::PostProcessingFailure) } }
crates/analytics/src/payments/metrics/retries_count.rs
analytics::src::payments::metrics::retries_count
984
true
// File: crates/analytics/src/payments/metrics/sessionized_metrics.rs // Module: analytics::src::payments::metrics::sessionized_metrics mod avg_ticket_size; mod connector_success_rate; mod debit_routing; mod failure_reasons; mod payment_count; mod payment_processed_amount; mod payment_success_count; mod payments_distribution; mod retries_count; mod success_rate; pub(super) use avg_ticket_size::AvgTicketSize; pub(super) use connector_success_rate::ConnectorSuccessRate; pub(super) use debit_routing::DebitRouting; pub(super) use failure_reasons::FailureReasons; pub(super) use payment_count::PaymentCount; pub(super) use payment_processed_amount::PaymentProcessedAmount; pub(super) use payment_success_count::PaymentSuccessCount; pub(super) use payments_distribution::PaymentsDistribution; pub(super) use retries_count::RetriesCount; pub(super) use success_rate::PaymentSuccessRate; pub use super::{PaymentMetric, PaymentMetricAnalytics, PaymentMetricRow};
crates/analytics/src/payments/metrics/sessionized_metrics.rs
analytics::src::payments::metrics::sessionized_metrics
204
true
// File: crates/analytics/src/payments/metrics/success_rate.rs // Module: analytics::src::payments::metrics::success_rate use std::collections::HashSet; use api_models::analytics::{ payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier}, Granularity, TimeRange, }; use common_utils::errors::ReportSwitchExt; use error_stack::ResultExt; use time::PrimitiveDateTime; use super::PaymentMetricRow; use crate::{ enums::AuthInfo, query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window}, types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, }; #[derive(Default)] pub(super) struct PaymentSuccessRate; #[async_trait::async_trait] impl<T> super::PaymentMetric<T> for PaymentSuccessRate where T: AnalyticsDataSource + super::PaymentMetricAnalytics, PrimitiveDateTime: ToSql<T>, AnalyticsCollection: ToSql<T>, Granularity: GroupByClause<T>, Aggregate<&'static str>: ToSql<T>, Window<&'static str>: ToSql<T>, { async fn load_metrics( &self, dimensions: &[PaymentDimensions], auth: &AuthInfo, filters: &PaymentFilters, granularity: Option<Granularity>, time_range: &TimeRange, pool: &T, ) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> { let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::Payment); let mut dimensions = dimensions.to_vec(); dimensions.push(PaymentDimensions::PaymentStatus); for dim in dimensions.iter() { query_builder.add_select_column(dim).switch()?; } query_builder .add_select_column(Aggregate::Count { field: None, alias: Some("count"), }) .switch()?; query_builder .add_select_column(Aggregate::Min { field: "created_at", alias: Some("start_bucket"), }) .switch()?; query_builder .add_select_column(Aggregate::Max { field: "created_at", alias: Some("end_bucket"), }) .switch()?; filters.set_filter_clause(&mut query_builder).switch()?; auth.set_filter_clause(&mut query_builder).switch()?; time_range .set_filter_clause(&mut query_builder) .attach_printable("Error filtering time range") .switch()?; for dim in dimensions.iter() { query_builder .add_group_by_clause(dim) .attach_printable("Error grouping by dimensions") .switch()?; } if let Some(granularity) = granularity { granularity .set_group_by_clause(&mut query_builder) .attach_printable("Error adding granularity") .switch()?; } query_builder .execute_query::<PaymentMetricRow, _>(pool) .await .change_context(MetricsError::QueryBuildingError)? .change_context(MetricsError::QueryExecutionFailure)? .into_iter() .map(|i| { Ok(( PaymentMetricsBucketIdentifier::new( i.currency.as_ref().map(|i| i.0), None, i.connector.clone(), i.authentication_type.as_ref().map(|i| i.0), i.payment_method.clone(), i.payment_method_type.clone(), i.client_source.clone(), i.client_version.clone(), i.profile_id.clone(), i.card_network.clone(), i.merchant_id.clone(), i.card_last_4.clone(), i.card_issuer.clone(), i.error_reason.clone(), i.routing_approach.as_ref().map(|i| i.0.clone()), i.signature_network.clone(), i.is_issuer_regulated, i.is_debit_routed, TimeRange { start_time: match (granularity, i.start_bucket) { (Some(g), Some(st)) => g.clip_to_start(st)?, _ => time_range.start_time, }, end_time: granularity.as_ref().map_or_else( || Ok(time_range.end_time), |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), )?, }, ), i, )) }) .collect::<error_stack::Result< HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>, crate::query::PostProcessingError, >>() .change_context(MetricsError::PostProcessingFailure) } }
crates/analytics/src/payments/metrics/success_rate.rs
analytics::src::payments::metrics::success_rate
964
true
// File: crates/analytics/src/payments/metrics/avg_ticket_size.rs // Module: analytics::src::payments::metrics::avg_ticket_size use std::collections::HashSet; use api_models::analytics::{ payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier}, Granularity, TimeRange, }; use common_utils::errors::ReportSwitchExt; use diesel_models::enums as storage_enums; use error_stack::ResultExt; use time::PrimitiveDateTime; use super::{PaymentMetric, PaymentMetricRow}; use crate::{ enums::AuthInfo, query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window}, types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, }; #[derive(Default)] pub(super) struct AvgTicketSize; #[async_trait::async_trait] impl<T> PaymentMetric<T> for AvgTicketSize where T: AnalyticsDataSource + super::PaymentMetricAnalytics, PrimitiveDateTime: ToSql<T>, AnalyticsCollection: ToSql<T>, Granularity: GroupByClause<T>, Aggregate<&'static str>: ToSql<T>, Window<&'static str>: ToSql<T>, { async fn load_metrics( &self, dimensions: &[PaymentDimensions], auth: &AuthInfo, filters: &PaymentFilters, granularity: Option<Granularity>, time_range: &TimeRange, pool: &T, ) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> { let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::Payment); for dim in dimensions.iter() { query_builder.add_select_column(dim).switch()?; } query_builder .add_select_column(Aggregate::Sum { field: "amount", alias: Some("total"), }) .switch()?; query_builder .add_select_column(Aggregate::Count { field: None, alias: Some("count"), }) .switch()?; query_builder .add_select_column(Aggregate::Min { field: "created_at", alias: Some("start_bucket"), }) .switch()?; query_builder .add_select_column(Aggregate::Max { field: "created_at", alias: Some("end_bucket"), }) .switch()?; filters.set_filter_clause(&mut query_builder).switch()?; auth.set_filter_clause(&mut query_builder).switch()?; time_range .set_filter_clause(&mut query_builder) .attach_printable("Error filtering time range") .switch()?; for dim in dimensions.iter() { query_builder .add_group_by_clause(dim) .attach_printable("Error grouping by dimensions") .switch()?; } if let Some(granularity) = granularity { granularity .set_group_by_clause(&mut query_builder) .attach_printable("Error adding granularity") .switch()?; } query_builder .add_filter_clause( PaymentDimensions::PaymentStatus, storage_enums::AttemptStatus::Charged, ) .switch()?; query_builder .execute_query::<PaymentMetricRow, _>(pool) .await .change_context(MetricsError::QueryBuildingError)? .change_context(MetricsError::QueryExecutionFailure)? .into_iter() .map(|i| { Ok(( PaymentMetricsBucketIdentifier::new( i.currency.as_ref().map(|i| i.0), i.status.as_ref().map(|i| i.0), i.connector.clone(), i.authentication_type.as_ref().map(|i| i.0), i.payment_method.clone(), i.payment_method_type.clone(), i.client_source.clone(), i.client_version.clone(), i.profile_id.clone(), i.card_network.clone(), i.merchant_id.clone(), i.card_last_4.clone(), i.card_issuer.clone(), i.error_reason.clone(), i.routing_approach.as_ref().map(|i| i.0.clone()), i.signature_network.clone(), i.is_issuer_regulated, i.is_debit_routed, TimeRange { start_time: match (granularity, i.start_bucket) { (Some(g), Some(st)) => g.clip_to_start(st)?, _ => time_range.start_time, }, end_time: granularity.as_ref().map_or_else( || Ok(time_range.end_time), |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), )?, }, ), i, )) }) .collect::<error_stack::Result< HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>, crate::query::PostProcessingError, >>() .change_context(MetricsError::PostProcessingFailure) } }
crates/analytics/src/payments/metrics/avg_ticket_size.rs
analytics::src::payments::metrics::avg_ticket_size
1,038
true
// File: crates/analytics/src/payments/metrics/debit_routing.rs // Module: analytics::src::payments::metrics::debit_routing use std::collections::HashSet; use api_models::analytics::{ payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier}, Granularity, TimeRange, }; use common_utils::errors::ReportSwitchExt; use diesel_models::enums as storage_enums; use error_stack::ResultExt; use time::PrimitiveDateTime; use super::PaymentMetricRow; use crate::{ enums::AuthInfo, query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window}, types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, }; #[derive(Default)] pub(super) struct DebitRouting; #[async_trait::async_trait] impl<T> super::PaymentMetric<T> for DebitRouting where T: AnalyticsDataSource + super::PaymentMetricAnalytics, PrimitiveDateTime: ToSql<T>, AnalyticsCollection: ToSql<T>, Granularity: GroupByClause<T>, Aggregate<&'static str>: ToSql<T>, Window<&'static str>: ToSql<T>, { async fn load_metrics( &self, dimensions: &[PaymentDimensions], auth: &AuthInfo, filters: &PaymentFilters, granularity: Option<Granularity>, time_range: &TimeRange, pool: &T, ) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> { let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::Payment); for dim in dimensions.iter() { query_builder.add_select_column(dim).switch()?; } query_builder .add_select_column(Aggregate::Count { field: None, alias: Some("count"), }) .switch()?; query_builder .add_select_column(Aggregate::Sum { field: "debit_routing_savings", alias: Some("total"), }) .switch()?; query_builder.add_select_column("currency").switch()?; query_builder .add_select_column(Aggregate::Min { field: "created_at", alias: Some("start_bucket"), }) .switch()?; query_builder .add_select_column(Aggregate::Max { field: "created_at", alias: Some("end_bucket"), }) .switch()?; filters.set_filter_clause(&mut query_builder).switch()?; auth.set_filter_clause(&mut query_builder).switch()?; time_range .set_filter_clause(&mut query_builder) .attach_printable("Error filtering time range") .switch()?; for dim in dimensions.iter() { query_builder .add_group_by_clause(dim) .attach_printable("Error grouping by dimensions") .switch()?; } query_builder .add_group_by_clause("currency") .attach_printable("Error grouping by currency") .switch()?; if let Some(granularity) = granularity { granularity .set_group_by_clause(&mut query_builder) .attach_printable("Error adding granularity") .switch()?; } query_builder .add_filter_clause( PaymentDimensions::PaymentStatus, storage_enums::AttemptStatus::Charged, ) .switch()?; query_builder .execute_query::<PaymentMetricRow, _>(pool) .await .change_context(MetricsError::QueryBuildingError)? .change_context(MetricsError::QueryExecutionFailure)? .into_iter() .map(|i| { Ok(( PaymentMetricsBucketIdentifier::new( i.currency.as_ref().map(|i| i.0), None, i.connector.clone(), i.authentication_type.as_ref().map(|i| i.0), i.payment_method.clone(), i.payment_method_type.clone(), i.client_source.clone(), i.client_version.clone(), i.profile_id.clone(), i.card_network.clone(), i.merchant_id.clone(), i.card_last_4.clone(), i.card_issuer.clone(), i.error_reason.clone(), i.routing_approach.as_ref().map(|i| i.0.clone()), i.signature_network.clone(), i.is_issuer_regulated, i.is_debit_routed, TimeRange { start_time: match (granularity, i.start_bucket) { (Some(g), Some(st)) => g.clip_to_start(st)?, _ => time_range.start_time, }, end_time: granularity.as_ref().map_or_else( || Ok(time_range.end_time), |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), )?, }, ), i, )) }) .collect::<error_stack::Result< HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>, crate::query::PostProcessingError, >>() .change_context(MetricsError::PostProcessingFailure) } }
crates/analytics/src/payments/metrics/debit_routing.rs
analytics::src::payments::metrics::debit_routing
1,068
true