repo_id
stringclasses 137
values | file_path
stringlengths 10
89
| content
stringlengths 0
990k
| __index_level_0__
int64 0
0
|
|---|---|---|---|
templates
|
/mono/helm/proxy-poloniex/templates/ingress.yaml
|
{{- if .Values.ingress.enabled -}}
{{- $fullName := include "proxy-poloniex.fullname" . -}}
{{- $svcPort := .Values.service.port -}}
{{- if and .Values.ingress.className (not (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion)) }}
{{- if not (hasKey .Values.ingress.annotations "kubernetes.io/ingress.class") }}
{{- $_ := set .Values.ingress.annotations "kubernetes.io/ingress.class" .Values.ingress.className}}
{{- end }}
{{- end }}
{{- if semverCompare ">=1.19-0" .Capabilities.KubeVersion.GitVersion -}}
apiVersion: networking.k8s.io/v1
{{- else if semverCompare ">=1.14-0" .Capabilities.KubeVersion.GitVersion -}}
apiVersion: networking.k8s.io/v1beta1
{{- else -}}
apiVersion: extensions/v1beta1
{{- end }}
kind: Ingress
metadata:
name: {{ $fullName }}
labels:
{{- include "proxy-poloniex.labels" . | nindent 4 }}
{{- with .Values.ingress.annotations }}
annotations:
{{- toYaml . | nindent 4 }}
{{- end }}
spec:
{{- if and .Values.ingress.className (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion) }}
ingressClassName: {{ .Values.ingress.className }}
{{- end }}
{{- if .Values.ingress.tls }}
tls:
{{- range .Values.ingress.tls }}
- hosts:
{{- range .hosts }}
- {{ . | quote }}
{{- end }}
secretName: {{ .secretName }}
{{- end }}
{{- end }}
rules:
{{- range .Values.ingress.hosts }}
- host: {{ .host | quote }}
http:
paths:
{{- range .paths }}
- path: {{ .path }}
{{- if and .pathType (semverCompare ">=1.18-0" $.Capabilities.KubeVersion.GitVersion) }}
pathType: {{ .pathType }}
{{- end }}
backend:
{{- if semverCompare ">=1.19-0" $.Capabilities.KubeVersion.GitVersion }}
service:
name: {{ $fullName }}
port:
number: {{ $svcPort }}
{{- else }}
serviceName: {{ $fullName }}
servicePort: {{ $svcPort }}
{{- end }}
{{- end }}
{{- end }}
{{- end }}
| 0
|
templates
|
/mono/helm/proxy-poloniex/templates/_helpers.tpl
|
{{/*
Expand the name of the chart.
*/}}
{{- define "proxy-poloniex.name" -}}
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
If release name contains chart name it will be used as a full name.
*/}}
{{- define "proxy-poloniex.fullname" -}}
{{- if .Values.fullnameOverride }}
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- $name := default .Chart.Name .Values.nameOverride }}
{{- if contains $name .Release.Name }}
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
{{- end }}
{{- end }}
{{- end }}
{{/*
Create chart name and version as used by the chart label.
*/}}
{{- define "proxy-poloniex.chart" -}}
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Common labels
*/}}
{{- define "proxy-poloniex.labels" -}}
helm.sh/chart: {{ include "proxy-poloniex.chart" . }}
{{ include "proxy-poloniex.selectorLabels" . }}
{{- if .Chart.AppVersion }}
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
{{- end }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
{{- end }}
{{/*
Selector labels
*/}}
{{- define "proxy-poloniex.selectorLabels" -}}
app.kubernetes.io/name: {{ include "proxy-poloniex.name" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
{{- end }}
{{/*
Create the name of the service account to use
*/}}
{{- define "proxy-poloniex.serviceAccountName" -}}
{{- if .Values.serviceAccount.create }}
{{- default (include "proxy-poloniex.fullname" .) .Values.serviceAccount.name }}
{{- else }}
{{- default "default" .Values.serviceAccount.name }}
{{- end }}
{{- end }}
| 0
|
templates
|
/mono/helm/proxy-poloniex/templates/hpa.yaml
|
{{- if .Values.autoscaling.enabled }}
apiVersion: autoscaling/v2beta1
kind: HorizontalPodAutoscaler
metadata:
name: {{ include "proxy-poloniex.fullname" . }}
labels:
{{- include "proxy-poloniex.labels" . | nindent 4 }}
spec:
scaleTargetRef:
apiVersion: apps/v1
kind: Deployment
name: {{ include "proxy-poloniex.fullname" . }}
minReplicas: {{ .Values.autoscaling.minReplicas }}
maxReplicas: {{ .Values.autoscaling.maxReplicas }}
metrics:
{{- if .Values.autoscaling.targetCPUUtilizationPercentage }}
- type: Resource
resource:
name: cpu
targetAverageUtilization: {{ .Values.autoscaling.targetCPUUtilizationPercentage }}
{{- end }}
{{- if .Values.autoscaling.targetMemoryUtilizationPercentage }}
- type: Resource
resource:
name: memory
targetAverageUtilization: {{ .Values.autoscaling.targetMemoryUtilizationPercentage }}
{{- end }}
{{- end }}
| 0
|
templates
|
/mono/helm/proxy-poloniex/templates/service.yaml
|
apiVersion: v1
kind: Service
metadata:
name: {{ include "proxy-poloniex.fullname" . }}
labels:
{{- include "proxy-poloniex.labels" . | nindent 4 }}
spec:
type: {{ .Values.service.type }}
ports:
- port: {{ .Values.service.port }}
targetPort: http
protocol: TCP
name: http
selector:
{{- include "proxy-poloniex.selectorLabels" . | nindent 4 }}
| 0
|
templates
|
/mono/helm/proxy-poloniex/templates/deployment.yaml
|
{{- include "common.secrets" . -}}
{{- include "common.config.configmap" . -}}
apiVersion: apps/v1
kind: Deployment
metadata:
name: {{ include "proxy-poloniex.fullname" . }}
labels:
{{- include "proxy-poloniex.labels" . | nindent 4 }}
spec:
{{- if not .Values.autoscaling.enabled }}
replicas: {{ .Values.replicaCount }}
{{- end }}
selector:
matchLabels:
{{- include "proxy-poloniex.selectorLabels" . | nindent 6 }}
template:
metadata:
{{- with .Values.podAnnotations }}
annotations:
{{- toYaml . | nindent 8 }}
{{- end }}
labels:
{{- include "proxy-poloniex.selectorLabels" . | nindent 8 }}
spec:
{{- with .Values.imagePullSecrets }}
imagePullSecrets:
{{- toYaml . | nindent 8 }}
{{- end }}
serviceAccountName: {{ include "proxy-poloniex.serviceAccountName" . }}
securityContext:
{{- toYaml .Values.podSecurityContext | nindent 8 }}
containers:
- name: {{ .Chart.Name }}
securityContext:
{{- toYaml .Values.securityContext | nindent 12 }}
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
imagePullPolicy: {{ .Values.image.pullPolicy }}
env:
- name: "RUST_LOG"
value: {{ .Values.RUST_LOG }}
- name: "SERVICE"
value: {{ .Values.SERVICE }}
ports:
- name: http
containerPort: 3000
protocol: TCP
livenessProbe:
initialDelaySeconds: 5
httpGet:
path: /healthy
port: 3000
readinessProbe:
initialDelaySeconds: 5
httpGet:
path: /ready
port: 3000
resources:
{{- toYaml .Values.resources | nindent 12 }}
volumeMounts:
{{- include "common.secrets.volumeMounts" . | nindent 12 }}
{{- include "common.config.volumeMounts" . | nindent 12 }}
volumes:
{{- include "common.secrets.volumes" . | nindent 8 }}
{{- include "common.config.volumes" . | nindent 8 }}
{{- with .Values.nodeSelector }}
nodeSelector:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.tolerations }}
tolerations:
{{- toYaml . | nindent 8 }}
{{- end }}
| 0
|
templates
|
/mono/helm/proxy-poloniex/templates/serviceaccount.yaml
|
{{- if .Values.serviceAccount.create -}}
apiVersion: v1
kind: ServiceAccount
metadata:
name: {{ include "proxy-poloniex.serviceAccountName" . }}
labels:
{{- include "proxy-poloniex.labels" . | nindent 4 }}
{{- with .Values.serviceAccount.annotations }}
annotations:
{{- toYaml . | nindent 4 }}
{{- end }}
{{- end }}
| 0
|
tests
|
/mono/helm/proxy-poloniex/templates/tests/test-connection.yaml
|
apiVersion: v1
kind: Pod
metadata:
name: "{{ include "proxy-poloniex.fullname" . }}-test-connection"
labels:
{{- include "proxy-poloniex.labels" . | nindent 4 }}
annotations:
"helm.sh/hook": test
spec:
containers:
- name: wget
image: busybox
command: ['wget']
args: ['{{ include "proxy-poloniex.fullname" . }}:{{ .Values.service.port }}']
restartPolicy: Never
| 0
|
strategy_manager
|
/mono/crates/strategy_manager/Cargo.toml
|
[package]
name = "strategy-manager"
version = "0.1.0"
edition = "2021"
authors = ["Justin"]
description = "Strategy Manager"
[[bin]]
name = "strategy-manager"
path = "src/main.rs"
[dependencies]
exchange-client = { path = "../exchange_client" }
fdr-common = { path = "../fdr_common" }
fdr-http = { path = "../fdr_http" }
fdr-event = { path = "../fdr_event" }
fdr-store = { path = "../fdr_store" }
strategies = { path = "../strategies" }
strat-common = { path = "../strat_common" }
anyhow = { workspace = true }
async-trait = { workspace = true }
axum = { workspace = true }
chrono = { workspace = true }
clap = { workspace = true, features = ["derive", "env", "cargo"] }
derive_more = { workspace = true }
http = { workspace = true }
http-body = { workspace = true }
hyper = { workspace = true, features = ["full"] }
indexmap = { workspace = true }
prometheus = { workspace = true }
rdkafka = { workspace = true }
rust_decimal = { workspace = true, features = ["serde", "db-postgres"] }
rust_decimal_macros = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
slog = { workspace = true, features = ["max_level_trace"] }
sqlx = { workspace = true, features = [
"runtime-tokio-native-tls",
"postgres",
"json",
"migrate",
"time",
"uuid",
"rust_decimal",
] }
thiserror = { workspace = true }
tokio = { workspace = true, features = ["full", "tracing"] }
toml = { workspace = true }
tower = { workspace = true, features = ["full"] }
tower-http = { workspace = true, features = ["full"] }
tracing = { workspace = true, default-features = false, features = [
"std",
"attributes",
] }
tracing-opentelemetry = { workspace = true, default-features = true }
url = { workspace = true }
uuid = { workspace = true, features = ["v4", "fast-rng", "serde"] }
| 0
|
strategy_manager
|
/mono/crates/strategy_manager/README.md
|
# Strategy Manager

| 0
|
src
|
/mono/crates/strategy_manager/src/exchange_adapter.rs
|
use async_trait::async_trait;
use exchange_client::{
realm::{market::ExchangeAdapterMarketTrait, user_data::ExchangeAdapterUserDataTrait},
ExchangeAdapter,
};
use fdr_common::crypto::assets::CurrencyPair;
use crate::error::StrategyManagerResult;
#[async_trait]
pub trait StrategyManagerExchangeAdapterTrait:
ExchangeAdapterUserDataTrait + ExchangeAdapterMarketTrait + Send + Sync + 'static
{
async fn noop(&self, pair: CurrencyPair) -> StrategyManagerResult<()>;
}
#[async_trait]
impl StrategyManagerExchangeAdapterTrait for ExchangeAdapter {
async fn noop(&self, _pair: CurrencyPair) -> StrategyManagerResult<()> {
panic!("the real exchange adapter does not support noop requests")
}
}
| 0
|
src
|
/mono/crates/strategy_manager/src/error.rs
|
use std::io;
#[derive(thiserror::Error, Debug)]
pub enum StrategyManagerError {
#[error(transparent)]
Strategy(#[from] strategies::error::StrategyError),
#[error(transparent)]
Fdr(#[from] fdr_common::error::FdrError),
#[error(transparent)]
FdrEvent(#[from] fdr_event::FdrEventError),
#[error(transparent)]
ParseAddr(#[from] std::net::AddrParseError),
#[error(transparent)]
Hyper(#[from] hyper::Error),
#[error(transparent)]
Io(#[from] io::Error),
#[error("Noop error occurred during backtest: {0}")]
Noop(String),
}
pub type StrategyManagerResult<T> = Result<T, StrategyManagerError>;
| 0
|
src
|
/mono/crates/strategy_manager/src/main.rs
|
use strategy_manager::build::build_strategy_manager;
#[tokio::main]
async fn main() -> anyhow::Result<()> {
Ok(build_strategy_manager().await?)
}
| 0
|
src
|
/mono/crates/strategy_manager/src/build.rs
|
use crate::{init::StrategyManagerInitTrait, Context, StrategyManager, StrategyManagerConfig};
use async_trait::async_trait;
use axum::Router;
use fdr_common::{
context::FdrServiceContext,
task::{DefaultSignalHandler, FdrTask, FdrTaskManagerBuilder, FdrTaskResult, ShutdownType},
tracing::init_tracing,
};
use slog::info;
use std::sync::Arc;
use strategies::{error::StrategyResult, Strategy};
use crate::error::{StrategyManagerError, StrategyManagerResult};
use fdr_common::utils::cli::{Args, Parser};
use fdr_http::Server;
pub async fn build_strategy_manager() -> StrategyManagerResult<()> {
let args: Args<StrategyManagerConfig> = Args::parse();
let ctx = Arc::new(Context::from_config(args.config.clone()).await?);
let (strategies, manager) = build_app(args.config.clone(), ctx.clone()).await?;
info!(ctx.logger, "{:?}", args.config);
if !args.config.back_test {
init_tracing(args.config.tracing.clone());
}
info!(ctx.logger, "Strategy manager service started"; "version" => env!("CARGO_PKG_VERSION"));
let mut task_manager_builder = FdrTaskManagerBuilder::new(ctx.logger.clone());
for strategy in strategies {
task_manager_builder = task_manager_builder.add_task(StrategyTask(strategy));
}
let mut task_manager = task_manager_builder
.add_task(StrategyManagerTask(manager.clone()))
.add_task(StrategyManagerReceiverTask(manager.clone()))
.add_task(HttpServerTask(ctx.clone()))
.add_task(DefaultSignalHandler(ctx.logger.clone()))
.build();
task_manager.run().await;
Ok(())
}
struct StrategyTask(Strategy<Context>);
#[async_trait]
impl FdrTask<StrategyManagerError> for StrategyTask {
async fn run(mut self: Box<Self>) -> FdrTaskResult<StrategyManagerError> {
Ok(self.0.subscribe_to_changes().await?)
}
fn name(&self) -> String {
format!("Strategy {}", self.0)
}
fn shutdown_type(&self) -> ShutdownType {
ShutdownType::Managed
}
}
pub struct StrategyManagerTask(Arc<StrategyManager>);
#[async_trait]
impl FdrTask<StrategyManagerError> for StrategyManagerTask {
async fn run(self: Box<Self>) -> FdrTaskResult<StrategyManagerError> {
Ok(self.0.start().await?)
}
fn name(&self) -> String {
"Strategy Manager".to_string()
}
fn shutdown_type(&self) -> ShutdownType {
ShutdownType::Managed
}
}
pub struct StrategyManagerReceiverTask(Arc<StrategyManager>);
#[async_trait]
impl FdrTask<StrategyManagerError> for StrategyManagerReceiverTask {
async fn run(self: Box<Self>) -> FdrTaskResult<StrategyManagerError> {
Ok(self.0.subscribe_to_requests().await?)
}
fn name(&self) -> String {
"Strategy Manager Receiver".to_string()
}
fn shutdown_type(&self) -> ShutdownType {
ShutdownType::Managed
}
}
pub struct HttpServerTask(Arc<Context>);
#[async_trait]
impl FdrTask<StrategyManagerError> for HttpServerTask {
async fn run(self: Box<Self>) -> FdrTaskResult<StrategyManagerError> {
Server::serve(
self.0.clone(),
self.0.config.server.listen_address.clone(),
self.0.config.server.port,
Router::default(),
)
.await
}
fn name(&self) -> String {
"HTTP Server".to_string()
}
fn shutdown_type(&self) -> ShutdownType {
ShutdownType::Managed
}
}
async fn build_app(
config: StrategyManagerConfig,
ctx: Arc<Context>,
) -> StrategyResult<(Vec<Strategy<Context>>, Arc<StrategyManager>)> {
let mut strategies = vec![];
let strategy_manager = StrategyManager::new(ctx.clone())?;
strategy_manager.init_state().await?;
// Build Strategies
for config in config.strategies {
strategies.push(Strategy::new(ctx.clone(), config, ctx.pool.clone()).await?);
}
// Set the state to the one we just built
Ok((strategies, Arc::new(strategy_manager)))
}
| 0
|
src
|
/mono/crates/strategy_manager/src/getters.rs
|
use crate::Context;
use async_trait::async_trait;
use rust_decimal::Decimal;
use tokio::sync::{watch, RwLockReadGuard};
use fdr_common::{
crypto::{book::Book, pairs::FeeData},
exchange::Market,
utils::channel::Channel,
};
use fdr_event::events::FdrEvent;
use strat_common::{
crypto::market::MarketState,
state_managers::{
book_manager::BookManagerGetters,
channel_manager::ChannelManagerGetters,
market_manager::MarketManagerGetters,
trade_volume_manager::{Fees, TradeVolumeManagerGetters},
},
};
#[async_trait]
impl MarketManagerGetters for Context {
async fn get_market(&self, market: &Market) -> Option<RwLockReadGuard<'_, MarketState>> {
self.market_manager.get_market(market).await
}
async fn get_market_balance(&self, market: &Market) -> Option<(Decimal, Decimal)> {
self.market_manager.get_market_balance(market).await
}
}
#[async_trait]
impl BookManagerGetters for Context {
async fn get_book(&self, market: &Market) -> Option<Book> {
self.order_book_manager.get_book(market).await
}
}
#[async_trait]
impl TradeVolumeManagerGetters for Context {
async fn get_taker_fee_data(&self, market: &Market) -> Option<FeeData> {
self.trade_volume_manager.get_taker_fee_data(market).await
}
async fn get_taker_fee(&self, market: &Market) -> Decimal {
self.trade_volume_manager.get_taker_fee(market).await
}
async fn get_fees(&self, market: &Market) -> Fees {
self.trade_volume_manager.get_fees(market).await
}
async fn get_maker_fee_data(&self, market: &Market) -> Option<FeeData> {
self.trade_volume_manager.get_maker_fee_data(market).await
}
async fn get_maker_fee(&self, market: &Market) -> Decimal {
self.trade_volume_manager.get_maker_fee(market).await
}
async fn get_volume(&self) -> Decimal {
self.trade_volume_manager.get_volume().await
}
}
impl ChannelManagerGetters for Context {
fn get_ticker_receiver(&self, market: &Market) -> Option<watch::Receiver<()>> {
self.channel_manager.get_ticker_receiver(market)
}
fn get_book_receiver(&self, market: &Market) -> Option<watch::Receiver<()>> {
self.channel_manager.get_book_receiver(market)
}
fn get_event_channel(&self) -> Channel<FdrEvent> {
self.channel_manager.get_event_channel()
}
}
| 0
|
src
|
/mono/crates/strategy_manager/src/config.rs
|
use std::collections::HashMap;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use fdr_common::{
config::{ExchangeConfiguration, MetricsConfig, ServerConfig},
crypto::assets::CurrencyPair,
exchange::Exchange,
tracing::config::TracingConfig,
};
use fdr_event::kafka::{consumer::KafkaConsumerConfig, producer::KafkaProducerConfig};
use fdr_store::config::PostgresConfig;
use strat_common::crypto::market::MarketConfig;
use strategies::config::StrategyConfigKind;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct NotifySubscribersConfig {
pub ticker: bool,
pub book: bool,
pub order_cancelled: bool,
pub trade: bool,
pub alert: bool,
pub order_placed: bool,
pub order_error: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StrategyManagerConfig {
/// optional b/c backtest won't use Kafka
pub kafka_consumer: Option<KafkaConsumerConfig>,
pub kafka_producer: Option<KafkaProducerConfig>,
/// optional b/c backtest won't use REST
pub exchange_adapter: Option<Vec<ExchangeConfiguration>>,
pub strategies: Vec<StrategyConfigKind>,
pub metrics: MetricsConfig,
pub server: ServerConfig,
pub tracing: TracingConfig,
pub notify_subscribers: NotifySubscribersConfig,
pub markets: HashMap<Exchange, HashMap<CurrencyPair, MarketConfig>>,
pub postgres: PostgresConfig,
#[serde(default)]
pub use_market_balance: bool,
#[serde(default)]
pub back_test: bool,
pub run_id: Uuid,
}
| 0
|
src
|
/mono/crates/strategy_manager/src/lib.rs
|
use std::{collections::HashMap, sync::Arc};
use rdkafka::{error::KafkaError, types::RDKafkaErrorCode};
use slog::{debug, error, info, trace, warn};
use fdr_common::{
error::FdrError,
exchange::{Exchange, Market},
ticker::FdrTickerUpdate,
};
use fdr_event::{
events::{FdrAlert, FdrBookUpdate, FdrEvent, FdrOrderUpdate, StrategyUpdate},
FdrEventError,
};
use strat_common::{
crypto::market::{MarketStatus, UpdateRequest, UpdateRequests},
state_managers::book_manager::BookManagerGetters,
};
use strategies::error::{StrategyError, StrategyResult};
use crate::{config::StrategyManagerConfig, context::Context, error::StrategyManagerResult};
pub mod build;
pub mod config;
pub mod context;
pub mod error;
pub mod exchange_adapter;
pub mod getters;
pub mod init;
pub struct StrategyManager {
ctx: Arc<Context>,
}
impl StrategyManager {
pub fn new(ctx: Arc<Context>) -> StrategyResult<Self> {
Ok(Self { ctx })
}
pub async fn start(&self) -> StrategyResult<()> {
info!(self.ctx.logger, "Strategy Manager Started"; "version" => env!("CARGO_PKG_VERSION"));
loop {
match self.ctx.event_consumer.recv_fdr_event_with_metadata().await {
Err(error) => {
if let FdrEventError::Kafka(KafkaError::MessageConsumption(
RDKafkaErrorCode::UnknownTopicOrPartition,
)) = error
{
if let (Some(consumer_config), Ok(topics)) =
(&self.ctx.config.kafka_consumer, self.ctx.event_consumer.get_topics())
{
consumer_config.topics.iter().for_each(|topic| {
if !topics.contains(topic) {
error!(self.ctx.logger, "Unable to subscribe to missing Kafka topic"; "topic" => topic.as_str());
}
});
}
}
return Err(StrategyError::FdrKafka(error));
}
Ok(event) => {
let _ = self.handle_event(event.event).await.map_err(|e| {
warn!(self.ctx.logger, "error handling received event"; "error" => format!("{:?}", e));
});
}
}
}
}
async fn handle_event(&self, event: FdrEvent) -> StrategyResult<()> {
match &event {
FdrEvent::Order(FdrOrderUpdate::Trade(trade)) => {
debug!(self.ctx.logger, "received trade event"; "event" => format!("{trade:?}"));
if self.ctx.config.notify_subscribers.trade {
self.notify_subscribers(trade.market, event).await?;
}
}
FdrEvent::Order(FdrOrderUpdate::Filled(filled_order)) => {
trace!(self.ctx.logger, "received order filled event"; "event" => format!("{filled_order:?}"));
}
FdrEvent::Order(FdrOrderUpdate::Canceled(cancelled_order)) => {
debug!(self.ctx.logger, "received order cancelled"; "event" => format!("{cancelled_order:?}"));
if self.ctx.config.notify_subscribers.order_cancelled {
self.notify_subscribers(cancelled_order.market, event.clone()).await?;
}
}
FdrEvent::Order(FdrOrderUpdate::Placed(placed_order)) => {
if self.ctx.config.notify_subscribers.order_placed {
self.notify_subscribers(placed_order.market, event).await?;
}
}
FdrEvent::Order(FdrOrderUpdate::Error(error)) => {
warn!(self.ctx.logger, "received order error event"; "event" => format!("{error:?}"));
if self.ctx.config.notify_subscribers.order_error {
self.notify_subscribers(error.market, event.clone()).await?;
}
}
FdrEvent::Book(book_update) => {
//trace!(self.ctx.logger, "received an order book event"; "event" => format!("{order_book:?}"));
let market = Market {
pair: book_update.pair,
exchange: book_update.exchange,
};
let mut sync_state_guard = match self.ctx.markets.get(&market) {
Some(market_synced) => Some(market_synced.write().await),
_ => None,
}
.ok_or_else(|| StrategyError::MissingOrderBook(book_update.pair))?;
let sync_complete = &*sync_state_guard;
match sync_complete {
false => {
let topic = event.topic(book_update.exchange);
let offset = self
.ctx
.order_book_manager
.get_book(&market)
.await
.and_then(|book| book.sync_required);
if let Some(offset) = offset {
self.ctx.event_consumer.seek(topic.as_str(), offset + 1).unwrap();
debug!(
self.ctx.logger,
"first book update for pair... setting topic offset";
"topic" => %topic,
"offset" => offset,
);
*sync_state_guard = true;
}
}
true => {
self.handle_order_book_event(book_update).await?;
if self.ctx.config.notify_subscribers.book && self.ctx.initializer.is_initialized().await {
self.notify_subscribers(market, event).await?;
}
}
}
}
FdrEvent::Ticker(ticker) => {
trace!(self.ctx.logger, "received a ticker event"; "event" => format!("{ticker:?}"));
self.handle_ticker_event(ticker).await?;
if self.ctx.config.notify_subscribers.ticker {
self.notify_subscribers(
Market {
pair: ticker.pair,
exchange: ticker.exchange,
},
event,
)
.await?;
}
}
// TODO(https://gitlab.com/fdr-group/mono/-/issues/28): Send the alert to the subscribers
FdrEvent::Alert(alert) => self.handle_alert_event(alert).await?,
_ => {
warn!(self.ctx.logger, "received unknown event"; "event" => format!("{event:?}"));
}
};
Ok(())
}
async fn handle_order_book_event(&self, book_update: &FdrBookUpdate) -> StrategyResult<()> {
self.ctx
.order_book_manager
.update_book(
Market {
pair: book_update.pair,
exchange: book_update.exchange,
},
&book_update.asks,
&book_update.bids,
None,
)
.await;
Ok(())
}
async fn handle_ticker_event(&self, ticker: &FdrTickerUpdate) -> StrategyResult<()> {
self.ctx.market_manager.update_last_ticker(ticker).await;
Ok(())
}
async fn handle_alert_event(&self, alert: &FdrAlert) -> StrategyResult<()> {
trace!(self.ctx.logger, "received alert event"; "event" => format!("{alert:?}"));
for market in &alert.details.firing {
self.ctx
.market_manager
.update_status(market, MarketStatus::Volatile)
.await;
}
for market in &alert.details.resolved {
self.ctx
.market_manager
.update_status(market, MarketStatus::Online)
.await;
}
Ok(())
}
pub async fn subscribe_to_requests(&self) -> StrategyResult<()> {
debug!(self.ctx.logger, "Subscribing to requests");
let mut receiver = self.ctx.requests_channel.rx();
loop {
match receiver.recv().await {
Ok(requests) => {
if let Err(e) = self.update_data(requests).await {
warn!(self.ctx.logger, "Failed to handle requests: {:?}", e);
}
}
Err(e) => {
warn!(self.ctx.logger, "Failed to receive requests: {:?}", e);
}
}
}
}
async fn update_data(&self, update_requests: UpdateRequests) -> StrategyManagerResult<()> {
let mut strategy_updates: HashMap<Market, Vec<StrategyUpdate>> = HashMap::new();
for request in update_requests.requests {
match request {
UpdateRequest::Order(order) => {
let update = StrategyUpdate::OrderCreated(order.id);
self.add_strategy_update(order.market, update, &mut strategy_updates);
debug!(self.ctx.logger, "Sending Order for Creation: {:?}", order);
}
UpdateRequest::Cancel(order) => {
let update = StrategyUpdate::OrderCancellationRequested(order.id);
self.add_strategy_update(order.market, update, &mut strategy_updates);
debug!(self.ctx.logger, "Cancelling Order: {:?}", order.id);
}
UpdateRequest::UpdateLastHit(market, last_hit) => {
self.ctx.market_manager.update_last_hit(&market, last_hit).await;
}
}
}
if !strategy_updates.is_empty() {
self.send_requests(strategy_updates).await?;
} else {
// This case should only happen when the strat has no requests to make, so it is in backtest mode.
// This will force a new ticker otherwise a ticker will be sent after all the requests have been handled.
// This is done this way to avoid race conditions.
if self.ctx.config.back_test {
trace!(self.ctx.logger, "sending no op to sender"; "pair" => format!("{0:?}", update_requests.pair));
// When backtest starts working on a strategy that works across multiple exchanges we'll have to update
// the UpdateRequest to supply a market instead of just a pair
self.ctx
.get_exchange_adapter(Exchange::Backtest)
.unwrap()
.noop(update_requests.pair)
.await?;
}
}
Ok(())
}
async fn send_requests(&self, strategy_updates: HashMap<Market, Vec<StrategyUpdate>>) -> StrategyResult<()> {
for (market, updates) in strategy_updates.into_iter() {
if !updates.is_empty() {
let fdr_event = FdrEvent::Strategy(updates);
self.ctx
.event_producer
.publish_fdr_event(fdr_event.topic(market.exchange).as_str(), None, &fdr_event)
.await?;
}
}
Ok(())
}
fn add_strategy_update(
&self,
market: Market,
update: StrategyUpdate,
strategy_updates: &mut HashMap<Market, Vec<StrategyUpdate>>,
) {
match strategy_updates.get_mut(&market) {
None => {
strategy_updates.insert(market, vec![update]);
}
Some(exchange_updates) => {
exchange_updates.push(update);
}
}
}
async fn notify_subscribers(&self, market: Market, event: FdrEvent) -> StrategyResult<()> {
match event {
FdrEvent::Book(_) => self.ctx.channel_manager.send_book_update(&market),
_ => self.ctx.channel_manager.send_event_update(event),
}
.map_err(|_| StrategyError::FdrError(FdrError::PublisherError))
}
}
| 0
|
src
|
/mono/crates/strategy_manager/src/context.rs
|
use async_trait::async_trait;
use exchange_client::ExchangeAdapter;
use prometheus::Registry;
use slog::Logger;
use std::{collections::HashMap, hash::RandomState, sync::Arc};
use tokio::sync::{
broadcast::{Receiver, Sender},
RwLock,
};
use uuid::Uuid;
use fdr_common::{exchange::Market, logging::default_logger};
use fdr_common::{
context::FdrServiceContext, crypto::assets::CurrencyPair, exchange::Exchange, utils::channel::Channel,
};
use fdr_event::{
events::FdrEvent,
kafka::{consumer::KafkaConsumer, producer::KafkaProducer, KafkaContext},
FdrEventConsumer, FdrEventProducer,
};
use fdr_store::core::pool::{init_postgres_pool_from_config, Pool};
use strat_common::{
crypto::market::{MarketConfig, MarketState, UpdateRequests},
metrics::StrategyMetrics,
state_managers::{
book_manager::BookManager, channel_manager::ChannelManager, market_manager::MarketManager,
strategy_manager::StrategyGetterTrait, trade_volume_manager::TradeVolumeManager,
},
};
use strategies::error::{StrategyError, StrategyResult};
use crate::{
exchange_adapter::StrategyManagerExchangeAdapterTrait,
init::strategy_manager_initializer::StrategyManagerInitializer, StrategyManagerConfig,
};
pub struct Context {
pub config: StrategyManagerConfig,
pub(crate) markets: HashMap<Market, RwLock<bool>>,
pub(crate) metrics: StrategyMetrics,
pub(crate) logger: Logger,
pub(crate) event_consumer: Arc<dyn FdrEventConsumer + Send + Sync + 'static>,
pub(crate) event_producer: Arc<dyn FdrEventProducer + Send + Sync + 'static>,
pub(crate) exchange_adapters: HashMap<Exchange, Arc<dyn StrategyManagerExchangeAdapterTrait>>,
pub(crate) trade_volume_manager: TradeVolumeManager,
pub(crate) market_manager: MarketManager,
pub(crate) order_book_manager: BookManager,
pub(crate) channel_manager: ChannelManager,
pub(crate) requests_channel: Channel<UpdateRequests>,
// TODO: Only here until we figure out the solution for arbitrage also
pub(crate) update_channel: Channel<FdrEvent>,
pub(crate) initializer: StrategyManagerInitializer,
pub pool: Arc<Pool>,
}
impl Context {
pub fn pairs(&self, exchange: Exchange) -> Vec<CurrencyPair> {
self.markets
.keys()
.filter_map(|market| match market.exchange == exchange {
true => Some(market.pair),
false => None,
})
.collect()
}
pub fn get_exchange_adapter(&self, exchange: Exchange) -> Option<Arc<dyn StrategyManagerExchangeAdapterTrait>> {
self.exchange_adapters.get(&exchange).cloned()
}
}
#[async_trait]
impl FdrServiceContext<StrategyManagerConfig> for Context {
type Context = Context;
type Error = StrategyError;
async fn from_config(config: StrategyManagerConfig) -> Result<Self::Context, Self::Error> {
let logger = default_logger();
let strategy_manager_initializer = StrategyManagerInitializer::new(config.markets.keys().cloned().collect());
let markets: Vec<Market> = config
.clone()
.markets
.into_iter()
.flat_map(|(exchange, configs)| {
configs
.into_keys()
.map(|pair| Market { pair, exchange })
.collect::<Vec<Market>>()
})
.collect();
let exchange_adapters = match &config.exchange_adapter {
Some(exchange_config) => exchange_config
.iter()
.map(|config| {
(
config.exchange,
Arc::new(ExchangeAdapter::new(
config.exchange,
config.strategy,
config.url.clone(),
config.port,
)) as Arc<dyn StrategyManagerExchangeAdapterTrait>,
)
})
.collect::<HashMap<Exchange, Arc<dyn StrategyManagerExchangeAdapterTrait>>>(),
None => {
return Err(StrategyError::BadConfig(
"At least one exchange config must be set".to_string(),
));
}
};
let (Some(kafka_consumer_config), Some(kafka_producer_config)) =
(config.kafka_consumer.as_ref(), config.kafka_producer.as_ref())
else {
return Err(StrategyError::BadConfig(
"Kafka consumer and producer must be set".to_string(),
));
};
let event_consumer = Arc::new(KafkaConsumer::new(
kafka_consumer_config,
KafkaContext::with_logger(logger.clone()),
)?);
let event_producer = Arc::new(KafkaProducer::new(
kafka_producer_config,
KafkaContext::with_logger(logger.clone()),
)?);
let order_book_manager = BookManager::new(&markets, 10);
let trade_volume_manager = TradeVolumeManager::default();
let market_manager = get_market_manager(&config.markets);
let requests_channel = Channel::new(50);
let update_channel = Channel::new(50);
let channel_manager = ChannelManager::new(markets.clone());
let metrics = StrategyMetrics::new(config.metrics.prefix.as_str())?;
let markets: HashMap<Market, RwLock<bool>> = markets
.iter()
.cloned()
.map(|market| (market, RwLock::new(false)))
.collect();
let pool = Arc::new(init_postgres_pool_from_config(&config.postgres).await?);
Ok(Context {
config,
markets,
logger,
event_consumer,
event_producer,
exchange_adapters,
trade_volume_manager,
market_manager,
order_book_manager,
channel_manager,
requests_channel,
update_channel,
metrics,
initializer: strategy_manager_initializer,
pool,
})
}
fn logger(&self) -> Logger {
self.logger.clone()
}
fn metrics_registry(&self) -> Arc<Registry> {
self.metrics.registry.clone()
}
}
impl StrategyGetterTrait for Context {
fn get_update_receiver(&self) -> Receiver<FdrEvent> {
self.update_channel.rx()
}
fn get_request_sender(&self) -> Sender<UpdateRequests> {
self.requests_channel.tx()
}
fn logger(&self) -> &Logger {
&self.logger
}
fn get_run_id(&self) -> Uuid {
self.config.run_id
}
fn get_config_markets(&self) -> &HashMap<Exchange, HashMap<CurrencyPair, MarketConfig, RandomState>, RandomState> {
&self.config.markets
}
fn get_metrics_provider(&self) -> &StrategyMetrics {
&self.metrics
}
}
impl Context {
pub async fn from_config_with_channels(
config: StrategyManagerConfig,
pool: Arc<Pool>,
logger: Logger,
event_consumer: Arc<dyn FdrEventConsumer + Send + Sync + 'static>,
event_producer: Arc<dyn FdrEventProducer + Send + Sync + 'static>,
exchange_adapter: Arc<dyn StrategyManagerExchangeAdapterTrait>,
) -> StrategyResult<Context> {
let strategy_manager_initializer = StrategyManagerInitializer::new(config.markets.keys().cloned().collect());
let markets: Vec<Market> = config
.clone()
.markets
.into_iter()
.flat_map(|(exchange, configs)| {
configs
.into_keys()
.map(|pair| Market { pair, exchange })
.collect::<Vec<Market>>()
})
.collect();
let exchange_adapters = [(Exchange::Backtest, exchange_adapter)].into_iter().collect();
let market_manager = get_market_manager(&config.markets);
let order_book_manager = BookManager::new(&markets, 10);
let trade_volume_manager = TradeVolumeManager::default();
let requests_channel = Channel::new(50);
let update_channel = Channel::new(50);
let channel_manager = ChannelManager::new(markets.clone());
let metrics = StrategyMetrics::new(config.metrics.prefix.as_str())?;
let markets: HashMap<Market, RwLock<bool>> = markets
.iter()
.cloned()
.map(|market| (market, RwLock::new(false)))
.collect();
Ok(Context {
config,
markets,
logger,
event_consumer,
event_producer,
exchange_adapters,
trade_volume_manager,
market_manager,
order_book_manager,
channel_manager,
requests_channel,
update_channel,
metrics,
initializer: strategy_manager_initializer,
pool,
})
}
}
fn get_market_manager(markets: &HashMap<Exchange, HashMap<CurrencyPair, MarketConfig>>) -> MarketManager {
let market_states: HashMap<Market, RwLock<MarketState>> = markets
.iter()
.flat_map(|(exchange, configs)| {
configs
.iter()
.map(|(pair, config)| {
let market = Market {
pair: *pair,
exchange: *exchange,
};
(
market,
RwLock::new(MarketState::new(
config.clone(),
market,
&Default::default(),
Default::default(),
)),
)
})
.collect::<HashMap<Market, RwLock<MarketState>>>()
})
.collect();
MarketManager::new(market_states)
}
| 0
|
init
|
/mono/crates/strategy_manager/src/init/mod.rs
|
use async_trait::async_trait;
use exchange_client::realm::{
market::{get_book::GetBookResponse, get_pair::GetPairsResponse},
user_data::get_trade_volume::{GetTradeVolumeRequest, GetTradeVolumeResponse},
};
use fdr_common::{
crypto::{assets::CurrencyPair, pairs::FeeData},
exchange::{Exchange, Market},
};
use fdr_event::events::FdrBookEntry;
use slog::{error, info};
use std::collections::HashMap;
use strat_common::{crypto::market::MarketState, state_managers::strategy_manager::StrategyGetterTrait};
use strategies::error::{StrategyError, StrategyResult};
use crate::StrategyManager;
pub mod strategy_manager_initializer;
#[async_trait]
pub trait StrategyManagerInitTrait {
async fn init_state(&self) -> StrategyResult<()>;
async fn handle_trade_volume_response(
&self,
exchange: Exchange,
trade_volume: GetTradeVolumeResponse,
) -> StrategyResult<()>;
async fn handle_order_book_response(&self, exchange: Exchange, order_book: GetBookResponse) -> StrategyResult<()>;
async fn handle_get_pairs_response(&self, exchange: Exchange, pairs: GetPairsResponse) -> StrategyResult<()>;
}
#[async_trait]
impl StrategyManagerInitTrait for StrategyManager {
async fn init_state(&self) -> StrategyResult<()> {
// We'll also need to add each exchange to the init check
for (exchange, adapter) in self.ctx.exchange_adapters.iter() {
info!(self.ctx.logger, "Requesting init state for exchange: {:?}", exchange);
let (books, get_pairs_resp) = tokio::try_join!(adapter.get_books(false), adapter.get_pairs(),)
.map_err(|e| StrategyError::Init(format!("Failed to request startup data from {exchange}: {e}")))?;
// Validate configured pairs against pairs available on the exchange
let configured_pairs = self.ctx.pairs(*exchange);
let mut invalid_pairs = vec![];
for pair in configured_pairs.iter() {
if !get_pairs_resp.pairs.contains_key(pair) {
error!(self.ctx.logger, "Invalid pair defined in config: {:?}", pair);
invalid_pairs.push(pair);
}
}
if !invalid_pairs.is_empty() {
let str_pair_list = invalid_pairs
.iter()
.map(|p| p.to_string())
.collect::<Vec<_>>()
.join(", ");
return Err(StrategyError::Init(format!(
"Invalid pairs configured: {}",
str_pair_list
)));
}
let trade_volume = adapter
.get_trade_volume(GetTradeVolumeRequest {
pairs: configured_pairs,
})
.await
.map_err(|e| StrategyError::Init(format!("Failed to request trade volume from {exchange}: {e}")))?;
self.handle_get_pairs_response(*exchange, get_pairs_resp).await?;
self.handle_order_book_response(*exchange, books).await?;
self.handle_trade_volume_response(*exchange, trade_volume).await?;
}
Ok(())
}
async fn handle_trade_volume_response(
&self,
exchange: Exchange,
trade_volume: GetTradeVolumeResponse,
) -> StrategyResult<()> {
info!(self.ctx.logger, "Starting trade volume: {:?}", trade_volume);
let map_fees = |fees: HashMap<CurrencyPair, FeeData>| -> HashMap<Market, FeeData> {
fees.into_iter()
.map(|(pair, value)| (Market { pair, exchange }, value))
.collect()
};
self.ctx
.trade_volume_manager
.swap(
map_fees(trade_volume.fees_maker.clone()),
map_fees(trade_volume.fees.clone()),
)
.await;
if let Some(pairs) = self.ctx.config.markets.get(&exchange) {
pairs.iter().for_each(|pair| {
if let Some(fees) = trade_volume.fees_maker.get(pair.0) {
info!(self.ctx.logger(), "Fee data initialized: ";
"Exchange" => format!("{exchange}"),
"Pair" => format!("{}", pair.0),
"Fee" => format!("{}", fees.fee),
);
}
});
}
self.ctx.initializer.set_initialized_trade_volume(exchange).await;
Ok(())
}
async fn handle_order_book_response(&self, exchange: Exchange, order_book: GetBookResponse) -> StrategyResult<()> {
// TODO: Probably can be changed to take a list of pairs it wants books for
let order_books = order_book
.books
.into_iter()
.filter_map(|(pair, book_with_meta)| {
let market = Market { pair, exchange };
if self.ctx.markets.contains_key(&market) {
Some((market, book_with_meta))
} else {
None
}
})
.collect::<HashMap<_, _>>();
for (market, book) in order_books {
if let Some((bids, asks)) = book.book.map(|book| {
(
book.bids
.into_iter()
.map(|(price, volume)| FdrBookEntry::from((price, volume)))
.collect::<Vec<_>>(),
book.asks
.into_iter()
.map(|(price, volume)| FdrBookEntry::from((price, volume)))
.collect::<Vec<_>>(),
)
}) {
self.ctx
.order_book_manager
.update_book(market, &asks, &bids, Some(book.current_offset))
.await;
}
}
self.ctx.initializer.set_initialized_order_book(exchange).await;
Ok(())
}
async fn handle_get_pairs_response(
&self,
exchange: Exchange,
pairs_response: GetPairsResponse,
) -> StrategyResult<()> {
// TODO: Probably can be changed to take a list of pairs it wants info back for
let pairs = pairs_response
.pairs
.into_iter()
.filter(|(pair, _)| self.ctx.pairs(exchange).contains(pair))
.collect::<HashMap<_, _>>();
// Create default markets
for (pair, trading_pair) in pairs {
let market = Market { pair, exchange };
let Some(market_configs) = self.ctx.config.markets.get(&exchange) else {
return Err(StrategyError::UnknownExchange(exchange));
};
let Some(market_config) = market_configs.get(&pair) else {
return Err(StrategyError::UnknownCurrencyPair(pair));
};
let market_state = MarketState::new(market_config.clone(), market, &trading_pair, Default::default());
self.ctx.market_manager.replace_market(market, market_state).await;
}
self.ctx.initializer.set_initialized_markets(exchange).await;
Ok(())
}
}
| 0
|
init
|
/mono/crates/strategy_manager/src/init/strategy_manager_initializer.rs
|
use std::collections::HashMap;
use fdr_common::exchange::Exchange;
use tokio::sync::RwLock;
#[derive(Default, Debug)]
pub struct StrategyManagerInitializer {
initializers: RwLock<HashMap<Exchange, StrategyManagerInitializers>>,
}
#[derive(Default, Debug)]
pub struct StrategyManagerInitializers {
pub initialized_trade_volume: bool,
pub initialized_order_book: bool,
pub initialized_markets: bool,
}
impl StrategyManagerInitializers {
pub fn is_initialized(&self) -> bool {
self.initialized_trade_volume && self.initialized_order_book && self.initialized_markets
}
}
impl StrategyManagerInitializer {
pub fn new(exchanges: Vec<Exchange>) -> Self {
Self {
initializers: RwLock::new(
exchanges
.iter()
.cloned()
.map(|exchange| (exchange, StrategyManagerInitializers::default()))
.collect(),
),
}
}
pub async fn is_initialized(&self) -> bool {
let lock = self.initializers.read().await;
lock.values().all(|lock| lock.is_initialized())
}
pub async fn set_initialized_trade_volume(&self, exchange: Exchange) {
self.initializers
.write()
.await
.entry(exchange)
.and_modify(|initializer| initializer.initialized_trade_volume = true);
}
pub async fn set_initialized_order_book(&self, exchange: Exchange) {
self.initializers
.write()
.await
.entry(exchange)
.and_modify(|initializer| initializer.initialized_order_book = true);
}
pub async fn set_initialized_markets(&self, exchange: Exchange) {
self.initializers
.write()
.await
.entry(exchange)
.and_modify(|initializer| initializer.initialized_markets = true);
}
}
| 0
|
market_maker
|
/mono/crates/strategy_manager/config/market_maker/config-local.toml.tmpl
|
use_market_balance = true
backtest = false
[kafka_consumer]
group_id = "market_makers"
bootstrap_servers = "localhost:9094"
enable_partition_eof = "false"
session_timeout_ms = "6000"
auto_commit = "false"
offset_reset = "latest"
topics = [
"market_volatility",
"kraken_ticker",
"kraken_orders",
"kraken_book_eth_usd",
"kraken_balances",
"kraken_account_balances",
"kraken_open_orders",
"kraken_vwap",
"kraken_assets",
"kraken_pairs",
"kraken_order_book",
"kraken_trade_volume",
]
[kafka_producer]
bootstrap_servers = "localhost:9093"
max_in_flight_requests_per_connection = 1
batch_num_messages = 1
request_required_acks = "all"
default_timeout_milliseconds = 50
max_retries = 3
retry_backoff_milliseconds = 10
[[exchange_adapter]]
url = "http://localhost"
port = 3002
exchange = "KRAKEN"
strategy = "MARKET_MAKER_ON_TICKER"
[server]
listen_address = "0.0.0.0"
port = 4001
[metrics]
prefix = "fdr"
[tracing]
level = "DEBUG"
tempo_url = "http://localhost"
tempo_port = 4317
service_name = "market_maker"
[markets]
Kraken = { "eth_usd" = { vwap_decay_factor = 0.0001, vwap_history = 10, ticker_delta = { decay_factor = 0.001, coefficient = { time_window = 60, increase_by = 0.05 } }, volume_weights = { profit_weight_scalar = 2, balance_weight_scalar = 6 } } }
[[strategies]]
MarketMakerOnTicker = { pair = "eth_usd", exchange = "Kraken", cancel_threshold = 0.0005, max_orders = 1, set_bid = 0.05, price_strategy = { "StaticFromTicker" = { desired_profit = 0.0006, price_guard = "SimplifiedAverageCost" } } }
[notify_subscribers]
ticker = true
book = false
order_cancelled = true
trade = true
alert = true
order_placed = true
order_error = true
| 0
|
fdr_event
|
/mono/crates/fdr_event/Cargo.toml
|
[package]
name = "fdr-event"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
fdr-common = { path = "../fdr_common" }
technical_analysis = { path = "../technical_analysis" }
anyhow = { workspace = true }
async-channel = { workspace = true, optional = true }
async-trait = { workspace = true }
chrono = { workspace = true, features = ["serde"] }
derive_more = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
slog = { workspace = true, features = ["max_level_trace"] }
thiserror = { workspace = true }
tracing = { workspace = true, default-features = false, features = ["std", "attributes"] }
rdkafka = { workspace = true }
tokio = { workspace = true }
rust_decimal = { workspace = true, features = ["serde"] }
regex = { workspace = true }
uuid = { workspace = true, features = ["serde", "v4"] }
[features]
backtest = ["async-channel"]
| 0
|
src
|
/mono/crates/fdr_event/src/events.rs
|
use chrono::{DateTime, Utc};
use fdr_common::{
crypto::{
assets::{Asset, CurrencyPair},
orders::{FdrOrderSide, FdrOrderStatus, FdrOrderType, OrderEntry},
},
error::ExchangeError,
exchange::{Exchange, Market},
Price, Volume,
};
use fdr_common::ticker::FdrTickerUpdate;
use regex::Regex;
use rust_decimal::Decimal;
use serde::Deserialize;
use serde_json::Value;
use std::collections::HashMap;
use technical_analysis::{
period::Period,
value::{IndicatorSymbol, IndicatorValue},
};
use uuid::Uuid;
const TICKER_UPDATE_TOPIC: &str = "ticker";
const BOOK_UPDATE_TOPIC: &str = "book";
const WALLET_UPDATE_TOPIC: &str = "wallet";
const ORDER_UPDATE_TOPIC: &str = "orders";
const DLQ: &str = "dead_letter_queue";
#[derive(Debug, serde::Deserialize, serde::Serialize)]
pub struct EventMetadata {
pub topic: String,
pub offset: i64,
}
#[derive(Debug, Clone, serde::Serialize)]
pub enum FdrEvent {
Alert(FdrAlert),
Ticker(FdrTickerUpdate),
Book(FdrBookUpdate),
Order(FdrOrderUpdate),
Wallet(FdrWalletUpdate),
Strategy(Vec<StrategyUpdate>),
Indicator(FdrIndicatorEvent),
Other(UndefinedEvent),
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub enum StrategyUpdate {
OrderCreated(Uuid),
OrderCancellationRequested(Uuid),
}
impl FdrEvent {
pub fn topic(&self, exchange: Exchange) -> String {
match self {
FdrEvent::Alert(_) => "alerts".to_string(),
FdrEvent::Ticker(_) => format!("{exchange}_{TICKER_UPDATE_TOPIC}"),
FdrEvent::Book(e) => format!(
"{exchange}_{BOOK_UPDATE_TOPIC}_{}_{}",
e.pair.base().to_string().to_lowercase(),
e.pair.quote().to_string().to_lowercase()
),
FdrEvent::Order(_) => format!("{exchange}_{ORDER_UPDATE_TOPIC}"),
FdrEvent::Wallet(_) => format!("{exchange}_{WALLET_UPDATE_TOPIC}"),
FdrEvent::Strategy(_) => format!("{exchange}_strategy_updates"),
FdrEvent::Other(_) => format!("{exchange}_{DLQ}"),
FdrEvent::Indicator(event) => format!(
"{exchange}_{}_{}",
event.market.pair.to_string().to_lowercase(),
event.indicator.to_string().to_lowercase(),
),
}
}
}
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
pub struct FdrAssetBalance {
pub available_balance: Decimal,
pub balance: Decimal,
pub timestamp: DateTime<Utc>,
pub symbol: Asset,
}
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
pub enum FdrOrderUpdate {
Placed(FdrOrderPlaced),
Trade(FdrOrderTrade),
Error(FdrOrderError),
Canceled(FdrOrderCanceled),
CancellationFailed(FdrOrderCancellationFailed),
Filled(FdrOrderFilled),
}
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
pub struct FdrOrderPlaced {
pub order_id: Uuid,
pub market: Market,
pub side: FdrOrderSide,
pub type_: FdrOrderType,
pub order_status: FdrOrderStatus,
pub price: Option<Decimal>,
pub volume: Option<Decimal>,
pub timestamp: DateTime<Utc>,
#[serde(flatten)]
pub extra_fields: HashMap<String, Value>,
}
impl From<OrderEntry> for FdrOrderPlaced {
fn from(order: OrderEntry) -> Self {
Self {
order_id: order.id,
market: order.market,
side: order.side,
type_: order.type_,
order_status: order.order_status,
price: order.price,
volume: Some(order.volume),
timestamp: Utc::now(),
extra_fields: Default::default(),
}
}
}
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
pub struct FdrOrderTrade {
pub order_id: Uuid,
pub trade_id: Uuid,
pub market: Market,
pub side: FdrOrderSide,
pub price: Decimal,
pub volume: Decimal,
pub cost: Decimal,
pub fee: Decimal,
pub timestamp: DateTime<Utc>,
#[serde(flatten)]
pub extra_fields: HashMap<String, Value>,
}
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
pub struct FdrOrderFilled {
pub order_id: Uuid,
pub timestamp: DateTime<Utc>,
#[serde(flatten)]
pub extra_fields: HashMap<String, Value>,
}
#[derive(Debug, Copy, Clone, serde::Deserialize, serde::Serialize)]
pub struct FdrOrderCanceled {
pub order_id: Uuid,
pub market: Market,
pub side: FdrOrderSide,
pub type_: FdrOrderType,
pub order_status: FdrOrderStatus,
pub price: Decimal,
pub volume: Decimal,
pub volume_remaining: Decimal,
pub timestamp: DateTime<Utc>,
}
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
pub struct FdrOrderCancellationFailed {
pub order_id: Uuid,
pub market: Market,
pub side: FdrOrderSide,
pub type_: FdrOrderType,
pub order_status: FdrOrderStatus,
pub price: Decimal,
pub volume: Decimal,
pub volume_remaining: Decimal,
pub timestamp: DateTime<Utc>,
pub reason: String,
}
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
pub struct FdrOrderError {
pub order_id: Uuid,
pub market: Market,
pub side: FdrOrderSide,
pub type_: FdrOrderType,
pub order_status: FdrOrderStatus,
pub price: Decimal,
pub volume: Decimal,
pub volume_remaining: Decimal,
pub timestamp: DateTime<Utc>,
pub error: ExchangeError,
}
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
pub struct FdrWalletUpdate {
pub balances: Vec<FdrAssetBalance>,
pub snapshot: bool,
///used to store any extra fields that we don't care about (or don't know about)
#[serde(flatten)]
pub extra_fields: HashMap<String, Value>,
}
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
pub struct UndefinedEvent {
#[serde(flatten)]
pub extra_fields: HashMap<String, Value>,
}
#[derive(Debug, Clone, PartialEq, serde::Deserialize, serde::Serialize)]
pub struct FdrBookUpdate {
pub pair: CurrencyPair,
pub asks: Vec<FdrBookEntry>,
pub bids: Vec<FdrBookEntry>,
pub is_snapshot: bool,
pub exchange: Exchange,
#[serde(flatten)]
pub extra_fields: HashMap<String, Value>,
}
#[derive(PartialEq, serde::Deserialize, serde::Serialize, Clone, Debug)]
pub struct FdrIndicatorEvent {
pub market: Market,
pub indicator: IndicatorSymbol,
pub values: HashMap<Period, IndicatorValue>,
}
#[derive(Copy, Clone, serde::Deserialize, serde::Serialize, Debug, PartialEq, Eq)]
pub struct FdrBookEntry {
pub price: Decimal,
pub volume: Decimal,
}
impl From<(Price, Volume)> for FdrBookEntry {
fn from((price, volume): (Price, Volume)) -> Self {
Self { price, volume }
}
}
impl<'de> Deserialize<'de> for FdrEvent {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::de::Deserializer<'de>,
{
let mut value = Value::deserialize(deserializer)?;
match value.as_object().and_then(|obj| obj.keys().next().map(|s| s.as_str())) {
Some("Alert") => value.get_mut("Alert").map(|v| {
Ok(FdrEvent::Alert(
serde_json::from_value(v.take()).map_err(|e| serde::de::Error::custom(e.to_string()))?,
))
}),
Some("Ticker") => value.get_mut("Ticker").map(|v| {
Ok(FdrEvent::Ticker(
serde_json::from_value(v.take()).map_err(|e| serde::de::Error::custom(e.to_string()))?,
))
}),
Some("Book") => value.get_mut("Book").map(|v| {
Ok(FdrEvent::Book(
serde_json::from_value(v.take()).map_err(|e| serde::de::Error::custom(e.to_string()))?,
))
}),
Some("Order") => value.get_mut("Order").map(|v| {
Ok(FdrEvent::Order(
serde_json::from_value(v.take()).map_err(|e| serde::de::Error::custom(e.to_string()))?,
))
}),
Some("Strategy") => value.get_mut("Strategy").map(|v| {
Ok(FdrEvent::Strategy(
serde_json::from_value(v.take()).map_err(|e| serde::de::Error::custom(e.to_string()))?,
))
}),
Some("Wallet") => value.get_mut("Wallet").map(|v| {
Ok(FdrEvent::Wallet(
serde_json::from_value(v.take()).map_err(|e| serde::de::Error::custom(e.to_string()))?,
))
}),
Some("Indicator") => value.get_mut("Indicator").map(|v| {
Ok(FdrEvent::Indicator(
serde_json::from_value(v.take()).map_err(|e| serde::de::Error::custom(e.to_string()))?,
))
}),
Some("Other") => value.get_mut("Other").map(|v| {
Ok(FdrEvent::Other(
serde_json::from_value(v.take()).map_err(|e| serde::de::Error::custom(e.to_string()))?,
))
}),
Some("alert_state") => Some(Ok(FdrEvent::Alert(
serde_json::from_value(value.clone()).map_err(|e| serde::de::Error::custom(e.to_string()))?,
))),
_ => Some(Ok(FdrEvent::Other(
serde_json::from_value(value.clone()).map_err(|e| serde::de::Error::custom(e.to_string()))?,
))),
}
.transpose()?
.map_or_else(
|| Err(serde::de::Error::custom(format!("Invalid event {:?}", value))),
Ok,
)
}
}
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
#[serde(rename_all(deserialize = "lowercase"))]
pub enum AlertState {
Alerting,
Resolved,
}
#[derive(Debug, Clone, serde::Serialize)]
pub struct FdrAlertDetails {
pub firing: Vec<Market>,
pub resolved: Vec<Market>,
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct FdrAlertDetailsRaw {
pub firing: Vec<Market>,
pub resolved: Vec<Market>,
}
impl<'de> Deserialize<'de> for FdrAlertDetails {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::de::Deserializer<'de>,
{
let value = Value::deserialize(deserializer)?;
let str_val = value.as_str().ok_or(serde::de::Error::custom("not a string"))?;
let re = Regex::new(r",(\s*[]}])").unwrap();
let str_val = re.replace_all(str_val, "$1");
let alert: FdrAlertDetailsRaw = serde_json::from_str(&str_val).map_or_else(
|_| Err(serde::de::Error::custom("Cannot deserialize alert details")),
Ok,
)?;
Ok(Self {
firing: alert.firing,
resolved: alert.resolved,
})
}
}
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
pub struct FdrAlert {
pub alert_state: AlertState,
pub client: String,
pub client_url: String,
pub description: String,
pub details: FdrAlertDetails,
pub incident_key: String,
}
#[cfg(test)]
mod tests {
use super::{FdrAlert, FdrEvent};
use serde_json::json;
#[test]
fn test_deserialize_fdr_alert() {
let raw = r#"
{
"alert_state": "alerting",
"client": "Grafana",
"client_url": "http://localhost:3000/alerting/list",
"description": "Market Volatility Level 1",
"details": "{\"firing\": [{ \"exchange\": \"kraken\", \"pair\": \"BTC_USD\" },{ \"exchange\": \"kraken\", \"pair\": \"BTC_USDC\" },{ \"exchange\": \"kraken\", \"pair\": \"ETH_BTC\" },{ \"exchange\": \"kraken\", \"pair\": \"ETH_USDT\" },],\"resolved\": [{ \"exchange\": \"kraken\", \"pair\": \"BTC_CAD\" },{ \"exchange\": \"kraken\", \"pair\": \"BTC_USDT\" },{ \"exchange\": \"kraken\", \"pair\": \"LINK_BTC\" },{ \"exchange\": \"kraken\", \"pair\": \"LINK_USDT\" },{ \"exchange\": \"kraken\", \"pair\": \"SOL_BTC\" },{ \"exchange\": \"kraken\", \"pair\": \"SOL_USDT\" },]}",
"incident_key": "b06904aa732208e95b0dccb0e29c294a00bee347e7eebf230eac8ddc4b7dec15"
}
"#;
let _alert: FdrAlert = serde_json::from_str(raw).unwrap();
let _event: FdrEvent = serde_json::from_str(raw).unwrap();
}
#[test]
fn test_deserialize_fdr_book_entry() {
let raw = json!(
{
"Book": {
"pair": "BTC_CAD",
"asks": [
{
"price": "39847.60000",
"volume": "0.00000000"
},
{
"price": "39877.10000",
"volume": "1.04900000"
},
{
"price": "39847.40000",
"volume": "0.00627355"
}
],
"bids": [],
"is_snapshot": false,
"exchange": "kraken"
}
});
let _event: FdrEvent = serde_json::from_value(raw).unwrap();
}
#[test]
fn test_deserialize_unknown_event() {
let raw = json!(
{
"SomeKey": {
"SomeInnerKey": "SomeInnerValue"
}
});
let _event: FdrEvent = serde_json::from_value(raw).unwrap();
}
}
| 0
|
src
|
/mono/crates/fdr_event/src/error.rs
|
use thiserror::Error;
#[derive(Error, Debug)]
pub enum FdrEventError {
#[error(transparent)]
Kafka(#[from] rdkafka::error::KafkaError),
#[error(transparent)]
Publish(#[from] crate::kafka::producer::FdrPublishError),
#[error(transparent)]
Deserialize(#[from] serde_json::Error),
#[error("Received Kafka Message w/No Body")]
EmptyPayload,
}
pub type FdrEventResult<T> = Result<T, FdrEventError>;
| 0
|
src
|
/mono/crates/fdr_event/src/config.rs
|
use derive_more::Display;
use serde::{Deserialize, Serialize};
#[derive(Debug, Copy, Clone, Display, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum KafkaOffsetReset {
#[display(fmt = "earliest")]
Earliest,
#[display(fmt = "latest")]
Latest,
}
impl Default for KafkaOffsetReset {
fn default() -> Self {
Self::Latest
}
}
| 0
|
src
|
/mono/crates/fdr_event/src/lib.rs
|
use std::time::Duration;
use crate::{
events::FdrEvent,
kafka::{consumer::FdrEventWithMetadata, producer::KafkaDeliveryResult},
};
pub mod config;
pub mod events;
pub mod kafka;
mod error;
pub use error::*;
#[cfg(feature = "backtest")]
pub mod channel;
#[async_trait::async_trait]
pub trait FdrEventProducer {
async fn publish_fdr_event<'a>(
&self,
topic: &'a str,
key: Option<&str>,
payload: &'a FdrEvent,
) -> FdrEventResult<KafkaDeliveryResult>;
async fn publish_fdr_event_with_custom_timeout<'a>(
&self,
topic: &'a str,
key: Option<&str>,
payload: &'a FdrEvent,
timeout: Duration,
) -> FdrEventResult<KafkaDeliveryResult>;
}
#[async_trait::async_trait]
pub trait FdrEventConsumer {
async fn recv_fdr_event_with_metadata(&self) -> FdrEventResult<FdrEventWithMetadata<'_>>;
fn commit_message(&self, message: &FdrEventWithMetadata<'_>) -> FdrEventResult<()>;
fn seek(&self, topic: &str, offset: i64) -> FdrEventResult<()>;
fn current_offset(&self, topic: &str) -> FdrEventResult<i64>;
fn get_topics(&self) -> FdrEventResult<Vec<String>>;
}
| 0
|
kafka
|
/mono/crates/fdr_event/src/kafka/mod.rs
|
use rdkafka::{
client::ClientContext,
consumer::{ConsumerContext, Rebalance},
error::KafkaResult,
topic_partition_list::TopicPartitionList,
};
use slog::{trace, Logger};
pub mod consumer;
pub mod producer;
pub struct KafkaContext {
logger: Logger,
}
impl KafkaContext {
pub fn with_logger(logger: Logger) -> Self {
Self { logger }
}
}
impl ClientContext for KafkaContext {}
impl ConsumerContext for KafkaContext {
fn pre_rebalance(&self, _rebalance: &Rebalance<'_>) {
trace!(self.logger, "Pre Rebalance")
}
fn post_rebalance(&self, _rebalance: &Rebalance<'_>) {
trace!(self.logger, "Post Rebalance")
}
fn commit_callback(&self, _result: KafkaResult<()>, _offsets: &TopicPartitionList) {
//trace!(self.logger, "Commit Callback")
}
}
| 0
|
producer
|
/mono/crates/fdr_event/src/kafka/producer/mod.rs
|
use std::{
error::Error,
fmt::{Display, Formatter},
time::Duration,
};
use rdkafka::{
config::RDKafkaLogLevel,
producer::{FutureProducer, FutureRecord},
util::Timeout,
ClientConfig,
};
use crate::{events::FdrEvent, kafka::KafkaContext, FdrEventError, FdrEventResult};
mod config;
pub use config::*;
pub type LoggingProducer = FutureProducer<KafkaContext>;
pub struct KafkaProducer {
producer: LoggingProducer,
timeout: Duration,
max_retries: usize,
retry_backoff_milliseconds: u64,
}
pub struct KafkaDeliveryResult {
pub partition: i32,
pub offset: i64,
}
#[derive(Debug)]
pub struct FdrPublishError {
pub error: rdkafka::error::KafkaError,
pub topic: String,
}
impl Display for FdrPublishError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "Error publishing to topic '{}'; {}", self.topic, self.error)
}
}
impl Error for FdrPublishError {}
impl From<(i32, i64)> for KafkaDeliveryResult {
fn from((partition, offset): (i32, i64)) -> Self {
Self { partition, offset }
}
}
impl KafkaProducer {
pub fn new(config: &KafkaProducerConfig, context: KafkaContext) -> FdrEventResult<Self> {
let producer: LoggingProducer = ClientConfig::new()
.set("bootstrap.servers", &config.bootstrap_servers)
.set(
"max.in.flight.requests.per.connection",
&config.max_in_flight_requests_per_connection.to_string(),
)
.set("batch.num.messages", &config.batch_num_messages.to_string())
.set("acks", &config.request_required_acks.to_string())
.set("delivery.timeout.ms", &config.default_timeout_milliseconds.to_string())
.set_log_level(RDKafkaLogLevel::Debug)
.create_with_context(context)?;
let timeout = Duration::from_millis(config.default_timeout_milliseconds);
Ok(Self {
producer,
timeout,
max_retries: config.max_retries,
retry_backoff_milliseconds: config.retry_backoff_milliseconds,
})
}
}
#[async_trait::async_trait]
impl crate::FdrEventProducer for KafkaProducer {
async fn publish_fdr_event<'a>(
&self,
topic: &'a str,
key: Option<&str>,
payload: &'a FdrEvent,
) -> FdrEventResult<KafkaDeliveryResult> {
let mut res = self
.publish_fdr_event_with_custom_timeout(topic, key, payload, self.timeout)
.await;
if res.is_err() {
for i in 1..self.max_retries {
tokio::time::sleep(Duration::from_millis(self.retry_backoff_milliseconds * i as u64)).await;
res = self
.publish_fdr_event_with_custom_timeout(topic, key, payload, self.timeout)
.await;
if res.is_ok() {
break;
}
}
}
res
}
async fn publish_fdr_event_with_custom_timeout<'a>(
&self,
topic: &'a str,
key: Option<&str>,
payload: &'a FdrEvent,
timeout: Duration,
) -> FdrEventResult<KafkaDeliveryResult> {
Ok(self
.producer
.send(
FutureRecord {
topic,
key,
payload: Some(serde_json::to_string(payload)?.as_bytes()),
partition: None,
timestamp: chrono::Utc::now().timestamp_millis().into(),
headers: None,
},
Timeout::After(timeout),
)
.await
.map_err(|(e, _)| {
FdrEventError::Publish(FdrPublishError {
error: e,
topic: topic.to_string(),
})
})?
.into())
}
}
| 0
|
producer
|
/mono/crates/fdr_event/src/kafka/producer/config.rs
|
use derive_more::Display;
#[derive(Display, Debug, Clone, serde::Deserialize, serde::Serialize)]
#[serde(rename_all = "lowercase")]
pub enum KafkaProducerAckStrategy {
#[display(fmt = "all")]
All,
#[display(fmt = "none")]
None,
}
impl Default for KafkaProducerAckStrategy {
fn default() -> Self {
Self::All
}
}
const DEFAULT_MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION: fn() -> u32 = || 1;
const DEFAULT_BATCH_NUM_MESSAGES: fn() -> u32 = || 1;
const DEFAULT_TIMEOUT_MILLISECONDS: fn() -> u64 = || 50;
const DEFAULT_MAX_RETRIES: fn() -> usize = || 3;
const DEFAULT_RETRY_BACKOFF_MILLISECONDS: fn() -> u64 = || 10;
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
pub struct KafkaProducerConfig {
pub bootstrap_servers: String,
#[serde(default = "DEFAULT_MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION")]
pub max_in_flight_requests_per_connection: u32,
#[serde(default = "DEFAULT_BATCH_NUM_MESSAGES")]
pub batch_num_messages: u32,
#[serde(default)]
pub request_required_acks: KafkaProducerAckStrategy,
#[serde(default = "DEFAULT_TIMEOUT_MILLISECONDS")]
pub default_timeout_milliseconds: u64,
#[serde(default = "DEFAULT_MAX_RETRIES")]
pub max_retries: usize,
#[serde(default = "DEFAULT_RETRY_BACKOFF_MILLISECONDS")]
pub retry_backoff_milliseconds: u64,
}
| 0
|
consumer
|
/mono/crates/fdr_event/src/kafka/consumer/mod.rs
|
use std::time::Duration;
use rdkafka::{
config::RDKafkaLogLevel,
consumer::{CommitMode, Consumer, StreamConsumer},
error::{KafkaError, RDKafkaErrorCode::Fail},
util::Timeout,
ClientConfig, Message, Offset, TopicPartitionList,
};
use crate::{events::FdrEvent, kafka::KafkaContext, FdrEventConsumer, FdrEventError, FdrEventResult};
mod config;
pub use config::*;
pub type LoggingConsumer = StreamConsumer<KafkaContext>;
#[derive(Debug)]
pub struct KafkaMessage<'a> {
pub message: rdkafka::message::BorrowedMessage<'a>,
}
impl<'a> KafkaMessage<'a> {
pub fn new(message: rdkafka::message::BorrowedMessage<'a>) -> Self {
Self { message }
}
pub fn payload(&self) -> FdrEventResult<Option<&[u8]>> {
Ok(self.message.payload())
}
}
impl<'a> TryFrom<KafkaMessage<'a>> for FdrEventWithMetadata<'a> {
type Error = FdrEventError;
fn try_from(message: KafkaMessage<'a>) -> Result<Self, Self::Error> {
let payload = message.message.payload().ok_or(FdrEventError::EmptyPayload)?;
let event = serde_json::de::from_slice::<FdrEvent>(payload)?;
let metadata = EventMetadata {
topic: message.message.topic().to_string(),
offset: message.message.offset(),
};
Ok(FdrEventWithMetadata {
event,
metadata,
message: Some(message),
})
}
}
#[derive(Debug)]
pub struct FdrEventWithMetadata<'a> {
pub event: FdrEvent,
pub metadata: EventMetadata,
pub message: Option<KafkaMessage<'a>>,
}
#[derive(Debug, serde::Deserialize, serde::Serialize)]
pub struct EventMetadata {
pub topic: String,
pub offset: i64,
}
pub struct KafkaConsumer {
pub consumer: LoggingConsumer,
}
impl KafkaConsumer {
pub fn new(config: &KafkaConsumerConfig, context: KafkaContext) -> FdrEventResult<Self> {
let consumer: LoggingConsumer = ClientConfig::new()
.set("group.id", &config.group_id)
.set("bootstrap.servers", &config.bootstrap_servers)
.set("enable.partition.eof", &config.enable_partition_eof)
.set("session.timeout.ms", &config.session_timeout_ms)
.set("enable.auto.commit", &config.auto_commit)
.set("auto.offset.reset", &config.offset_reset.to_string())
.set_log_level(RDKafkaLogLevel::Debug)
.create_with_context(context)?;
consumer.subscribe(&config.topics.iter().map(|s| s.as_str()).collect::<Vec<&str>>())?;
Ok(Self { consumer })
}
}
#[async_trait::async_trait]
impl FdrEventConsumer for KafkaConsumer {
async fn recv_fdr_event_with_metadata(&self) -> FdrEventResult<FdrEventWithMetadata<'_>> {
let message = KafkaMessage::new(self.consumer.recv().await?);
let event_with_meta = FdrEventWithMetadata::try_from(message)?;
Ok(event_with_meta)
}
fn commit_message(&self, message: &FdrEventWithMetadata<'_>) -> FdrEventResult<()> {
let Some(message) = &message.message else {
return Ok(());
};
Ok(self.consumer.commit_message(&message.message, CommitMode::Async)?)
}
fn seek(&self, topic: &str, offset: i64) -> FdrEventResult<()> {
Ok(self.consumer.seek(topic, 0, Offset::Offset(offset), Timeout::Never)?)
}
fn current_offset(&self, topic: &str) -> FdrEventResult<i64> {
let mut tpl = TopicPartitionList::new();
tpl.add_partition(topic, 0);
let tpl = self.consumer.committed_offsets(tpl, Timeout::Never)?;
let element = tpl.find_partition(topic, 0).ok_or(KafkaError::OffsetFetch(Fail))?;
let offset = element.offset();
match offset {
Offset::Offset(offset_val) => Ok(offset_val),
_ => Err(KafkaError::OffsetFetch(Fail))?,
}
}
fn get_topics(&self) -> FdrEventResult<Vec<String>> {
Ok(self
.consumer
.fetch_metadata(None, Duration::from_secs(5))?
.topics()
.iter()
.map(|topic| String::from(topic.name()))
.collect())
}
}
| 0
|
consumer
|
/mono/crates/fdr_event/src/kafka/consumer/config.rs
|
use crate::config::KafkaOffsetReset;
const DEFAULT_ENABLE_PARTITION_EOF: fn() -> String = || false.to_string();
const DEFAULT_SESSION_TIMEOUT_MS: fn() -> String = || 6000.to_string();
const DEFAULT_AUTO_COMMIT: fn() -> String = || false.to_string();
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct KafkaConsumerConfig {
pub group_id: String,
pub bootstrap_servers: String,
#[serde(default = "DEFAULT_ENABLE_PARTITION_EOF")]
pub enable_partition_eof: String,
#[serde(default = "DEFAULT_SESSION_TIMEOUT_MS")]
pub session_timeout_ms: String,
#[serde(default = "DEFAULT_AUTO_COMMIT")]
pub auto_commit: String,
#[serde(default)]
pub offset_reset: KafkaOffsetReset,
pub topics: Vec<String>,
}
| 0
|
channel
|
/mono/crates/fdr_event/src/channel/mod.rs
|
use crate::{
events::FdrEvent,
kafka::{
consumer::{EventMetadata, FdrEventWithMetadata},
producer::KafkaDeliveryResult,
},
FdrEventConsumer, FdrEventProducer, FdrEventResult,
};
use async_trait::async_trait;
use std::time::Duration;
pub struct FdrEventChannel {
producer: async_channel::Sender<FdrEvent>,
consumer: async_channel::Receiver<FdrEvent>,
}
impl FdrEventChannel {
pub fn new(capacity: usize) -> Self {
let (producer, consumer) = async_channel::bounded::<FdrEvent>(capacity);
Self { producer, consumer }
}
}
#[async_trait]
impl FdrEventProducer for FdrEventChannel {
async fn publish_fdr_event<'a>(
&self,
_topic: &'a str,
_key: Option<&str>,
payload: &'a FdrEvent,
) -> FdrEventResult<KafkaDeliveryResult> {
self.producer.send(payload.clone()).await.unwrap();
Ok(KafkaDeliveryResult {
offset: 0,
partition: 0,
})
}
async fn publish_fdr_event_with_custom_timeout<'a>(
&self,
_topic: &'a str,
_key: Option<&str>,
payload: &'a FdrEvent,
_timeout: Duration,
) -> FdrEventResult<KafkaDeliveryResult> {
self.producer.send(payload.clone()).await.unwrap();
Ok(KafkaDeliveryResult {
offset: 0,
partition: 0,
})
}
}
#[async_trait]
impl FdrEventConsumer for FdrEventChannel {
async fn recv_fdr_event_with_metadata(&self) -> FdrEventResult<FdrEventWithMetadata<'_>> {
let event = self.consumer.recv().await.unwrap();
Ok(FdrEventWithMetadata {
event,
metadata: EventMetadata {
topic: "".to_string(),
offset: 0,
},
message: None,
})
}
fn commit_message(&self, _message: &FdrEventWithMetadata<'_>) -> FdrEventResult<()> {
Ok(())
}
fn seek(&self, _topic: &str, _offset: i64) -> FdrEventResult<()> {
Ok(())
}
fn current_offset(&self, _topic: &str) -> FdrEventResult<i64> {
Ok(0)
}
fn get_topics(&self) -> FdrEventResult<Vec<String>> {
Ok(Vec::new())
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::events::{FdrEvent, UndefinedEvent};
use serde_json::Value;
use std::collections::HashMap;
#[tokio::test]
async fn test_fdr_event_channel() {
let channel = FdrEventChannel::new(10);
for i in 0..3 {
let event = FdrEvent::Other(UndefinedEvent {
extra_fields: [("iteration".to_string(), serde_json::Value::from(i))]
.into_iter()
.collect::<HashMap<String, Value>>(),
});
channel.publish_fdr_event("test", None, &event).await.unwrap();
}
for i in 0..3 {
let event = channel.recv_fdr_event_with_metadata().await.unwrap();
if let FdrEvent::Other(UndefinedEvent { extra_fields }) = event.event {
assert_eq!(extra_fields.get("iteration").unwrap().as_i64().unwrap(), i);
}
}
}
}
| 0
|
indicator_service
|
/mono/crates/indicator_service/Cargo.toml
|
[package]
name = "indicator-service"
version.workspace = true
edition.workspace = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
fdr-common = { path = "../fdr_common", features = ["tracing"] }
fdr-http = { path = "../fdr_http" }
fdr-event = { path = "../fdr_event" }
fdr-store = { path = "../fdr_store" }
technical_analysis = { path = "../technical_analysis" }
async-trait = { workspace = true }
axum = { workspace = true }
chrono = { workspace = true, features = ["serde"] }
clap = { workspace = true, features = ["derive", "env", "cargo"] }
derive_more = { workspace = true }
http = { workspace = true }
http-body = { workspace = true }
hyper = { workspace = true }
rust_decimal = { workspace = true, features = ["serde", "db-postgres"] }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
slog = { workspace = true, features = ["max_level_trace"] }
strum = { workspace = true, features = ["derive"] }
strum_macros = { workspace = true }
sqlx = { workspace = true, features = [
"runtime-tokio-native-tls",
"postgres",
"json",
"migrate",
"time",
"uuid",
"rust_decimal",
] }
prometheus = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true, features = ["full"] }
tracing = { workspace = true, default-features = false, features = [
"std",
"attributes",
] }
tracing-opentelemetry = { workspace = true, default-features = true }
toml = { workspace = true }
url = { workspace = true }
uuid = { workspace = true, features = ["v4", "fast-rng", "serde"] }
| 0
|
src
|
/mono/crates/indicator_service/src/metrics.rs
|
use fdr_common::{error::FdrResult, exchange::Market};
use prometheus::{GaugeVec, Registry};
use rust_decimal::prelude::ToPrimitive;
use std::{collections::HashMap, sync::Arc};
use technical_analysis::{
period::Period,
value::{IndicatorSymbol, IndicatorValue},
};
pub struct Metrics {
pub registry: Arc<Registry>,
pub rsi: RsiMetrics,
pub sma: SmaMetrics,
pub ema: EmaMetrics,
pub std_dev: StdDevMetrics,
pub std_prc: StdDevPrcMetrics,
}
impl Metrics {
pub fn new(prefix: &str) -> FdrResult<Self> {
let registry = Registry::new_custom(Some(prefix.to_string()), None)?;
Ok(Self {
registry: Arc::new(registry.clone()),
rsi: RsiMetrics::new(registry.clone())?,
sma: SmaMetrics::new(registry.clone())?,
ema: EmaMetrics::new(registry.clone())?,
std_dev: StdDevMetrics::new(registry.clone())?,
std_prc: StdDevPrcMetrics::new(registry.clone())?,
})
}
pub fn update_indicator_metrics(
&self,
market: &Market,
symbol: &IndicatorSymbol,
values: &HashMap<Period, IndicatorValue>,
) {
let exchange = market.exchange.to_string();
let pair = market.pair.to_string();
match symbol {
IndicatorSymbol::SimpleMovingAverage => {
for (period, value) in values.iter() {
if let IndicatorValue::Decimal(value) = value {
self.sma
.with_label_values(&[&exchange, &pair, &period.to_string()])
.set(value.to_f64().unwrap_or_default())
}
}
}
IndicatorSymbol::ExponentialMovingAverage => {
for (period, value) in values.iter() {
if let IndicatorValue::Decimal(value) = value {
self.ema
.with_label_values(&[&exchange, &pair, &period.to_string()])
.set(value.to_f64().unwrap_or_default())
}
}
}
IndicatorSymbol::RelativeStrengthIndex => {
for (period, value) in values.iter() {
if let IndicatorValue::Decimal(value) = value {
self.rsi
.with_label_values(&[&exchange, &pair, &period.to_string()])
.set(value.to_f64().unwrap_or_default())
}
}
}
IndicatorSymbol::StandardDeviation => {
for (period, value) in values.iter() {
if let IndicatorValue::Decimal(value) = value {
self.std_dev
.with_label_values(&[&exchange, &pair, &period.to_string()])
.set(value.to_f64().unwrap_or_default())
}
}
}
IndicatorSymbol::StandardDeviationPercent => {
for (period, value) in values.iter() {
if let IndicatorValue::Decimal(value) = value {
self.std_prc
.with_label_values(&[&exchange, &pair, &period.to_string()])
.set(value.to_f64().unwrap_or_default())
}
}
}
}
}
}
pub struct RsiMetrics(GaugeVec);
impl RsiMetrics {
fn new(registry: Registry) -> FdrResult<Self> {
let rsi = GaugeVec::new(
prometheus::opts!("rsi", "Relative Strength Index"),
&["exchange", "pair", "period"],
)?;
registry.register(Box::new(rsi.clone()))?;
Ok(Self(rsi))
}
pub fn with_label_values(&self, labels: &[&str]) -> prometheus::Gauge {
self.0.with_label_values(labels)
}
}
pub struct SmaMetrics(GaugeVec);
impl SmaMetrics {
fn new(registry: Registry) -> FdrResult<Self> {
let sma = GaugeVec::new(
prometheus::opts!("sma", "Simple Moving Average"),
&["exchange", "pair", "period"],
)?;
registry.register(Box::new(sma.clone()))?;
Ok(Self(sma))
}
pub fn with_label_values(&self, labels: &[&str]) -> prometheus::Gauge {
self.0.with_label_values(labels)
}
}
pub struct EmaMetrics(GaugeVec);
impl EmaMetrics {
fn new(registry: Registry) -> FdrResult<Self> {
let ema = GaugeVec::new(
prometheus::opts!("ema", "Exponential Moving Average"),
&["exchange", "pair", "period"],
)?;
registry.register(Box::new(ema.clone()))?;
Ok(Self(ema))
}
pub fn with_label_values(&self, labels: &[&str]) -> prometheus::Gauge {
self.0.with_label_values(labels)
}
}
pub struct StdDevMetrics(GaugeVec);
impl StdDevMetrics {
fn new(registry: Registry) -> FdrResult<Self> {
let std_dev = GaugeVec::new(
prometheus::opts!(IndicatorSymbol::StandardDeviation.to_string(), "Standard Deviation"),
&["exchange", "pair", "period"],
)?;
registry.register(Box::new(std_dev.clone()))?;
Ok(Self(std_dev))
}
pub fn with_label_values(&self, labels: &[&str]) -> prometheus::Gauge {
self.0.with_label_values(labels)
}
}
pub struct StdDevPrcMetrics(GaugeVec);
impl StdDevPrcMetrics {
fn new(registry: Registry) -> FdrResult<Self> {
let std_prc = GaugeVec::new(
prometheus::opts!(
IndicatorSymbol::StandardDeviationPercent.to_string(),
"Standard Deviation as percentage of last price"
),
&["exchange", "pair", "period"],
)?;
registry.register(Box::new(std_prc.clone()))?;
Ok(Self(std_prc))
}
pub fn with_label_values(&self, labels: &[&str]) -> prometheus::Gauge {
self.0.with_label_values(labels)
}
}
| 0
|
src
|
/mono/crates/indicator_service/src/error.rs
|
use fdr_common::error::FdrError;
use fdr_event::FdrEventError;
use fdr_store::error::FdrStoreError;
use technical_analysis::error::IndicatorError;
use thiserror::Error;
#[derive(Error, Debug)]
pub enum IndicatorServiceError {
#[error(transparent)]
FdrError(#[from] FdrError),
#[error("Received Kafka Message w/No Body")]
EmptyPayload,
#[error(transparent)]
Sqlx(#[from] sqlx::Error),
#[error(transparent)]
DbMigration(#[from] sqlx::migrate::MigrateError),
#[error(transparent)]
Kafka(#[from] FdrEventError),
#[error(transparent)]
SerdeJson(#[from] serde_json::Error),
#[error(transparent)]
Io(#[from] std::io::Error),
#[error(transparent)]
Uuid(#[from] uuid::Error),
#[error(transparent)]
Decimal(#[from] rust_decimal::Error),
#[error(transparent)]
ParseUrl(#[from] url::ParseError),
#[error(transparent)]
Hyper(#[from] hyper::Error),
#[error(transparent)]
AddrParse(#[from] std::net::AddrParseError),
#[error(transparent)]
Prometheus(#[from] prometheus::Error),
#[error(transparent)]
FdrStoreError(#[from] FdrStoreError),
#[error(transparent)]
IndicatorError(#[from] IndicatorError),
}
pub type IndicatorServiceResult<T> = Result<T, IndicatorServiceError>;
| 0
|
src
|
/mono/crates/indicator_service/src/main.rs
|
mod config;
mod context;
pub mod error;
mod metrics;
use crate::{
config::Config,
context::Context,
error::{IndicatorServiceError, IndicatorServiceResult},
};
use async_trait::async_trait;
use axum::Router;
use fdr_common::{
context::FdrServiceContext,
exchange::Market,
task::{FdrTask, FdrTaskManagerBuilder, FdrTaskResult, ShutdownType},
ticker::FdrTickerUpdate,
utils::cli::{Args, Parser},
};
use fdr_event::{
events::{FdrBookUpdate, FdrEvent, FdrIndicatorEvent},
FdrEventConsumer, FdrEventProducer,
};
use fdr_http::Server;
use fdr_store::indicator::IndicatorStore;
use slog::{info, trace, warn};
use std::sync::Arc;
#[tokio::main]
async fn main() -> IndicatorServiceResult<()> {
let args: Args<Config> = Args::parse();
let ctx = Arc::new(Context::from_config(args.config.clone()).await?);
info!(ctx.logger, "Indicator Service started"; "version" => env!("CARGO_PKG_VERSION"));
let mut task_manager = FdrTaskManagerBuilder::new(ctx.logger().clone())
.add_task(HttpServerTask(ctx.clone()))
.add_task(IndicatorServiceTask { ctx })
.build();
task_manager.run().await;
Ok(())
}
impl IndicatorServiceTask {
async fn await_and_handle_event(&self) -> IndicatorServiceResult<()> {
let event = self.ctx.kafka_consumer.recv_fdr_event_with_metadata().await?;
let res = match &event.event {
FdrEvent::Ticker(ticker) => self.handle_ticker_event(ticker).await,
FdrEvent::Book(book) => self.handle_book_event(book).await,
_ => Ok(()),
};
self.ctx.kafka_consumer.commit_message(&event)?;
res
}
async fn handle_book_event(&self, book: &FdrBookUpdate) -> IndicatorServiceResult<()> {
trace!(self.ctx.logger, "received book entry"; "book" => ?book);
todo!()
}
async fn handle_ticker_event(&self, ticker: &FdrTickerUpdate) -> IndicatorServiceResult<()> {
trace!(self.ctx.logger, "received ticker entry"; "ticker" => ?ticker);
let market: Market = ticker.into();
// Save the ticker to the tickers
let Some(market_tickers) = self.ctx.tickers.get(&market) else {
// TODO: Maybe this should be an error?
return Ok(());
};
// Update the TickerBuffer with the latest ticker
let mut lock = market_tickers.write().await;
lock.push(*ticker);
// Drop the write lock
drop(lock);
// Update the indicator market
let updated_indicators = self
.ctx
.indicator_registry
.update_market_indicators(market, market_tickers.clone())
.await?;
for (symbol, updated_values) in updated_indicators.iter() {
let event = FdrEvent::Indicator(FdrIndicatorEvent {
market,
indicator: *symbol,
values: updated_values.clone(),
});
self.ctx
.kafka_producer
.publish_fdr_event(&event.topic(market.exchange), None, &event)
.await?;
self.ctx
.metrics
.update_indicator_metrics(&market, symbol, updated_values);
self.ctx
.pool
.acquire()
.await?
.save_indicator(market, symbol, updated_values, ticker.timestamp)
.await?;
}
Ok(())
}
}
struct IndicatorServiceTask {
ctx: Arc<Context>,
}
#[async_trait]
impl FdrTask<IndicatorServiceError> for IndicatorServiceTask {
async fn run(self: Box<Self>) -> FdrTaskResult<IndicatorServiceError> {
info!(self.ctx.logger, "Listening for events...");
loop {
if let Err(e) = self.await_and_handle_event().await {
warn!(self.ctx.logger, "error handling kafka message"; "error" => format!("{:?}", e));
}
}
}
fn name(&self) -> String {
"Indicator Service".to_string()
}
fn shutdown_type(&self) -> ShutdownType {
ShutdownType::Managed
}
}
struct HttpServerTask(Arc<Context>);
#[async_trait]
impl FdrTask<IndicatorServiceError> for HttpServerTask {
async fn run(self: Box<Self>) -> FdrTaskResult<IndicatorServiceError> {
Server::serve(
self.0.clone(),
self.0.config.server.listen_address.clone(),
self.0.config.server.port,
Router::new(),
)
.await
}
fn name(&self) -> String {
"HTTP Server".to_string()
}
fn shutdown_type(&self) -> ShutdownType {
ShutdownType::Managed
}
}
| 0
|
src
|
/mono/crates/indicator_service/src/config.rs
|
use fdr_common::{
config::{MetricsConfig, ServerConfig},
exchange::Market,
};
use fdr_event::kafka::{consumer::KafkaConsumerConfig, producer::KafkaProducerConfig};
use fdr_store::config::PostgresConfig;
use serde::{Deserialize, Serialize};
use strum::IntoEnumIterator;
use technical_analysis::{
indicators::{
ExponentialMovingAverage, IndicatorTrait, RelativeStrengthIndex, SimpleMovingAverage, StandardDeviation,
StandardDeviationPercent,
},
registry::IndicatorRegistry,
value::IndicatorSymbol,
};
#[derive(Debug, Clone, serde::Deserialize)]
pub(crate) struct Config {
pub(crate) kafka_producer: KafkaProducerConfig,
pub(crate) kafka_consumer: KafkaConsumerConfig,
pub(crate) postgres: PostgresConfig,
pub(crate) server: ServerConfig,
pub(crate) metrics: MetricsConfig,
pub(crate) indicator: IndicatorRegistryConfig,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub(crate) struct IndicatorRegistryConfig {
pub(crate) markets: Vec<Market>,
}
impl From<&IndicatorRegistryConfig> for IndicatorRegistry {
fn from(config: &IndicatorRegistryConfig) -> IndicatorRegistry {
let mut registry = IndicatorRegistry::new();
for market in config.markets.iter() {
for symbol in IndicatorSymbol::iter() {
let indicator: Box<dyn IndicatorTrait> = match symbol {
IndicatorSymbol::SimpleMovingAverage => Box::new(SimpleMovingAverage),
IndicatorSymbol::ExponentialMovingAverage => Box::new(ExponentialMovingAverage),
IndicatorSymbol::RelativeStrengthIndex => Box::new(RelativeStrengthIndex),
IndicatorSymbol::StandardDeviation => Box::new(StandardDeviation),
IndicatorSymbol::StandardDeviationPercent => Box::new(StandardDeviationPercent),
};
registry.register(*market, indicator);
}
}
registry
}
}
| 0
|
src
|
/mono/crates/indicator_service/src/context.rs
|
use async_trait::async_trait;
use prometheus::Registry;
use slog::{error, Logger};
use std::{collections::HashMap, sync::Arc};
use tokio::sync::RwLock;
use crate::{config::Config, error::IndicatorServiceError, metrics::Metrics};
use fdr_common::{
context::FdrServiceContext, exchange::Market, logging::default_logger, ticker::ticker_buffer::TickerBuffer,
};
use fdr_event::kafka::{consumer::KafkaConsumer, producer::KafkaProducer, KafkaContext};
use fdr_store::core::pool::{init_postgres_pool_from_config, Pool};
use technical_analysis::{period::Period, registry::IndicatorRegistry};
pub struct Context {
pub config: Config,
pub pool: Pool,
pub kafka_consumer: KafkaConsumer,
pub kafka_producer: KafkaProducer,
pub logger: Logger,
pub metrics: Metrics,
pub tickers: HashMap<Market, Arc<RwLock<TickerBuffer>>>,
pub indicator_registry: IndicatorRegistry,
}
#[async_trait]
impl FdrServiceContext<Config> for Context {
type Context = Context;
type Error = IndicatorServiceError;
async fn from_config(config: Config) -> Result<Self::Context, Self::Error> {
let logger = default_logger();
let kafka_consumer = KafkaConsumer::new(&config.kafka_consumer, KafkaContext::with_logger(logger.clone()))?;
let pool = init_postgres_pool_from_config(&config.postgres).await?;
let metrics = Metrics::new(config.metrics.prefix.as_str())?;
let kafka_producer = KafkaProducer::new(&config.kafka_producer, KafkaContext::with_logger(logger.clone()))
.map_err(|e| {
error!(logger, "Failed to Create Kafka Producer"; "error" => e.to_string());
e
})?;
let tickers = config
.indicator
.markets
.iter()
.map(|market| {
let buffer = TickerBuffer::new(Period::Month1.as_duration());
(*market, Arc::new(RwLock::new(buffer)))
})
.collect();
let indicator_registry = IndicatorRegistry::from(&config.indicator);
Ok(Self {
config,
tickers,
pool,
metrics,
kafka_consumer,
kafka_producer,
logger,
indicator_registry,
})
}
fn logger(&self) -> Logger {
self.logger.clone()
}
fn metrics_registry(&self) -> Arc<Registry> {
self.metrics.registry.clone()
}
}
| 0
|
strat_common
|
/mono/crates/strat_common/Cargo.toml
|
[package]
name = "strat-common"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
fdr-common = { path = "../fdr_common", features = ["tracing"] }
fdr-event = { path = "../fdr_event" }
fdr-store = { path = "../fdr_store" }
price_guard = { path = "../price_guard" }
async-trait = { workspace = true }
chrono = { workspace = true }
clap = { workspace = true, features = ["derive", "env", "cargo"] }
derive_more = { workspace = true }
digest = { workspace = true }
futures-util = { workspace = true }
http = { workspace = true }
http-body = { workspace = true }
hmac = { workspace = true }
hyper = { workspace = true, features = ["full"] }
prometheus = { workspace = true }
regex = { workspace = true }
rust_decimal = { workspace = true, features = ["serde", "db-postgres"] }
rust_decimal_macros = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
slog = { workspace = true, features = ["max_level_trace"] }
sha2 = { workspace = true }
sqlx = { workspace = true, features = ["runtime-tokio-native-tls", "postgres", "json", "migrate", "time", "uuid", "chrono"] }
thiserror = { workspace = true }
tokio = { workspace = true, features = ["full", "tracing"] }
toml = { workspace = true }
tower = { workspace = true, features = ["full"] }
tracing = { workspace = true, default-features = false, features = ["std", "attributes"] }
tracing-opentelemetry = { workspace = true, default-features = true }
url = { workspace = true }
uuid = { workspace = true, features = ["v4", "fast-rng", "serde"] }
arc-swap = { workspace = true }
| 0
|
src
|
/mono/crates/strat_common/src/lib.rs
|
pub mod crypto;
pub mod metrics;
pub mod price_strategies;
pub mod state_managers;
pub mod volume_strategies;
| 0
|
volume_strategies
|
/mono/crates/strat_common/src/volume_strategies/profit_weighting.rs
|
use rust_decimal::Decimal;
use fdr_common::crypto::orders::FdrOrderSide;
pub fn calculate_profit_weight(
_reference_price: &Decimal,
_side: &FdrOrderSide,
_average_cost: &Option<Decimal>,
_ticker_price: &Decimal,
_set_expected_profit: &Decimal,
_scaling_factor: &Decimal,
) -> Decimal {
// TODO: Weighting is turned off as it needs to be further investigated. Currently, it always
// causes worse outcomes, so is turned off completely.
Decimal::ONE
// let diff = match side {
// FdrOrderSide::Sell => {
// // Reference price should be higher than the average cost
// reference_price - average_cost
// }
// FdrOrderSide::Buy => {
// // Reference price should lower than the average cost
// average_cost - reference_price
// }
// };
//
// // If Diff is negative we definitely don't want to boost the volume
// if diff < Decimal::ZERO || ticker_price <= &Decimal::ZERO {
// return Decimal::ONE;
// }
//
// // Let's turn things to percentages
// let diff = diff / ticker_price;
// let diff_percentage = diff * dec!(100);
//
// // If the trade is not profitable enough, we should avoid larger volume sides
// if &diff <= set_expected_profit {
// return Decimal::ONE;
// }
//
// let weight = Decimal::ONE + (scaling_factor * diff_percentage);
// weight
}
// #[cfg(test)]
// mod tests {
// use crate::volume_strategies::profit_weighting::calculate_profit_weight;
// use fdr_common::crypto::orders::FdrOrderSide;
// use rust_decimal_macros::dec;
//
// #[test]
// fn test_profit_weight_buy() {
// let reference_price = dec!(4.07238);
// let ticker_price = dec!(4.0845);
// let average_cost = dec!(4.09018);
// let set_expected_profit = dec!(0.0003);
// let scaling_factor = dec!(2);
//
// let res = calculate_profit_weight(
// &reference_price,
// &FdrOrderSide::Buy,
// &average_cost,
// &ticker_price,
// &set_expected_profit,
// &scaling_factor,
// )
// .round_dp(6);
// assert_eq!(res, dec!(1.759665));
// }
// #[test]
// fn test_profit_weight_sell() {
// let reference_price = dec!(4.0845);
// let ticker_price = dec!(4.07238);
// let average_buy = dec!(4.0667);
// let set_expected_profit = dec!(0.0003);
// let scaling_factor = dec!(2);
//
// let res = calculate_profit_weight(
// &reference_price,
// &FdrOrderSide::Sell,
// &average_buy,
// &ticker_price,
// &set_expected_profit,
// &scaling_factor,
// )
// .round_dp(6);
// assert_eq!(res, dec!(1.764194));
// }
//
// #[test]
// fn test_profit_weight_not_profitable_enough() {
// let reference_price = dec!(4.0845);
// let ticker_price = dec!(4.0844);
// let average_buy = dec!(4.0843);
// let set_expected_profit = dec!(0.0003);
// let scaling_factor = dec!(4);
//
// let res = calculate_profit_weight(
// &reference_price,
// &FdrOrderSide::Sell,
// &average_buy,
// &ticker_price,
// &set_expected_profit,
// &scaling_factor,
// )
// .round_dp(6);
// assert_eq!(res, dec!(1));
// }
//
// #[test]
// fn test_profit_weight_zero_ticker_price() {
// let reference_price = dec!(4.0845);
// let ticker_price = dec!(0);
// let average_buy = dec!(4.0843);
// let set_expected_profit = dec!(0.0003);
// let scaling_factor = dec!(4);
//
// let res = calculate_profit_weight(
// &reference_price,
// &FdrOrderSide::Sell,
// &average_buy,
// &ticker_price,
// &set_expected_profit,
// &scaling_factor,
// )
// .round_dp(6);
// assert_eq!(res, dec!(1));
// }
//
// #[test]
// fn test_profit_weight_negative_reference_price() {
// let reference_price = dec!(-1.0);
// let ticker_price = dec!(4.0844);
// let average_buy = dec!(4.0843);
// let set_expected_profit = dec!(0.0003);
// let scaling_factor = dec!(4);
//
// let res = calculate_profit_weight(
// &reference_price,
// &FdrOrderSide::Sell,
// &average_buy,
// &ticker_price,
// &set_expected_profit,
// &scaling_factor,
// )
// .round_dp(6);
// assert_eq!(res, dec!(1));
// }
//
// #[test]
// fn test_profit_weight_zero_reference_price() {
// let reference_price = dec!(0);
// let ticker_price = dec!(4.0844);
// let average_buy = dec!(4.0843);
// let set_expected_profit = dec!(0.0003);
// let scaling_factor = dec!(4);
//
// let res = calculate_profit_weight(
// &reference_price,
// &FdrOrderSide::Sell,
// &average_buy,
// &ticker_price,
// &set_expected_profit,
// &scaling_factor,
// )
// .round_dp(6);
// assert_eq!(res, dec!(1));
// }
// }
| 0
|
volume_strategies
|
/mono/crates/strat_common/src/volume_strategies/balance_weighting.rs
|
use fdr_common::crypto::orders::FdrOrderSide;
use rust_decimal::Decimal;
use rust_decimal_macros::dec;
const TARGET_BALANCE_PERCENTAGE: Decimal = dec!(0.5);
/// Calculates the balance weight based on the given parameters.
///
/// # Arguments
///
/// - `reference_price`: The reference price used to calculate the balance weight.
/// - `total_quote_balance`: The total quote balance.
/// - `total_base_balance`: The total base balance.
/// - `side`: The side of the FdrOrder.
/// - `scaling_factor`: The scaling factor used in the weight calculation.
///
/// # Returns
///
/// The calculated balance weight as a `Decimal` value.
pub fn calculate_balance_weight(
reference_price: &Decimal,
total_quote_balance: &Decimal,
total_base_balance: &Decimal,
side: &FdrOrderSide,
scaling_factor: &Decimal,
) -> Decimal {
let base_balance_in_quote = reference_price * total_base_balance;
let total_balance_in_quote = base_balance_in_quote + total_quote_balance;
// Account for zero
if total_balance_in_quote <= Decimal::ZERO {
return Decimal::ONE;
}
let balance = match side {
FdrOrderSide::Sell => base_balance_in_quote / total_balance_in_quote,
FdrOrderSide::Buy => return Decimal::ONE,
};
get_weight(&balance, scaling_factor)
}
// TODO: Weighting is turned off as it needs to be further investigated. Currently, it always
// causes worse outcomes, so is turned off completely.
fn get_weight(balance: &Decimal, _scaling_factor: &Decimal) -> Decimal {
// TODO: Change this to a variable amount
let scaling_factor = dec!(2);
// v = d * (2(b-0.5)+1)
// https://www.notion.so/Order-Size-ab40059cb3574e0da62717e96d7df94c
if balance > &TARGET_BALANCE_PERCENTAGE {
Decimal::ONE + scaling_factor * (balance - TARGET_BALANCE_PERCENTAGE)
} else {
Decimal::ONE
}
}
// #[cfg(test)]
// mod tests {
// use crate::volume_strategies::balance_weighting::calculate_balance_weight;
// use fdr_common::crypto::orders::FdrOrderSide;
// use rust_decimal::Decimal;
// use rust_decimal_macros::dec;
//
// #[test]
// fn test_balance_weight_zero_order_size() {
// let scaling_factor = dec!(8);
// let reference_price = dec!(2.0);
//
// let weight = calculate_balance_weight(
// &reference_price,
// &dec!(500),
// &dec!(10),
// &FdrOrderSide::Sell,
// &scaling_factor,
// )
// .round_dp(4);
// assert_eq!(weight, Decimal::ONE);
// }
//
// #[test]
// fn test_balance_weight_zero_reference_price() {
// let scaling_factor = dec!(8);
// let reference_price = dec!(0);
//
// let weight = calculate_balance_weight(
// &reference_price,
// &dec!(500),
// &dec!(10),
// &FdrOrderSide::Sell,
// &scaling_factor,
// )
// .round_dp(4);
// assert_eq!(weight, Decimal::ONE);
// }
//
// #[test]
// fn test_balance_weight_zero_scaling_factor() {
// let scaling_factor = dec!(0);
// let reference_price = dec!(2.0);
//
// let weight = calculate_balance_weight(
// &reference_price,
// &dec!(1000),
// &dec!(50),
// &FdrOrderSide::Sell,
// &scaling_factor,
// )
// .round_dp(4);
// assert_eq!(weight, Decimal::ONE);
// }
//
// #[test]
// fn test_equal_balances() {
// let scaling_factor = dec!(8);
// let reference_price = dec!(2.0);
//
// let weight = calculate_balance_weight(
// &reference_price,
// &dec!(1000),
// &dec!(500),
// &FdrOrderSide::Sell,
// &scaling_factor,
// )
// .round_dp(4);
// assert_eq!(weight, Decimal::ONE);
// }
//
// #[test]
// fn test_heavy_quote_buy() {
// let scaling_factor = dec!(8);
// let reference_price = dec!(2.0);
// let weight = calculate_balance_weight(
// &reference_price,
// &dec!(10000),
// &dec!(50),
// &FdrOrderSide::Buy,
// &scaling_factor,
// )
// .round_dp(4);
// assert_eq!(weight, dec!(1.9802));
// }
//
// #[test]
// fn test_heavy_quote_sell() {
// let scaling_factor = dec!(8);
// let reference_price = dec!(2.0);
// let weight = calculate_balance_weight(
// &reference_price,
// &dec!(10000),
// &dec!(50),
// &FdrOrderSide::Sell,
// &scaling_factor,
// )
// .round_dp(4);
// assert_eq!(weight, Decimal::ONE);
// }
//
// #[test]
// fn test_heavy_base_buy() {
// let scaling_factor = dec!(8);
// let reference_price = dec!(2.0);
// let weight = calculate_balance_weight(
// &reference_price,
// &dec!(100),
// &dec!(500),
// &FdrOrderSide::Buy,
// &scaling_factor,
// )
// .round_dp(4);
// assert_eq!(weight, Decimal::ONE);
// }
//
// #[test]
// fn test_heavy_base_sell() {
// let scaling_factor = dec!(8);
// let reference_price = dec!(2.0);
// let weight = calculate_balance_weight(
// &reference_price,
// &dec!(100),
// &dec!(500),
// &FdrOrderSide::Sell,
// &scaling_factor,
// )
// .round_dp(4);
// assert_eq!(weight, dec!(1.8182));
// }
// }
| 0
|
volume_strategies
|
/mono/crates/strat_common/src/volume_strategies/mod.rs
|
pub mod balance_weighting;
pub mod profit_weighting;
| 0
|
metrics
|
/mono/crates/strat_common/src/metrics/balances.rs
|
use chrono::{DateTime, Utc};
use prometheus::{opts, GaugeVec, Registry};
use rust_decimal::{prelude::ToPrimitive, Decimal};
use uuid::Uuid;
use fdr_common::{crypto::assets::CurrencyPair, error::FdrError, exchange::Exchange};
use fdr_common::crypto::asset_balance::AssetBalance;
#[derive(Debug)]
pub struct MetricsBalances {
pub quote_balance: AssetBalance,
pub base_balance: AssetBalance,
}
pub struct BalanceMetrics {
balance: GaugeVec,
}
impl BalanceMetrics {
pub fn new(registry: Registry) -> Result<Self, FdrError> {
let balance = GaugeVec::new(
opts!("balance", "Balance"),
&["strat_run_id", "exchange", "asset", "Time"],
)?;
registry.register(Box::new(balance.clone()))?;
Ok(Self { balance })
}
pub fn update_balance_metrics(
&self,
strat_run_id: Uuid,
exchange: &Exchange,
pair: &CurrencyPair,
balances: MetricsBalances,
ticker_time: DateTime<Utc>,
) {
let exchange = exchange.to_string();
let base = pair.base().to_string();
let quote = pair.quote().to_string();
self.update_balance_metric(
strat_run_id,
&exchange,
"e,
balances.quote_balance.total(),
ticker_time,
);
self.update_balance_metric(
strat_run_id,
&exchange,
&base,
balances.base_balance.total(),
ticker_time,
);
}
fn update_balance_metric(
&self,
strat_run_id: Uuid,
exchange: &str,
asset: &str,
value: Decimal,
ticker_time: DateTime<Utc>,
) {
if let Some(value) = value.to_f64() {
self.balance
.with_label_values(&[&strat_run_id.to_string(), exchange, asset, &ticker_time.to_string()])
.set(value);
};
}
}
| 0
|
metrics
|
/mono/crates/strat_common/src/metrics/mod.rs
|
use fdr_common::error::FdrError;
use prometheus::Registry;
use std::sync::Arc;
use crate::metrics::{
average_cost::AverageCostMetrics, balances::BalanceMetrics, fake_orders::FakeOrderMetrics, tvwap::TvwapMetrics,
};
use self::arbitrage::ArbitrageMetrics;
pub mod arbitrage;
pub mod average_cost;
pub mod balances;
pub mod fake_orders;
pub mod trades;
pub mod tvwap;
pub struct StrategyMetrics {
pub registry: Arc<Registry>,
pub balances: BalanceMetrics,
pub fake_orders: FakeOrderMetrics,
pub tvwap: TvwapMetrics,
pub average_cost: AverageCostMetrics,
pub arbitrage: ArbitrageMetrics,
}
impl StrategyMetrics {
pub fn new(prefix: &str) -> Result<Self, FdrError> {
let registry = Registry::new_custom(Some(prefix.to_string()), None)?;
Ok(Self {
arbitrage: ArbitrageMetrics::new(registry.clone())?,
balances: BalanceMetrics::new(registry.clone())?,
fake_orders: FakeOrderMetrics::new(registry.clone())?,
tvwap: TvwapMetrics::new(registry.clone())?,
average_cost: AverageCostMetrics::new(registry.clone())?,
registry: Arc::new(registry),
})
}
}
| 0
|
metrics
|
/mono/crates/strat_common/src/metrics/fake_orders.rs
|
use fdr_common::{crypto::orders::OrderEntry, error::FdrError};
use prometheus::{opts, GaugeVec, Registry};
use rust_decimal::Decimal;
#[derive(Clone, Debug, PartialEq)]
pub enum FakeOrder {
MarketMakerOnTickerFakeOrder(MarketMakerOnTickerFakeOrder),
}
#[derive(Clone, Debug, PartialEq)]
pub struct MarketMakerOnTickerFakeOrder {
pub order: OrderEntry,
pub coefficient: Decimal,
}
pub struct FakeOrderMetrics {
pub on_ticker: GaugeVec,
}
impl FakeOrderMetrics {
pub fn new(registry: Registry) -> Result<Self, FdrError> {
let on_ticker = GaugeVec::new(
opts!("fake_order_market_maker_on_ticker", "On ticker fake orders"),
&["exchange", "pair", "side", "strategy", "coefficient"],
)?;
registry.register(Box::new(on_ticker.clone()))?;
Ok(Self { on_ticker })
}
}
| 0
|
metrics
|
/mono/crates/strat_common/src/metrics/arbitrage.rs
|
use std::fmt::{Display, Formatter, Result as FmtResult};
use prometheus::{opts, CounterVec, Registry};
use uuid::Uuid;
use fdr_common::{crypto::assets::CurrencyPair, error::FdrError, exchange::Exchange};
pub enum ArbitrageType {
BINARY,
TRIANGULAR,
}
impl Display for ArbitrageType {
fn fmt(&self, formatter: &mut Formatter<'_>) -> FmtResult {
match *self {
ArbitrageType::BINARY => write!(formatter, "BINARY"),
ArbitrageType::TRIANGULAR => write!(formatter, "TRIANGULAR"),
}
}
}
pub struct ArbitrageMetrics {
opportunity_check: CounterVec,
opportunity_pre_fee: CounterVec,
opportunity_post_fee: CounterVec,
opportunity_supplant: CounterVec,
}
impl ArbitrageMetrics {
pub fn new(registry: Registry) -> Result<Self, FdrError> {
let opportunity_check = CounterVec::new(
opts!("opportunity_check_total", "Update checked for arbitrage opportunity"),
&["strat_run_id", "exchange", "pair"],
)?;
let opportunity_pre_fee = CounterVec::new(
opts!(
"opportunity_pre_fee_total",
"Arb opportunity found before fees considered"
),
&["strat_run_id", "pair", "type"],
)?;
let opportunity_post_fee = CounterVec::new(
opts!(
"opportunity_post_fee_total",
"Arb opportunity found with fees considered"
),
&["strat_run_id", "pair", "type"],
)?;
let opportunity_supplant = CounterVec::new(
opts!(
"opportunity_supplant_total",
"Arb opportunity with greater profitability found"
),
&["strat_run_id", "pair", "type"],
)?;
registry.register(Box::new(opportunity_check.clone()))?;
registry.register(Box::new(opportunity_pre_fee.clone()))?;
registry.register(Box::new(opportunity_post_fee.clone()))?;
registry.register(Box::new(opportunity_supplant.clone()))?;
Ok(Self {
opportunity_check,
opportunity_pre_fee,
opportunity_post_fee,
opportunity_supplant,
})
}
pub fn increment_arb_opportunity_check_count(&self, strat_run_id: Uuid, exchange: &Exchange, pair: &CurrencyPair) {
self.opportunity_check
.with_label_values(&[
&strat_run_id.to_string(),
exchange.to_string().as_str(),
pair.to_string().as_str(),
])
.inc();
}
// An opportunity has been found that is profitable before fees are applied
pub fn increment_pre_fee_opportunity(&self, strat_run_id: Uuid, pair: &CurrencyPair, len: usize) {
self.opportunity_pre_fee
.with_label_values(&[
&strat_run_id.to_string(),
pair.to_string().as_str(),
len.to_string().as_str(),
])
.inc();
}
// An opportunity has been found that is profitable after fees are applied
pub fn increment_post_fee_opportunity(&self, strat_run_id: Uuid, pair: &CurrencyPair, len: usize) {
self.opportunity_post_fee
.with_label_values(&[
&strat_run_id.to_string(),
pair.to_string().as_str(),
len.to_string().as_str(),
])
.inc();
}
// An opportunity has been found that is profitable more profitable than a previous from the same update
pub fn increment_opportunity_supplant(&self, strat_run_id: Uuid, pair: &CurrencyPair, len: usize) {
self.opportunity_supplant
.with_label_values(&[
&strat_run_id.to_string(),
pair.to_string().as_str(),
len.to_string().as_str(),
])
.inc();
}
}
| 0
|
metrics
|
/mono/crates/strat_common/src/metrics/average_cost.rs
|
use prometheus::{opts, GaugeVec, Registry};
use rust_decimal::{prelude::ToPrimitive, Decimal};
use fdr_common::{crypto::assets::CurrencyPair, error::FdrError, exchange::Exchange};
pub struct AverageCostMetrics {
on_ticker: GaugeVec,
}
impl AverageCostMetrics {
pub fn new(registry: Registry) -> Result<Self, FdrError> {
let on_ticker = GaugeVec::new(
opts!("average_cost_market_maker_on_ticker", "On ticker average cost"),
&["exchange", "pair"],
)?;
registry.register(Box::new(on_ticker.clone()))?;
Ok(Self { on_ticker })
}
pub fn update_average_cost_metric(&self, pair: &CurrencyPair, exchange: &Exchange, value: Decimal) {
let pair = pair.to_string();
let exchange = exchange.to_string();
if let Some(value) = value.to_f64() {
self.on_ticker.with_label_values(&[&exchange, &pair]).set(value);
}
}
}
| 0
|
metrics
|
/mono/crates/strat_common/src/metrics/trades.rs
|
use chrono::{DateTime, Utc};
use fdr_common::crypto::{
assets::{Asset, CurrencyPair},
orders::FdrOrderSide,
};
use fdr_event::events::FdrOrderTrade;
use fdr_store::metrics::models::TradeMetricsDto;
use rust_decimal::Decimal;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Clone, Debug, Serialize, Deserialize, sqlx::FromRow)]
pub struct TradeMetrics {
pub run_id: Uuid,
pub profit: Decimal,
pub average_cost: Decimal,
pub vwap_delta: Decimal,
pub average_sell: Decimal,
pub volume_sold: Decimal,
pub volume_bought: Decimal,
pub cost_basis: Decimal,
pub net_proceeds: Decimal,
pub buy_cost: Decimal,
pub fees: Decimal,
pub time: DateTime<Utc>,
pub base: Asset,
pub quote: Asset,
pub time_volume_weighted_average_cost: Decimal,
pub time_volume_weighted_average_sell: Decimal,
}
impl TradeMetrics {
pub fn new(run_id: Uuid, time: DateTime<Utc>, pair: CurrencyPair) -> Self {
Self {
run_id,
profit: Decimal::ZERO,
average_cost: Decimal::ZERO,
vwap_delta: Decimal::ZERO,
average_sell: Decimal::ZERO,
volume_sold: Decimal::ZERO,
volume_bought: Decimal::ZERO,
cost_basis: Decimal::ZERO,
net_proceeds: Decimal::ZERO,
buy_cost: Decimal::ZERO,
fees: Decimal::ZERO,
time,
base: pair.base(),
quote: pair.quote(),
time_volume_weighted_average_cost: Default::default(),
time_volume_weighted_average_sell: Default::default(),
}
}
}
impl TradeMetrics {
pub fn calculate(&mut self, trade: &FdrOrderTrade) {
match trade.side {
FdrOrderSide::Sell => self.calculate_sell_metrics(trade),
FdrOrderSide::Buy => self.calculate_buy_metrics(trade),
}
self.time = trade.timestamp;
self.fees += trade.fee;
self.cost_basis = self.volume_sold * self.average_cost;
if self.average_sell > Decimal::ZERO {
self.vwap_delta = self.average_sell - self.average_cost;
}
self.profit = self.net_proceeds - self.cost_basis - self.fees;
}
fn calculate_sell_metrics(&mut self, trade: &FdrOrderTrade) {
self.volume_sold += trade.volume;
self.net_proceeds += trade.cost;
if self.volume_sold > Decimal::ZERO {
self.average_sell = self.net_proceeds / self.volume_sold;
}
}
fn calculate_buy_metrics(&mut self, trade: &FdrOrderTrade) {
self.volume_bought += trade.volume;
self.buy_cost += trade.cost;
if self.volume_bought > Decimal::ZERO {
self.average_cost = self.buy_cost / self.volume_bought;
}
}
}
impl From<TradeMetricsDto> for TradeMetrics {
fn from(value: TradeMetricsDto) -> Self {
Self {
run_id: value.run_id,
profit: value.profit,
average_cost: value.average_cost,
vwap_delta: value.vwap_delta,
average_sell: value.average_sell,
volume_sold: value.volume_sold,
volume_bought: value.volume_bought,
cost_basis: value.cost_basis,
net_proceeds: value.net_proceeds,
buy_cost: value.buy_cost,
fees: value.fees,
time: value.time,
base: value.base,
quote: value.quote,
time_volume_weighted_average_cost: value.time_volume_weighted_average_cost,
time_volume_weighted_average_sell: value.time_volume_weighted_average_sell,
}
}
}
impl From<TradeMetrics> for TradeMetricsDto {
fn from(value: TradeMetrics) -> Self {
Self {
run_id: value.run_id,
profit: value.profit,
average_cost: value.average_cost,
vwap_delta: value.vwap_delta,
average_sell: value.average_sell,
volume_sold: value.volume_sold,
volume_bought: value.volume_bought,
cost_basis: value.cost_basis,
net_proceeds: value.net_proceeds,
buy_cost: value.buy_cost,
fees: value.fees,
time: value.time,
base: value.base,
quote: value.quote,
time_volume_weighted_average_cost: value.time_volume_weighted_average_cost,
time_volume_weighted_average_sell: value.time_volume_weighted_average_sell,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use fdr_common::{
crypto::{
assets::{Asset, CurrencyPair},
orders::FdrOrderSide,
},
exchange::{Exchange, Market},
};
use fdr_event::events::FdrOrderTrade;
use rust_decimal_macros::dec;
fn get_trade(price: Decimal, side: FdrOrderSide, volume: Decimal) -> FdrOrderTrade {
let cost = price * volume;
FdrOrderTrade {
order_id: Uuid::new_v4(),
trade_id: Uuid::new_v4(),
market: Market {
exchange: Exchange::Kraken,
pair: CurrencyPair::new(Asset::Dot, Asset::Usd),
},
side,
price,
volume,
cost,
fee: Default::default(),
timestamp: Utc::now(),
extra_fields: Default::default(),
}
}
struct TestTrade {
pub price: Decimal,
pub side: FdrOrderSide,
pub volume: Decimal,
}
fn get_test_trades() -> Vec<TestTrade> {
vec![
// First Buy
TestTrade {
price: dec!(5),
side: FdrOrderSide::Buy,
volume: dec!(10),
},
// Second Buy
TestTrade {
price: dec!(5.10),
side: FdrOrderSide::Buy,
volume: dec!(10),
},
// First Sell
TestTrade {
price: dec!(5.12),
side: FdrOrderSide::Sell,
volume: dec!(5),
},
TestTrade {
price: dec!(5.10),
side: FdrOrderSide::Buy,
volume: dec!(100),
},
TestTrade {
price: dec!(5.11),
side: FdrOrderSide::Sell,
volume: dec!(100),
},
]
}
#[test]
fn calculate_updates_based_on_buy_trade() {
let trade = get_trade(dec!(1000), FdrOrderSide::Buy, dec!(2));
let mut metrics = TradeMetrics::new(Uuid::new_v4(), Utc::now(), trade.market.pair);
metrics.calculate(&trade);
assert_eq!(metrics.average_cost, dec!(1000));
assert_eq!(metrics.volume_bought, dec!(2));
assert_eq!(metrics.vwap_delta, dec!(0));
}
#[test]
fn calculate_updates_based_on_sell_trade() {
let trade = get_trade(dec!(1500), FdrOrderSide::Sell, dec!(1));
let mut metrics = TradeMetrics::new(Uuid::new_v4(), Utc::now(), trade.market.pair);
metrics.calculate(&trade);
assert_eq!(metrics.average_sell, dec!(1500));
assert_eq!(metrics.volume_sold, dec!(1));
}
#[test]
fn calculate_falls_back_to_zero_to_prevent_division_by_zero() {
let trade = get_trade(dec!(0), FdrOrderSide::Buy, dec!(0));
let mut metrics = TradeMetrics::new(Uuid::new_v4(), Utc::now(), trade.market.pair);
metrics.calculate(&trade);
assert_eq!(metrics.volume_bought, Decimal::ZERO);
assert_eq!(metrics.average_cost, Decimal::ZERO);
}
#[test]
fn trade_series_meets_expected_outcome() {
let mut metrics = TradeMetrics::new(Uuid::new_v4(), Utc::now(), CurrencyPair::new(Asset::Dot, Asset::Usd));
for test_trade in get_test_trades() {
let trade = get_trade(test_trade.price, test_trade.side, test_trade.volume);
metrics.calculate(&trade);
}
}
}
| 0
|
metrics
|
/mono/crates/strat_common/src/metrics/tvwap.rs
|
use fdr_common::{
crypto::{assets::CurrencyPair, orders::FdrOrderSide},
error::FdrError,
exchange::Exchange,
};
use prometheus::{opts, GaugeVec, Registry};
use rust_decimal::{prelude::ToPrimitive, Decimal};
pub struct TvwapMetrics {
on_ticker: GaugeVec,
}
impl TvwapMetrics {
pub fn new(registry: Registry) -> Result<Self, FdrError> {
let on_ticker = GaugeVec::new(
opts!("tvwap_market_maker_on_ticker", "On ticker tvwap"),
&["exchange", "pair", "side"],
)?;
registry.register(Box::new(on_ticker.clone()))?;
Ok(Self { on_ticker })
}
pub fn update_tvwap_metric(&self, pair: &CurrencyPair, exchange: &Exchange, value: Decimal, side: FdrOrderSide) {
let pair = pair.to_string();
let exchange = exchange.to_string();
let side = side.to_string();
if let Some(value) = value.to_f64() {
self.on_ticker.with_label_values(&[&exchange, &pair, &side]).set(value);
}
}
}
| 0
|
crypto
|
/mono/crates/strat_common/src/crypto/mod.rs
|
pub mod market;
| 0
|
market
|
/mono/crates/strat_common/src/crypto/market/mod.rs
|
pub mod balance;
pub mod ticker_delta;
use chrono::{DateTime, Utc};
use fdr_common::{
crypto::{assets::CurrencyPair, orders::OrderEntry, pairs::TradingPair},
exchange::{Exchange, Market},
};
use std::{cmp::Ordering, f64::consts::E};
use crate::{
crypto::market::{
balance::MarketBalance,
ticker_delta::{MovingTickerDeltaAverage, TickerDeltaCoefficient, TickerDeltaConfig},
},
state_managers::trade_volume_manager::Fees,
};
use fdr_common::ticker::FdrTickerUpdate;
use rust_decimal::Decimal;
use serde::{Deserialize, Serialize};
#[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize)]
pub struct MarketConfig {
/// Used for dynamically adjusting the price placement
pub ticker_delta: TickerDeltaConfig,
/// Configs related to the Volume weighting
pub volume_weights: VolumeWeightConfig,
}
#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
pub struct VolumeWeightConfig {
pub profit_weight_scalar: Decimal,
pub balance_weight_scalar: Decimal,
}
#[derive(Clone, Debug, PartialEq)]
pub struct MarketState {
pub config: MarketConfig,
/// The status of the market, Online, Offline, Volatile
pub status: MarketStatus,
/// The Pair this market is trading
pub pair: CurrencyPair,
/// The last ticker price the market has acted upon, whether cancelling or creating an order, both count the same.
pub last_hit: Option<Decimal>,
/// The last ticker the market pair has received
pub last_ticker: Option<FdrTickerUpdate>,
/// The fee values for this markets exchange account
pub fees: Fees,
/// The time weighted average of the ticker deltas, used for dynamic price placement
pub moving_ticker_delta_average: MovingTickerDeltaAverage,
/// The exchange specific data for the pair, things like decimal places, min order size, etc.
pub trading_pair: TradingPair,
/// The exchange the market is running on
pub exchange: Exchange,
/// The balance this market is allotted, NOTE: this could be only a percentage of the total balance on the account.
pub market_balance: MarketBalance,
}
#[derive(Clone, Debug, PartialEq)]
pub struct UpdateRequests {
pub pair: CurrencyPair,
pub requests: Vec<UpdateRequest>,
}
/// The Requests a Strategy can make to a StrategyManager
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum UpdateRequest {
/// Cancel an order
Cancel(OrderEntry),
/// Place an order
Order(OrderEntry),
/// Update the last_hit value on the given market
UpdateLastHit(Market, Option<Decimal>),
}
impl PartialOrd for UpdateRequest {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for UpdateRequest {
fn cmp(&self, other: &Self) -> Ordering {
match (self, other) {
(UpdateRequest::Cancel(_), UpdateRequest::Cancel(_)) => Ordering::Equal,
(UpdateRequest::Cancel(_), _) => Ordering::Less,
(UpdateRequest::Order(_), UpdateRequest::Order(_)) => Ordering::Equal,
(UpdateRequest::Order(_), UpdateRequest::Cancel(_)) => Ordering::Greater,
(UpdateRequest::Order(_), UpdateRequest::UpdateLastHit(_, _)) => Ordering::Less,
(UpdateRequest::UpdateLastHit(_, _), UpdateRequest::UpdateLastHit(_, _)) => Ordering::Equal,
(UpdateRequest::UpdateLastHit(_, _), _) => Ordering::Greater,
}
}
}
impl MarketState {
pub fn new(
config: MarketConfig,
market: Market,
trading_pair: &TradingPair,
market_balance: MarketBalance,
) -> Self {
MarketState {
config: config.clone(),
status: Default::default(),
pair: market.pair,
last_hit: None,
last_ticker: None,
fees: Default::default(),
moving_ticker_delta_average: MovingTickerDeltaAverage::new(
config.ticker_delta.decay_factor,
TickerDeltaCoefficient {
coefficient: Decimal::ONE,
time_window: config.ticker_delta.coefficient.time_window,
increase_by: config.ticker_delta.coefficient.increase_by,
highest_coefficient: Decimal::ONE,
},
),
trading_pair: *trading_pair,
exchange: market.exchange,
market_balance,
}
}
}
/// The status of the given market
#[derive(Default, Debug, Clone, Copy, PartialEq)]
pub enum MarketStatus {
#[default]
/// The market is online and functioning normally
Online,
/// The market is completely off and will not trade
Offline,
/// The market is considered to be in a volatile state, NOTE: Trading behaviour may differ during this period.
Volatile,
}
/// This method calculates an exponentially decaying time weight
pub fn get_time_weight(updated_at: &DateTime<Utc>, ticker_time: &DateTime<Utc>, decay_factor: f64) -> f64 {
let time_difference_seconds = (*ticker_time - updated_at).num_seconds() as f64;
let decay_exponent = -decay_factor * time_difference_seconds;
E.powf(decay_exponent)
}
| 0
|
market
|
/mono/crates/strat_common/src/crypto/market/ticker_delta.rs
|
use crate::crypto::market::get_time_weight;
use chrono::{DateTime, Utc};
use rust_decimal::{prelude::FromPrimitive, Decimal};
use serde::{Deserialize, Serialize};
use std::{collections::VecDeque, ops::Sub};
#[derive(Clone, Debug, PartialEq)]
pub struct MovingTickerDeltaAverage {
/// The factor that the time weight falls
decay_factor: f64,
/// The list of ticker deltas
deltas: VecDeque<TickerDelta>,
/// The coefficient data
coefficient: TickerDeltaCoefficient,
}
#[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize)]
pub struct TickerDeltaCoefficientConfig {
/// The time window to which we evaluate a naive concept of volatility
pub time_window: i64,
/// The amount the coefficient should be increased by if the threshold is hit.
pub increase_by: Decimal,
}
#[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize)]
pub struct TickerDeltaConfig {
/// The factor that the time weight falls
pub decay_factor: f64,
/// The coefficient configuration
pub coefficient: TickerDeltaCoefficientConfig,
/// The time the ticker starts, used for the ticker delta average mostly for testing, if not set
/// it will default to Instant::now()
pub ticker_start_time: Option<DateTime<Utc>>,
}
/// A mechanism used to adjust price placement based on volatility.
/// The basics of it are as follows:
/// - Ticker comes in
/// - If ticker has come in within a certain time_window threshold
/// - The coefficient is increased by the increase_by amount, something like 0.1 is reasonable
/// - To create backpressure, the coefficient is decreased the highest_coefficient / time_window * time_difference_in_secs
#[derive(Clone, Debug, PartialEq)]
pub struct TickerDeltaCoefficient {
/// The price placement coefficient, defaults to 1, used to spread the price out further if the market becomes volatile.
pub coefficient: Decimal,
/// The time window to which we evaluate a naive concept of volatility
pub time_window: i64,
/// The amount the coefficient should be increased by if the threshold is hit.
pub increase_by: Decimal,
/// The highest coefficient that has been seen within this time window.
pub highest_coefficient: Decimal,
}
#[derive(Clone, Copy, Debug, PartialEq)]
pub struct TickerDelta {
/// The delta between the last ticker price, and the current ticker price.
pub delta: Decimal,
/// When the ticker was received.
pub timestamp: DateTime<Utc>,
}
impl Default for TickerDelta {
fn default() -> Self {
Self {
delta: Decimal::ZERO,
timestamp: Utc::now(),
}
}
}
impl MovingTickerDeltaAverage {
pub fn new(decay_factor: f64, coefficient: TickerDeltaCoefficient) -> Self {
Self {
decay_factor,
deltas: VecDeque::new(),
coefficient,
}
}
pub fn add_entry(&mut self, delta: Decimal, last_ticker_time: DateTime<Utc>, new_ticker_time: DateTime<Utc>) {
let delta = TickerDelta {
delta: delta.abs(),
timestamp: new_ticker_time,
};
self.deltas.push_back(delta);
if self.coefficient.coefficient == Decimal::ONE {
self.coefficient.highest_coefficient = Decimal::ONE;
}
self.coefficient.highest_coefficient = self.coefficient.highest_coefficient.max(self.coefficient.coefficient);
// How quickly did the last ticker come in?
let diff = new_ticker_time.signed_duration_since(last_ticker_time).num_seconds();
if diff < self.coefficient.time_window {
self.coefficient.coefficient += self.coefficient.increase_by;
}
let diff_decimal = Decimal::from_i64(diff).unwrap_or(Decimal::ZERO);
let time_window_decimal = Decimal::from_i64(self.coefficient.time_window).unwrap_or(Decimal::ONE);
// As the highest coefficient increases, the decrease_by amount should increase as well to create more back pressure.
let decrease_by = ((self.coefficient.highest_coefficient - Decimal::ONE) / time_window_decimal) * diff_decimal;
self.coefficient.coefficient = self.coefficient.coefficient.sub(decrease_by);
if self.coefficient.coefficient < Decimal::ONE {
self.coefficient.coefficient = Decimal::ONE;
}
// Check and remove any values that are beyond significance
while let Some(delta) = self.deltas.front() {
let diff = (last_ticker_time - delta.timestamp).num_seconds();
if diff < self.coefficient.time_window {
break;
}
self.deltas.pop_front();
}
}
pub fn get_time_weighted_average(&self, ticker_time: &DateTime<Utc>) -> Decimal {
if self.deltas.is_empty() {
return Decimal::ZERO;
}
let mut total_delta = Decimal::ZERO;
let mut total_count = Decimal::ZERO;
for delta in self.deltas.iter() {
let time_weight =
Decimal::from_f64_retain(get_time_weight(&delta.timestamp, ticker_time, self.decay_factor))
.unwrap_or(Decimal::ZERO);
total_delta += delta.delta * time_weight;
total_count += Decimal::ONE * time_weight;
}
if total_delta.is_zero() {
Decimal::ZERO
} else {
total_delta / total_count
}
}
pub fn get_coefficient(&self) -> &TickerDeltaCoefficient {
&self.coefficient
}
}
| 0
|
market
|
/mono/crates/strat_common/src/crypto/market/balance.rs
|
use fdr_common::crypto::asset_balance::AssetBalance;
#[derive(Clone, Default, Debug, PartialEq)]
pub struct MarketBalance {
pub base: AssetBalance,
pub quote: AssetBalance,
}
impl MarketBalance {
pub fn new(base: AssetBalance, quote: AssetBalance) -> Self {
Self { base, quote }
}
}
| 0
|
price_strategies
|
/mono/crates/strat_common/src/price_strategies/mod.rs
|
use fdr_common::crypto::orders::FdrOrderSide;
use price_guard::{error::PriceGuardResult, PriceGuard};
use rust_decimal::Decimal;
pub mod models;
#[async_trait::async_trait]
pub trait PriceStrategy: Send + Sync {
async fn get_price(
&self,
reference_price: &Decimal,
desired_profit: &Decimal,
order_level: &Decimal,
side: &FdrOrderSide,
) -> Decimal;
async fn apply_fees(&self, price: &Decimal, side: &FdrOrderSide, fees: &Decimal) -> Decimal;
async fn apply_price_guard(
&self,
price: &Decimal,
desired_profit: &Decimal,
order_level: &Decimal,
side: &FdrOrderSide,
price_guard: &PriceGuard,
) -> PriceGuardResult<Decimal>;
}
| 0
|
price_strategies
|
/mono/crates/strat_common/src/price_strategies/models.rs
|
use rust_decimal::Decimal;
use serde::{Deserialize, Serialize};
#[derive(Default, Debug, Clone, Copy, Eq, PartialEq, Hash, Deserialize, Serialize)]
pub struct AverageCost {
pub total_cost: Decimal,
pub num_orders: Decimal,
pub total_volume: Decimal,
pub avg_cost: Decimal,
}
| 0
|
state_managers
|
/mono/crates/strat_common/src/state_managers/order_manager.rs
|
use std::collections::HashMap;
use async_trait::async_trait;
use tokio::sync::RwLock;
use uuid::Uuid;
use fdr_common::crypto::{
assets::CurrencyPair,
orders::{FdrOrderSide, OrderEntry},
};
#[derive(Debug, Default)]
pub struct OrderManager {
orders: RwLock<HashMap<Uuid, OrderEntry>>,
}
#[async_trait]
pub trait OrderManagerGetters {
async fn get_orders(&self, pair: &CurrencyPair) -> Vec<OrderEntry>;
async fn get_orders_by_side(&self, pair: &CurrencyPair, side: FdrOrderSide) -> Vec<OrderEntry>;
async fn get_all_orders(&self) -> Vec<OrderEntry>;
async fn is_empty(&self) -> bool;
async fn get_order(&self, id: &Uuid) -> Option<OrderEntry>;
}
#[async_trait]
impl OrderManagerGetters for OrderManager {
async fn get_orders(&self, pair: &CurrencyPair) -> Vec<OrderEntry> {
self.orders
.read()
.await
.values()
.filter(|order| order.market.pair == *pair)
.cloned()
.collect()
}
async fn get_orders_by_side(&self, pair: &CurrencyPair, side: FdrOrderSide) -> Vec<OrderEntry> {
self.orders
.read()
.await
.values()
.filter(|order| order.side == side && order.market.pair == *pair)
.cloned()
.collect()
}
async fn get_all_orders(&self) -> Vec<OrderEntry> {
self.orders.read().await.values().cloned().collect()
}
async fn is_empty(&self) -> bool {
self.orders.read().await.is_empty()
}
async fn get_order(&self, id: &Uuid) -> Option<OrderEntry> {
self.orders.read().await.get(id).copied()
}
}
impl OrderManager {
pub fn new() -> Self {
OrderManager {
orders: RwLock::new(HashMap::new()),
}
}
pub async fn clear(&mut self) {
self.orders.write().await.clear();
}
pub async fn add_order(&self, order: OrderEntry) -> Option<OrderEntry> {
self.orders.write().await.insert(order.id, order)
}
pub async fn remove_order(&self, id: &Uuid) -> Option<OrderEntry> {
self.orders.write().await.remove(id)
}
pub async fn remove_pair_orders(&self, pair: &CurrencyPair) {
self.orders.write().await.retain(|_, order| order.market.pair != *pair)
}
pub async fn update_order(&self, updated_order: OrderEntry) {
if let Some(order) = self.orders.write().await.get_mut(&updated_order.id) {
*order = updated_order;
}
}
/// Used to update an order that has been placed asynchronously where the id is not known when requested.
pub async fn update_async_order(&self, updated_order: OrderEntry, original_id: Uuid) {
if let Some(order) = self.orders.write().await.get_mut(&original_id) {
*order = updated_order;
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use fdr_common::{
crypto::{
assets::Asset,
orders::{FdrOrderSide, FdrOrderStatus, FdrOrderType},
},
exchange::{Exchange, Market},
TradingStrategy,
};
fn get_order() -> OrderEntry {
OrderEntry {
id: Default::default(),
market: Market {
pair: CurrencyPair::new(Asset::Btc, Asset::Usd),
exchange: Exchange::Kraken,
},
side: FdrOrderSide::Sell,
price: Default::default(),
volume: Default::default(),
volume_remaining: Default::default(),
order_status: FdrOrderStatus::New,
strategy: TradingStrategy::MarketMakerOnTicker,
type_: FdrOrderType::LimitMakerOnly,
strat_run_id: None,
}
}
#[tokio::test]
async fn test_update_order() {
let order_manager = OrderManager::new();
let mut order = get_order();
let id = Uuid::new_v4();
order.id = id;
order_manager.add_order(order).await;
let mut order = order_manager.get_order(&id).await.unwrap();
assert_eq!(order.order_status, FdrOrderStatus::New);
order.order_status = FdrOrderStatus::Open;
order_manager.update_order(order).await;
let order = order_manager.get_order(&id).await.unwrap();
assert_eq!(order.order_status, FdrOrderStatus::Open);
}
#[tokio::test]
async fn test_add_order() {
let order_manager = OrderManager::new();
let mut order = get_order();
let id = Uuid::new_v4();
order.id = id;
order_manager.add_order(order).await;
let orders = order_manager
.get_orders(&CurrencyPair::new(Asset::Btc, Asset::Usd))
.await;
assert_eq!(orders[0].id, id);
}
#[tokio::test]
async fn test_remove_order() {
let order_manager = OrderManager::new();
let mut order = get_order();
let id = Uuid::new_v4();
order.id = id;
order_manager.add_order(order).await;
let orders = order_manager
.get_orders(&CurrencyPair::new(Asset::Btc, Asset::Usd))
.await;
assert_eq!(orders.len(), 1);
order_manager.remove_order(&id).await;
let orders = order_manager
.get_orders(&CurrencyPair::new(Asset::Btc, Asset::Usd))
.await;
assert_eq!(orders.len(), 0);
}
}
| 0
|
state_managers
|
/mono/crates/strat_common/src/state_managers/book_manager.rs
|
use async_trait::async_trait;
use fdr_common::{crypto::book::Book, exchange::Market};
use fdr_event::events::FdrBookEntry;
use rust_decimal::Decimal;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use tokio::sync::RwLock;
#[derive(Debug)]
pub struct BookManager {
books: RwLock<HashMap<Market, Book>>,
depth: usize,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BookManagerConfig {
pub markets: Vec<Market>,
}
#[async_trait]
pub trait BookManagerGetters {
async fn get_book(&self, market: &Market) -> Option<Book>;
}
#[async_trait]
impl BookManagerGetters for BookManager {
/// Returns a reference to the book for a given pair
/// Returns None if the pair is not found
/// Returns Some(book) if the pair is found
/// Example:
/// ```
/// # use fdr_common::crypto::assets::{Asset, CurrencyPair};
/// # use strat_common::state_managers::book_manager::{BookManager, BookManagerGetters};
/// # use fdr_common::exchange::{Exchange, Market};
/// # #[tokio::main]
/// # async fn main() {
/// let market = Market {
/// pair: CurrencyPair::new(Asset::Usd, Asset::Btc),
/// exchange: Exchange::Kraken,
/// };
/// let markets = vec![market];
/// let book_manager = BookManager::new(&markets, 10);
/// let book = book_manager.get_book(&market).await.unwrap();
/// assert_eq!(book.depth, 10);
/// # }
/// ```
/// will return a reference to the book for XBT/USD
/// entries are stored in a BTreeMap, so they are sorted by price lowest to highest
async fn get_book(&self, market: &Market) -> Option<Book> {
self.books.read().await.get(market).cloned()
}
}
impl BookManager {
/// Creates a new BookManager
/// Example:
/// ```
/// # use fdr_common::crypto::assets::{Asset, CurrencyPair};
/// # use strat_common::state_managers::book_manager::BookManager;
/// # use fdr_common::exchange::{Exchange, Market};
/// let market = Market {
/// pair: CurrencyPair::new(Asset::Usd, Asset::Btc),
/// exchange: Exchange::Kraken,
/// };
/// let markets = vec![market];
/// let book_manager = BookManager::new(&markets, 10);
/// ```
/// will create a new BookManager with two books, one for XBT/USD and one for ETH/USD
pub fn new(markets: &[Market], depth: usize) -> Self {
let books = RwLock::new(markets.iter().map(|&market| (market, Book::new(depth))).collect());
Self { books, depth }
}
pub async fn clear_book(&mut self, market: &Market) {
if let Some(book) = self.books.write().await.get_mut(market) {
book.asks.clear();
book.bids.clear();
}
}
pub async fn reset(&mut self) {
self.books.write().await.iter_mut().for_each(|(_, book)| {
book.asks.clear();
book.bids.clear();
});
}
/// Updates the book for a given pair
///
/// Example:
/// ```rust
/// # use rust_decimal::Decimal;
/// # use fdr_common::crypto::assets::{Asset, CurrencyPair};
/// # use fdr_event::events::FdrBookEntry;
/// # use strat_common::state_managers::book_manager::BookManager;
/// # use fdr_common::exchange::{Exchange, Market};
/// # #[tokio::main]
/// # async fn main() {
/// let markets = vec![
/// Market {
/// pair: CurrencyPair::new(Asset::Usd, Asset::Btc),
/// exchange: Exchange::Kraken,
/// },
/// Market {
/// pair: CurrencyPair::new(Asset::Usd, Asset::Eth),
/// exchange: Exchange::Kraken,
/// },
/// ];
/// let book_manager = BookManager::new(&markets, 10);
/// book_manager.update_book(
/// Market {
/// pair: CurrencyPair::new(Asset::Usd, Asset::Btc),
/// exchange: Exchange::Kraken,
/// },
/// &[FdrBookEntry{ price: Decimal::from(100), volume: Decimal::from(1)}], &[FdrBookEntry { price: Decimal::from(200), volume: Decimal::from(1)}],
/// None,
/// ).await;
/// # }
/// ```
/// ```text
/// will update the book for XBT/USD with an ask at 100 and a bid at 200
/// ```
pub async fn update_book(
&self,
market: Market,
asks: &[FdrBookEntry],
bids: &[FdrBookEntry],
sync_required: Option<i64>,
) {
let mut book = self.books.write().await;
let book = book.entry(market).or_insert_with(|| Book::new(self.depth));
let (old_ask, old_bid) = (book.lowest_ask(), book.highest_bid());
for ask in asks {
if ask.volume == Decimal::ZERO {
book.asks.remove(&ask.price);
} else {
book.asks.insert(ask.price, ask.volume);
}
}
while book.asks.len() > book.depth {
book.asks.pop_last();
}
for bid in bids {
if bid.volume == Decimal::ZERO {
book.bids.remove(&bid.price);
} else {
book.bids.insert(bid.price, bid.volume);
}
}
while book.bids.len() > book.depth {
book.bids.pop_first();
}
book.top_updated = old_bid != book.highest_bid() || old_ask != book.lowest_ask();
book.sync_required = sync_required;
}
}
#[cfg(test)]
mod tests {
use super::*;
use fdr_common::{
crypto::assets::{Asset, CurrencyPair},
exchange::Exchange,
};
use rust_decimal_macros::dec;
#[tokio::test]
async fn update_book_add_entries() {
let pair = CurrencyPair::new(Asset::Btc, Asset::Usd);
let market = Market {
pair,
exchange: Exchange::Kraken,
};
let mut book_manager = BookManager::new(vec![market].as_slice(), 10);
update_book(&market, &mut book_manager).await;
let btc_usd_book = book_manager.get_book(&market).await.unwrap(); // unwrap safe
assert_eq!(btc_usd_book.asks.len(), 3);
assert_eq!(btc_usd_book.bids.len(), 3);
}
#[tokio::test]
async fn update_book_remove_entry() {
let pair = CurrencyPair::new(Asset::Btc, Asset::Usd);
let market = Market {
pair,
exchange: Exchange::Kraken,
};
let mut book_manager = BookManager::new(vec![market].as_slice(), 10);
update_book(&market, &mut book_manager).await;
let mut to_remove = FdrBookEntry {
price: dec!(20478),
volume: dec!(0.034928),
};
to_remove.volume = dec!(0);
let update = vec![to_remove];
book_manager.update_book(market, &update, &[], None).await;
let btc_usd_book = book_manager.get_book(&market).await.unwrap(); // unwrap safe
assert_eq!(btc_usd_book.asks.len(), 2);
assert_eq!(btc_usd_book.bids.len(), 3);
}
#[tokio::test]
async fn proper_entries_are_bumped() {
let pair = CurrencyPair::new(Asset::Btc, Asset::Usd);
let market = Market {
pair,
exchange: Exchange::Kraken,
};
let book_manager = BookManager::new(vec![market].as_slice(), 3);
let asks_update = vec![
FdrBookEntry {
price: dec!(1000),
volume: dec!(1),
},
FdrBookEntry {
price: dec!(3000),
volume: dec!(1),
},
FdrBookEntry {
price: dec!(4000),
volume: dec!(1),
},
];
let bids_update = vec![
FdrBookEntry {
price: dec!(1000),
volume: dec!(1),
},
FdrBookEntry {
price: dec!(3000),
volume: dec!(1),
},
FdrBookEntry {
price: dec!(4000),
volume: dec!(1),
},
];
book_manager
.update_book(market, asks_update.as_slice(), bids_update.as_slice(), None)
.await;
book_manager
.update_book(
market,
&[FdrBookEntry {
price: dec!(2000),
volume: dec!(1),
}],
&[FdrBookEntry {
price: dec!(2000),
volume: dec!(1),
}],
None,
)
.await;
let book = book_manager.get_book(&market).await.unwrap(); // unwrap safe
let (asks, bids) = (&book.asks, &book.bids);
assert_eq!(asks.len(), 3);
assert_eq!(asks.get(&dec!(1000)), Some(&dec!(1)));
assert_eq!(asks.get(&dec!(2000)), Some(&dec!(1)));
assert_eq!(asks.get(&dec!(3000)), Some(&dec!(1)));
assert_eq!(bids.len(), 3);
assert_eq!(bids.get(&dec!(2000)), Some(&dec!(1)));
assert_eq!(bids.get(&dec!(3000)), Some(&dec!(1)));
assert_eq!(bids.get(&dec!(4000)), Some(&dec!(1)));
}
async fn update_book(market: &Market, book_manager: &mut BookManager) {
let asks_update = vec![
FdrBookEntry {
price: dec!(20478),
volume: dec!(0.034928),
},
FdrBookEntry {
price: dec!(30345),
volume: dec!(0.2342874),
},
FdrBookEntry {
price: dec!(40324),
volume: dec!(0.003240235),
},
];
let bids_update = vec![
FdrBookEntry {
price: dec!(20385),
volume: dec!(0.0034239845),
},
FdrBookEntry {
price: dec!(30595),
volume: dec!(0.34023402),
},
FdrBookEntry {
price: dec!(40503),
volume: dec!(0.045345),
},
];
book_manager
.update_book(*market, asks_update.as_slice(), bids_update.as_slice(), None)
.await;
}
}
| 0
|
state_managers
|
/mono/crates/strat_common/src/state_managers/mod.rs
|
pub mod book_manager;
pub mod channel_manager;
pub mod market_manager;
pub mod order_manager;
pub mod strategy_manager;
pub mod trade_volume_manager;
use async_trait::async_trait;
use fdr_common::error::FdrError;
#[async_trait]
pub trait StateTrait<Requests, Event, E>
where
Event: Clone + Send + Sync,
Requests: Clone + PartialEq + Send + Sync,
E: From<FdrError> + Send + Sync,
{
async fn subscribe_to_requests(&self) -> Result<(), E>;
async fn update_data(&self, requests: Requests) -> Result<(), E>;
async fn notify_subscribers(&self, event: Event) -> Result<usize, E>;
}
#[async_trait]
pub trait StateSubscriberTrait<Requests, E>
where
Requests: Clone + Send + Sync,
E: From<FdrError> + Send + Sync,
{
async fn subscribe_to_changes(&mut self) -> Result<(), E>;
async fn request_state_update(&self, requests: Requests) -> Result<usize, E>;
}
| 0
|
state_managers
|
/mono/crates/strat_common/src/state_managers/channel_manager.rs
|
use std::collections::HashMap;
use derive_more::Display;
use tokio::sync::watch;
use fdr_common::{
error::{FdrError, FdrResult},
exchange::Market,
utils::channel::Channel,
};
use fdr_event::events::FdrEvent;
#[derive(Default)]
pub struct ChannelManager {
markets: HashMap<Market, PairChannels>,
event: Channel<FdrEvent>,
}
impl ChannelManager {
pub fn new(markets: Vec<Market>) -> Self {
ChannelManager {
markets: markets
.into_iter()
.map(|market| (market, PairChannels::default()))
.collect(),
event: Channel::new(100),
}
}
pub fn send_event_update(&self, event: FdrEvent) -> FdrResult<()> {
self.event.tx().send(event).map_err(|_| FdrError::PublisherError)?;
Ok(())
}
pub fn send_ticker_update(&self, market: &Market) -> FdrResult<()> {
self.markets.get(market).map(|channels| channels.ticker.send(()));
Ok(())
}
pub fn send_book_update(&self, market: &Market) -> FdrResult<()> {
self.markets.get(market).map(|channels| channels.book.send(()));
Ok(())
}
}
pub struct PairChannels {
ticker: watch::Sender<()>,
book: watch::Sender<()>,
}
impl Default for PairChannels {
fn default() -> Self {
PairChannels {
ticker: watch::channel(()).0,
book: watch::channel(()).0,
}
}
}
impl std::fmt::Debug for PairChannels {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut debug_struct = f.debug_struct("PairChannels");
debug_struct.field("ticker", &self.ticker);
debug_struct.field("book", &self.book);
debug_struct.finish()
}
}
#[derive(Debug, Display)]
pub enum UpdateType {
Ticker,
Book,
Event,
}
pub trait ChannelManagerGetters {
fn get_ticker_receiver(&self, market: &Market) -> Option<watch::Receiver<()>>;
fn get_book_receiver(&self, market: &Market) -> Option<watch::Receiver<()>>;
fn get_event_channel(&self) -> Channel<FdrEvent>;
}
impl ChannelManagerGetters for ChannelManager {
fn get_ticker_receiver(&self, market: &Market) -> Option<watch::Receiver<()>> {
self.markets.get(market).map(|channels| channels.ticker.subscribe())
}
fn get_book_receiver(&self, market: &Market) -> Option<watch::Receiver<()>> {
self.markets.get(market).map(|channels| channels.book.subscribe())
}
fn get_event_channel(&self) -> Channel<FdrEvent> {
self.event.clone()
}
}
| 0
|
state_managers
|
/mono/crates/strat_common/src/state_managers/strategy_manager.rs
|
use std::{collections::HashMap, hash::RandomState};
use async_trait::async_trait;
use slog::Logger;
use fdr_common::{crypto::assets::CurrencyPair, error::FdrError, exchange::Exchange, ticker::FdrTickerUpdate};
use fdr_event::events::{
FdrAlert, FdrBookUpdate, FdrEvent, FdrOrderCanceled, FdrOrderFilled, FdrOrderPlaced, FdrOrderTrade,
};
use uuid::Uuid;
use crate::{
crypto::market::{MarketConfig, UpdateRequests},
metrics::StrategyMetrics,
state_managers::{
book_manager::BookManagerGetters, channel_manager::ChannelManagerGetters, market_manager::MarketManagerGetters,
trade_volume_manager::TradeVolumeManagerGetters,
},
};
#[async_trait]
pub trait StrategyManagerTrait<E>: EventHandlerTrait<E>
where
E: From<FdrError> + Send + Sync,
{
fn add_strategy<T>(&mut self, config: T);
async fn start(&self) -> Result<(), E>;
}
#[async_trait]
pub trait StrategyGetterTrait:
MarketManagerGetters + TradeVolumeManagerGetters + BookManagerGetters + ChannelManagerGetters
{
fn get_update_receiver(&self) -> tokio::sync::broadcast::Receiver<FdrEvent>;
fn get_request_sender(&self) -> tokio::sync::broadcast::Sender<UpdateRequests>;
fn logger(&self) -> &Logger;
fn get_run_id(&self) -> Uuid;
fn get_config_markets(&self) -> &HashMap<Exchange, HashMap<CurrencyPair, MarketConfig, RandomState>, RandomState>;
fn get_metrics_provider(&self) -> &StrategyMetrics;
}
#[async_trait]
pub trait EventHandlerTrait<E>
where
E: From<FdrError> + Send + Sync,
{
async fn handle_event(&self, event: FdrEvent) -> Result<(), E>;
async fn handle_trade_event(&self, trade: &FdrOrderTrade) -> Result<(), E>;
async fn handle_filled_order_event(&self, filled_order: &FdrOrderFilled) -> Result<(), E>;
async fn handle_cancel_event(&self, cancelled_order: &FdrOrderCanceled) -> Result<(), E>;
async fn handle_order_placed_event(&self, placed_order: &FdrOrderPlaced) -> Result<(), E>;
async fn handle_order_book_event(&self, order_book: &FdrBookUpdate) -> Result<(), E>;
async fn handle_ticker_event(&self, ticker: &FdrTickerUpdate) -> Result<(), E>;
async fn handle_alert_event(&self, alert: &FdrAlert) -> Result<(), E>;
}
| 0
|
state_managers
|
/mono/crates/strat_common/src/state_managers/trade_volume_manager.rs
|
use std::{collections::HashMap, ops::Add, sync::Arc};
use arc_swap::ArcSwap;
use async_trait::async_trait;
use rust_decimal::Decimal;
use tokio::sync::RwLock;
use fdr_common::{crypto::pairs::FeeData, exchange::Market};
#[derive(Debug, Default)]
pub struct TradeVolumeManager {
volume: Decimal,
maker_fees: ArcSwap<RwLock<HashMap<Market, FeeData>>>,
taker_fees: ArcSwap<RwLock<HashMap<Market, FeeData>>>,
}
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)]
pub struct Fees {
pub maker: Decimal,
pub taker: Decimal,
}
#[async_trait]
pub trait TradeVolumeManagerGetters {
async fn get_taker_fee_data(&self, pair: &Market) -> Option<FeeData>;
async fn get_taker_fee(&self, pair: &Market) -> Decimal;
async fn get_fees(&self, pair: &Market) -> Fees;
async fn get_maker_fee_data(&self, pair: &Market) -> Option<FeeData>;
async fn get_maker_fee(&self, pair: &Market) -> Decimal;
async fn get_volume(&self) -> Decimal;
}
#[async_trait]
impl TradeVolumeManagerGetters for TradeVolumeManager {
/// Get the taker [`FeeData`] of the trade volume manager for a given pair
///
/// # Example
///
/// ```
/// # use fdr_common::crypto::{
/// # assets::{Asset, CurrencyPair},
/// # pairs::FeeData,
/// # };
/// # use rust_decimal::Decimal;
/// # use strat_common::state_managers::trade_volume_manager::{TradeVolumeManager, TradeVolumeManagerGetters};
/// # use std::collections::HashMap;
/// # use fdr_common::exchange::{Exchange, Market};
/// # #[tokio::main]
/// # async fn main() {
/// let market = Market {
/// pair: CurrencyPair::new(Asset::Btc, Asset::Usd),
/// exchange: Exchange::Kraken,
/// };
/// let mut maker_fees = HashMap::new();
/// maker_fees.insert(
/// market,
/// FeeData {fee: Decimal::new(1, 0), ..Default::default()},
/// );
///
/// let mut taker_fees = HashMap::new();
/// taker_fees.insert(
/// market,
/// FeeData {fee: Decimal::new(1, 0), ..Default::default()}
/// );
///
/// let trade_volume_manager = TradeVolumeManager::new(
/// Decimal::new(1, 0),
/// maker_fees,
/// taker_fees,
/// );
///
/// assert_eq!(
/// trade_volume_manager
/// .get_taker_fee_data(&market)
/// .await
/// .unwrap()
/// .fee,
/// Decimal::new(1, 0)
/// );
/// # }
/// ```
async fn get_taker_fee_data(&self, market: &Market) -> Option<FeeData> {
self.taker_fees.load().read().await.get(market).copied()
}
async fn get_taker_fee(&self, market: &Market) -> Decimal {
self.taker_fees
.load()
.read()
.await
.get(market)
.map_or_else(Decimal::default, |fee_data| fee_data.fee)
}
async fn get_fees(&self, market: &Market) -> Fees {
Fees {
maker: self.get_maker_fee(market).await,
taker: self.get_taker_fee(market).await,
}
}
/// Get the maker [`FeeData`] of the trade volume manager for a given pair
///
/// # Example
///
/// ```
/// # use fdr_common::crypto::{
/// # assets::{Asset, CurrencyPair},
/// # pairs::FeeData,
/// # };
/// # use rust_decimal::Decimal;
/// # use std::collections::HashMap;
/// # use fdr_common::exchange::{Exchange, Market};
/// # #[tokio::main]
/// # async fn main() {
/// # use strat_common::state_managers::trade_volume_manager::{TradeVolumeManager, TradeVolumeManagerGetters};
///
/// let market = Market {
/// pair: CurrencyPair::new(Asset::Btc, Asset::Usd),
/// exchange: Exchange::Kraken,
/// };
/// let mut maker_fees = HashMap::new();
/// maker_fees.insert(
/// market,
/// FeeData {fee: Decimal::new(1, 0), ..Default::default()},
/// );
///
/// let mut taker_fees = HashMap::new();
/// taker_fees.insert(
/// market,
/// FeeData {fee: Decimal::new(1, 0), ..Default::default()},
/// );
///
/// let trade_volume_manager = TradeVolumeManager::new(Decimal::new(1, 0), maker_fees, taker_fees);
/// assert_eq!(
/// trade_volume_manager
/// .get_maker_fee_data(&market).await
/// .unwrap()
/// .fee,
/// Decimal::new(1, 0)
/// );
/// # }
/// ```
async fn get_maker_fee_data(&self, market: &Market) -> Option<FeeData> {
self.maker_fees.load().read().await.get(market).copied()
}
async fn get_maker_fee(&self, market: &Market) -> Decimal {
self.maker_fees
.load()
.read()
.await
.get(market)
.map_or_else(Decimal::default, |fee_data| fee_data.fee)
}
async fn get_volume(&self) -> Decimal {
self.volume
}
}
impl TradeVolumeManager {
pub fn new(volume: Decimal, maker_fees: HashMap<Market, FeeData>, taker_fees: HashMap<Market, FeeData>) -> Self {
TradeVolumeManager {
volume,
maker_fees: ArcSwap::from_pointee(RwLock::new(maker_fees)),
taker_fees: ArcSwap::from_pointee(RwLock::new(taker_fees)),
}
}
/// Increase the volume of the trade volume manager
/// # Example
///
/// ```
/// # use fdr_common::crypto::{
/// # assets::{Asset, CurrencyPair},
/// # pairs::FeeData,
/// # };
/// # use rust_decimal::Decimal;
/// # use std::collections::HashMap;
/// # use strat_common::state_managers::trade_volume_manager::{TradeVolumeManager, TradeVolumeManagerGetters};
/// # use fdr_common::exchange::{Exchange, Market};
///# #[tokio::main]
/// # async fn main() {
/// let market = Market {
/// pair: CurrencyPair::new(Asset::Btc, Asset::Usd),
/// exchange: Exchange::Kraken,
/// };
/// let mut maker_fees = HashMap::new();
/// maker_fees.insert(
/// market,
/// FeeData {fee: Decimal::new(1, 0), ..Default::default()},
/// );
///
/// let mut taker_fees = HashMap::new();
/// taker_fees.insert(
/// market,
/// FeeData {fee: Decimal::new(1, 0), ..Default::default()},
/// );
///
/// let mut trade_volume_manager = TradeVolumeManager::new(Decimal::new(1, 0), maker_fees, taker_fees);
/// trade_volume_manager.increase_volume(Decimal::new(1, 0));
/// assert_eq!(trade_volume_manager.get_volume().await, Decimal::new(2, 0));
/// # }
/// ```
pub fn increase_volume(&mut self, increase: Decimal) -> Decimal {
self.volume = self.volume.add(increase);
self.volume
}
/// Swap the trade volume manager for a new one using ArcSwap under the hood
///
/// # Example
/// ```rust
/// # use fdr_common::crypto::{
/// # assets::{Asset, CurrencyPair},
/// # pairs::FeeData,
/// # };
/// # use rust_decimal::Decimal;
/// # use std::collections::HashMap;
/// # use std::sync::Arc;
/// # use tokio::sync::RwLock;
/// # use fdr_common::exchange::{Exchange, Market};
///
/// # #[tokio::main]
/// # async fn main() {
/// # use strat_common::state_managers::trade_volume_manager::{TradeVolumeManager, TradeVolumeManagerGetters};
///
/// let market = Market {
/// pair: CurrencyPair::new(Asset::Btc, Asset::Usd),
/// exchange: Exchange::Kraken,
/// };
/// let mut maker_fees = HashMap::new();
/// maker_fees.insert(
/// market,
/// FeeData {fee: Decimal::new(1, 0), ..Default::default()},
/// );
///
/// let mut taker_fees = HashMap::new();
/// taker_fees.insert(
/// market,
/// FeeData {fee: Decimal::new(1, 0), ..Default::default()},
/// );
///
/// let trade_volume_manager = TradeVolumeManager::new(
/// Decimal::new(1, 0),
/// maker_fees.clone(),
/// taker_fees.clone(),
/// );
///
/// trade_volume_manager
/// .swap(maker_fees, taker_fees)
/// .await;
///
/// assert_eq!(
/// trade_volume_manager
/// .get_maker_fee_data(&market).await
/// .unwrap()
/// .fee,
/// Decimal::new(1, 0)
/// );
/// # }
/// ```
///
/// The example demonstrates how to use the `AsyncTradeVolumeManager` to swap the trade volume manager for a new one.
/// It creates a new instance, performs a swap operation with updated values, and verifies the volume using assertions.
pub async fn swap(&self, maker_fees: HashMap<Market, FeeData>, taker_fees: HashMap<Market, FeeData>) {
self.maker_fees.store(Arc::new(RwLock::new(maker_fees)));
self.taker_fees.store(Arc::new(RwLock::new(taker_fees)));
}
}
| 0
|
state_managers
|
/mono/crates/strat_common/src/state_managers/market_manager.rs
|
use std::{collections::HashMap, fmt::Debug};
use async_trait::async_trait;
use rust_decimal::Decimal;
use tokio::sync::{RwLock, RwLockReadGuard};
use tracing::{instrument, Level};
use fdr_common::{
crypto::{
asset_balance::{AssetBalance, BalanceType},
orders::FdrOrderSide,
},
error::FdrError,
exchange::Market,
ticker::FdrTickerUpdate,
};
use fdr_event::events::FdrOrderTrade;
use crate::crypto::market::{balance::MarketBalance, MarketState, MarketStatus};
#[derive(Debug, Default)]
pub struct MarketManager {
markets: HashMap<Market, RwLock<MarketState>>,
}
#[async_trait]
pub trait MarketManagerGetters {
async fn get_market(&self, market: &Market) -> Option<RwLockReadGuard<'_, MarketState>>;
async fn get_market_balance(&self, market: &Market) -> Option<(Decimal, Decimal)>;
}
#[async_trait]
impl MarketManagerGetters for MarketManager {
#[instrument(skip_all, level = Level::DEBUG)]
async fn get_market(&self, market: &Market) -> Option<RwLockReadGuard<'_, MarketState>> {
if let Some(market) = self.markets.get(market) {
Some(market.read().await)
} else {
None
}
}
async fn get_market_balance(&self, market: &Market) -> Option<(Decimal, Decimal)> {
if let Some(market) = self.markets.get(market) {
let market_lock = market.read().await;
let market_balance = &market_lock.market_balance;
Some((market_balance.base.available(), market_balance.quote.available()))
} else {
None
}
}
}
/// The MarketManager is a singleton that exists per StrategyManager
/// that handles the state and mutability of such for all markets running on
/// the given StrategyManager.
impl MarketManager {
pub fn new(markets: HashMap<Market, RwLock<MarketState>>) -> Self {
MarketManager { markets }
}
#[instrument(skip_all, level = Level::DEBUG)]
pub async fn replace_market(&self, market: Market, market_state: MarketState) {
if let Some(market) = self.markets.get(&market) {
let mut market = market.write().await;
market.market_balance = market_state.market_balance;
market.last_ticker = market_state.last_ticker;
market.moving_ticker_delta_average = market_state.moving_ticker_delta_average;
market.trading_pair = market_state.trading_pair;
market.last_hit = market_state.last_hit;
market.status = market_state.status;
market.fees = market_state.fees;
}
}
#[instrument(skip_all, level = Level::DEBUG)]
pub async fn replace_market_balance(&self, market: Market, market_balance: MarketBalance) {
if let Some(market) = self.markets.get(&market) {
let mut market = market.write().await;
market.market_balance = market_balance;
}
}
#[instrument(skip(self), level = Level::DEBUG)]
pub async fn update_market_balance_from_trade(
&self,
trade: &FdrOrderTrade,
) -> Result<(AssetBalance, AssetBalance), FdrError> {
if let Some(market) = self.markets.get(&trade.market) {
let mut market = market.write().await;
// Adjust balances
// Fees come from quote side
market.market_balance.quote.decrease_balances(&trade.fee);
match trade.side {
FdrOrderSide::Sell => {
market.market_balance.base.decrease_total_balance(&trade.volume);
market.market_balance.quote.increase_balances(&trade.cost);
}
FdrOrderSide::Buy => {
market.market_balance.base.increase_balances(&trade.volume);
market.market_balance.quote.decrease_total_balance(&trade.cost);
}
}
Ok((market.market_balance.base, market.market_balance.quote))
} else {
Err(FdrError::IncreaseFailed)
}
}
pub async fn increase_market_balance(
&self,
market: &Market,
side: FdrOrderSide,
amount: &Decimal,
balance_type: BalanceType,
) -> Result<AssetBalance, FdrError> {
if let Some(market) = self.markets.get(market) {
let mut market = market.write().await;
let asset_balance = match side {
FdrOrderSide::Buy => &mut market.market_balance.quote,
FdrOrderSide::Sell => &mut market.market_balance.base,
};
match balance_type {
BalanceType::Available => asset_balance.increase_available_balance(amount),
BalanceType::Total => asset_balance.increase_total_balance(amount),
}
Ok(*asset_balance)
} else {
Err(FdrError::IncreaseFailed)
}
}
pub async fn decrease_market_balance(
&self,
market: &Market,
side: FdrOrderSide,
amount: &Decimal,
balance_type: BalanceType,
) -> Result<AssetBalance, FdrError> {
if let Some(market) = self.markets.get(market) {
let mut market = market.write().await;
let asset_balance = match side {
FdrOrderSide::Buy => &mut market.market_balance.quote,
FdrOrderSide::Sell => &mut market.market_balance.base,
};
match balance_type {
BalanceType::Available => asset_balance.decrease_available_balance(amount),
BalanceType::Total => asset_balance.decrease_total_balance(amount),
}
Ok(*asset_balance)
} else {
Err(FdrError::IncreaseFailed)
}
}
#[instrument(skip_all, level = Level::DEBUG)]
pub async fn update_last_ticker(&self, last_ticker: &FdrTickerUpdate) -> Option<FdrTickerUpdate> {
if let Some(market) = self.markets.get(&Market {
pair: last_ticker.pair,
exchange: last_ticker.exchange,
}) {
let mut market = market.write().await;
market.last_ticker.and_then(|old_ticker| {
last_ticker.last_price.map(|new_price| {
if let Some(old_price) = old_ticker.last_price {
let delta = new_price - old_price;
market.moving_ticker_delta_average.add_entry(
delta,
old_ticker.timestamp,
last_ticker.timestamp,
);
}
})
});
market.last_ticker = Some(*last_ticker);
market.last_ticker
} else {
None
}
}
#[instrument(skip_all, level = Level::DEBUG)]
pub async fn update_last_hit(&self, market: &Market, last_price: Option<Decimal>) -> Option<Decimal> {
if let Some(market) = self.markets.get(market) {
let mut market = market.write().await;
market.last_hit = last_price;
last_price
} else {
None
}
}
#[instrument(skip_all, level = Level::DEBUG)]
pub async fn update_status(&self, market: &Market, status: MarketStatus) -> Option<MarketStatus> {
if let Some(market) = self.markets.get(market) {
let mut market = market.write().await;
market.status = status;
Some(status)
} else {
None
}
}
}
| 0
|
fdr_http
|
/mono/crates/fdr_http/Cargo.toml
|
[package]
name = "fdr-http"
version = "0.1.0"
edition = "2021"
[dependencies]
fdr-common = { path = "../fdr_common" }
async-trait = { workspace = true }
axum = { workspace = true }
axum-tracing-opentelemetry = { workspace = true }
chrono = { workspace = true, features = ["serde"] }
clap = { workspace = true, features = ["derive", "env", "cargo"] }
derive_more = { workspace = true }
http = { workspace = true }
hyper = { workspace = true }
indexmap = { workspace = true }
lazy_static = { workspace = true }
prometheus = { workspace = true }
rust_decimal = { workspace = true, features = ["serde", "db-postgres"] }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
slog = { workspace = true, features = ["max_level_trace"] }
sqlx = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true, features = ["full"] }
tower = { workspace = true, features = ["full"] }
tower-http = { workspace = true, features = ["full"] }
toml = { workspace = true }
url = { workspace = true }
uuid = { workspace = true, features = ["v4", "fast-rng", "serde"] }
regex = { workspace = true }
# tracing dependencies
tracing = { workspace = true, default-features = false, features = ["std", "attributes"] }
tracing-subscriber = { workspace = true, default-features = false, features = ["json", "env-filter", "serde"] }
tracing-opentelemetry = { workspace = true, default-features = true }
opentelemetry = { workspace = true, default-features = false, features = ["trace"] }
opentelemetry_sdk = { workspace = true, features = ["rt-tokio"] }
opentelemetry-otlp = { workspace = true, default-features = false, features = ["trace", "grpc-tonic"] }
tracing-attributes = { workspace = true }
tracing-core = { workspace = true }
http-body = { workspace = true }
tokio-util = { workspace = true }
| 0
|
src
|
/mono/crates/fdr_http/src/lib.rs
|
use crate::{
layer::{idempotency_key_extractor::PropagateXIdempotencyKeyLayer, strategy_extractor::PropagateXStrategyLayer},
realm::{get_metrics, healthy, ready},
};
use axum::{routing::get, Router};
use axum_tracing_opentelemetry::middleware::OtelAxumLayer;
use fdr_common::{context::FdrServiceContext, error::FdrError};
use http::{header::AUTHORIZATION, HeaderName, HeaderValue};
use slog::info;
use std::{
iter::once,
net::{AddrParseError, SocketAddr},
ops::Deref,
sync::Arc,
};
use tower_http::{
add_extension::AddExtensionLayer, compression::CompressionLayer, propagate_header::PropagateHeaderLayer,
sensitive_headers::SetSensitiveRequestHeadersLayer, validate_request::ValidateRequestHeaderLayer,
};
use uuid::Uuid;
pub mod layer;
pub mod realm;
pub struct Server {}
impl Server {
pub async fn serve<Context, Config, Error>(
context: Arc<Context>,
listen_address: String,
port: u16,
router: Router,
) -> Result<(), Error>
where
Config: serde::de::DeserializeOwned + Clone + Send + Sync + 'static,
Context: FdrServiceContext<Config, Context = Context> + Send + Sync + 'static,
Error: From<FdrError> + From<hyper::Error> + From<AddrParseError> + std::convert::From<std::io::Error>,
{
let bind_addr: SocketAddr = format!("{}:{}", listen_address, port).parse()?;
let listener = tokio::net::TcpListener::bind(bind_addr).await?;
info!(context.logger(), "Starting server on {}", bind_addr);
let app = Router::new()
.route("/metrics", get(get_metrics::<Config, Context>))
.route("/ready", get(ready::<Config, Context>))
.route("/healthy", get(healthy::<Config, Context>))
.merge(router)
.layer(SetSensitiveRequestHeadersLayer::new(once(AUTHORIZATION)))
.layer(OtelAxumLayer::default())
.layer(CompressionLayer::new())
.layer(PropagateHeaderLayer::new(HeaderName::from_static("x-request-id")))
.layer(ValidateRequestHeaderLayer::accept("application/json"))
.layer(PropagateXStrategyLayer::new())
.layer(PropagateXIdempotencyKeyLayer::new(context.logger().clone()))
.layer(AddExtensionLayer::new(context.clone()));
Ok(axum::serve(listener, app.into_make_service()).await?)
}
}
#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)]
pub struct IdempotencyKey(Uuid);
impl Deref for IdempotencyKey {
type Target = Uuid;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl TryFrom<&HeaderValue> for IdempotencyKey {
type Error = FdrError;
fn try_from(value: &HeaderValue) -> Result<Self, Self::Error> {
Ok(IdempotencyKey(
Uuid::try_parse(
value
.to_str()
.map_err(|_| FdrError::Parse("Idempotency-Key header is not a valid UUID".to_string()))?,
)
.map_err(|_| FdrError::Parse("Idempotency-Key header is not a valid UUID".to_string()))?,
))
}
}
| 0
|
realm
|
/mono/crates/fdr_http/src/realm/mod.rs
|
use axum::{body::Body, response::Response, Extension};
use fdr_common::{context::FdrServiceContext, metrics};
use http::StatusCode;
use std::sync::Arc;
use tracing::{instrument, Level};
#[instrument(skip_all, level = Level::DEBUG)]
pub async fn get_metrics<Config, Context>(Extension(ctx): Extension<Arc<Context>>) -> Result<Response<Body>, StatusCode>
where
Config: serde::de::DeserializeOwned + Clone + Send + Sync + 'static,
Context: Send + Sync + FdrServiceContext<Config, Context = Context>,
{
let metrics =
metrics::get_metrics_registry(ctx.metrics_registry().clone()).map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
let mut response = Response::new(Body::from(metrics));
*response.status_mut() = StatusCode::OK;
Ok(response)
}
#[instrument(skip_all, level = Level::DEBUG)]
pub async fn ready<Config, Context>(Extension(_ctx): Extension<Arc<Context>>) -> Result<Response<Body>, StatusCode>
where
Config: serde::de::DeserializeOwned + Clone + Send + Sync + 'static,
Context: Send + Sync + FdrServiceContext<Config, Context = Context>,
{
let mut response = Response::new(Body::empty());
*response.status_mut() = StatusCode::OK;
Ok(response)
}
#[instrument(skip_all, level = Level::DEBUG)]
pub async fn healthy<Config, Context>(Extension(_ctx): Extension<Arc<Context>>) -> Result<Response<Body>, StatusCode>
where
Config: serde::de::DeserializeOwned + Clone + Send + Sync + 'static,
Context: Send + Sync + FdrServiceContext<Config, Context = Context>,
{
let mut response = Response::new(Body::empty());
*response.status_mut() = StatusCode::OK;
Ok(response)
}
| 0
|
layer
|
/mono/crates/fdr_http/src/layer/mod.rs
|
pub mod idempotency_key_extractor;
pub mod redirect;
pub mod strategy_extractor;
| 0
|
layer
|
/mono/crates/fdr_http/src/layer/strategy_extractor.rs
|
use axum::{body::Body, http::Request};
use fdr_common::TradingStrategy;
use std::{
convert::Infallible,
task::{Context, Poll},
};
use tower::{Layer, Service};
#[derive(Clone)]
pub struct PropagateXStrategy<S> {
inner: S,
}
impl<S> Service<Request<Body>> for PropagateXStrategy<S>
where
S: Service<Request<Body>, Error = Infallible> + Clone + Send + 'static,
{
type Response = S::Response;
type Error = S::Error;
type Future = S::Future;
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.inner.poll_ready(cx)
}
fn call(&mut self, mut req: Request<Body>) -> Self::Future {
if let Some(x_strategy) = req.headers().get("X-Strategy") {
if let Ok(strategy) = TradingStrategy::try_from(x_strategy.to_str().unwrap_or_default()) {
req.extensions_mut().insert(strategy);
}
}
self.inner.call(req)
}
}
#[derive(Clone)]
pub struct PropagateXStrategyLayer;
impl PropagateXStrategyLayer {
pub fn new() -> Self {
Self
}
}
impl<S> Layer<S> for PropagateXStrategyLayer {
type Service = PropagateXStrategy<S>;
fn layer(&self, inner: S) -> Self::Service {
PropagateXStrategy { inner }
}
}
impl Default for PropagateXStrategyLayer {
fn default() -> Self {
Self::new()
}
}
| 0
|
layer
|
/mono/crates/fdr_http/src/layer/redirect.rs
|
use axum::{
body::Body,
http::{Request, Response, StatusCode, Uri},
Error as AxumError,
};
use std::{
convert::Infallible,
future::Future,
pin::Pin,
task::{Context, Poll},
};
use tokio_util::bytes::Bytes;
use tower::{Layer, Service};
use tower_http::body::UnsyncBoxBody;
#[derive(Clone)]
pub struct RedirectMiddleware<S> {
inner: S,
src: Uri,
dst: Uri,
}
impl<S> Service<Request<Body>> for RedirectMiddleware<S>
where
S: Service<Request<Body>, Response = Response<UnsyncBoxBody<Bytes, AxumError>>, Error = Infallible>
+ Clone
+ Send
+ 'static,
S::Future: Send,
{
type Response = Response<UnsyncBoxBody<Bytes, AxumError>>;
type Error = Infallible;
type Future = Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>> + Send>>;
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.inner.poll_ready(cx)
}
fn call(&mut self, req: Request<Body>) -> Self::Future {
let uri = req.uri();
if uri == &self.src {
let response = Response::builder()
.status(StatusCode::MOVED_PERMANENTLY)
.header("Location", self.dst.to_string())
.body(UnsyncBoxBody::<Bytes, axum::Error>::default())
.unwrap();
Box::pin(async move { Ok(response) })
} else {
Box::pin(self.inner.call(req))
}
}
}
#[derive(Clone)]
pub struct RedirectMiddlewareLayer {
src: Uri,
dst: Uri,
}
impl RedirectMiddlewareLayer {
pub fn new(src: Uri, dst: Uri) -> Self {
Self { src, dst }
}
}
impl<S> Layer<S> for RedirectMiddlewareLayer {
type Service = RedirectMiddleware<S>;
fn layer(&self, inner: S) -> Self::Service {
RedirectMiddleware {
inner,
src: self.src.clone(),
dst: self.dst.clone(),
}
}
}
| 0
|
layer
|
/mono/crates/fdr_http/src/layer/idempotency_key_extractor.rs
|
use crate::IdempotencyKey;
use axum::{body::Body, http::Request};
use slog::Logger;
use std::{
convert::Infallible,
task::{Context, Poll},
};
use tower::{Layer, Service};
#[derive(Clone)]
pub struct PropagateXIdempotencyKey<S> {
inner: S,
logger: Logger,
}
impl<S> Service<Request<Body>> for PropagateXIdempotencyKey<S>
where
S: Service<Request<Body>, Error = Infallible> + Clone + Send + 'static,
{
type Response = S::Response;
type Error = S::Error;
type Future = S::Future;
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.inner.poll_ready(cx)
}
fn call(&mut self, mut req: Request<Body>) -> Self::Future {
let idempotency_key = req.headers().get("X-Idempotency-Key").map(|header_value| {
IdempotencyKey::try_from(header_value).map_or_else(
|e| {
slog::warn!(
self.logger,
"Invalid UUIDv4 provided in IdempotencyKey Header";
"HeaderValue" => header_value.to_str().unwrap_or_default(),
"error" => %e
);
None
},
Some,
)
});
match idempotency_key {
None => {
req.extensions_mut().insert(None::<IdempotencyKey>);
}
Some(idempotency_key) => {
req.extensions_mut().insert(idempotency_key);
}
};
self.inner.call(req)
}
}
#[derive(Clone)]
pub struct PropagateXIdempotencyKeyLayer {
logger: Logger,
}
impl PropagateXIdempotencyKeyLayer {
pub fn new(logger: Logger) -> Self {
Self { logger }
}
}
impl<S> Layer<S> for PropagateXIdempotencyKeyLayer {
type Service = PropagateXIdempotencyKey<S>;
fn layer(&self, inner: S) -> Self::Service {
PropagateXIdempotencyKey {
inner,
logger: self.logger.clone(),
}
}
}
| 0
|
exchange_client
|
/mono/crates/exchange_client/Cargo.toml
|
[package]
name = "exchange-client"
version = "0.1.0"
edition = "2021"
authors = ["Steve Sampson <mail@stephensampson.dev>"]
description = "A generic client compatible with all FDR exchange proxies"
[dependencies]
fdr-common = { path = "../fdr_common", features = ["tracing"] }
async-trait = { workspace = true }
axum = { workspace = true }
chrono = { workspace = true, features = ["serde"] }
derive_more = { workspace = true }
http = { workspace = true }
indexmap = { workspace = true }
reqwest = { workspace = true }
rust_decimal = { workspace = true, features = ["serde", "db-postgres"] }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
slog = { workspace = true }
thiserror = { workspace = true }
tracing = { workspace = true, default-features = false, features = ["std", "attributes"] }
url = { workspace = true }
uuid = { workspace = true, features = ["v4", "fast-rng", "serde"] }
[features]
kraken = []
| 0
|
src
|
/mono/crates/exchange_client/src/error.rs
|
use crate::realm::FdrApiErrorBody;
use fdr_common::error::ExchangeError;
use thiserror::Error;
#[derive(Error, Debug)]
pub enum ExchangeClientError {
#[error(transparent)]
Reqwest(#[from] reqwest::Error),
#[error(transparent)]
Url(#[from] url::ParseError),
#[error(transparent)]
Deserialize(#[from] serde_json::Error),
#[error("The remote server responded with error body: {0}")]
RemoteServerError(FdrApiErrorBody),
#[error("The adapter is configured for a different exchange than this request is intended for")]
WrongExchange,
#[error(transparent)]
ExchangeError(#[from] ExchangeError),
}
pub type ExchangeClientResult<T> = Result<T, ExchangeClientError>;
| 0
|
src
|
/mono/crates/exchange_client/src/lib.rs
|
use async_trait::async_trait;
use fdr_common::{config::ExchangeConfiguration, exchange::Exchange, TradingStrategy};
use http::{HeaderMap, Method};
use serde::{de::DeserializeOwned, Serialize};
use std::fmt::Debug;
use tracing::instrument;
use url::Url;
use uuid::Uuid;
use crate::{
error::{ExchangeClientError, ExchangeClientResult},
realm::trading::ExchangeAdapterTradingTrait,
};
pub mod error;
pub mod realm;
#[derive(Clone)]
pub struct ExchangeAdapter {
pub config: ExchangeConfiguration,
}
impl ExchangeAdapter {
pub fn new(exchange: Exchange, strategy: TradingStrategy, url: String, port: u16) -> Self {
ExchangeAdapter {
config: ExchangeConfiguration {
exchange,
strategy,
url,
port,
},
}
}
pub fn from_config(config: ExchangeConfiguration) -> Self {
ExchangeAdapter { config }
}
pub fn exchange(&self) -> Exchange {
self.config.exchange
}
#[instrument(skip(self))]
pub async fn send_request<T, U>(
&self,
method: Method,
url: &str,
body: U,
idempotency_key: Option<Uuid>,
) -> ExchangeClientResult<T>
where
T: DeserializeOwned,
U: Serialize + Debug,
{
let url = Url::parse(format!("{}:{}/{url}", self.config.url, self.config.port, url = url).as_str())?;
let mut headers = HeaderMap::new();
fdr_common::tracing::store_tracing_context(&mut headers);
let mut res = reqwest::Client::new()
.request(method, url)
.header("X-Strategy", self.config.strategy.to_string())
.headers(headers)
.body(serde_json::to_string(&body)?)
.header(reqwest::header::CONTENT_TYPE, "application/json");
if let Some(idempotency_key) = idempotency_key {
res = res.header("X-Idempotency-Key", idempotency_key.to_string());
}
let res = res.send().await?;
let json_value: serde_json::Value = res.json().await?;
match serde_json::from_value(json_value.clone()) {
Ok(value) => Ok(value),
Err(_) => Err(ExchangeClientError::RemoteServerError(serde_json::from_value(
json_value,
)?)),
}
}
}
#[async_trait]
pub trait ExchangeAdapterTrait: ExchangeAdapterTradingTrait {
fn confirm_exchange(&self, exchange: Exchange) -> ExchangeClientResult<bool>;
}
impl ExchangeAdapterTrait for ExchangeAdapter {
fn confirm_exchange(&self, exchange: Exchange) -> ExchangeClientResult<bool> {
if self.config.exchange == exchange {
Ok(true)
} else {
Err(ExchangeClientError::WrongExchange)
}
}
}
| 0
|
realm
|
/mono/crates/exchange_client/src/realm/mod.rs
|
pub mod market;
pub mod trading;
pub mod user_data;
use crate::realm::trading::TradingMethod;
use axum::{
response::{IntoResponse, Response},
Json,
};
use http::StatusCode;
use std::fmt::Display;
#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)]
pub enum Method {
Trading(TradingMethod),
}
/// type alias that ensures Ok and Err implement IntoResponse
// todo(steve.sampson): try to make the ok type more generic
pub type FdrApiResult<T> = Result<(StatusCode, Json<T>), FdrApiError>;
#[derive(Debug, Default, PartialEq, Eq, Hash, Clone)]
pub struct FdrApiError {
status_code: StatusCode,
body: FdrApiErrorBody,
}
#[derive(Debug, serde::Serialize, serde::Deserialize, Default, PartialEq, Eq, Hash, Clone)]
pub struct FdrApiErrorBody {
#[serde(default, skip_serializing_if = "Option::is_none")]
error: Option<String>,
}
impl Display for FdrApiErrorBody {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let msg = self
.error
.as_ref()
.map_or_else(|| "No error message".to_string(), String::from);
write!(f, "{msg}")
}
}
impl FdrApiError {
pub fn new(status_code: StatusCode, error: Option<String>) -> Self {
Self {
status_code,
body: FdrApiErrorBody { error },
}
}
}
impl IntoResponse for FdrApiError {
fn into_response(self) -> Response {
(self.status_code, Json(self.body)).into_response()
}
}
| 0
|
market
|
/mono/crates/exchange_client/src/realm/market/get_pair.rs
|
use fdr_common::crypto::{assets::CurrencyPair, pairs::TradingPair};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct GetPairsRequest {}
#[derive(Debug, Serialize, Deserialize)]
pub struct GetPairsResponse {
pub pairs: HashMap<CurrencyPair, TradingPair>,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct GetPairRequest {}
#[derive(Debug, Serialize, Deserialize)]
pub struct GetPairResponse {
pub pair: Option<TradingPair>,
}
| 0
|
market
|
/mono/crates/exchange_client/src/realm/market/mod.rs
|
use crate::{
error::ExchangeClientResult,
realm::market::{get_asset::GetAssetsResponse, get_book::GetBookResponse, get_pair::GetPairsResponse},
};
use async_trait::async_trait;
use fdr_common::crypto::{
assets::{Asset, CurrencyPair, TradingAsset},
pairs::TradingPair,
};
pub mod get_asset;
pub mod get_book;
pub mod get_pair;
mod implementation;
#[async_trait]
pub trait ExchangeAdapterMarketTrait {
async fn get_asset(&self, req: Asset) -> ExchangeClientResult<Option<TradingAsset>>;
async fn get_assets(&self) -> ExchangeClientResult<GetAssetsResponse>;
async fn get_pair(&self, req: CurrencyPair) -> ExchangeClientResult<Option<TradingPair>>;
async fn get_pairs(&self) -> ExchangeClientResult<GetPairsResponse>;
async fn get_books(&self, tpl_only: bool) -> ExchangeClientResult<GetBookResponse>;
async fn get_book(&self, pair: CurrencyPair, tpl_only: bool) -> ExchangeClientResult<Option<GetBookResponse>>;
}
| 0
|
market
|
/mono/crates/exchange_client/src/realm/market/get_book.rs
|
use fdr_common::crypto::{assets::CurrencyPair, book::Book};
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct GetBookResponse {
pub books: BTreeMap<CurrencyPair, BookWithMeta>,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct BookWithMeta {
#[serde(skip_serializing_if = "Option::is_none")]
pub book: Option<Book>,
pub topic: String,
pub current_offset: i64,
pub last_snapshot_offset: i64,
}
| 0
|
market
|
/mono/crates/exchange_client/src/realm/market/implementation.rs
|
use async_trait::async_trait;
use http::Method;
use fdr_common::{
crypto::{
assets::{Asset, CurrencyPair, TradingAsset},
pairs::TradingPair,
},
tracing::instrument,
};
use crate::{
error::ExchangeClientResult,
realm::market::{
get_asset::GetAssetsResponse, get_book::GetBookResponse, get_pair::GetPairsResponse, ExchangeAdapterMarketTrait,
},
ExchangeAdapter,
};
#[async_trait]
impl ExchangeAdapterMarketTrait for ExchangeAdapter {
#[instrument(skip(self))]
async fn get_asset(&self, req: Asset) -> ExchangeClientResult<Option<TradingAsset>> {
self.send_request(Method::GET, &format!("market/asset/{req}"), req, None)
.await
}
#[instrument(skip(self))]
async fn get_assets(&self) -> ExchangeClientResult<GetAssetsResponse> {
self.send_request(Method::GET, "market/assets", {}, None).await
}
#[instrument(skip(self))]
async fn get_pair(&self, req: CurrencyPair) -> ExchangeClientResult<Option<TradingPair>> {
let pairname = req.topic_suffix();
self.send_request(Method::GET, &format!("market/pair/{pairname}"), req, None)
.await
}
#[instrument(skip(self))]
async fn get_pairs(&self) -> ExchangeClientResult<GetPairsResponse> {
self.send_request(Method::GET, "market/pairs", {}, None).await
}
#[instrument(skip(self))]
async fn get_books(&self, tpl_only: bool) -> ExchangeClientResult<GetBookResponse> {
self.send_request(Method::GET, &format!("market/books?tpl_only={tpl_only}"), {}, None)
.await
}
#[instrument(skip(self))]
async fn get_book(&self, pair: CurrencyPair, tpl_only: bool) -> ExchangeClientResult<Option<GetBookResponse>> {
self.send_request(
Method::GET,
&format!("market/book/{pair}?tpl_only={tpl_only}"),
{},
None,
)
.await
}
}
| 0
|
market
|
/mono/crates/exchange_client/src/realm/market/get_asset.rs
|
use fdr_common::crypto::assets::{Asset, TradingAsset};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct GetAssetsRequest {}
#[derive(Debug, Serialize, Deserialize)]
pub struct GetAssetsResponse {
pub assets: HashMap<Asset, TradingAsset>,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct GetAssetRequest {}
#[derive(Debug, Serialize, Deserialize)]
pub struct GetAssetResponse {
pub asset: Option<TradingAsset>,
}
| 0
|
trading
|
/mono/crates/exchange_client/src/realm/trading/mod.rs
|
use crate::{
error::ExchangeClientResult,
realm::trading::{
cancel_order::{CancelOrderRequest, CancelOrderResponse},
create_order::{CreateOrderRequest, CreateOrderResponse},
},
};
use async_trait::async_trait;
use uuid::Uuid;
pub mod cancel_order;
pub mod create_order;
mod implementation;
#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)]
pub enum TradingMethod {
CreateOrder,
CancelOrder,
}
#[async_trait]
/// Trait for all exchange adapters
pub trait ExchangeAdapterTradingTrait {
/// Create a new order
/// This method will block until the order hits the order book or is rejected by the exchange.
/// Use [`create_order_async`](ExchangeAdapterTrait::create_order_async) if you want to return
/// immediately.
///
/// Usage:
/// ```rust
/// use rust_decimal::Decimal;
/// use exchange_client::realm::trading::create_order::CreateOrderRequest;
/// use exchange_client::realm::trading::ExchangeAdapterTradingTrait;
/// use fdr_common::crypto::assets::{Asset, CurrencyPair};
/// use fdr_common::crypto::orders::{FdrOrderSide, FdrOrderType};
/// use fdr_common::exchange::{Exchange, Market};
/// use fdr_common::TradingStrategy;
///
/// pub async fn example() {
/// let kraken = exchange_client::ExchangeAdapter::new(Exchange::Kraken, TradingStrategy::MarketMakerOnTicker, "http://localhost".to_string(), 3001);
/// let res = kraken
/// .create_order(
/// CreateOrderRequest {
/// type_: FdrOrderType::Limit,
/// side: FdrOrderSide::Sell,
/// volume: Decimal::from(5),
/// price: Some(Decimal::from(10)),
/// expire_time: None,
/// market: Market {
/// pair: CurrencyPair::new(Asset::Dai, Asset::Usd),
/// exchange: Exchange::Kraken,
/// },
/// strat_run_id: None,
/// strategy: TradingStrategy::MarketMakerOnTicker,
/// },
/// None,
/// )
/// .await.unwrap();
/// }
/// ```
async fn create_order(
&self,
req: CreateOrderRequest,
idempotency_key: Option<Uuid>,
) -> ExchangeClientResult<CreateOrderResponse>;
/// Create a new order
/// This method returns as soon as the request is persisted.
/// The proxy guarantees that the order will be sent to the exchange, and updates (success or
/// failure) will be pushed to Kafka.
///
/// Usage:
/// ```
/// use rust_decimal::Decimal;
/// use exchange_client::realm::trading::create_order::CreateOrderRequest;
/// use exchange_client::realm::trading::ExchangeAdapterTradingTrait;
/// use fdr_common::crypto::assets::{Asset, CurrencyPair};
/// use fdr_common::crypto::orders::{FdrOrderSide, FdrOrderType};
/// use fdr_common::exchange::{Exchange, Market};
/// use fdr_common::TradingStrategy;
///
/// pub async fn example() {
/// let kraken = exchange_client::ExchangeAdapter::new(Exchange::Kraken, TradingStrategy::MarketMakerOnTicker, "http://localhost".to_string(), 3001);
/// let res = kraken
/// .create_order_async(
/// CreateOrderRequest {
/// type_: FdrOrderType::Limit,
/// side: FdrOrderSide::Sell,
/// volume: Decimal::from(5),
/// price: Some(Decimal::from(10)),
/// expire_time: None,
/// market: Market {
/// pair: CurrencyPair::new(Asset::Dai, Asset::Usd),
/// exchange: Exchange::Kraken,
/// },
/// strat_run_id: None,
/// strategy: TradingStrategy::MarketMakerOnTicker,
/// },
/// None,
/// )
/// .await.unwrap();
/// }
async fn create_order_async(
&self,
req: CreateOrderRequest,
idempotency_key: Option<Uuid>,
) -> ExchangeClientResult<CreateOrderResponse>;
/// Cancel an async order
/// This method will block until the order has been removed from the order book on the exchange
/// (or the request rejected)
/// Use [`cancel_order_async`](ExchangeAdapterTrait::cancel_order_async) if you want to return
/// immediately.
///
/// Usage:
/// ```rust
/// use uuid::Uuid;
/// use exchange_client::realm::trading::cancel_order::CancelOrderRequest;
/// use exchange_client::realm::trading::ExchangeAdapterTradingTrait;
/// use fdr_common::exchange::Exchange;
/// use fdr_common::TradingStrategy;
///
/// pub async fn example() {
/// let kraken = exchange_client::ExchangeAdapter::new(Exchange::Kraken, TradingStrategy::MarketMakerOnTicker, "http://localhost".to_string(), 3001);
/// let res = kraken
/// .cancel_order(
/// CancelOrderRequest {
/// order_id: Uuid::new_v4(),
/// },
/// None,
/// )
/// .await.unwrap();
/// }
/// ```
async fn cancel_order(
&self,
req: CancelOrderRequest,
idempotency_key: Option<Uuid>,
) -> ExchangeClientResult<CancelOrderResponse>;
/// Cancel an order asynchronously
/// This method returns as soon as the request is persisted.
/// The proxy guarantees that the cancellation request will be sent to the exchange.
/// Any updates (success or failure) will be pushed to Kafka.
/// Usage:
/// ```
/// use uuid::Uuid;
/// use exchange_client::realm::trading::cancel_order::CancelOrderRequest;
/// use exchange_client::realm::trading::ExchangeAdapterTradingTrait;
/// use fdr_common::exchange::Exchange;
/// use fdr_common::TradingStrategy;
///
/// pub async fn example() {
/// let kraken = exchange_client::ExchangeAdapter::new(Exchange::Kraken, TradingStrategy::MarketMakerOnTicker, "http://localhost".to_string(), 3001);
/// let res = kraken
/// .cancel_order_async(
/// CancelOrderRequest {
/// order_id: Uuid::new_v4(),
/// },
/// None,
/// )
/// .await.unwrap();
/// }
async fn cancel_order_async(
&self,
req: CancelOrderRequest,
idempotency_key: Option<Uuid>,
) -> ExchangeClientResult<CancelOrderResponse>;
}
| 0
|
trading
|
/mono/crates/exchange_client/src/realm/trading/create_order.rs
|
use chrono::{DateTime, Utc};
use fdr_common::{
crypto::orders::{FdrOrderSide, FdrOrderStatus, FdrOrderType},
exchange::Market,
TradingStrategy,
};
use rust_decimal::Decimal;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CreateOrderRequest {
#[serde(rename = "type")]
pub type_: FdrOrderType,
pub market: Market,
pub side: FdrOrderSide,
pub price: Option<Decimal>,
pub volume: Decimal,
#[serde(deserialize_with = "iso8601_deserialize")]
#[serde(default)]
#[serde(skip_serializing_if = "Option::is_none")]
pub expire_time: Option<DateTime<Utc>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub strat_run_id: Option<Uuid>,
pub strategy: TradingStrategy,
}
fn iso8601_deserialize<'de, D>(deserializer: D) -> Result<Option<DateTime<Utc>>, D::Error>
where
D: serde::Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
Ok(if s.is_empty() {
None
} else {
Some(
DateTime::parse_from_rfc3339(&s)
.map_err(serde::de::Error::custom)?
.with_timezone(&Utc),
)
})
}
#[derive(Debug, Default, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CreateOrderResponse {
pub order_id: Uuid,
#[serde(skip_serializing_if = "Option::is_none")]
pub exchange_order_ref: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub order_status: Option<FdrOrderStatus>,
}
#[cfg(test)]
mod tests {
use super::*;
use fdr_common::{
crypto::assets::{Asset, CurrencyPair},
exchange::Exchange,
};
#[test]
fn serialize_deserialize_request() {
let req = CreateOrderRequest {
market: Market {
pair: CurrencyPair::new(Asset::Btc, Asset::Usd),
exchange: Exchange::Kraken,
},
type_: FdrOrderType::Limit,
side: FdrOrderSide::Sell,
price: Some(Decimal::new(100, 2)),
volume: Decimal::new(100, 2),
expire_time: None,
strat_run_id: None,
strategy: TradingStrategy::MarketMakerOnTicker,
};
let as_string = serde_json::to_string(&req).unwrap(); // unwrap safe
let deserialized: CreateOrderRequest = serde_json::from_str(&as_string).unwrap(); // unwrap safe
assert_eq!(deserialized, req);
}
#[test]
fn serialize_deserialize_response() {
let resp = CreateOrderResponse {
order_id: Uuid::new_v4(),
exchange_order_ref: Some("ref".to_string()),
order_status: Some(FdrOrderStatus::Filled),
};
let as_string = serde_json::to_string(&resp).unwrap(); // unwrap safe
let deserialized: CreateOrderResponse = serde_json::from_str(&as_string).unwrap(); // unwrap safe
assert_eq!(deserialized, resp);
}
}
| 0
|
trading
|
/mono/crates/exchange_client/src/realm/trading/cancel_order.rs
|
use fdr_common::crypto::orders::FdrOrderStatus;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CancelOrderRequest {
pub order_id: Uuid,
}
#[derive(Debug, Default, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CancelOrderResponse {
pub order_status: Option<FdrOrderStatus>,
}
| 0
|
trading
|
/mono/crates/exchange_client/src/realm/trading/implementation.rs
|
use async_trait::async_trait;
use http::Method;
use uuid::Uuid;
use fdr_common::tracing::instrument;
use crate::{
error::ExchangeClientResult,
realm::trading::{
cancel_order::{CancelOrderRequest, CancelOrderResponse},
create_order::{CreateOrderRequest, CreateOrderResponse},
ExchangeAdapterTradingTrait,
},
ExchangeAdapter, ExchangeAdapterTrait,
};
#[async_trait]
impl ExchangeAdapterTradingTrait for ExchangeAdapter {
#[instrument(skip(self))]
async fn create_order(
&self,
req: CreateOrderRequest,
idempotency_key: Option<Uuid>,
) -> ExchangeClientResult<CreateOrderResponse> {
self.confirm_exchange(req.market.exchange)?;
self.send_request(Method::POST, "trading/create", req, idempotency_key)
.await
}
#[instrument(skip(self))]
async fn create_order_async(
&self,
req: CreateOrderRequest,
idempotency_key: Option<Uuid>,
) -> ExchangeClientResult<CreateOrderResponse> {
self.confirm_exchange(req.market.exchange)?;
self.send_request(Method::POST, "trading/create_async", req, idempotency_key)
.await
}
#[instrument(skip(self))]
async fn cancel_order(
&self,
req: CancelOrderRequest,
idempotency_key: Option<Uuid>,
) -> ExchangeClientResult<CancelOrderResponse> {
self.send_request(Method::POST, "trading/cancel", req, idempotency_key)
.await
}
#[instrument(skip(self))]
async fn cancel_order_async(
&self,
req: CancelOrderRequest,
idempotency_key: Option<Uuid>,
) -> ExchangeClientResult<CancelOrderResponse> {
self.send_request(Method::POST, "trading/cancel_async", req, idempotency_key)
.await
}
}
| 0
|
user_data
|
/mono/crates/exchange_client/src/realm/user_data/mod.rs
|
use async_trait::async_trait;
use fdr_common::crypto::orders::OrderEntry;
use crate::{error::ExchangeClientResult, realm::user_data::get_balance::GetBalanceResponse};
use self::get_trade_volume::{GetTradeVolumeRequest, GetTradeVolumeResponse};
pub mod get_balance;
pub mod get_trade_volume;
mod implementation;
#[async_trait]
/// Trait for all exchange adapters
pub trait ExchangeAdapterUserDataTrait {
/// Get all balances
/// Usage:
/// ```
/// use exchange_client::realm::user_data::ExchangeAdapterUserDataTrait;
/// use fdr_common::exchange::Exchange;
/// use fdr_common::TradingStrategy;
///
/// pub async fn example() {
/// let kraken = exchange_client::ExchangeAdapter::new(Exchange::Kraken, TradingStrategy::MarketMakerOnTicker, "http://localhost".to_string(), 3001);
/// let res = kraken
/// .get_balances()
/// .await.unwrap();
/// }
async fn get_balances(&self) -> ExchangeClientResult<GetBalanceResponse>;
async fn get_trade_volume(&self, req: GetTradeVolumeRequest) -> ExchangeClientResult<GetTradeVolumeResponse>;
async fn get_open_orders(&self) -> ExchangeClientResult<Vec<OrderEntry>>;
}
| 0
|
user_data
|
/mono/crates/exchange_client/src/realm/user_data/implementation.rs
|
use async_trait::async_trait;
use fdr_common::crypto::orders::OrderEntry;
use http::Method;
use serde_json::json;
use fdr_common::tracing::instrument;
use crate::{
error::ExchangeClientResult,
realm::user_data::{
get_balance::{GetBalanceRequest, GetBalanceResponse},
get_trade_volume::{GetTradeVolumeRequest, GetTradeVolumeResponse},
ExchangeAdapterUserDataTrait,
},
ExchangeAdapter,
};
#[async_trait]
impl ExchangeAdapterUserDataTrait for ExchangeAdapter {
#[instrument(skip(self))]
async fn get_balances(&self) -> ExchangeClientResult<GetBalanceResponse> {
self.send_request(
Method::GET,
"user_data/balance",
json!(GetBalanceRequest {
strategy: self.config.strategy
}),
None,
)
.await
}
#[instrument(skip(self))]
async fn get_trade_volume(&self, req: GetTradeVolumeRequest) -> ExchangeClientResult<GetTradeVolumeResponse> {
self.send_request(Method::GET, "user_data/trade_volume", json!(req), None)
.await
}
#[instrument(skip(self))]
async fn get_open_orders(&self) -> ExchangeClientResult<Vec<OrderEntry>> {
self.send_request(Method::GET, "user_data/open_orders", json!({}), None)
.await
}
}
| 0
|
user_data
|
/mono/crates/exchange_client/src/realm/user_data/get_trade_volume.rs
|
use std::collections::HashMap;
use fdr_common::crypto::{
assets::{Asset, CurrencyPair},
pairs::FeeData,
};
use rust_decimal::Decimal;
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize)]
pub struct GetTradeVolumeRequest {
pub pairs: Vec<CurrencyPair>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct GetTradeVolumeResponse {
pub currency: Asset,
pub volume: Decimal,
pub fees: HashMap<CurrencyPair, FeeData>,
pub fees_maker: HashMap<CurrencyPair, FeeData>,
}
| 0
|
user_data
|
/mono/crates/exchange_client/src/realm/user_data/get_balance.rs
|
use std::collections::HashMap;
use fdr_common::crypto::{asset_balance::AssetBalance, assets::Asset};
use serde::{Deserialize, Serialize};
use crate::TradingStrategy;
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct GetBalanceRequest {
pub strategy: TradingStrategy,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct GetBalanceResponse {
pub balances: HashMap<Asset, AssetBalance>,
}
| 0
|
archive_service
|
/mono/crates/archive_service/Cargo.toml
|
[package]
name = "archive-service"
version = "0.1.0"
edition = "2021"
authors = ["Justin Heisler"]
description = "Stores and retrieves historical data from the exchange"
[[bin]]
name = "archive-service"
path = "src/service.rs"
[[bin]]
name = "archive-ohlcv"
path = "src/ohlcv.rs"
[dependencies]
fdr-common = { path = "../fdr_common", features = ["tracing"] }
fdr-event = { path = "../fdr_event" }
fdr-http = { path = "../fdr_http" }
fdr-store = { path = "../fdr_store" }
async-trait = { workspace = true }
axum = { workspace = true }
chrono = { workspace = true, features = ["serde"] }
clap = { workspace = true, features = ["derive", "env", "cargo"] }
derive_more = { workspace = true }
http = { workspace = true }
http-body = { workspace = true }
hyper = { workspace = true }
rust_decimal = { workspace = true, features = ["serde", "db-postgres"] }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
slog = { workspace = true, features = ["max_level_trace"] }
sqlx = { workspace = true, features = [
"runtime-tokio-native-tls",
"postgres",
"json",
"migrate",
"time",
"uuid",
"rust_decimal",
] }
prometheus = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true, features = ["full"] }
tracing = { workspace = true, default-features = false, features = [
"std",
"attributes",
] }
tracing-opentelemetry = { workspace = true, default-features = true }
toml = { workspace = true }
url = { workspace = true }
uuid = { workspace = true, features = ["v4", "fast-rng", "serde"] }
csv = "1.3.0"
| 0
|
src
|
/mono/crates/archive_service/src/service.rs
|
use archive_service::{
config::Config,
context::Context,
error::{ArchiveServiceError, ArchiveServiceResult},
};
use async_trait::async_trait;
use axum::Router;
use fdr_common::{
context::FdrServiceContext,
task::{FdrTask, FdrTaskManagerBuilder, FdrTaskResult, ShutdownType},
ticker::FdrTickerUpdate,
utils::cli::{Args, Parser},
};
use fdr_event::{
events::{FdrBookUpdate, FdrEvent},
FdrEventConsumer,
};
use fdr_http::Server;
use fdr_store::archive::ArchiveStore;
use slog::{info, trace, warn};
use std::sync::Arc;
#[tokio::main]
async fn main() -> ArchiveServiceResult<()> {
let args: Args<Config> = Args::parse();
let ctx = Arc::new(Context::from_config(args.config.clone()).await?);
info!(ctx.logger, "Archive service started"; "version" => env!("CARGO_PKG_VERSION"));
let mut task_manager = FdrTaskManagerBuilder::new(ctx.logger().clone())
.add_task(HttpServerTask(ctx.clone()))
.add_task(ArchiveTask { ctx })
.build();
task_manager.run().await;
Ok(())
}
struct ArchiveTask {
ctx: Arc<Context>,
}
#[async_trait]
impl FdrTask<ArchiveServiceError> for ArchiveTask {
async fn run(self: Box<Self>) -> FdrTaskResult<ArchiveServiceError> {
info!(self.ctx.logger, "Listening for events...");
loop {
if let Err(e) = self.await_and_handle_event().await {
warn!(self.ctx.logger, "error handling kafka message"; "error" => format!("{:?}", e));
}
}
}
fn name(&self) -> String {
"Archive Service".to_string()
}
fn shutdown_type(&self) -> ShutdownType {
ShutdownType::Managed
}
}
impl ArchiveTask {
async fn await_and_handle_event(&self) -> ArchiveServiceResult<()> {
let event = self.ctx.kafka_consumer.recv_fdr_event_with_metadata().await?;
let res = match &event.event {
FdrEvent::Ticker(ticker) => self.handle_ticker_event(ticker).await,
FdrEvent::Book(book) => self.handle_book_event(book).await,
_ => Ok(()),
};
self.ctx.kafka_consumer.commit_message(&event)?;
res
}
async fn handle_book_event(&self, book: &FdrBookUpdate) -> ArchiveServiceResult<()> {
trace!(self.ctx.logger, "saving book entry"; "book" => ?book);
Ok(self
.ctx
.pool
.acquire()
.await?
.save_archive_record(book.try_into()?)
.await?)
}
async fn handle_ticker_event(&self, ticker: &FdrTickerUpdate) -> ArchiveServiceResult<()> {
trace!(self.ctx.logger, "saving ticker entry"; "ticker" => ?ticker);
Ok(self
.ctx
.pool
.acquire()
.await?
.save_archive_record(ticker.try_into()?)
.await?)
}
}
struct HttpServerTask(Arc<Context>);
#[async_trait]
impl FdrTask<ArchiveServiceError> for HttpServerTask {
async fn run(self: Box<Self>) -> FdrTaskResult<ArchiveServiceError> {
Server::serve(
self.0.clone(),
self.0.config.server.listen_address.clone(),
self.0.config.server.port,
Router::new(),
)
.await
}
fn name(&self) -> String {
"HTTP Server".to_string()
}
fn shutdown_type(&self) -> ShutdownType {
ShutdownType::Managed
}
}
| 0
|
src
|
/mono/crates/archive_service/src/metrics.rs
|
use fdr_common::error::FdrResult;
use prometheus::Registry;
use std::sync::Arc;
pub struct Metrics {
pub registry: Arc<Registry>,
}
impl Metrics {
pub fn new(prefix: &str) -> FdrResult<Self> {
let registry = Registry::new_custom(Some(prefix.to_string()), None)?;
Ok(Self {
registry: Arc::new(registry),
})
}
}
| 0
|
src
|
/mono/crates/archive_service/src/error.rs
|
use thiserror::Error;
use fdr_common::error::FdrError;
use fdr_event::FdrEventError;
use fdr_store::error::FdrStoreError;
#[derive(Error, Debug)]
pub enum ArchiveServiceError {
#[error(transparent)]
FdrError(#[from] FdrError),
#[error("Received Kafka Message w/No Body")]
EmptyPayload,
#[error(transparent)]
Sqlx(#[from] sqlx::Error),
#[error(transparent)]
DbMigration(#[from] sqlx::migrate::MigrateError),
#[error(transparent)]
Kafka(#[from] FdrEventError),
#[error(transparent)]
SerdeJson(#[from] serde_json::Error),
#[error(transparent)]
Io(#[from] std::io::Error),
#[error(transparent)]
Uuid(#[from] uuid::Error),
#[error(transparent)]
Decimal(#[from] rust_decimal::Error),
#[error(transparent)]
ParseUrl(#[from] url::ParseError),
#[error(transparent)]
Hyper(#[from] hyper::Error),
#[error(transparent)]
AddrParse(#[from] std::net::AddrParseError),
#[error(transparent)]
Prometheus(#[from] prometheus::Error),
#[error("Missing strategy")]
MissingStrategy,
#[error("{0}")]
MissingField(String),
#[error("Could not find the specified row")]
RowNotfound,
#[error("Invalid API Usage: {0}")]
ApiError(String),
#[error("The database returned an error: {0}")]
DatabaseError(String),
#[error("{0}")]
ResourceAlreadyExists(String),
#[error("Required field {0} was not provided")]
RequiredField(String),
#[error(transparent)]
FdrStoreError(#[from] FdrStoreError),
}
pub type ArchiveServiceResult<T> = Result<T, ArchiveServiceError>;
| 0
|
src
|
/mono/crates/archive_service/src/ohlcv.rs
|
use chrono::DateTime;
use fdr_common::{
crypto::assets::CurrencyPair,
error::FdrError,
exchange::Exchange,
utils::cli::{Args, Parser},
};
use fdr_event::FdrEventError;
use fdr_store::{
archive::{models::OhlcDataDto, ArchiveStore},
config::PostgresConfig,
core::pool::{init_postgres_pool_from_config, Pool},
error::FdrStoreError,
};
use rust_decimal::Decimal;
use std::{fs::File, num::ParseIntError, str::FromStr, sync::Arc};
use thiserror::Error;
#[allow(dead_code)]
#[tokio::main]
async fn main() -> Result<(), OhlcvError> {
let args: Args<OhlcvConfig> = Args::parse();
let pool = init_postgres_pool_from_config(&args.config.postgres).await?;
let ctx = Arc::new(Context {
config: args.config,
pool,
});
println!("Archive service started");
let file = File::open(ctx.config.ohlcv_params.data_file.clone())?;
let mut rdr = csv::Reader::from_reader(file);
let mut candles: Vec<OhlcDataDto> = Vec::new();
let mut conn = ctx.pool.acquire().await?;
let mut batch_number = 0;
for result in rdr.records() {
let record = result.unwrap();
let candle = OhlcDataDto {
exchange: ctx.config.ohlcv_params.exchange,
base: ctx.config.ohlcv_params.pair.base(),
quote: ctx.config.ohlcv_params.pair.quote(),
interval: ctx.config.ohlcv_params.candle_interval_mins,
timestamp: DateTime::from_timestamp(i64::from_str(record.get(0).unwrap())?, 0)
.map_or_else(|| Err(OhlcvError::ErrorVariant), Ok)?,
open: Decimal::from_str(record.get(1).ok_or(OhlcvError::ErrorVariant)?)?,
high: Decimal::from_str(record.get(2).ok_or(OhlcvError::ErrorVariant)?)?,
low: Decimal::from_str(record.get(3).ok_or(OhlcvError::ErrorVariant)?)?,
close: Decimal::from_str(record.get(4).ok_or(OhlcvError::ErrorVariant)?)?,
vwap: None, // Kraken API provides VWAP but its not present in data dumps..
volume: record.get(5).and_then(|volume| Decimal::from_str(volume).ok()),
count: record.get(6).and_then(|count| i32::from_str(count).ok()),
};
candles.push(candle);
if candles.len() == 1000 {
batch_number += 1;
println!(
"Depositing candles batch {} (total: {:?})",
batch_number,
batch_number * 1000
);
conn.save_candles(candles.clone()).await?;
candles.clear();
}
}
println!("Depositing remaining {:?} candles", candles.len());
conn.save_candles(candles).await?;
println!("Ohlcv archive complete");
Ok(())
}
#[derive(Debug, Clone, serde::Deserialize)]
pub struct OhlcvConfig {
pub postgres: PostgresConfig,
pub ohlcv_params: OhlcvParamsConfig,
}
#[derive(Debug, Clone, serde::Deserialize)]
pub struct OhlcvParamsConfig {
pub exchange: Exchange,
pub pair: CurrencyPair,
pub candle_interval_mins: i32,
pub data_file: String,
}
pub struct Context {
pub config: OhlcvConfig,
pub pool: Pool,
}
#[derive(Error, Debug)]
pub enum OhlcvError {
#[error(transparent)]
FdrError(#[from] FdrError),
#[error(transparent)]
Sqlx(#[from] sqlx::Error),
#[error(transparent)]
DbMigration(#[from] sqlx::migrate::MigrateError),
#[error(transparent)]
Kafka(#[from] FdrEventError),
#[error(transparent)]
SerdeJson(#[from] serde_json::Error),
#[error(transparent)]
Io(#[from] std::io::Error),
#[error(transparent)]
Uuid(#[from] uuid::Error),
#[error(transparent)]
Decimal(#[from] rust_decimal::Error),
#[error(transparent)]
ParseUrl(#[from] url::ParseError),
#[error(transparent)]
Hyper(#[from] hyper::Error),
#[error(transparent)]
AddrParse(#[from] std::net::AddrParseError),
#[error(transparent)]
Prometheus(#[from] prometheus::Error),
#[allow(dead_code)]
#[error("Could not find the specified row")]
RowNotfound,
#[error(transparent)]
FdrStoreError(#[from] FdrStoreError),
#[error("error variant")]
ErrorVariant,
#[error(transparent)]
CsvError(#[from] csv::Error),
#[error(transparent)]
ParseIntError(#[from] ParseIntError),
}
| 0
|
src
|
/mono/crates/archive_service/src/lib.rs
|
pub mod config;
pub mod context;
pub mod error;
pub mod metrics;
mod ohlcv;
| 0
|
src
|
/mono/crates/archive_service/src/context.rs
|
use async_trait::async_trait;
use prometheus::Registry;
use slog::Logger;
use std::sync::Arc;
use crate::{config::Config, error::ArchiveServiceError, metrics::Metrics};
use fdr_common::{context::FdrServiceContext, logging::default_logger};
use fdr_event::kafka::{consumer::KafkaConsumer, KafkaContext};
use fdr_store::core::pool::{init_postgres_pool_from_config, Pool};
pub struct Context {
pub config: Config,
pub pool: Pool,
pub kafka_consumer: KafkaConsumer,
pub logger: Logger,
pub metrics: Metrics,
}
#[async_trait]
impl FdrServiceContext<Config> for Context {
type Context = Context;
type Error = ArchiveServiceError;
async fn from_config(config: Config) -> Result<Self::Context, Self::Error> {
let logger = default_logger();
let kafka_consumer = KafkaConsumer::new(&config.kafka_consumer, KafkaContext::with_logger(logger.clone()))?;
let pool = init_postgres_pool_from_config(&config.postgres).await?;
let metrics = Metrics::new(config.metrics.prefix.as_str())?;
Ok(Self {
config,
pool,
metrics,
kafka_consumer,
logger,
})
}
fn logger(&self) -> Logger {
self.logger.clone()
}
fn metrics_registry(&self) -> Arc<Registry> {
self.metrics.registry.clone()
}
}
| 0
|
config
|
/mono/crates/archive_service/src/config/mod.rs
|
use fdr_common::{
config::{MetricsConfig, ServerConfig},
crypto::assets::CurrencyPair,
exchange::Exchange,
};
use fdr_event::kafka::consumer::KafkaConsumerConfig;
use fdr_store::config::PostgresConfig;
#[derive(Debug, Clone, serde::Deserialize)]
pub struct Config {
pub kafka_consumer: KafkaConsumerConfig,
pub postgres: PostgresConfig,
pub server: ServerConfig,
pub metrics: MetricsConfig,
}
#[derive(Debug, Clone, serde::Deserialize)]
pub struct OhlcvConfig {
pub postgres: PostgresConfig,
pub ohlcv_params: OhlcvParamsConfig,
}
#[derive(Debug, Clone, serde::Deserialize)]
pub struct OhlcvParamsConfig {
pub exchange: Exchange,
pub pair: CurrencyPair,
pub start_time: i64,
pub end_time: i64,
pub candle_interval_mins: i64,
}
| 0
|
fdr_store
|
/mono/crates/fdr_store/Cargo.toml
|
[package]
name = "fdr-store"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
fdr-common = { path = "../fdr_common" }
fdr-event = { path = "../fdr_event" }
technical_analysis = { path = "../technical_analysis" }
async-trait = { workspace = true }
chrono = { workspace = true, features = ["serde"] }
lazy_static = { workspace = true }
rust_decimal = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
sqlx = { workspace = true, features = [
"runtime-tokio-native-tls",
"postgres",
"json",
"migrate",
"time",
"uuid",
"chrono",
"rust_decimal",
"sqlite",
"any"
] }
thiserror = { workspace = true }
tracing = { workspace = true, default-features = false, features = [
"std",
"attributes",
] }
tracing-opentelemetry = { workspace = true, default-features = true }
uuid = { workspace = true, features = ["v4", "fast-rng", "serde"] }
derive_more = { workspace = true }
tracing-attributes = { workspace = true }
url = { workspace = true }
tokio = { workspace = true }
| 0
|
src
|
/mono/crates/fdr_store/src/error.rs
|
use sqlx::migrate::MigrateError;
use thiserror::Error;
#[derive(Error, Debug)]
pub enum FdrStoreError {
#[error(transparent)]
Sqlx(#[from] sqlx::Error),
#[error("{0}")]
DisallowedOrderStatusTransition(String),
#[error("{0}")]
DisallowedOrderCancellationStatusTransition(String),
#[error("The database returned an error: {0}")]
DatabaseError(String),
#[error("Required field {0} was not provided")]
RequiredField(String),
#[error(transparent)]
UrlParse(#[from] url::ParseError),
#[error(transparent)]
Json(#[from] serde_json::Error),
#[error(transparent)]
MigrateError(#[from] MigrateError),
}
pub type FdrStoreResult<T> = Result<T, FdrStoreError>;
pub fn parse_db_error(e: sqlx::Error) -> FdrStoreError {
match e {
sqlx::Error::Database(e) => match e.message() {
// do not change this msg, it comes from DB schema
"Can not cancel order which has reached terminal state" => {
FdrStoreError::DisallowedOrderStatusTransition(e.message().to_string())
}
// do not change this msg, it comes from DB schema
"Can not cancel order which has not hit the exchange" => {
FdrStoreError::DisallowedOrderCancellationStatusTransition(e.message().to_string())
}
_ => FdrStoreError::DatabaseError(format!(
"Code: {}, Message: {}",
e.code().unwrap_or_default(),
e.message()
)),
},
_ => e.into(),
}
}
| 0
|
src
|
/mono/crates/fdr_store/src/config.rs
|
use crate::error::FdrStoreError;
use fdr_common::utils::config::from_file_or_const;
use serde::{Deserialize, Serialize};
use url::Url;
const DEFAULT_PORT: fn() -> u32 = || 5432;
const DEFAULT_MAX_CONNECTIONS: fn() -> u32 = || 10;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PostgresConfig {
pub hostname: String,
#[serde(default = "DEFAULT_PORT")]
pub port: u32,
pub database: String,
#[serde(default = "DEFAULT_MAX_CONNECTIONS")]
pub max_connections: u32,
#[serde(deserialize_with = "from_file_or_const")]
pub username: String,
#[serde(deserialize_with = "from_file_or_const")]
pub password: String,
}
impl TryFrom<&PostgresConfig> for Url {
type Error = FdrStoreError;
fn try_from(config: &PostgresConfig) -> Result<Self, Self::Error> {
Ok(Url::parse(&format!(
"postgres://{}:{}@{}:{}/{}",
config.username, config.password, config.hostname, config.port, config.database
))?)
}
}
| 0
|
src
|
/mono/crates/fdr_store/src/lib.rs
|
pub mod archive;
pub mod backtest;
pub mod config;
pub mod core;
pub mod error;
pub mod indicator;
pub mod metrics;
pub mod opportunities;
pub mod orders;
pub mod trades;
pub mod virtual_accounts;
| 0
|
archive
|
/mono/crates/fdr_store/src/archive/mod.rs
|
use crate::{
archive::models::{ArchivedDataDto, GetArchivesFilter},
core::pool_connection::PoolConnection,
error::FdrStoreResult,
};
use async_trait::async_trait;
use std::collections::VecDeque;
use self::models::{GetCandlesFilter, OhlcDataDto};
pub mod models;
pub mod postgres;
#[async_trait]
pub trait ArchiveStore {
async fn save_archive_record(&mut self, record: ArchivedDataDto) -> FdrStoreResult<()>;
async fn get_archive_records(&mut self, filter: GetArchivesFilter) -> FdrStoreResult<VecDeque<ArchivedDataDto>>;
async fn save_candles(&mut self, candles: Vec<OhlcDataDto>) -> FdrStoreResult<u64>;
async fn get_candles(&mut self, filter: GetCandlesFilter) -> FdrStoreResult<VecDeque<OhlcDataDto>>;
}
#[async_trait]
impl ArchiveStore for PoolConnection {
async fn save_archive_record(&mut self, record: ArchivedDataDto) -> FdrStoreResult<()> {
match self {
PoolConnection::Memory => unimplemented!(),
PoolConnection::Postgres(conn) => postgres::save_archive_record(conn, record).await,
}
}
async fn get_archive_records(&mut self, filter: GetArchivesFilter) -> FdrStoreResult<VecDeque<ArchivedDataDto>> {
match self {
PoolConnection::Memory => unimplemented!(),
PoolConnection::Postgres(conn) => postgres::get_archive_records(conn, filter).await,
}
}
async fn save_candles(&mut self, candles: Vec<OhlcDataDto>) -> FdrStoreResult<u64> {
match self {
PoolConnection::Memory => unimplemented!(),
PoolConnection::Postgres(conn) => postgres::save_candles(conn, candles).await,
}
}
async fn get_candles(&mut self, filter: GetCandlesFilter) -> FdrStoreResult<VecDeque<OhlcDataDto>> {
match self {
PoolConnection::Memory => unimplemented!(),
PoolConnection::Postgres(conn) => postgres::get_candles(conn, filter).await,
}
}
}
| 0
|
archive
|
/mono/crates/fdr_store/src/archive/models.rs
|
use chrono::{DateTime, Utc};
use derive_more::Display;
use fdr_common::{
crypto::assets::{Asset, CurrencyPair},
exchange::Exchange,
ticker::FdrTickerUpdate,
};
use fdr_event::events::FdrBookUpdate;
use rust_decimal::Decimal;
use serde_json::Value;
#[derive(serde::Deserialize, serde::Serialize, PartialEq, Eq, Debug, Copy, Clone, sqlx::Type, Display)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[sqlx(type_name = "archive_type", rename_all = "SCREAMING_SNAKE_CASE")]
pub enum ArchiveType {
Ticker,
Book,
}
#[derive(Debug, Clone, sqlx::FromRow)]
pub struct ArchivedDataDto {
pub timestamp: DateTime<Utc>,
pub archive_type: ArchiveType,
pub exchange: Exchange,
pub base: Asset,
pub quote: Asset,
pub data: Value,
pub last_price: Option<Decimal>,
}
#[derive(Debug, Default)]
pub struct GetArchivesFilter {
pub pairs: Option<Vec<CurrencyPair>>,
pub exchange: Option<Exchange>,
pub page: i32,
pub page_size: i32,
pub archive_type: Option<ArchiveType>,
pub start_time: Option<DateTime<Utc>>,
pub end_time: Option<DateTime<Utc>>,
}
#[derive(Debug, Default)]
pub struct GetCandlesFilter {
pub pairs: Option<Vec<CurrencyPair>>,
pub exchange: Option<Exchange>,
pub page: i32,
pub page_size: i32,
pub interval: Option<i64>,
pub start_time: Option<DateTime<Utc>>,
pub end_time: Option<DateTime<Utc>>,
}
#[derive(Debug, Clone, Copy, sqlx::FromRow)]
pub struct OhlcDataDto {
pub timestamp: DateTime<Utc>,
pub base: Asset,
pub quote: Asset,
pub exchange: Exchange,
pub interval: i32,
pub open: Decimal,
pub high: Decimal,
pub low: Decimal,
pub close: Decimal,
pub vwap: Option<Decimal>,
pub volume: Option<Decimal>,
pub count: Option<i32>,
}
impl TryFrom<&FdrTickerUpdate> for ArchivedDataDto {
type Error = serde_json::Error;
fn try_from(event: &FdrTickerUpdate) -> Result<Self, Self::Error> {
Ok(ArchivedDataDto {
timestamp: event.timestamp,
archive_type: ArchiveType::Ticker,
exchange: event.exchange,
last_price: event.last_price,
base: event.pair.base(),
quote: event.pair.quote(),
data: serde_json::to_value(event)?,
})
}
}
impl TryFrom<ArchivedDataDto> for FdrTickerUpdate {
type Error = serde_json::Error;
fn try_from(event: ArchivedDataDto) -> Result<Self, Self::Error> {
let ticker_update: FdrTickerUpdate = serde_json::from_value(event.data)?;
Ok(ticker_update)
}
}
impl TryFrom<&FdrBookUpdate> for ArchivedDataDto {
type Error = serde_json::Error;
fn try_from(event: &FdrBookUpdate) -> Result<Self, Self::Error> {
Ok(ArchivedDataDto {
timestamp: Utc::now(),
archive_type: ArchiveType::Book,
exchange: event.exchange,
last_price: None,
base: event.pair.base(),
quote: event.pair.quote(),
data: serde_json::to_value(event)?,
})
}
}
| 0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.