alexaxbreadbytes commited on
Commit
e220ed6
·
1 Parent(s): 859934c

Update V3 Dash

Browse files
Files changed (1) hide show
  1. app.py +632 -237
app.py CHANGED
@@ -20,29 +20,133 @@ import numpy as np
20
  import datetime as dt
21
  import matplotlib.pyplot as plt
22
  from pathlib import Path
 
 
 
 
23
 
24
  import streamlit as st
25
  import plotly.express as px
26
  import altair as alt
27
  import dateutil.parser
28
- import copy
29
 
30
 
31
  # +
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
  @st.experimental_memo
33
  def get_hist_info(df_coin, principal_balance,plheader):
34
  numtrades = int(len(df_coin))
35
  numwin = int(sum(df_coin[plheader] > 0))
36
  numloss = int(sum(df_coin[plheader] < 0))
37
- winrate = int(np.round(100*numwin/numtrades,2))
 
 
 
38
 
39
  grosswin = sum(df_coin[df_coin[plheader] > 0][plheader])
40
  grossloss = sum(df_coin[df_coin[plheader] < 0][plheader])
41
- if grossloss !=0:
42
  pfactor = -1*np.round(grosswin/grossloss,2)
43
  else:
44
  pfactor = np.nan
45
  return numtrades, numwin, numloss, winrate, pfactor
 
46
  @st.experimental_memo
47
  def get_rolling_stats(df, lev, otimeheader, days):
48
  max_roll = (df[otimeheader].max() - df[otimeheader].min()).days
@@ -58,273 +162,564 @@ def get_rolling_stats(df, lev, otimeheader, days):
58
  else:
59
  rolling_perc = np.nan
60
  return 100*rolling_perc
 
 
 
 
 
61
 
62
  @st.experimental_memo
 
 
 
 
63
  def filt_df(df, cheader, symbol_selections):
64
- """
65
- Inputs: df (pd.DataFrame), cheader (str) and symbol_selections (list[str]).
66
-
67
- Returns a filtered pd.DataFrame containing only data that matches symbol_selections (list[str])
68
- from df[cheader].
69
- """
70
 
71
  df = df.copy()
72
  df = df[df[cheader].isin(symbol_selections)]
73
 
74
  return df
75
 
76
- @st.experimental_memo
77
- def my_style(v, props=''):
78
- props = 'color:red' if v < 0 else 'color:green'
79
- return props
80
-
81
- @st.experimental_memo
82
- def cc_coding(row):
83
- return ['background-color: lightgrey'] * len(row) if row['Exit Date'] <= datetime.strptime('2022-12-16 00:00:00','%Y-%m-%d %H:%M:%S').date() else [''] * len(row)
 
 
 
84
 
 
 
85
 
86
- @st.cache(ttl=24*3600, allow_output_mutation=True)
87
- def load_data(filename, otimeheader,fmat):
88
- df = pd.read_csv(open(filename,'r'), sep='\t') # so as not to mutate cached value
89
- df.columns = ['Trade','Signal','Entry Date','Buy Price', 'Sell Price','Exit Date', 'P/L per token', 'P/L %', 'Drawdown %']
90
- # df.insert(1, 'Signal', ['Long']*len(df))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
91
 
92
- df['Buy Price'] = df['Buy Price'].str.replace('$', '', regex=True)
93
- df['Sell Price'] = df['Sell Price'].str.replace('$', '', regex=True)
94
- df['Buy Price'] = df['Buy Price'].str.replace(',', '', regex=True)
95
- df['Sell Price'] = df['Sell Price'].str.replace(',', '', regex=True)
96
- df['P/L per token'] = df['P/L per token'].str.replace('$', '', regex=True)
97
- df['P/L per token'] = df['P/L per token'].str.replace(',', '', regex=True)
98
- df['P/L %'] = df['P/L %'].str.replace('%', '', regex=True)
99
-
100
- df['Buy Price'] = pd.to_numeric(df['Buy Price'])
101
- df['Sell Price'] = pd.to_numeric(df['Sell Price'])
102
- df['P/L per token'] = pd.to_numeric(df['P/L per token'])
103
- df['P/L %'] = pd.to_numeric(df['P/L %'])
104
-
105
  dateheader = 'Date'
106
  theader = 'Time'
107
-
108
  df[dateheader] = [tradetimes.split(" ")[0] for tradetimes in df[otimeheader].values]
109
  df[theader] = [tradetimes.split(" ")[1] for tradetimes in df[otimeheader].values]
110
 
111
  df[otimeheader]= [dateutil.parser.parse(date+' '+time)
112
- for date,time in zip(df[dateheader],df[theader])]
113
-
114
  df[otimeheader] = pd.to_datetime(df[otimeheader])
115
  df['Exit Date'] = pd.to_datetime(df['Exit Date'])
116
  df.sort_values(by=otimeheader, inplace=True)
117
-
118
  df[dateheader] = [dateutil.parser.parse(date).date() for date in df[dateheader]]
119
  df[theader] = [dateutil.parser.parse(time).time() for time in df[theader]]
120
- df['Trade'] = [i+1 for i in range(len(df))] #reindex
121
 
122
- return df
123
-
124
- def runapp():
125
- bot_selections = "Pure Bread"
126
- otimeheader = 'Entry Date'
127
- plheader = 'P/L %'
128
- fmat = '%Y-%m-%d %H:%M:%S'
129
- dollar_cap = 100000.00
130
- fees = .075/100
131
- st.header(f"{bot_selections} Performance Dashboard :bread: :moneybag:")
132
- st.write("Welcome to the Trading Bot Dashboard by BreadBytes! You can use this dashboard to track " +
133
- "the performance of our trading bots.")
134
- # st.sidebar.header("FAQ")
135
-
136
- # with st.sidebar.subheader("FAQ"):
137
- # st.write(Path("FAQ_README.md").read_text())
138
- st.subheader("Choose your settings:")
139
- no_errors = True
140
-
141
- data = load_data("PB-Trade-Log.csv",otimeheader,fmat)
142
- df = data.copy(deep=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
143
 
144
- dateheader = 'Date'
145
- theader = 'Time'
 
 
146
 
147
- with st.form("user input", ):
148
- if no_errors:
149
- with st.container():
150
- col1, col2 = st.columns(2)
151
- with col1:
152
- try:
153
- startdate = st.date_input("Start Date", value=pd.to_datetime(df[otimeheader]).min())
154
- except:
155
- st.error("Please select your exchange or upload a supported trade log file.")
156
- no_errors = False
157
- with col2:
158
- try:
159
- enddate = st.date_input("End Date", value=datetime.today())
160
- except:
161
- st.error("Please select your exchange or upload a supported trade log file.")
162
- no_errors = False
163
- #st.sidebar.subheader("Customize your Dashboard")
164
-
165
- if no_errors and (enddate < startdate):
166
- st.error("End Date must be later than Start date. Please try again.")
167
- no_errors = False
168
- with st.container():
169
- col1,col2 = st.columns(2)
170
- with col2:
171
- lev = st.number_input('Leverage', min_value=1, value=1, max_value= 3, step=1)
172
- with col1:
173
- principal_balance = st.number_input('Starting Balance', min_value=0.00, value=1000.00, max_value= dollar_cap, step=.01)
174
-
175
- #hack way to get button centered
176
- c = st.columns(9)
177
- with c[4]:
178
- submitted = st.form_submit_button("Get Cookin'!")
179
 
180
- signal_map = {'Long': 1, 'Short':-1} # 1 for long #-1 for short
181
-
182
- df['Calculated Return %'] = df['Signal'].map(signal_map)*(1-fees)*((df['Sell Price']-df['Buy Price'])/df['Buy Price'] - fees) #accounts for fees on open and close of trade
183
 
184
-
185
- if submitted and principal_balance * lev > dollar_cap:
186
- lev = np.floor(dollar_cap/principal_balance)
187
- st.error(f"WARNING: (Starting Balance)*(Leverage) exceeds the ${dollar_cap} limit. Using maximum available leverage of {lev}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
188
 
189
- if submitted and no_errors:
190
- df = df[(df[dateheader] >= startdate) & (df[dateheader] <= enddate)]
191
-
192
- if len(df) == 0:
193
- st.error("There are no available trades matching your selections. Please try again!")
194
- no_errors = False
195
- if no_errors:
196
- df['Return Per Trade'] = 1+lev*df['Calculated Return %'].values
197
-
198
- df['Compounded Return'] = df['Return Per Trade'].cumprod()
199
- df['New Balance'] = [min(dollar_cap/lev, bal*principal_balance) for bal in df['Compounded Return']]
200
- df['Balance used in Trade'] = np.concatenate([[principal_balance], df['New Balance'].values[:-1]])
201
- df['Net P/L Per Trade'] = (df['Return Per Trade']-1)*df['Balance used in Trade']
202
- df['Cumulative P/L'] = df['Net P/L Per Trade'].cumsum()
203
- cum_pl = df.loc[df.drop('Drawdown %', axis=1).dropna().index[-1],'Cumulative P/L'] + principal_balance
204
-
205
- effective_return = 100*((cum_pl - principal_balance)/principal_balance)
206
-
207
- st.header(f"{bot_selections} Results")
208
- if len(bot_selections) > 1:
209
- st.metric(
210
- "Total Account Balance",
211
- f"${cum_pl:.2f}",
212
- f"{100*(cum_pl-principal_balance)/(principal_balance):.2f} %",
213
- )
214
-
215
- st.line_chart(data=df.drop('Drawdown %', axis=1).dropna(), x='Exit Date', y='Cumulative P/L', use_container_width=True)
216
-
217
- df['Per Trade Return Rate'] = df['Return Per Trade']-1
218
-
219
- totals = pd.DataFrame([], columns = ['# of Trades', 'Wins', 'Losses', 'Win Rate', 'Profit Factor'])
220
- data = get_hist_info(df.drop('Drawdown %', axis=1).dropna(), principal_balance,'Per Trade Return Rate')
221
- totals.loc[len(totals)] = list(i for i in data)
222
-
223
- totals['Cum. P/L'] = cum_pl-principal_balance
224
- totals['Cum. P/L (%)'] = 100*(cum_pl-principal_balance)/principal_balance
225
- #results_df['Avg. P/L'] = (cum_pl-principal_balance)/results_df['# of Trades'].values[0]
226
- #results_df['Avg. P/L (%)'] = 100*results_df['Avg. P/L'].values[0]/principal_balance
227
-
228
- if df.empty:
229
- st.error("Oops! None of the data provided matches your selection(s). Please try again.")
230
- else:
231
- #st.dataframe(totals.style.format({'# of Trades': '{:.0f}','Wins': '{:.0f}','Losses': '{:.0f}','Win Rate': '{:.2f}%','Profit Factor' : '{:.2f}', 'Avg. P/L (%)': '{:.2f}%', 'Cum. P/L (%)': '{:.2f}%', 'Cum. P/L': '{:.2f}', 'Avg. P/L': '{:.2f}'})
232
- #.text_gradient(subset=['Win Rate'],cmap="RdYlGn", vmin = 0, vmax = 100)\
233
- #.text_gradient(subset=['Profit Factor'],cmap="RdYlGn", vmin = 0, vmax = 2), use_container_width=True)
234
- for row in totals.itertuples():
235
- col1, col2, col3, col4 = st.columns(4)
236
- c1, c2, c3, c4 = st.columns(4)
237
- with col1:
238
- st.metric(
239
- "Total Trades",
240
- f"{row._1:.0f}",
241
- )
242
- with c1:
243
- st.metric(
244
- "Profit Factor",
245
- f"{row._5:.2f}",
246
- )
247
- with col2:
248
- st.metric(
249
- "Wins",
250
- f"{row.Wins:.0f}",
251
- )
252
- with c2:
253
- st.metric(
254
- "Cumulative P/L",
255
- f"${row._6:.2f}",
256
- f"{row._7:.2f} %",
257
- )
258
- with col3:
259
- st.metric(
260
- "Losses",
261
- f"{row.Losses:.0f}",
262
- )
263
- with c3:
264
- st.metric(
265
- "Rolling 7 Days",
266
- "",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
267
- f"{get_rolling_stats(df,lev, otimeheader, 7):.2f}%",
268
- )
269
- st.metric(
270
- "Rolling 30 Days",
271
- "",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
272
- f"{get_rolling_stats(df,lev, otimeheader, 30):.2f}%",
273
- )
274
-
275
- with col4:
276
- st.metric(
277
- "Win Rate",
278
- f"{row._4:.1f}%",
279
- )
280
- with c4:
281
- st.metric(
282
- "Rolling 90 Days",
283
- "",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
284
- f"{get_rolling_stats(df,lev, otimeheader, 90):.2f}%",
285
- )
286
- st.metric(
287
- "Rolling 180 Days",
288
- "",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
289
- f"{get_rolling_stats(df,lev, otimeheader, 180):.2f}%",
290
- )
291
 
292
- if submitted:
293
- grouped_df = df.groupby('Exit Date').agg({'Signal':'min','Entry Date': 'min','Exit Date': 'max','Buy Price': 'mean',
294
- 'Sell Price' : 'max',
295
- 'Net P/L Per Trade': 'mean',
296
- 'Calculated Return %' : lambda x: np.round(100*lev*x.sum(),2)})
297
- grouped_df.index = range(1, len(grouped_df)+1)
298
- grouped_df.rename(columns={'Buy Price':'Avg. Buy Price',
299
- 'Net P/L Per Trade':'Net P/L',
300
- 'Calculated Return %':'P/L %'}, inplace=True)
301
- else:
302
- grouped_df = df.groupby('Exit Date').agg({'Signal':'min','Entry Date': 'min','Exit Date': 'max','Buy Price': 'mean',
303
- 'Sell Price' : 'max',
304
- 'P/L per token' : 'mean',
305
- 'Calculated Return %' : lambda x: np.round(100*x.sum(),2)})
306
- grouped_df.index = range(1, len(grouped_df)+1)
307
- grouped_df.rename(columns={'Buy Price':'Avg. Buy Price',
308
- 'P/L per token':'Net P/L',
309
- 'Calculated Return %':'P/L %'}, inplace=True)
310
-
311
- st.subheader("Trade Logs")
312
- grouped_df['Entry Date'] = pd.to_datetime(grouped_df['Entry Date'])
313
- grouped_df['Exit Date'] = pd.to_datetime(grouped_df['Exit Date'])
314
- st.dataframe(grouped_df.style.format({'Entry Date':'{:%m-%d-%Y %H:%M:%S}','Exit Date':'{:%m-%d-%Y %H:%M:%S}','Avg. Buy Price': '${:.4f}', 'Sell Price': '${:.4f}', 'Net P/L':'${:.2f}', 'P/L %':'{:.2f}%'})\
315
- .apply(cc_coding, axis=1)\
316
- .applymap(my_style,subset=['Net P/L'])\
317
- .applymap(my_style,subset=['P/L %'])\
318
- ,use_container_width=True)
319
- new_title = '<div style="text-align: right;"><span style="background-color:lightgrey;">&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</span> Backtest Data</div>'
320
- st.markdown(new_title, unsafe_allow_html=True)
321
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
322
  if __name__ == "__main__":
323
  st.set_page_config(
324
  "Trading Bot Dashboard",
325
  layout="wide",
326
  )
327
  runapp()
328
- # -
329
-
330
-
 
20
  import datetime as dt
21
  import matplotlib.pyplot as plt
22
  from pathlib import Path
23
+ import time
24
+ import plotly.graph_objects as go
25
+ import plotly.io as pio
26
+ from PIL import Image
27
 
28
  import streamlit as st
29
  import plotly.express as px
30
  import altair as alt
31
  import dateutil.parser
32
+ from matplotlib.colors import LinearSegmentedColormap
33
 
34
 
35
  # +
36
+ class color:
37
+ PURPLE = '\033[95m'
38
+ CYAN = '\033[96m'
39
+ DARKCYAN = '\033[36m'
40
+ BLUE = '\033[94m'
41
+ GREEN = '\033[92m'
42
+ YELLOW = '\033[93m'
43
+ RED = '\033[91m'
44
+ BOLD = '\033[1m'
45
+ UNDERLINE = '\033[4m'
46
+ END = '\033[0m'
47
+
48
+ @st.experimental_memo
49
+ def print_PL(amnt, thresh, extras = "" ):
50
+ if amnt > 0:
51
+ return color.BOLD + color.GREEN + str(amnt) + extras + color.END
52
+ elif amnt < 0:
53
+ return color.BOLD + color.RED + str(amnt)+ extras + color.END
54
+ elif np.isnan(amnt):
55
+ return str(np.nan)
56
+ else:
57
+ return str(amnt + extras)
58
+
59
+ @st.experimental_memo
60
+ def get_headers(logtype):
61
+ otimeheader = ""
62
+ cheader = ""
63
+ plheader = ""
64
+ fmat = '%Y-%m-%d %H:%M:%S'
65
+
66
+ if logtype == "ByBit":
67
+ otimeheader = 'Create Time'
68
+ cheader = 'Contracts'
69
+ plheader = 'Closed P&L'
70
+ fmat = '%Y-%m-%d %H:%M:%S'
71
+
72
+ if logtype == "BitGet":
73
+ otimeheader = 'Date'
74
+ cheader = 'Futures'
75
+ plheader = 'Realized P/L'
76
+ fmat = '%Y-%m-%d %H:%M:%S'
77
+
78
+ if logtype == "MEXC":
79
+ otimeheader = 'Trade time'
80
+ cheader = 'Futures'
81
+ plheader = 'closing position'
82
+ fmat = '%Y/%m/%d %H:%M'
83
+
84
+ if logtype == "Binance":
85
+ otimeheader = 'Date'
86
+ cheader = 'Symbol'
87
+ plheader = 'Realized Profit'
88
+ fmat = '%Y-%m-%d %H:%M:%S'
89
+
90
+ #if logtype == "Kucoin":
91
+ # otimeheader = 'Time'
92
+ # cheader = 'Contract'
93
+ # plheader = ''
94
+ # fmat = '%Y/%m/%d %H:%M:%S'
95
+
96
+
97
+ if logtype == "Kraken":
98
+ otimeheader = 'time'
99
+ cheader = 'asset'
100
+ plheader = 'amount'
101
+ fmat = '%Y-%m-%d %H:%M:%S.%f'
102
+
103
+ if logtype == "OkX":
104
+ otimeheader = '\ufeffOrder Time'
105
+ cheader = '\ufeffInstrument'
106
+ plheader = '\ufeffPL'
107
+ fmat = '%Y-%m-%d %H:%M:%S'
108
+
109
+ return otimeheader.lower(), cheader.lower(), plheader.lower(), fmat
110
+
111
+ @st.experimental_memo
112
+ def get_coin_info(df_coin, principal_balance,plheader):
113
+ numtrades = int(len(df_coin))
114
+ numwin = int(sum(df_coin[plheader] > 0))
115
+ numloss = int(sum(df_coin[plheader] < 0))
116
+ winrate = np.round(100*numwin/numtrades,2)
117
+
118
+ grosswin = sum(df_coin[df_coin[plheader] > 0][plheader])
119
+ grossloss = sum(df_coin[df_coin[plheader] < 0][plheader])
120
+ if grossloss != 0:
121
+ pfactor = -1*np.round(grosswin/grossloss,2)
122
+ else:
123
+ pfactor = np.nan
124
+
125
+ cum_PL = np.round(sum(df_coin[plheader].values),2)
126
+ cum_PL_perc = np.round(100*cum_PL/principal_balance,2)
127
+ mean_PL = np.round(sum(df_coin[plheader].values/len(df_coin)),2)
128
+ mean_PL_perc = np.round(100*mean_PL/principal_balance,2)
129
+
130
+ return numtrades, numwin, numloss, winrate, pfactor, cum_PL, cum_PL_perc, mean_PL, mean_PL_perc
131
+
132
  @st.experimental_memo
133
  def get_hist_info(df_coin, principal_balance,plheader):
134
  numtrades = int(len(df_coin))
135
  numwin = int(sum(df_coin[plheader] > 0))
136
  numloss = int(sum(df_coin[plheader] < 0))
137
+ if numtrades != 0:
138
+ winrate = int(np.round(100*numwin/numtrades,2))
139
+ else:
140
+ winrate = np.nan
141
 
142
  grosswin = sum(df_coin[df_coin[plheader] > 0][plheader])
143
  grossloss = sum(df_coin[df_coin[plheader] < 0][plheader])
144
+ if grossloss != 0:
145
  pfactor = -1*np.round(grosswin/grossloss,2)
146
  else:
147
  pfactor = np.nan
148
  return numtrades, numwin, numloss, winrate, pfactor
149
+
150
  @st.experimental_memo
151
  def get_rolling_stats(df, lev, otimeheader, days):
152
  max_roll = (df[otimeheader].max() - df[otimeheader].min()).days
 
162
  else:
163
  rolling_perc = np.nan
164
  return 100*rolling_perc
165
+ @st.experimental_memo
166
+ def cc_coding(row):
167
+ return ['background-color: lightgrey'] * len(row) if row['Exit Date'] <= datetime.strptime('2022-12-16 00:00:00','%Y-%m-%d %H:%M:%S').date() else [''] * len(row)
168
+ def ctt_coding(row):
169
+ return ['background-color: lightgrey'] * len(row) if row['Exit Date'] <= datetime.strptime('2023-01-02 00:00:00','%Y-%m-%d %H:%M:%S').date() else [''] * len(row)
170
 
171
  @st.experimental_memo
172
+ def my_style(v, props=''):
173
+ props = 'color:red' if v < 0 else 'color:green'
174
+ return props
175
+
176
  def filt_df(df, cheader, symbol_selections):
 
 
 
 
 
 
177
 
178
  df = df.copy()
179
  df = df[df[cheader].isin(symbol_selections)]
180
 
181
  return df
182
 
183
+ def tv_reformat(close50filename):
184
+ try:
185
+ data = pd.read_csv(open(close50filename,'r'), sep='[,|\t]', engine='python')
186
+ except:
187
+ data = pd.DataFrame([])
188
+
189
+ if data.empty:
190
+ return data
191
+ else:
192
+ entry_df = data[data['Type'].str.contains("Entry")]
193
+ exit_df = data[data['Type'].str.contains("Exit")]
194
 
195
+ entry_df.index = range(len(entry_df))
196
+ exit_df.index = range(len(exit_df))
197
 
198
+ df = pd.DataFrame([], columns=['Trade','Entry Date','Buy Price', 'Sell Price','Exit Date', 'P/L per token', 'P/L %', 'Drawdown %'])
199
+
200
+ df['Signal'] = [string.split(' ')[1] for string in entry_df['Type']]
201
+ df['Trade'] = entry_df.index
202
+ df['Entry Date'] = entry_df['Date/Time']
203
+ df['Buy Price'] = entry_df['Price USDT']
204
+
205
+ df['Sell Price'] = exit_df['Price USDT']
206
+ df['Exit Date'] = exit_df['Date/Time']
207
+ df['P/L per token'] = df['Sell Price'] - df['Buy Price']
208
+ df['P/L %'] = exit_df['Profit %']
209
+ df['Drawdown %'] = exit_df['Drawdown %']
210
+ df['Close 50'] = [int(i == "Close 50% of Position") for i in exit_df['Signal']]
211
+ df = df.sort_values(['Entry Date','Close 50'], ascending = [False, True])
212
+ df.index = range(len(df))
213
+
214
+ df.loc[df['Close 50'] == 1, 'Exit Date'] = np.copy(df.loc[df[df['Close 50'] == 1].index.values -1]['Exit Date'])
215
+
216
+ grouped_df = df.groupby('Entry Date').agg({'Signal' : 'first', 'Entry Date': 'min', 'Buy Price':'mean',
217
+ 'Sell Price' : 'mean',
218
+ 'Exit Date': 'max',
219
+ 'P/L per token': 'mean',
220
+ 'P/L %' : 'mean'})
221
+
222
+ grouped_df.insert(0,'Trade', range(len(grouped_df)))
223
+ grouped_df.index = range(len(grouped_df))
224
+ return grouped_df
225
+
226
+ def load_data(filename, otimeheader, fmat):
227
+ df = pd.read_csv(open(filename,'r'), sep='\t') # so as not to mutate cached value
228
+ close50filename = filename.split('.')[0] + '-50.' + filename.split('.')[1]
229
+ df2 = tv_reformat(close50filename)
230
+
231
+ if filename == "CT-Trade-Log.csv":
232
+ df.columns = ['Trade','Entry Date','Buy Price', 'Sell Price','Exit Date', 'P/L per token', 'P/L %', 'Drawdown %']
233
+ df.insert(1, 'Signal', ['Long']*len(df))
234
+ elif filename == "CC-Trade-Log.csv" or "PB-Trade-Log.csv":
235
+ df.columns = ['Trade','Signal','Entry Date','Buy Price', 'Sell Price','Exit Date', 'P/L per token', 'P/L %', 'Drawdown %']
236
+ else:
237
+ df.columns = ['Trade','Signal','Entry Date','Buy Price', 'Sell Price','Exit Date', 'P/L per token', 'P/L %']
238
+
239
+ if filename != "CT-Toasted-Trade-Log.csv":
240
+ df['Signal'] = df['Signal'].str.replace(' ', '', regex=True)
241
+ df['Buy Price'] = df['Buy Price'].str.replace('$', '', regex=True)
242
+ df['Sell Price'] = df['Sell Price'].str.replace('$', '', regex=True)
243
+ df['Buy Price'] = df['Buy Price'].str.replace(',', '', regex=True)
244
+ df['Sell Price'] = df['Sell Price'].str.replace(',', '', regex=True)
245
+ df['P/L per token'] = df['P/L per token'].str.replace('$', '', regex=True)
246
+ df['P/L per token'] = df['P/L per token'].str.replace(',', '', regex=True)
247
+ df['P/L %'] = df['P/L %'].str.replace('%', '', regex=True)
248
+
249
+ df['Buy Price'] = pd.to_numeric(df['Buy Price'])
250
+ df['Sell Price'] = pd.to_numeric(df['Sell Price'])
251
+ df['P/L per token'] = pd.to_numeric(df['P/L per token'])
252
+ df['P/L %'] = pd.to_numeric(df['P/L %'])
253
+
254
+ if df2.empty:
255
+ df = df
256
+ else:
257
+ df = pd.concat([df,df2], axis=0, ignore_index=True)
258
+
259
+ if filename == "CT-Trade-Log.csv":
260
+ df['Signal'] = ['Long']*len(df)
261
 
 
 
 
 
 
 
 
 
 
 
 
 
 
262
  dateheader = 'Date'
263
  theader = 'Time'
264
+
265
  df[dateheader] = [tradetimes.split(" ")[0] for tradetimes in df[otimeheader].values]
266
  df[theader] = [tradetimes.split(" ")[1] for tradetimes in df[otimeheader].values]
267
 
268
  df[otimeheader]= [dateutil.parser.parse(date+' '+time)
269
+ for date,time in zip(df[dateheader],df[theader])]
 
270
  df[otimeheader] = pd.to_datetime(df[otimeheader])
271
  df['Exit Date'] = pd.to_datetime(df['Exit Date'])
272
  df.sort_values(by=otimeheader, inplace=True)
273
+
274
  df[dateheader] = [dateutil.parser.parse(date).date() for date in df[dateheader]]
275
  df[theader] = [dateutil.parser.parse(time).time() for time in df[theader]]
276
+ df['Trade'] = df.index + 1 #reindex
277
 
278
+ if filename == "CT-Trade-Log.csv":
279
+ df['DCA'] = np.nan
280
+
281
+ for exit in pd.unique(df['Exit Date']):
282
+ df_exit = df[df['Exit Date']==exit]
283
+ if dateutil.parser.parse(str(exit)) < dateutil.parser.parse('2023-02-07 13:00:00'):
284
+ for i in range(len(df_exit)):
285
+ ind = df_exit.index[i]
286
+ df.loc[ind,'DCA'] = i+1
287
+
288
+ else:
289
+ for i in range(len(df_exit)):
290
+ ind = df_exit.index[i]
291
+ df.loc[ind,'DCA'] = i+1.1
292
+ return df
293
+
294
+
295
+ def get_sd_df(sd_df, sd, bot_selections, dca1, dca2, dca3, dca4, dca5, dca6, fees, lev, dollar_cap, principal_balance):
296
+ sd = 2*.00026
297
+ # ------ Standard Dev. Calculations.
298
+ if bot_selections == "Cinnamon Toast":
299
+ dca_map = {1: dca1/100, 2: dca2/100, 3: dca3/100, 4: dca4/100, 1.1: dca5/100, 2.1: dca6/100}
300
+ sd_df['DCA %'] = sd_df['DCA'].map(dca_map)
301
+ sd_df['Calculated Return % (+)'] = df['Signal'].map(signal_map)*(df['DCA %'])*(1-fees)*((df['Sell Price']*(1+df['Signal'].map(signal_map)*sd) - df['Buy Price']*(1-df['Signal'].map(signal_map)*sd))/df['Buy Price']*(1-df['Signal'].map(signal_map)*sd) - fees) #accounts for fees on open and close of trade
302
+ sd_df['Calculated Return % (-)'] = df['Signal'].map(signal_map)*(df['DCA %'])*(1-fees)*((df['Sell Price']*(1-df['Signal'].map(signal_map)*sd)-df['Buy Price']*(1+df['Signal'].map(signal_map)*sd))/df['Buy Price']*(1+df['Signal'].map(signal_map)*sd) - fees) #accounts for fees on open and close of trade
303
+ sd_df['DCA'] = np.floor(sd_df['DCA'].values)
304
+
305
+ sd_df['Return Per Trade (+)'] = np.nan
306
+ sd_df['Return Per Trade (-)'] = np.nan
307
+ sd_df['Balance used in Trade (+)'] = np.nan
308
+ sd_df['Balance used in Trade (-)'] = np.nan
309
+ sd_df['New Balance (+)'] = np.nan
310
+ sd_df['New Balance (-)'] = np.nan
311
+
312
+ g1 = sd_df.groupby('Exit Date').sum(numeric_only=True)['Calculated Return % (+)'].reset_index(name='Return Per Trade (+)')
313
+ g2 = sd_df.groupby('Exit Date').sum(numeric_only=True)['Calculated Return % (-)'].reset_index(name='Return Per Trade (-)')
314
+ sd_df.loc[sd_df['DCA']==1.0,'Return Per Trade (+)'] = 1+lev*g1['Return Per Trade (+)'].values
315
+ sd_df.loc[sd_df['DCA']==1.0,'Return Per Trade (-)'] = 1+lev*g2['Return Per Trade (-)'].values
316
+
317
+ sd_df['Compounded Return (+)'] = sd_df['Return Per Trade (+)'].cumprod()
318
+ sd_df['Compounded Return (-)'] = sd_df['Return Per Trade (-)'].cumprod()
319
+ sd_df.loc[sd_df['DCA']==1.0,'New Balance (+)'] = [min(dollar_cap/lev, bal*principal_balance) for bal in sd_df.loc[sd_df['DCA']==1.0,'Compounded Return (+)']]
320
+ sd_df.loc[sd_df['DCA']==1.0,'Balance used in Trade (+)'] = np.concatenate([[principal_balance], sd_df.loc[sd_df['DCA']==1.0,'New Balance (+)'].values[:-1]])
321
+
322
+ sd_df.loc[sd_df['DCA']==1.0,'New Balance (-)'] = [min(dollar_cap/lev, bal*principal_balance) for bal in sd_df.loc[sd_df['DCA']==1.0,'Compounded Return (-)']]
323
+ sd_df.loc[sd_df['DCA']==1.0,'Balance used in Trade (-)'] = np.concatenate([[principal_balance], sd_df.loc[sd_df['DCA']==1.0,'New Balance (-)'].values[:-1]])
324
+ else:
325
+ sd_df['Calculated Return % (+)'] = df['Signal'].map(signal_map)*(1-fees)*((df['Sell Price']*(1+df['Signal'].map(signal_map)*sd) - df['Buy Price']*(1-df['Signal'].map(signal_map)*sd))/df['Buy Price']*(1-df['Signal'].map(signal_map)*sd) - fees) #accounts for fees on open and close of trade
326
+ sd_df['Calculated Return % (-)'] = df['Signal'].map(signal_map)*(1-fees)*((df['Sell Price']*(1-df['Signal'].map(signal_map)*sd)-df['Buy Price']*(1+df['Signal'].map(signal_map)*sd))/df['Buy Price']*(1+df['Signal'].map(signal_map)*sd) - fees) #accounts for fees on open and close of trade
327
+ sd_df['Return Per Trade (+)'] = np.nan
328
+ sd_df['Return Per Trade (-)'] = np.nan
329
+
330
+ g1 = sd_df.groupby('Exit Date').sum(numeric_only=True)['Calculated Return % (+)'].reset_index(name='Return Per Trade (+)')
331
+ g2 = sd_df.groupby('Exit Date').sum(numeric_only=True)['Calculated Return % (-)'].reset_index(name='Return Per Trade (-)')
332
+ sd_df['Return Per Trade (+)'] = 1+lev*g1['Return Per Trade (+)'].values
333
+ sd_df['Return Per Trade (-)'] = 1+lev*g2['Return Per Trade (-)'].values
334
+
335
+ sd_df['Compounded Return (+)'] = sd_df['Return Per Trade (+)'].cumprod()
336
+ sd_df['Compounded Return (-)'] = sd_df['Return Per Trade (-)'].cumprod()
337
+ sd_df['New Balance (+)'] = [min(dollar_cap/lev, bal*principal_balance) for bal in sd_df['Compounded Return (+)']]
338
+ sd_df['Balance used in Trade (+)'] = np.concatenate([[principal_balance], sd_df['New Balance (+)'].values[:-1]])
339
+
340
+ sd_df['New Balance (-)'] = [min(dollar_cap/lev, bal*principal_balance) for bal in sd_df['Compounded Return (-)']]
341
+ sd_df['Balance used in Trade (-)'] = np.concatenate([[principal_balance], sd_df['New Balance (-)'].values[:-1]])
342
+
343
+ sd_df['Net P/L Per Trade (+)'] = (sd_df['Return Per Trade (+)']-1)*sd_df['Balance used in Trade (+)']
344
+ sd_df['Cumulative P/L (+)'] = sd_df['Net P/L Per Trade (+)'].cumsum()
345
+
346
+ sd_df['Net P/L Per Trade (-)'] = (sd_df['Return Per Trade (-)']-1)*sd_df['Balance used in Trade (-)']
347
+ sd_df['Cumulative P/L (-)'] = sd_df['Net P/L Per Trade (-)'].cumsum()
348
+ return sd_df
349
+
350
+ def runapp() -> None:
351
+ bot_selections = "Pure Bread"
352
+ otimeheader = 'Exit Date'
353
+ fmat = '%Y-%m-%d %H:%M:%S'
354
+ fees = .075/100
355
 
356
+ st.header(f"{bot_selections} Performance Dashboard :bread: :moneybag:")
357
+ no_errors = True
358
+ st.write("Welcome to the Trading Bot Dashboard by BreadBytes! You can use this dashboard to track " +
359
+ "the performance of our trading bots.")
360
 
361
+ if bot_selections == "Cinnamon Toast":
362
+ lev_cap = 5
363
+ dollar_cap = 1000000000.00
364
+ data = load_data("CT-Trade-Log.csv",otimeheader, fmat)
365
+ if bot_selections == "French Toast":
366
+ lev_cap = 3
367
+ dollar_cap = 10000000000.00
368
+ data = load_data("FT-Trade-Log.csv",otimeheader, fmat)
369
+ if bot_selections == "Short Bread":
370
+ lev_cap = 5
371
+ dollar_cap = 1000000000.00
372
+ data = load_data("SB-Trade-Log.csv",otimeheader, fmat)
373
+ if bot_selections == "Cosmic Cupcake":
374
+ lev_cap = 3
375
+ dollar_cap = 1000000000.00
376
+ data = load_data("CC-Trade-Log.csv",otimeheader, fmat)
377
+ if bot_selections == "Pure Bread":
378
+ lev_cap = 3
379
+ dollar_cap = 1000000000.00
380
+ data = load_data("PB-Trade-Log.csv",otimeheader, fmat)
 
 
 
 
 
 
 
 
 
 
 
 
381
 
382
+ df = data.copy(deep=True)
 
 
383
 
384
+ dateheader = 'Date'
385
+ theader = 'Time'
386
+
387
+ st.subheader("Choose your settings:")
388
+ with st.form("user input", ):
389
+ if no_errors:
390
+ with st.container():
391
+ col1, col2 = st.columns(2)
392
+ with col1:
393
+ try:
394
+ startdate = st.date_input("Start Date", value=pd.to_datetime(df[otimeheader]).min())
395
+ except:
396
+ st.error("Please select your exchange or upload a supported trade log file.")
397
+ no_errors = False
398
+ with col2:
399
+ try:
400
+ enddate = st.date_input("End Date", value=datetime.today())
401
+ except:
402
+ st.error("Please select your exchange or upload a supported trade log file.")
403
+ no_errors = False
404
+ #st.sidebar.subheader("Customize your Dashboard")
405
+
406
+ if no_errors and (enddate < startdate):
407
+ st.error("End Date must be later than Start date. Please try again.")
408
+ no_errors = False
409
+ with st.container():
410
+ col1,col2 = st.columns(2)
411
+ with col2:
412
+ lev = st.number_input('Leverage', min_value=1, value=1, max_value= lev_cap, step=1)
413
+ with col1:
414
+ principal_balance = st.number_input('Starting Balance', min_value=0.00, value=1000.00, max_value= dollar_cap, step=.01)
415
 
416
+ if bot_selections == "Cinnamon Toast":
417
+ st.write("Choose your DCA setup (for trades before 02/07/2023)")
418
+ with st.container():
419
+ col1, col2, col3, col4 = st.columns(4)
420
+ with col1:
421
+ dca1 = st.number_input('DCA 1 Allocation', min_value=0, value=25, max_value= 100, step=1)
422
+ with col2:
423
+ dca2 = st.number_input('DCA 2 Allocation', min_value=0, value=25, max_value= 100, step=1)
424
+ with col3:
425
+ dca3 = st.number_input('DCA 3 Allocation', min_value=0, value=25, max_value= 100, step=1)
426
+ with col4:
427
+ dca4 = st.number_input('DCA 4 Allocation', min_value=0, value=25, max_value= 100, step=1)
428
+ st.write("Choose your DCA setup (for trades on or after 02/07/2023)")
429
+ with st.container():
430
+ col1, col2 = st.columns(2)
431
+ with col1:
432
+ dca5 = st.number_input('DCA 1 Allocation', min_value=0, value=50, max_value= 100, step=1)
433
+ with col2:
434
+ dca6 = st.number_input('DCA 2 Allocation', min_value=0, value=50, max_value= 100, step=1)
435
+
436
+ #hack way to get button centered
437
+ c = st.columns(9)
438
+ with c[4]:
439
+ submitted = st.form_submit_button("Get Cookin'!")
440
+
441
+ if submitted and principal_balance * lev > dollar_cap:
442
+ lev = np.floor(dollar_cap/principal_balance)
443
+ st.error(f"WARNING: (Starting Balance)*(Leverage) exceeds the ${dollar_cap} limit. Using maximum available leverage of {lev}")
444
+
445
+ if submitted and no_errors:
446
+ df = df[(df[dateheader] >= startdate) & (df[dateheader] <= enddate)]
447
+ signal_map = {'Long': 1, 'Short':-1}
448
+
449
+
450
+ if len(df) == 0:
451
+ st.error("There are no available trades matching your selections. Please try again!")
452
+ no_errors = False
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
453
 
454
+ if no_errors:
455
+ if bot_selections == "Cinnamon Toast":
456
+ dca_map = {1: dca1/100, 2: dca2/100, 3: dca3/100, 4: dca4/100, 1.1: dca5/100, 2.1: dca6/100}
457
+ df['DCA %'] = df['DCA'].map(dca_map)
458
+ df['Calculated Return %'] = df['Signal'].map(signal_map)*(df['DCA %'])*(1-fees)*((df['Sell Price']-df['Buy Price'])/df['Buy Price'] - fees) #accounts for fees on open and close of trade
459
+ df['DCA'] = np.floor(df['DCA'].values)
460
+
461
+ df['Return Per Trade'] = np.nan
462
+ df['Balance used in Trade'] = np.nan
463
+ df['New Balance'] = np.nan
464
+
465
+ g = df.groupby('Exit Date').sum(numeric_only=True)['Calculated Return %'].reset_index(name='Return Per Trade')
466
+ df.loc[df['DCA']==1.0,'Return Per Trade'] = 1+lev*g['Return Per Trade'].values
467
+
468
+ df['Compounded Return'] = df['Return Per Trade'].cumprod()
469
+ df.loc[df['DCA']==1.0,'New Balance'] = [min(dollar_cap/lev, bal*principal_balance) for bal in df.loc[df['DCA']==1.0,'Compounded Return']]
470
+ df.loc[df['DCA']==1.0,'Balance used in Trade'] = np.concatenate([[principal_balance], df.loc[df['DCA']==1.0,'New Balance'].values[:-1]])
471
+ else:
472
+ df['Calculated Return %'] = df['Signal'].map(signal_map)*(1-fees)*((df['Sell Price']-df['Buy Price'])/df['Buy Price'] - fees) #accounts for fees on open and close of trade
473
+ df['Return Per Trade'] = np.nan
474
+ g = df.groupby('Exit Date').sum(numeric_only=True)['Calculated Return %'].reset_index(name='Return Per Trade')
475
+ df['Return Per Trade'] = 1+lev*g['Return Per Trade'].values
476
+
477
+ df['Compounded Return'] = df['Return Per Trade'].cumprod()
478
+ df['New Balance'] = [min(dollar_cap/lev, bal*principal_balance) for bal in df['Compounded Return']]
479
+ df['Balance used in Trade'] = np.concatenate([[principal_balance], df['New Balance'].values[:-1]])
480
+ df['Net P/L Per Trade'] = (df['Return Per Trade']-1)*df['Balance used in Trade']
481
+ df['Cumulative P/L'] = df['Net P/L Per Trade'].cumsum()
482
+
483
+ if bot_selections == "Cinnamon Toast" or bot_selections == "Cosmic Cupcake" or bot_selections == "Pure Bread":
484
+ cum_pl = df.loc[df.drop('Drawdown %', axis=1).dropna().index[-1],'Cumulative P/L'] + principal_balance
485
+ #cum_sdp = sd_df.loc[sd_df.drop('Drawdown %', axis=1).dropna().index[-1],'Cumulative P/L (+)'] + principal_balance
486
+ #cum_sdm = sd_df.loc[sd_df.drop('Drawdown %', axis=1).dropna().index[-1],'Cumulative P/L (-)'] + principal_balance
487
+ else:
488
+ cum_pl = df.loc[df.dropna().index[-1],'Cumulative P/L'] + principal_balance
489
+ #cum_sdp = sd_df.loc[sd_df.dropna().index[-1],'Cumulative P/L (+)'] + principal_balance
490
+ #cum_sdm = sd_df.loc[sd_df.dropna().index[-1],'Cumulative P/L (-)'] + principal_balance
491
+ #sd = 2*.00026
492
+ #sd_df = get_sd_df(get_sd_df(df.copy(), sd, bot_selections, dca1, dca2, dca3, dca4, dca5, dca6, fees, lev, dollar_cap, principal_balance)
493
+
494
+ effective_return = 100*((cum_pl - principal_balance)/principal_balance)
495
+
496
+ st.header(f"{bot_selections} Results")
497
+ with st.container():
498
+
499
+ if len(bot_selections) > 1:
500
+ col1, col2 = st.columns(2)
501
+ with col1:
502
+ st.metric(
503
+ "Total Account Balance",
504
+ f"${cum_pl:.2f}",
505
+ f"{100*(cum_pl-principal_balance)/(principal_balance):.2f} %",
506
+ )
507
+
508
+ # with col2:
509
+ # st.write("95% of trades should fall within this 2 std. dev. range.")
510
+ # st.metric(
511
+ # "High Range (+ 2 std. dev.)",
512
+ # f"", #${cum_sdp:.2f}
513
+ # f"{100*(cum_sdp-principal_balance)/(principal_balance):.2f} %",
514
+ # )
515
+ # st.metric(
516
+ # "Low Range (- 2 std. dev.)",
517
+ # f"" ,#${cum_sdm:.2f}"
518
+ # f"{100*(cum_sdm-principal_balance)/(principal_balance):.2f} %",
519
+ # )
520
+ if bot_selections == "Cinnamon Toast" or bot_selections == "Cosmic Cupcake" or bot_selections == "Pure Bread":
521
+ #st.line_chart(data=df.drop('Drawdown %', axis=1).dropna(), x='Exit Date', y='Cumulative P/L', use_container_width=True)
522
+ dfdata = df.drop('Drawdown %', axis=1).dropna()
523
+ #sd_df = sd_df.drop('Drawdown %', axis=1).dropna()
524
+ else:
525
+ #st.line_chart(data=df.dropna(), x='Exit Date', y='Cumulative P/L', use_container_width=True)
526
+ dfdata = df.dropna()
527
+ #sd_df = sd_df.dropna()
528
+
529
+ # Create figure
530
+ fig = go.Figure()
531
+
532
+ pyLogo = Image.open("logo.png")
533
+
534
+ # fig.add_traces(go.Scatter(x=sd_df['Exit Date'], y = sd_df['Cumulative P/L (+)'],line_shape='spline',
535
+ # line = dict(smoothing = 1.3, color='rgba(31, 119, 200,0)'), showlegend = False)
536
+ # )
537
+
538
+ # fig.add_traces(go.Scatter(x=sd_df['Exit Date'], y = sd_df['Cumulative P/L (-)'],
539
+ # line = dict(smoothing = 1.3, color='rgba(31, 119, 200,0)'), line_shape='spline',
540
+ # fill='tonexty',
541
+ # fillcolor = 'rgba(31, 119, 200,.2)', name = '+/- Standard Deviation')
542
+ # )
543
+
544
+ # Add trace
545
+ fig.add_trace(
546
+ go.Scatter(x=dfdata['Exit Date'], y=np.round(dfdata['Cumulative P/L'].values,2), line_shape='spline',
547
+ line = {'smoothing': 1.0, 'color' : 'rgba(31, 119, 200,.8)'},
548
+ name='Cumulative P/L')
549
+ )
550
+ buyhold = (principal_balance/dfdata['Buy Price'][dfdata.index[0]])*(dfdata['Buy Price']-dfdata['Buy Price'][dfdata.index[0]])
551
+ fig.add_trace(go.Scatter(x=dfdata['Exit Date'], y=np.round(buyhold.values,2), line_shape='spline',
552
+ line = {'smoothing': 1.0, 'color' :'red'}, name = 'Buy & Hold Return')
553
+ )
554
+
555
+ fig.add_layout_image(
556
+ dict(
557
+ source=pyLogo,
558
+ xref="paper",
559
+ yref="paper",
560
+ x = 0.05, #dfdata['Exit Date'].astype('int64').min() // 10**9,
561
+ y = .85, #dfdata['Cumulative P/L'].max(),
562
+ sizex= .9, #(dfdata['Exit Date'].astype('int64').max() - dfdata['Exit Date'].astype('int64').min()) // 10**9,
563
+ sizey= .9, #(dfdata['Cumulative P/L'].max() - dfdata['Cumulative P/L'].min()),
564
+ sizing="contain",
565
+ opacity=0.2,
566
+ layer = "below")
567
+ )
568
+
569
+ #style layout
570
+ fig.update_layout(
571
+ height = 600,
572
+ xaxis=dict(
573
+ title="Exit Date",
574
+ tickmode='array',
575
+ ),
576
+ yaxis=dict(
577
+ title="Cumulative P/L"
578
+ ) )
579
+
580
+ st.plotly_chart(fig, theme=None, use_container_width=True,height=600)
581
+ st.write()
582
+ df['Per Trade Return Rate'] = df['Return Per Trade']-1
583
+
584
+ totals = pd.DataFrame([], columns = ['# of Trades', 'Wins', 'Losses', 'Win Rate', 'Profit Factor'])
585
+ if bot_selections == "Cinnamon Toast" or bot_selections == "Cosmic Cupcake" or bot_selections == "Pure Bread":
586
+ data = get_hist_info(df.drop('Drawdown %', axis=1).dropna(), principal_balance,'Per Trade Return Rate')
587
+ else:
588
+ data = get_hist_info(df.dropna(), principal_balance,'Per Trade Return Rate')
589
+ totals.loc[len(totals)] = list(i for i in data)
590
+
591
+ totals['Cum. P/L'] = cum_pl-principal_balance
592
+ totals['Cum. P/L (%)'] = 100*(cum_pl-principal_balance)/principal_balance
593
+
594
+ if df.empty:
595
+ st.error("Oops! None of the data provided matches your selection(s). Please try again.")
596
+ else:
597
+ with st.container():
598
+ for row in totals.itertuples():
599
+ col1, col2, col3, col4= st.columns(4)
600
+ c1, c2, c3, c4 = st.columns(4)
601
+ with col1:
602
+ st.metric(
603
+ "Total Trades",
604
+ f"{row._1:.0f}",
605
+ )
606
+ with c1:
607
+ st.metric(
608
+ "Profit Factor",
609
+ f"{row._5:.2f}",
610
+ )
611
+ with col2:
612
+ st.metric(
613
+ "Wins",
614
+ f"{row.Wins:.0f}",
615
+ )
616
+ with c2:
617
+ st.metric(
618
+ "Cumulative P/L",
619
+ f"${row._6:.2f}",
620
+ f"{row._7:.2f} %",
621
+ )
622
+ with col3:
623
+ st.metric(
624
+ "Losses",
625
+ f"{row.Losses:.0f}",
626
+ )
627
+ with c3:
628
+ st.metric(
629
+ "Rolling 7 Days",
630
+ "",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
631
+ f"{get_rolling_stats(df,lev, otimeheader, 7):.2f}%",
632
+ )
633
+ st.metric(
634
+ "Rolling 30 Days",
635
+ "",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
636
+ f"{get_rolling_stats(df,lev, otimeheader, 30):.2f}%",
637
+ )
638
+
639
+ with col4:
640
+ st.metric(
641
+ "Win Rate",
642
+ f"{row._4:.1f}%",
643
+ )
644
+ with c4:
645
+ st.metric(
646
+ "Rolling 90 Days",
647
+ "",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
648
+ f"{get_rolling_stats(df,lev, otimeheader, 90):.2f}%",
649
+ )
650
+ st.metric(
651
+ "Rolling 180 Days",
652
+ "",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
653
+ f"{get_rolling_stats(df,lev, otimeheader, 180):.2f}%",
654
+ )
655
+
656
+ if bot_selections == "Cinnamon Toast":
657
+ if submitted:
658
+ grouped_df = df.groupby('Exit Date').agg({'Signal':'min','Entry Date': 'min','Exit Date': 'max','Buy Price': 'mean',
659
+ 'Sell Price' : 'max',
660
+ 'Net P/L Per Trade': 'mean',
661
+ 'Calculated Return %' : lambda x: np.round(100*lev*x.sum(),2),
662
+ 'DCA': lambda x: int(np.floor(x.max()))})
663
+ grouped_df.index = range(1, len(grouped_df)+1)
664
+ grouped_df.rename(columns={'DCA' : '# of DCAs', 'Buy Price':'Avg. Buy Price',
665
+ 'Net P/L Per Trade':'Net P/L',
666
+ 'Calculated Return %':'P/L %'}, inplace=True)
667
+ else:
668
+ dca_map = {1: 25/100, 2: 25/100, 3: 25/100, 4: 25/100, 1.1: 50/100, 2.1: 50/100}
669
+ df['DCA %'] = df['DCA'].map(dca_map)
670
+ df['Calculated Return %'] = (df['DCA %'])*(1-fees)*((df['Sell Price']-df['Buy Price'])/df['Buy Price'] - fees) #accounts for fees on open and close of trade
671
+
672
+ grouped_df = df.groupby('Exit Date').agg({'Signal':'min','Entry Date': 'min','Exit Date': 'max','Buy Price': 'mean',
673
+ 'Sell Price' : 'max',
674
+ 'P/L per token': 'mean',
675
+ 'Calculated Return %' : lambda x: np.round(100*x.sum(),2),
676
+ 'DCA': lambda x: int(np.floor(x.max()))})
677
+ grouped_df.index = range(1, len(grouped_df)+1)
678
+ grouped_df.rename(columns={'DCA' : '# of DCAs', 'Buy Price':'Avg. Buy Price',
679
+ 'Calculated Return %':'P/L %',
680
+ 'P/L per token':'Net P/L'}, inplace=True)
681
+
682
+ else:
683
+ if submitted:
684
+ grouped_df = df.groupby('Exit Date').agg({'Signal':'min','Entry Date': 'min','Exit Date': 'max','Buy Price': 'mean',
685
+ 'Sell Price' : 'max',
686
+ 'Net P/L Per Trade': 'mean',
687
+ 'Calculated Return %' : lambda x: np.round(100*lev*x.sum(),2)})
688
+ grouped_df.index = range(1, len(grouped_df)+1)
689
+ grouped_df.rename(columns={'Buy Price':'Avg. Buy Price',
690
+ 'Net P/L Per Trade':'Net P/L',
691
+ 'Calculated Return %':'P/L %'}, inplace=True)
692
+ else:
693
+ grouped_df = df.groupby('Exit Date').agg({'Signal':'min','Entry Date': 'min','Exit Date': 'max','Buy Price': 'mean',
694
+ 'Sell Price' : 'max',
695
+ 'P/L per token': 'mean',
696
+ 'P/L %':'mean'})
697
+ grouped_df.index = range(1, len(grouped_df)+1)
698
+ grouped_df.rename(columns={'Buy Price':'Avg. Buy Price',
699
+ 'P/L per token':'Net P/L'}, inplace=True)
700
+ st.subheader("Trade Logs")
701
+ grouped_df['Entry Date'] = pd.to_datetime(grouped_df['Entry Date'])
702
+ grouped_df['Exit Date'] = pd.to_datetime(grouped_df['Exit Date'])
703
+ if bot_selections == "Cosmic Cupcake" or bot_selections == "CT Toasted":
704
+ coding = cc_coding if bot_selections == "Cosmic Cupcake" else ctt_coding
705
+ st.dataframe(grouped_df.style.format({'Entry Date':'{:%m-%d-%Y %H:%M:%S}','Exit Date':'{:%m-%d-%Y %H:%M:%S}','Avg. Buy Price': '${:.2f}', 'Sell Price': '${:.2f}', 'Net P/L':'${:.2f}', 'P/L %':'{:.2f}%'})\
706
+ .apply(coding, axis=1)\
707
+ .applymap(my_style,subset=['Net P/L'])\
708
+ .applymap(my_style,subset=['P/L %']), use_container_width=True)
709
+ # new_title = '<div style="text-align: right;"><span style="background-color:lightgrey;">&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</span> Not Live Traded</div>'
710
+ # st.markdown(new_title, unsafe_allow_html=True)
711
+ else:
712
+ st.dataframe(grouped_df.style.format({'Entry Date':'{:%m-%d-%Y %H:%M:%S}','Exit Date':'{:%m-%d-%Y %H:%M:%S}','Avg. Buy Price': '${:.2f}', 'Sell Price': '${:.2f}', 'Net P/L':'${:.2f}', 'P/L %':'{:.2f}%'})\
713
+ .applymap(my_style,subset=['Net P/L'])\
714
+ .applymap(my_style,subset=['P/L %']), use_container_width=True)
715
+
716
+ # st.subheader("Checking Status")
717
+ # if submitted:
718
+ # st.dataframe(sd_df)
719
+
720
  if __name__ == "__main__":
721
  st.set_page_config(
722
  "Trading Bot Dashboard",
723
  layout="wide",
724
  )
725
  runapp()