solanaexpert commited on
Commit
d1a316c
·
verified ·
1 Parent(s): 03e6a37

Create MLCryptoForecasterAllAssetsTPSL_ParisTime.py

Browse files
MLCryptoForecasterAllAssetsTPSL_ParisTime.py ADDED
@@ -0,0 +1,165 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import pandas as pd
3
+ import numpy as np
4
+ from datetime import timedelta
5
+ from binance.client import Client
6
+ from sklearn.model_selection import train_test_split
7
+ from sklearn.ensemble import RandomForestClassifier
8
+ from sklearn.metrics import classification_report
9
+ import ta
10
+ import pytz
11
+
12
+ # Function to log results to both console and file
13
+ # Blank lines are added after each asset block explicitly
14
+
15
+ def log_results(message, filename="predictions_results.txt"):
16
+ print(message)
17
+ with open(filename, "a") as f:
18
+ f.write(message + "\n")
19
+
20
+ # Convert UTC timestamp to Europe/Paris timezone
21
+
22
+ def convert_to_paris_time(utc_time):
23
+ paris_tz = pytz.timezone('Europe/Paris')
24
+ utc_time = utc_time.replace(tzinfo=pytz.utc)
25
+ paris_time = utc_time.astimezone(paris_tz)
26
+ return paris_time.strftime('%Y-%m-%d %H:%M:%S')
27
+
28
+ # Initialize Binance client
29
+ client = Client()
30
+
31
+ # Settings
32
+ interval = Client.KLINE_INTERVAL_4HOUR
33
+ result_file = "predictions_results.txt"
34
+
35
+ # Delete the results file if it exists for a fresh start
36
+ if os.path.exists(result_file):
37
+ os.remove(result_file)
38
+
39
+ # Initialize result file header
40
+ with open(result_file, "w") as f:
41
+ f.write("Asset,Time,Price,Prediction,Optimal_UP_TP,Optimal_UP_SL,Optimal_DN_TP,Optimal_DN_SL\n")
42
+
43
+ # Get USDT-quoted trading symbols
44
+ symbols = [s['symbol'] for s in client.get_exchange_info()['symbols']
45
+ if s['status']=='TRADING' and s['quoteAsset']=='USDT']
46
+
47
+ # Optimize take-profit / stop-loss function
48
+ def optimize_tp_sl(df, signals, side, pgrid, lgrid):
49
+ best = (0, 0, -np.inf)
50
+ prices = df['close'].values
51
+ idxs = np.where(signals == side)[0]
52
+ for tp in pgrid:
53
+ for sl in lgrid:
54
+ rets = []
55
+ for i in idxs:
56
+ entry = prices[i]
57
+ for j in range(i+1, min(i+11, len(prices))):
58
+ ret = (prices[j] - entry) / entry if side == 1 else (entry - prices[j]) / entry
59
+ if ret >= tp or ret <= -sl:
60
+ rets.append(np.sign(ret) * min(abs(ret), max(tp, sl)))
61
+ break
62
+ if rets:
63
+ avg_ret = np.mean(rets)
64
+ if avg_ret > best[2]:
65
+ best = (tp, sl, avg_ret)
66
+ return best
67
+
68
+ # Main loop: process each symbol
69
+ for symbol in symbols:
70
+ log_results(f"=== {symbol} ===", result_file)
71
+
72
+ # Load or download historical data
73
+ data_file = f"{symbol}_data_4h_full.csv"
74
+ if os.path.exists(data_file):
75
+ df = pd.read_csv(data_file, index_col=0, parse_dates=True)
76
+ last_ts = df.index[-1]
77
+ start = (last_ts + timedelta(hours=4)).strftime("%d %B %Y %H:%M:%S")
78
+ new = client.get_historical_klines(symbol, interval, start)
79
+ if new:
80
+ new_df = pd.DataFrame(new, columns=[
81
+ 'timestamp','open','high','low','close','volume',
82
+ 'close_time','quote_av','trades','tb_base_av','tb_quote_av','ignore'
83
+ ])
84
+ new_df = new_df[['timestamp','open','high','low','close','volume']].astype(float)
85
+ new_df['timestamp'] = pd.to_datetime(new_df['timestamp'], unit='ms')
86
+ new_df.set_index('timestamp', inplace=True)
87
+ df = pd.concat([df, new_df]).drop_duplicates()
88
+ df.to_csv(data_file)
89
+ else:
90
+ klines = client.get_historical_klines(symbol, interval, "01 December 2021")
91
+ df = pd.DataFrame(klines, columns=[
92
+ 'timestamp','open','high','low','close','volume',
93
+ 'close_time','quote_av','trades','tb_base_av','tb_quote_av','ignore'
94
+ ])
95
+ df = df[['timestamp','open','high','low','close','volume']].astype(float)
96
+ df['timestamp'] = pd.to_datetime(df['timestamp'], unit='ms')
97
+ df.set_index('timestamp', inplace=True)
98
+ df.to_csv(data_file)
99
+
100
+ # Compute technical indicators
101
+ df['rsi'] = ta.momentum.RSIIndicator(df['close'], window=14).rsi()
102
+ df['macd'] = ta.trend.MACD(df['close']).macd()
103
+ for s in [10, 20, 50, 100]:
104
+ df[f'ema_{s}'] = df['close'].ewm(span=s).mean()
105
+ for w in [10, 20, 50, 100]:
106
+ df[f'sma_{w}'] = df['close'].rolling(window=w).mean()
107
+ bb = ta.volatility.BollingerBands(df['close'], window=20, window_dev=2)
108
+ df['bbw'] = (bb.bollinger_hband() - bb.bollinger_lband()) / bb.bollinger_mavg()
109
+ df['atr'] = ta.volatility.AverageTrueRange(df['high'], df['low'], df['close'], window=14).average_true_range()
110
+ df['adx'] = ta.trend.ADXIndicator(df['high'], df['low'], df['close'], window=14).adx()
111
+ st = ta.momentum.StochasticOscillator(df['high'], df['low'], df['close'], window=14)
112
+ df['st_k'] = st.stoch()
113
+ df['st_d'] = st.stoch_signal()
114
+ df['wr'] = ta.momentum.WilliamsRIndicator(df['high'], df['low'], df['close'], lbp=14).williams_r()
115
+ df['cci'] = ta.trend.CCIIndicator(df['high'], df['low'], df['close'], window=20).cci()
116
+ df['mom'] = df['close'] - df['close'].shift(10)
117
+ ichi = ta.trend.IchimokuIndicator(df['high'], df['low'], window1=9, window2=26, window3=52)
118
+ df['span_a'] = ichi.ichimoku_a()
119
+ df['span_b'] = ichi.ichimoku_b()
120
+ df.dropna(inplace=True)
121
+
122
+ # Label signals based on Ichimoku cloud
123
+ df['signal'] = np.select(
124
+ [(df['close'] > df['span_a']) & (df['close'] > df['span_b']),
125
+ (df['close'] < df['span_a']) & (df['close'] < df['span_b'])],
126
+ [1, 0], default=-1)
127
+
128
+ # Train/test split
129
+ features = [c for c in df.columns if c not in ['open','high','low','close','volume','signal']]
130
+ X, y = df[features], df['signal']
131
+ Xtr, Xte, ytr, yte = train_test_split(X, y, test_size=0.2, shuffle=False)
132
+ model = RandomForestClassifier(n_estimators=200, class_weight='balanced', random_state=42)
133
+ model.fit(Xtr, ytr)
134
+ ypr = model.predict(Xte)
135
+
136
+ # Log classification report
137
+ report = classification_report(yte, ypr, zero_division=0)
138
+ log_results(f"Classification report for {symbol}:\n{report}", result_file)
139
+
140
+ # Predict latest trend with correct feature naming
141
+ latest_df = X.iloc[-1:]
142
+ trend_label = model.predict(latest_df)[0]
143
+
144
+ # Convert timestamp to Paris time and fetch price
145
+ pred_time_utc = df.index[-1]
146
+ pred_time = convert_to_paris_time(pred_time_utc)
147
+ pred_price = df['close'].iloc[-1]
148
+ trend_str = {1:'Uptrend',0:'Downtrend',-1:'Neutral'}[trend_label]
149
+ log_results(f"Time: {pred_time}, Price: {pred_price:.2f}, Prediction: {trend_str}", result_file)
150
+
151
+ # Optimize TP/SL and log results
152
+ hist_sign = model.predict(X)
153
+ pgrid = np.arange(0.01, 0.1, 0.01)
154
+ lgrid = np.arange(0.01, 0.1, 0.01)
155
+ up_tp, up_sl, _ = optimize_tp_sl(df, hist_sign, 1, pgrid, lgrid)
156
+ dn_tp, dn_sl, _ = optimize_tp_sl(df, hist_sign, 0, pgrid, lgrid)
157
+ log_results(f"Optimal UP TP/SL: +{up_tp*100:.1f}% / -{up_sl*100:.1f}%", result_file)
158
+ log_results(f"Optimal DN TP/SL: +{dn_tp*100:.1f}% / -{dn_sl*100:.1f}%", result_file)
159
+
160
+ # Blank line after asset
161
+ with open(result_file, "a") as f:
162
+ f.write("\n")
163
+
164
+ # End of processing
165
+ log_results("All assets processed.", result_file)