Spaces:
Sleeping
Sleeping
Upload 3 files
Browse files- patterns.py +173 -0
- predictions.py +33 -0
- trading.py +71 -0
patterns.py
ADDED
|
@@ -0,0 +1,173 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
import pandas as pd
|
| 3 |
+
|
| 4 |
+
def identify_patterns(df):
|
| 5 |
+
"""Identify candlestick patterns in the data using basic calculations"""
|
| 6 |
+
patterns = pd.DataFrame(index=df.index)
|
| 7 |
+
|
| 8 |
+
# Calculate basic candlestick properties
|
| 9 |
+
body = df['Close'] - df['Open']
|
| 10 |
+
body_abs = abs(body)
|
| 11 |
+
upper_shadow = df['High'] - df[['Open', 'Close']].max(axis=1)
|
| 12 |
+
lower_shadow = df[['Open', 'Close']].min(axis=1) - df['Low']
|
| 13 |
+
|
| 14 |
+
# 1. Hammer Pattern
|
| 15 |
+
patterns['HAMMER'] = np.where(
|
| 16 |
+
(lower_shadow > 2 * body_abs) & # Long lower shadow
|
| 17 |
+
(upper_shadow <= 0.1 * body_abs) & # Minimal upper shadow
|
| 18 |
+
(body > 0), # Bullish close
|
| 19 |
+
1, 0
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
# 2. Inverted Hammer Pattern
|
| 23 |
+
patterns['INVERTED_HAMMER'] = np.where(
|
| 24 |
+
(upper_shadow > 2 * body_abs) & # Long upper shadow
|
| 25 |
+
(lower_shadow <= 0.1 * body_abs) & # Minimal lower shadow
|
| 26 |
+
(body > 0), # Bullish close
|
| 27 |
+
1, 0
|
| 28 |
+
)
|
| 29 |
+
|
| 30 |
+
# 3. Piercing Line Pattern
|
| 31 |
+
patterns['PIERCING_LINE'] = np.where(
|
| 32 |
+
(body.shift(1) < 0) & # Previous candle bearish
|
| 33 |
+
(body > 0) & # Current candle bullish
|
| 34 |
+
(df['Open'] < df['Close'].shift(1)) & # Opens below previous close
|
| 35 |
+
(df['Close'] > (df['Open'].shift(1) + df['Close'].shift(1)) / 2), # Closes above midpoint
|
| 36 |
+
1, 0
|
| 37 |
+
)
|
| 38 |
+
|
| 39 |
+
# 4. Bullish Engulfing Pattern
|
| 40 |
+
patterns['BULLISH_ENGULFING'] = np.where(
|
| 41 |
+
(body.shift(1) < 0) & # Previous candle bearish
|
| 42 |
+
(body > 0) & # Current candle bullish
|
| 43 |
+
(df['Open'] < df['Close'].shift(1)) & # Opens below previous close
|
| 44 |
+
(df['Close'] > df['Open'].shift(1)), # Closes above previous open
|
| 45 |
+
1, 0
|
| 46 |
+
)
|
| 47 |
+
|
| 48 |
+
# 5. Morning Star Pattern
|
| 49 |
+
patterns['MORNING_STAR'] = np.where(
|
| 50 |
+
(body.shift(2) < 0) & # First candle bearish
|
| 51 |
+
(abs(body.shift(1)) < abs(body.shift(2)) * 0.3) & # Second candle small
|
| 52 |
+
(body > 0) & # Third candle bullish
|
| 53 |
+
(df['Close'] > df['Close'].shift(2) * 0.5), # Closes above midpoint
|
| 54 |
+
1, 0
|
| 55 |
+
)
|
| 56 |
+
|
| 57 |
+
# 6. Three White Soldiers
|
| 58 |
+
patterns['THREE_WHITE_SOLDIERS'] = np.where(
|
| 59 |
+
(body > 0) & # Current candle bullish
|
| 60 |
+
(body.shift(1) > 0) & # Previous candle bullish
|
| 61 |
+
(body.shift(2) > 0) & # Two candles ago bullish
|
| 62 |
+
(df['Close'] > df['Close'].shift(1)) & # Each closes higher
|
| 63 |
+
(df['Close'].shift(1) > df['Close'].shift(2)),
|
| 64 |
+
1, 0
|
| 65 |
+
)
|
| 66 |
+
|
| 67 |
+
# 7. Bullish Harami
|
| 68 |
+
patterns['BULLISH_HARAMI'] = np.where(
|
| 69 |
+
(body.shift(1) < 0) & # Previous candle bearish
|
| 70 |
+
(body > 0) & # Current candle bullish
|
| 71 |
+
(df['Open'] > df['Close'].shift(1)) & # Opens inside previous body
|
| 72 |
+
(df['Close'] < df['Open'].shift(1)), # Closes inside previous body
|
| 73 |
+
1, 0
|
| 74 |
+
)
|
| 75 |
+
|
| 76 |
+
# 8. Hanging Man
|
| 77 |
+
patterns['HANGING_MAN'] = np.where(
|
| 78 |
+
(lower_shadow > 2 * body_abs) & # Long lower shadow
|
| 79 |
+
(upper_shadow <= 0.1 * body_abs) & # Minimal upper shadow
|
| 80 |
+
(body < 0), # Bearish close
|
| 81 |
+
1, 0
|
| 82 |
+
)
|
| 83 |
+
|
| 84 |
+
# 9. Dark Cloud Cover
|
| 85 |
+
patterns['DARK_CLOUD_COVER'] = np.where(
|
| 86 |
+
(body.shift(1) > 0) & # Previous candle bullish
|
| 87 |
+
(body < 0) & # Current candle bearish
|
| 88 |
+
(df['Open'] > df['High'].shift(1)) & # Opens above previous high
|
| 89 |
+
(df['Close'] < (df['Open'].shift(1) + df['Close'].shift(1)) / 2), # Closes below midpoint
|
| 90 |
+
1, 0
|
| 91 |
+
)
|
| 92 |
+
|
| 93 |
+
# 10. Bearish Engulfing
|
| 94 |
+
patterns['BEARISH_ENGULFING'] = np.where(
|
| 95 |
+
(body.shift(1) > 0) & # Previous candle bullish
|
| 96 |
+
(body < 0) & # Current candle bearish
|
| 97 |
+
(df['Open'] > df['Close'].shift(1)) & # Opens above previous close
|
| 98 |
+
(df['Close'] < df['Open'].shift(1)), # Closes below previous open
|
| 99 |
+
1, 0
|
| 100 |
+
)
|
| 101 |
+
|
| 102 |
+
# 11. Evening Star
|
| 103 |
+
patterns['EVENING_STAR'] = np.where(
|
| 104 |
+
(body.shift(2) > 0) & # First candle bullish
|
| 105 |
+
(abs(body.shift(1)) < abs(body.shift(2)) * 0.3) & # Second candle small
|
| 106 |
+
(body < 0) & # Third candle bearish
|
| 107 |
+
(df['Close'] < df['Close'].shift(2) * 0.5), # Closes below midpoint
|
| 108 |
+
1, 0
|
| 109 |
+
)
|
| 110 |
+
|
| 111 |
+
# 12. Three Black Crows
|
| 112 |
+
patterns['THREE_BLACK_CROWS'] = np.where(
|
| 113 |
+
(body < 0) & # Current candle bearish
|
| 114 |
+
(body.shift(1) < 0) & # Previous candle bearish
|
| 115 |
+
(body.shift(2) < 0) & # Two candles ago bearish
|
| 116 |
+
(df['Close'] < df['Close'].shift(1)) & # Each closes lower
|
| 117 |
+
(df['Close'].shift(1) < df['Close'].shift(2)),
|
| 118 |
+
1, 0
|
| 119 |
+
)
|
| 120 |
+
|
| 121 |
+
# 13. Shooting Star
|
| 122 |
+
patterns['SHOOTING_STAR'] = np.where(
|
| 123 |
+
(upper_shadow > 2 * body_abs) & # Long upper shadow
|
| 124 |
+
(lower_shadow <= 0.1 * body_abs) & # Minimal lower shadow
|
| 125 |
+
(body < 0), # Bearish close
|
| 126 |
+
1, 0
|
| 127 |
+
)
|
| 128 |
+
|
| 129 |
+
# 14. Doji Patterns
|
| 130 |
+
patterns['DOJI'] = np.where(
|
| 131 |
+
abs(body) <= 0.1 * (df['High'] - df['Low']), # Very small body
|
| 132 |
+
1, 0
|
| 133 |
+
)
|
| 134 |
+
|
| 135 |
+
# 15. Dragonfly Doji
|
| 136 |
+
patterns['DRAGONFLY_DOJI'] = np.where(
|
| 137 |
+
(abs(body) <= 0.1 * (df['High'] - df['Low'])) & # Doji body
|
| 138 |
+
(upper_shadow <= 0.1 * (df['High'] - df['Low'])) & # Minimal upper shadow
|
| 139 |
+
(lower_shadow >= 0.7 * (df['High'] - df['Low'])), # Long lower shadow
|
| 140 |
+
1, 0
|
| 141 |
+
)
|
| 142 |
+
|
| 143 |
+
# 16. Gravestone Doji
|
| 144 |
+
patterns['GRAVESTONE_DOJI'] = np.where(
|
| 145 |
+
(abs(body) <= 0.1 * (df['High'] - df['Low'])) & # Doji body
|
| 146 |
+
(lower_shadow <= 0.1 * (df['High'] - df['Low'])) & # Minimal lower shadow
|
| 147 |
+
(upper_shadow >= 0.7 * (df['High'] - df['Low'])), # Long upper shadow
|
| 148 |
+
1, 0
|
| 149 |
+
)
|
| 150 |
+
|
| 151 |
+
return patterns
|
| 152 |
+
|
| 153 |
+
def calculate_technical_indicators(df):
|
| 154 |
+
"""Calculate technical indicators for analysis"""
|
| 155 |
+
# RSI
|
| 156 |
+
delta = df['Close'].diff()
|
| 157 |
+
gain = (delta.where(delta > 0, 0)).rolling(window=14).mean()
|
| 158 |
+
loss = (-delta.where(delta < 0, 0)).rolling(window=14).mean()
|
| 159 |
+
rs = gain / loss
|
| 160 |
+
df['RSI'] = 100 - (100 / (1 + rs))
|
| 161 |
+
|
| 162 |
+
# MACD
|
| 163 |
+
exp1 = df['Close'].ewm(span=12, adjust=False).mean()
|
| 164 |
+
exp2 = df['Close'].ewm(span=26, adjust=False).mean()
|
| 165 |
+
df['MACD'] = exp1 - exp2
|
| 166 |
+
df['MACD_Signal'] = df['MACD'].ewm(span=9, adjust=False).mean()
|
| 167 |
+
df['MACD_Hist'] = df['MACD'] - df['MACD_Signal']
|
| 168 |
+
|
| 169 |
+
# Moving Averages
|
| 170 |
+
df['SMA_20'] = df['Close'].rolling(window=20).mean()
|
| 171 |
+
df['SMA_50'] = df['Close'].rolling(window=50).mean()
|
| 172 |
+
|
| 173 |
+
return df
|
predictions.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
from sklearn.ensemble import RandomForestClassifier
|
| 3 |
+
from sklearn.preprocessing import StandardScaler
|
| 4 |
+
|
| 5 |
+
def prepare_features(df):
|
| 6 |
+
"""Prepare features for prediction model"""
|
| 7 |
+
features = df[['RSI', 'MACD', 'MACD_Signal', 'MACD_Hist']].copy()
|
| 8 |
+
features['SMA_Ratio'] = df['SMA_20'] / df['SMA_50']
|
| 9 |
+
features['Price_Change'] = df['Close'].pct_change()
|
| 10 |
+
features['Volatility'] = df['Close'].rolling(window=20).std()
|
| 11 |
+
|
| 12 |
+
return features.dropna()
|
| 13 |
+
|
| 14 |
+
def predict_movement(df, lookback_period=30):
|
| 15 |
+
"""Predict price movement for next 30 minutes"""
|
| 16 |
+
features = prepare_features(df)
|
| 17 |
+
if len(features) < lookback_period:
|
| 18 |
+
return None, None
|
| 19 |
+
|
| 20 |
+
# Prepare training data
|
| 21 |
+
X = features[:-1].values
|
| 22 |
+
y = (df['Close'].shift(-1) > df['Close'])[:-1].values
|
| 23 |
+
|
| 24 |
+
# Train model
|
| 25 |
+
model = RandomForestClassifier(n_estimators=100, random_state=42)
|
| 26 |
+
model.fit(X, y)
|
| 27 |
+
|
| 28 |
+
# Make prediction
|
| 29 |
+
latest_features = features.iloc[-1:].values
|
| 30 |
+
prediction = model.predict(latest_features)[0]
|
| 31 |
+
probability = model.predict_proba(latest_features)[0]
|
| 32 |
+
|
| 33 |
+
return prediction, probability
|
trading.py
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import yfinance as yf
|
| 2 |
+
import pandas as pd
|
| 3 |
+
from datetime import datetime, timedelta
|
| 4 |
+
import pytz
|
| 5 |
+
|
| 6 |
+
def is_market_open():
|
| 7 |
+
"""Check if US market is currently open"""
|
| 8 |
+
now = datetime.now(pytz.timezone('America/New_York'))
|
| 9 |
+
# Market hours are 9:30 AM - 4:00 PM Eastern Time, Monday to Friday
|
| 10 |
+
is_weekday = now.weekday() < 5
|
| 11 |
+
is_market_hours = 9.5 <= now.hour + (now.minute / 60) <= 16
|
| 12 |
+
|
| 13 |
+
# Pre-market (4:00 AM - 9:30 AM) and After-hours (4:00 PM - 8:00 PM)
|
| 14 |
+
is_extended_hours = (4 <= now.hour + (now.minute / 60) <= 20)
|
| 15 |
+
|
| 16 |
+
return is_weekday and (is_market_hours or is_extended_hours)
|
| 17 |
+
|
| 18 |
+
def fetch_market_data(symbol, period='1d', interval='15m'):
|
| 19 |
+
"""Fetch market data from Yahoo Finance"""
|
| 20 |
+
try:
|
| 21 |
+
# Check if we have a valid symbol
|
| 22 |
+
if not symbol:
|
| 23 |
+
raise Exception("Please enter a valid trading symbol")
|
| 24 |
+
|
| 25 |
+
# Always fetch 1 day of data to ensure we have enough history
|
| 26 |
+
ticker = yf.Ticker(symbol)
|
| 27 |
+
df = ticker.history(period='1d', interval='15m', prepost=True)
|
| 28 |
+
|
| 29 |
+
if df.empty:
|
| 30 |
+
raise Exception(f"No data available for {symbol}. Please verify the symbol is correct.")
|
| 31 |
+
|
| 32 |
+
# Get the market status
|
| 33 |
+
market_status = "Market Open" if is_market_open() else "Market Closed"
|
| 34 |
+
|
| 35 |
+
# Trim data based on selected timeframe
|
| 36 |
+
now = datetime.now(pytz.timezone('America/New_York'))
|
| 37 |
+
if period.endswith('m'):
|
| 38 |
+
minutes = int(period[:-1])
|
| 39 |
+
cutoff_time = now - timedelta(minutes=minutes)
|
| 40 |
+
else:
|
| 41 |
+
hours = int(period[:-1])
|
| 42 |
+
cutoff_time = now - timedelta(hours=hours)
|
| 43 |
+
|
| 44 |
+
df = df[df.index >= cutoff_time]
|
| 45 |
+
|
| 46 |
+
if df.empty:
|
| 47 |
+
raise Exception(f"No recent data available. {market_status}.")
|
| 48 |
+
|
| 49 |
+
return df
|
| 50 |
+
|
| 51 |
+
except Exception as e:
|
| 52 |
+
if "symbol may be delisted" in str(e).lower():
|
| 53 |
+
raise Exception(f"Symbol {symbol} not found. Please verify the symbol is correct.")
|
| 54 |
+
raise Exception(f"Data fetch error: {str(e)}")
|
| 55 |
+
|
| 56 |
+
def calculate_performance_metrics(predictions, actual):
|
| 57 |
+
"""Calculate prediction performance metrics"""
|
| 58 |
+
if len(predictions) == 0 or len(actual) == 0:
|
| 59 |
+
return {
|
| 60 |
+
'accuracy': 0,
|
| 61 |
+
'success_rate': 0,
|
| 62 |
+
'total_predictions': 0
|
| 63 |
+
}
|
| 64 |
+
|
| 65 |
+
correct_predictions = sum(p == a for p, a in zip(predictions, actual))
|
| 66 |
+
|
| 67 |
+
return {
|
| 68 |
+
'accuracy': correct_predictions / len(predictions),
|
| 69 |
+
'success_rate': correct_predictions / len(predictions) * 100,
|
| 70 |
+
'total_predictions': len(predictions)
|
| 71 |
+
}
|