File size: 3,230 Bytes
0169a1d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89

def fetch_data(ticker, start_date, end_date):
    # Fetch data using yfinance
    data = yf.download(ticker, start=start_date, end=end_date)
    return data
    def preprocess_data(data):
    # Select 'Close' prices for prediction
    prices = data['Close'].values.reshape(-1, 1)
    # Normalize the data
    from sklearn.preprocessing import MinMaxScaler
    scaler = MinMaxScaler(feature_range=(0, 1))
    scaled_prices = scaler.fit_transform(prices)

    # Create dataset with features and labels
    x_data, y_data = [], []
    for i in range(60, len(scaled_prices)):
        x_data.append(scaled_prices[i-60:i, 0])
        y_data.append(scaled_prices[i, 0])
    return np.array(x_data), np.array(y_data), scaler
def build_model():
    model = Sequential()
    model.add(LSTM(units=50, return_sequences=True, input_shape=(60, 1)))
    model.add(Dropout(0.2))
    model.add(LSTM(units=50, return_sequences=False))
    model.add(Dropout(0.2))
    model.add(Dense(units=1))  # Prediction for the next price
    model.compile(optimizer='adam', loss='mean_squared_error')
    return model
def train_model(x_data, y_data):
    # Reshape data for LSTM
    x_data = np.reshape(x_data, (x_data.shape[0], x_data.shape[1], 1))
    model = build_model()
    model.fit(x_data, y_data, epochs=50, batch_size=32)
    return model
def make_prediction(model, data, scaler):
    # Prepare the last 60 days of data
    last_data = data['Close'].values[-60:].reshape(-1, 1)
    last_data_scaled = scaler.transform(last_data)

    x_test = []
    x_test.append(last_data_scaled)
    x_test = np.array(x_test)
    x_test = np.reshape(x_test, (x_test.shape[0], x_test.shape[1], 1))

    predicted_price = model.predict(x_test)
    predicted_price = scaler.inverse_transform(predicted_price)  # Convert back to original scale
    return predicted_price[0][0]
def stock_prediction(ticker, start_date, end_date):
    data = fetch_data(ticker, start_date, end_date)
    x_data, y_data, scaler = preprocess_data(data)
    model = train_model(x_data, y_data)
    predicted_price = make_prediction(model, data, scaler)
    
    # Calculate percentage change, highest and lowest values
    percentage_change = ((predicted_price - data['Close'].iloc[-1]) / data['Close'].iloc[-1]) * 100
    highest_price = data['Close'].max()
    lowest_price = data['Close'].min()
    
    # Visualization
    plt.figure(figsize=(14, 5))
    plt.plot(data['Close'], label='Historical Prices')
    plt.axhline(y=predicted_price, color='r', linestyle='--', label='Predicted Price')
    plt.title(f'{ticker} Price Prediction')
    plt.xlabel('Date')
    plt.ylabel('Price')
    plt.legend()
    plt.show()

    return {
        'Predicted Price': predicted_price,
        'Percentage Change': percentage_change,
        'Highest Price': highest_price,
        'Lowest Price': lowest_price
    }

# Gradio Interface
ticker_options = ['AAPL', 'GOOGL', 'AMZN', 'MSFT', 'TSLA', 'FB', 'NFLX', 'NVDA', 'BRK.B', 'DIS']
iface = gr.Interface(
    fn=stock_prediction,
    inputs=[
        gr.inputs.Dropdown(choices=ticker_options, label='Stock Ticker'),
        gr.inputs.Date(label='Start Date'),
        gr.inputs.Date(label='End Date')
    ],
    outputs='json'
)

iface.launch()