rachman commited on
Commit
012eb9b
·
1 Parent(s): 98970e9

first commit

Browse files
app.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # from src import stock_data, model_train, train_test_split, model_predict
2
+ # import streamlit as st
3
+ # user_input = st.text_input("Enter your stock ticker here:")
4
+ # train = stock_data (user_input)
5
+ # x_train, y_train, scaler = train_test_split(train)
6
+ # history, model = model_train(x_train, y_train)
7
+ # result, data_inf = model_predict(train, scaler, model)
8
+
9
+ import streamlit as st
10
+ from src import stock_data, model_train, train_test_split, model_predict
11
+
12
+ # Function to display result, history, and data information
13
+ def display_results(user_input,result, history, data_inf):
14
+ st.header(f'Here is the data for {user_input} the past 10 days.')
15
+ st.write(data_inf)
16
+ st.markdown(f'''
17
+ Stock Prediction Analysis for <span style="font-size:24px;">{user_input}</span>
18
+
19
+ <p style="font-size:24px;">
20
+ Tomorrow's {user_input} predicted price is : <b>{result}</b>
21
+ </p>
22
+ ''', unsafe_allow_html=True)
23
+
24
+
25
+
26
+ # Main function to run the app
27
+ def main():
28
+ st.title("Stock Prediction App")
29
+ user_input = st.text_input("Enter your stock ticker here:")
30
+
31
+ if st.button("Predict"):
32
+ with st.spinner('Loading...'):
33
+ train = stock_data(user_input)
34
+ if len(train)>0:
35
+ x_train, y_train, scaler = train_test_split(train)
36
+ history, model = model_train(x_train, y_train)
37
+ result, data_inf = model_predict(train, scaler, model)
38
+ display_results(user_input,result[0][0], history, data_inf)
39
+ else:
40
+ st.write('Invalid stock ticker. Please verify the ticker symbol on the following website: [Yahoo Finance.](https://finance.yahoo.com/)')
41
+
42
+
43
+ if __name__ == "__main__":
44
+ main()
requirements.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ scikit-learn==1.3.2
2
+ yfinance==0.2.37
3
+ tensorflow==2.15.0
4
+ numpy==1.26.3
5
+ streamlit
src/__init__.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ from .model import *
2
+ from .get_data import *
3
+ from .train_test_split_data import *
src/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (230 Bytes). View file
 
src/__pycache__/get_data.cpython-310.pyc ADDED
Binary file (522 Bytes). View file
 
src/__pycache__/model.cpython-310.pyc ADDED
Binary file (1.65 kB). View file
 
src/__pycache__/train_test_split_data.cpython-310.pyc ADDED
Binary file (694 Bytes). View file
 
src/get_data.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ import yfinance as yf
2
+ from datetime import datetime, timedelta
3
+ from sklearn.preprocessing import MinMaxScaler
4
+ import numpy as np
5
+
6
+ def stock_data (tick):
7
+ data = yf.download(tick, start=datetime.now() - timedelta(days=365),end=datetime.now())
8
+ return data.Close
src/model.py ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from tensorflow.keras.models import Sequential
2
+ from tensorflow.keras.layers import GRU, Bidirectional, Dense, Dropout
3
+ from tensorflow.keras.callbacks import EarlyStopping
4
+ from tensorflow.keras.regularizers import L2
5
+ import tensorflow as tf
6
+
7
+ def model_train (x_train, y_train):
8
+ tf.random.set_seed(100)
9
+ tf.keras.backend.clear_session()
10
+
11
+ model = Sequential()
12
+ model.add(Bidirectional(GRU(units=256, activation='relu', return_sequences=True, input_shape=(10,1))))
13
+ model.add(Dropout(0.5))
14
+ model.add(Bidirectional(GRU(units=256, activation='relu', return_sequences=False, kernel_regularizer=L2(0.01))))
15
+ model.add(Dropout(0.1))
16
+ model.add(Dense(units=1, activation='linear')) # Prediction of the next value
17
+
18
+ model.compile(optimizer='adam', loss='mape', metrics='mape')
19
+
20
+ model.build((None, 10, 1)) # Specify input shape
21
+
22
+ # Define EarlyStopping callback
23
+ early_stopping = EarlyStopping(monitor='val_loss', patience=3, restore_best_weights=True)
24
+
25
+ # Train the model
26
+ history = model.fit(x_train, y_train, epochs=10, batch_size=20, validation_split=0.2, callbacks=[early_stopping])
27
+ return history, model
28
+
29
+ def model_predict(train,scaler,model):
30
+ data_inf = train[-10:]
31
+ scaled_data_inf = scaler.transform(data_inf.values.reshape(-1,1))
32
+ data_inf_final = tf.expand_dims(scaled_data_inf,0)
33
+ pred_inf = model.predict(data_inf_final)
34
+ return scaler.inverse_transform(pred_inf) , data_inf
35
+
src/train_test_split_data.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from sklearn.preprocessing import MinMaxScaler
2
+ import numpy as np
3
+
4
+ def train_test_split(train):
5
+ scaler = MinMaxScaler()
6
+ scaled_data = scaler.fit_transform(train.values.reshape(-1,1))
7
+
8
+ prediction_days = 10
9
+
10
+ x_train = []
11
+ y_train = []
12
+
13
+ for x in range(prediction_days, len(scaled_data)):
14
+ x_train.append(scaled_data[x-prediction_days:x, 0])
15
+ y_train.append(scaled_data[x, 0])
16
+
17
+
18
+ x_train, y_train = np.array(x_train), np.array(y_train)
19
+ x_train = np.reshape(x_train, (x_train.shape[0], x_train.shape[1], 1))
20
+ return x_train, y_train , scaler