PSX_fyp / app.py
Hamza012bce21's picture
Update app.py
1c03d2a verified
raw
history blame
4.38 kB
import streamlit as st
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import os
# Optional ML imports
try:
import tensorflow as tf
from tensorflow.keras.models import load_model
from sklearn.preprocessing import MinMaxScaler
import joblib
except ImportError:
tf = None
load_model = None
MinMaxScaler = None
joblib = None
# Optional sentiment analysis
try:
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
analyzer = SentimentIntensityAnalyzer()
except ImportError:
analyzer = None
st.set_page_config(page_title="PSX Stock Predictor", layout="wide")
st.title("πŸ“ˆ PSX Stock Predictor – HF Safe + Live Version")
# ------------------------------
# Load Model & Scaler
# ------------------------------
MODEL_LOADED = False
if tf and os.path.exists("model.h5"):
try:
model = load_model("model.h5", custom_objects={"mse": tf.keras.metrics.MeanSquaredError()})
scaler = joblib.load("scaler.pkl") if os.path.exists("scaler.pkl") else MinMaxScaler()
MODEL_LOADED = True
st.success("Model loaded successfully!")
except Exception as e:
st.warning(f"Model found but failed to load: {e}")
else:
st.warning("Model not found. Using dummy predictions.")
# ------------------------------
# Fetch PSX Data
# ------------------------------
API_KEY = os.getenv("ALPHAVANTAGE_API_KEY", None)
def get_psx_data(symbol="HBL"):
if API_KEY:
try:
import requests
url = f"https://www.alphavantage.co/query?function=TIME_SERIES_DAILY&symbol={symbol}.PSX&apikey={API_KEY}"
r = requests.get(url).json()
data = r.get("Time Series (Daily)", None)
if data:
df = pd.DataFrame(data).T
df.index = pd.to_datetime(df.index)
df = df.sort_index()
df = df[["4. close"]].rename(columns={"4. close": "Close"})
return df
except:
pass
# Fallback dummy data
dates = pd.date_range(end=pd.Timestamp.today(), periods=200)
prices = np.linspace(100, 150, 200) + np.random.normal(0, 2, 200)
df = pd.DataFrame({"Close": prices}, index=dates)
return df
# ------------------------------
# News Sentiment
# ------------------------------
NEWS_KEY = os.getenv("NEWSAPI_KEY", None)
def get_sentiment(stock="HBL"):
if not analyzer or not NEWS_KEY:
return 0
try:
import requests
url = f"https://newsapi.org/v2/everything?q={stock}+Pakistan&apiKey={NEWS_KEY}"
r = requests.get(url).json()
articles = r.get("articles", [])[:5]
if not articles:
return 0
scores = [analyzer.polarity_scores(a["title"])['compound'] for a in articles]
return np.mean(scores) if scores else 0
except:
return 0
# ------------------------------
# Prediction
# ------------------------------
def predict_next(df):
if MODEL_LOADED:
data = scaler.fit_transform(df[["Close"]])
last60 = data[-60:].reshape(1, 60, 1)
pred = model.predict(last60, verbose=0)[0][0]
pred_real = scaler.inverse_transform([[pred]])[0][0]
return pred_real
else:
# Dummy prediction: last value + small random change
return df["Close"].iloc[-1] * (1 + np.random.uniform(-0.01, 0.01))
# ------------------------------
# Streamlit UI
# ------------------------------
symbol = st.selectbox("Choose PSX Stock:", ["HBL", "UBL", "ENGRO", "PSO", "OGDC"])
if st.button("Fetch & Predict"):
with st.spinner("Fetching data and predicting..."):
df = get_psx_data(symbol)
sentiment = get_sentiment(symbol)
prediction = predict_next(df)
# Adjust prediction with sentiment (2% weight)
sentiment_adj = prediction + (prediction * sentiment * 0.02)
# Plot historical + predicted
fig, ax = plt.subplots()
ax.plot(df.index, df["Close"], label="Historical Price")
ax.axhline(sentiment_adj, linestyle="--", color="red", label="Predicted Price")
ax.set_title(f"{symbol} Stock Price Prediction")
ax.legend()
st.pyplot(fig)
# Display results
st.subheader("Prediction Result")
st.write(f"**Predicted Price:** Rs {sentiment_adj:.2f}")
st.write(f"**Sentiment Impact:** {sentiment:.3f}")