Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| import warnings | |
| # --- 1. SILENCE ALL WARNINGS & ERRORS --- | |
| warnings.filterwarnings("ignore", category=RuntimeWarning) | |
| warnings.filterwarnings("ignore", category=FutureWarning) | |
| import os | |
| import sys | |
| import logging | |
| import contextlib | |
| from dotenv import load_dotenv | |
| import yfinance as yf | |
| from duckduckgo_search import DDGS | |
| from langchain_core.language_models.llms import LLM | |
| from huggingface_hub import InferenceClient | |
| from typing import Any, List, Optional | |
| from pydantic import Field | |
| # Silence yfinance logger | |
| logging.getLogger('yfinance').setLevel(logging.CRITICAL) | |
| def suppress_output(): | |
| """Redirects stdout/stderr to devnull to hide yfinance errors.""" | |
| with open(os.devnull, "w") as devnull: | |
| old_stdout, old_stderr = sys.stdout, sys.stderr | |
| try: | |
| sys.stdout, sys.stderr = devnull, devnull | |
| yield | |
| finally: | |
| sys.stdout, sys.stderr = old_stdout, old_stderr | |
| # --- CONFIG --- | |
| st.set_page_config(page_title="Stock Scout", page_icon="π", layout="centered") | |
| load_dotenv() | |
| hf_token = os.getenv("HF_TOKEN") | |
| if not hf_token: | |
| st.error("Missing HF_TOKEN in .env file.") | |
| st.stop() | |
| # --- MODEL (Unbreakable Cluster) --- | |
| class HFChatModel(LLM): | |
| token: str = Field(...) | |
| models: List[str] = [ | |
| "microsoft/Phi-3.5-mini-instruct", | |
| "Qwen/Qwen2.5-7B-Instruct", | |
| "HuggingFaceH4/zephyr-7b-beta" | |
| ] | |
| def _call(self, prompt: str, stop: Optional[List[str]] = None, **kwargs: Any) -> str: | |
| client = InferenceClient(token=self.token) | |
| messages = [{"role": "user", "content": prompt}] | |
| for model_id in self.models: | |
| try: | |
| response = client.chat_completion( | |
| model=model_id, messages=messages, max_tokens=600, temperature=0.5 | |
| ) | |
| return response.choices[0].message.content | |
| except: | |
| continue | |
| return "System Busy." | |
| def _llm_type(self) -> str: | |
| return "custom_hf_chat_cluster" | |
| llm = HFChatModel(token=hf_token) | |
| # --- FUNCTIONS --- | |
| def get_stock_data(ticker): | |
| try: | |
| with suppress_output(): | |
| stock = yf.Ticker(ticker) | |
| hist = stock.history(period="5d") | |
| if len(hist) < 2: return None, None | |
| curr = hist['Close'].iloc[-1] | |
| prev = hist['Close'].iloc[-2] | |
| change = ((curr - prev) / prev) * 100 | |
| return curr, change | |
| except: | |
| return None, None | |
| def analyze_news(ticker): | |
| try: | |
| with suppress_output(): | |
| results = DDGS().news(f"{ticker} stock news", max_results=5) | |
| if not results: return "No recent news found." | |
| news_context = [] | |
| sources_txt = "\n\n**Sources:**\n" | |
| for i, res in enumerate(results, 1): | |
| title = res.get('title', '?') | |
| link = res.get('url', '#') | |
| news_context.append(f"[Source {i}]: {title}") | |
| sources_txt += f"{i}. [{title}]({link})\n" | |
| full_text = "\n".join(news_context) | |
| prompt = f""" | |
| Analyze news for {ticker}: | |
| {full_text} | |
| 1. Why is it moving? (Cite [Source X]) | |
| 2. Sentiment: BULLISH/BEARISH/NEUTRAL | |
| """ | |
| return llm.invoke(prompt) + sources_txt | |
| except: | |
| return "Analysis failed." | |
| # --- UI (SIMPLE DROPDOWN ONLY) --- | |
| st.title("π Stock Scout") | |
| # 1. The Safe List | |
| POPULAR_TICKERS = [ | |
| "AAPL", "NVDA", "TSLA", "AMD", "AMZN", "MSFT", "GOOGL", "META", | |
| "JPM", "BAC", "WMT", "DIS", "NFLX", "KO", "PEP", "BA", "INTC", "PYPL" | |
| ] | |
| # 2. The Selector | |
| selected_ticker = st.selectbox("Select Stock:", options=POPULAR_TICKERS) | |
| # 3. Execution (Instant) | |
| if selected_ticker: | |
| price, change = get_stock_data(selected_ticker) | |
| if price: | |
| st.metric(selected_ticker, f"${price:.2f}", f"{change:.2f}%") | |
| st.divider() | |
| with st.spinner(f"Analyzing {selected_ticker}..."): | |
| st.info(analyze_news(selected_ticker)) | |
| else: | |
| st.error("Data unavailable.") |