import streamlit as st import pandas as pd import numpy as np import yfinance as yf import cvxpy as cp from datetime import datetime, timedelta # ============ THEME MANAGEMENT ============ def initialize_theme(): if 'theme' not in st.session_state: st.session_state.theme = 'light' def toggle_theme(): if st.session_state.theme == 'light': st.session_state.theme = 'dark' else: st.session_state.theme = 'light' def get_theme_colors(): initialize_theme() if st.session_state.theme == 'dark': return { "bg_color": "#0e1117", "card_bg": "#1e293b", "text": "#fafafa", "border": "#334155", "metric_label": "#94a3b8", "icon": "πŸŒ™" } else: return { "bg_color": "#ffffff", "card_bg": "#ffffff", "text": "#0f172a", "border": "#e2e8f0", "metric_label": "#64748b", "icon": "β˜€οΈ" } def render_header(): initialize_theme() colors = get_theme_colors() st.markdown(f""" """, unsafe_allow_html=True) cols = st.columns([1, 1, 1, 1, 1, 1, 1, 0.5]) with cols[0]: st.page_link("Main_Page.py", label="Home", icon="🏠") with cols[1]: st.page_link("pages/1_New_Portfolio.py", label="New", icon="πŸ’Ό") with cols[2]: st.page_link("pages/2_Rebalance.py", label="Rebalance", icon="πŸ”„") with cols[3]: st.page_link("pages/3_Risk_Analysis.py", label="Risk", icon="πŸ“Š") with cols[4]: st.page_link("pages/4_Market_Insights.py", label="Market", icon="πŸ“ˆ") with cols[5]: st.page_link("pages/5_Settings.py", label="Settings", icon="βš™οΈ") with cols[6]: st.page_link("pages/6_Learn.py", label="Learn", icon="πŸ“š") with cols[7]: if st.button(colors['icon'], key="theme_toggle", help="Toggle Light/Dark Mode"): toggle_theme() st.rerun() st.markdown("---") # ============ DATA FETCHING ============ @st.cache_data(ttl=86400) def get_nifty50_stocks(): # Hardcoded backup list backup_tickers = [ "RELIANCE.NS", "TCS.NS", "HDFCBANK.NS", "INFY.NS", "ICICIBANK.NS", "HINDUNILVR.NS", "ITC.NS", "SBIN.NS", "BHARTIARTL.NS", "KOTAKBANK.NS", "LT.NS", "AXISBANK.NS", "ASIANPAINT.NS", "MARUTI.NS", "SUNPHARMA.NS", "TITAN.NS", "BAJFINANCE.NS", "WIPRO.NS", "ULTRACEMCO.NS", "NESTLEIND.NS", "HCLTECH.NS", "POWERGRID.NS", "NTPC.NS", "TECHM.NS", "ONGC.NS", "M&M.NS", "TATAMOTORS.NS", "BAJAJFINSV.NS", "TATASTEEL.NS", "ADANIPORTS.NS", "COALINDIA.NS", "INDUSINDBK.NS", "DRREDDY.NS", "JSWSTEEL.NS", "CIPLA.NS", "BRITANNIA.NS", "BAJAJ-AUTO.NS", "DIVISLAB.NS", "GRASIM.NS", "HINDALCO.NS", "APOLLOHOSP.NS", "EICHERMOT.NS", "HEROMOTOCO.NS", "BPCL.NS", "TATACONSUM.NS", "SBILIFE.NS", "UPL.NS", "ADANIENT.NS", "HDFCLIFE.NS", "SHREECEM.NS" ] try: url = "https://en.wikipedia.org/wiki/NIFTY_50" tables = pd.read_html(url) df = tables[1] if 'Symbol' not in df.columns: for table in tables: if 'Symbol' in table.columns: df = table break tickers = df['Symbol'].astype(str).values.tolist() formatted_tickers = [f"{ticker}.NS" for ticker in tickers] if len(formatted_tickers) < 45: return backup_tickers return formatted_tickers except Exception as e: print(f"Scraping failed: {e}") return backup_tickers @st.cache_data(ttl=86400) def get_sector_stocks(): return { "Banking & Finance": ["HDFCBANK.NS", "ICICIBANK.NS", "SBIN.NS", "KOTAKBANK.NS", "AXISBANK.NS", "INDUSINDBK.NS", "FEDERALBNK.NS", "BAJFINANCE.NS", "BAJAJFINSV.NS", "IDFCFIRSTB.NS"], "Information Technology": ["TCS.NS", "INFY.NS", "HCLTECH.NS", "WIPRO.NS", "TECHM.NS", "COFORGE.NS", "PERSISTENT.NS", "LTIM.NS", "MPHASIS.NS", "OFSS.NS"], "FMCG & Consumer": ["HINDUNILVR.NS", "ITC.NS", "NESTLEIND.NS", "BRITANNIA.NS", "DABUR.NS", "GODREJCP.NS", "MARICO.NS", "TATACONSUM.NS", "UBL.NS", "COLPAL.NS"], "Pharmaceuticals": ["SUNPHARMA.NS", "DRREDDY.NS", "CIPLA.NS", "DIVISLAB.NS", "BIOCON.NS", "LUPIN.NS", "AUROPHARMA.NS", "TORNTPHARM.NS", "ALKEM.NS", "CADILAHC.NS"], "Energy & Power": ["RELIANCE.NS", "ONGC.NS", "POWERGRID.NS", "NTPC.NS", "COALINDIA.NS", "GAIL.NS", "IOC.NS", "BPCL.NS", "TATAPOWER.NS", "ADANIGREEN.NS"], "Automobiles": ["MARUTI.NS", "TATAMOTORS.NS", "M&M.NS", "BAJAJ-AUTO.NS", "EICHERMOT.NS", "HEROMOTOCO.NS", "TVSMOTOR.NS", "ASHOKLEY.NS", "MRF.NS", "APOLLOTYRE.NS"], "Metals & Mining": ["TATASTEEL.NS", "JSWSTEEL.NS", "HINDALCO.NS", "VEDL.NS", "NATIONALUM.NS", "SAIL.NS", "JINDALSTEL.NS", "NMDC.NS", "COALINDIA.NS"] } @st.cache_data(ttl=1800) def get_stock_info(ticker): try: stock = yf.Ticker(ticker) info = stock.info return { 'name': info.get('longName', ticker), 'sector': info.get('sector', 'Unknown'), 'industry': info.get('industry', 'Unknown'), 'price': info.get('currentPrice', 0), } except: return {'name': ticker, 'sector': 'Unknown', 'industry': 'Unknown', 'price': 0} def download_prices(tickers, start_date, end_date): try: data = yf.download(tickers, start=start_date, end=end_date, progress=False, group_by="ticker" if len(tickers) > 1 else None) if data.empty: return pd.DataFrame() if len(tickers) == 1: if 'Close' in data.columns: prices = data[['Close']].copy() prices.columns = tickers else: return pd.DataFrame() elif isinstance(data.columns, pd.MultiIndex): cleaned = {} for ticker in tickers: try: ticker_data = data[ticker]['Close'].dropna() if len(ticker_data) > 50: cleaned[ticker] = ticker_data except: continue prices = pd.DataFrame(cleaned) else: prices = data prices = prices.ffill().dropna(how='all').dropna(axis=1, how='all') return prices except Exception as e: st.error(f"Error downloading data: {str(e)}") return pd.DataFrame() # ============ STATISTICS & OPTIMIZATION ============ def compute_portfolio_stats(prices, periods_per_year=252): returns = prices.pct_change().dropna() mean_annual = returns.mean() * periods_per_year cov_annual = returns.cov() * periods_per_year corr_matrix = returns.corr() volatility_annual = returns.std() * np.sqrt(periods_per_year) return returns, mean_annual, cov_annual, corr_matrix, volatility_annual def solve_optimization(cov_annual, expected_returns, target_return=None, max_weight=1.0): """ CVXPY optimization with Safety Constraint (max_weight). """ n = cov_annual.shape[0] w = cp.Variable(n) Sigma = cov_annual.values + 1e-6 * np.eye(n) constraints = [ cp.sum(w) == 1, w >= 0, w <= max_weight # Safety Lock ] if target_return is not None: mu = expected_returns.values constraints.append(w.T @ mu >= target_return) objective = cp.quad_form(w, Sigma) prob = cp.Problem(cp.Minimize(objective), constraints) solvers = [cp.OSQP, cp.SCS, cp.ECOS] for solver in solvers: try: prob.solve(solver=solver, verbose=False) if w.value is not None and prob.status in [cp.OPTIMAL, cp.OPTIMAL_INACCURATE]: weights = np.array(w.value).flatten() weights = np.maximum(weights, 0) weights = weights / weights.sum() return weights except: continue # FIX: Return None instead of Equal Weights if it fails # This prevents the "Artifact" in the Efficient Frontier graph return None def find_max_sharpe_portfolio(expected_returns, cov_annual, risk_free_rate=0.0654, n_points=50, max_weight=1.0): min_ret = expected_returns.min() max_ret = expected_returns.max() # Quick solution for global min var (no target constraint) global_min_var = solve_optimization(cov_annual, expected_returns, max_weight=max_weight) if max_ret <= min_ret: return global_min_var, [] target_returns = np.linspace(min_ret + 0.001, max_ret - 0.001, n_points) best_sharpe = -np.inf best_weights = None efficient_frontier = [] for target in target_returns: try: # Pass max_weight constraint weights = solve_optimization(cov_annual, expected_returns, target, max_weight) # FIX: If optimization failed (returned None), skip this point if weights is None: continue port_return = expected_returns.values @ weights port_volatility = np.sqrt(weights.T @ cov_annual.values @ weights) efficient_frontier.append({ 'return': port_return, 'volatility': port_volatility, 'sharpe': (port_return - risk_free_rate) / port_volatility if port_volatility > 0 else 0 }) if port_volatility > 0: sharpe = (port_return - risk_free_rate) / port_volatility if sharpe > best_sharpe: best_sharpe = sharpe best_weights = weights except: continue if best_weights is None: # Fallback to global min var if everything failed best_weights = global_min_var return best_weights, efficient_frontier # ============ RISK METRICS ============ def monte_carlo_simulation(returns, weights, initial_investment, n_simulations=1000, n_days=252): mean_returns = returns.mean() cov_matrix = returns.cov() portfolio_returns = [] for _ in range(n_simulations): simulated_returns = np.random.multivariate_normal(mean_returns, cov_matrix, n_days) portfolio_daily_returns = simulated_returns @ weights portfolio_value = initial_investment * (1 + portfolio_daily_returns).cumprod()[-1] portfolio_returns.append(portfolio_value) return np.array(portfolio_returns) def calculate_var_cvar(returns, weights, confidence_level=0.95): portfolio_returns = returns @ weights var = np.percentile(portfolio_returns, (1 - confidence_level) * 100) cvar = portfolio_returns[portfolio_returns <= var].mean() return var, cvar def calculate_max_drawdown(prices, weights): portfolio_returns = (prices @ weights).pct_change().fillna(0) portfolio_value = (1 + portfolio_returns).cumprod() running_max = portfolio_value.cummax() drawdown = (portfolio_value - running_max) / running_max max_drawdown = drawdown.min() return max_drawdown, drawdown def calculate_rolling_volatility(returns, weights, window=30): portfolio_returns = returns @ weights rolling_vol = portfolio_returns.rolling(window=window).std() * np.sqrt(252) return rolling_vol def stress_test_scenarios(returns, weights): portfolio_returns = returns @ weights mean = portfolio_returns.mean() std = portfolio_returns.std() scenarios = { 'Market Crash (-20%)': -0.20, 'Moderate Decline (-10%)': -0.10, 'Minor Correction (-5%)': -0.05, 'Current Volatility': std, 'Volatility Spike (2x)': std * 2, 'Best Historical Day': portfolio_returns.max(), 'Worst Historical Day': portfolio_returns.min(), 'Mean Daily Return': mean } return scenarios def calculate_portfolio_metrics(prices, weights, risk_free_rate=0.0654): returns, mean_annual, cov_annual, _, _ = compute_portfolio_stats(prices) port_return = mean_annual.values @ weights port_volatility = np.sqrt(weights.T @ cov_annual.values @ weights) sharpe_ratio = (port_return - risk_free_rate) / port_volatility if port_volatility > 0 else 0 return {'return': port_return, 'volatility': port_volatility, 'sharpe': sharpe_ratio} def generate_rebalancing_actions(current_holdings, optimal_weights, latest_prices, total_value, brokerage_rate=0.0003): actions = [] for ticker in optimal_weights.index: current_shares = current_holdings.get(ticker, {}).get('shares', 0) current_value = current_shares * latest_prices[ticker] current_weight = current_value / total_value if total_value > 0 else 0 target_weight = optimal_weights[ticker] target_value = target_weight * total_value target_shares = int(target_value / latest_prices[ticker]) diff_shares = target_shares - current_shares diff_value = diff_shares * latest_prices[ticker] if abs(diff_shares) > 0: action = 'BUY' if diff_shares > 0 else 'SELL' cost = abs(diff_value) * brokerage_rate actions.append({ 'Stock': ticker, 'Action': action, 'Shares': abs(diff_shares), 'Price': f"β‚Ή{latest_prices[ticker]:.2f}", 'Amount': f"β‚Ή{abs(diff_value):,.0f}", 'Cost': f"β‚Ή{cost:.2f}", 'Current %': f"{current_weight*100:.2f}%", 'Target %': f"{target_weight*100:.2f}%" }) return pd.DataFrame(actions) if actions else pd.DataFrame() # ============ MARKET INSIGHTS ============ @st.cache_data(ttl=300) def get_nifty_data(): try: nifty = yf.Ticker("^NSEI") data = nifty.history(period="1mo") return data, nifty.info except Exception as e: return pd.DataFrame(), {} @st.cache_data(ttl=300) def get_top_movers(tickers, n=10): data = {} for ticker in tickers: try: stock = yf.Ticker(ticker) info = stock.info change_val = info.get('regularMarketChangePercent', 0) if change_val is None: change_val = 0 data[ticker] = { 'name': info.get('longName', ticker)[:30], 'price': float(info.get('currentPrice', 0)), 'change': float(change_val), 'volume': int(info.get('volume', 0)) } except: continue df = pd.DataFrame(data).T if df.empty: return pd.DataFrame(), pd.DataFrame() df['change'] = pd.to_numeric(df['change'], errors='coerce').fillna(0) df['price'] = pd.to_numeric(df['price'], errors='coerce').fillna(0) gainers = df.nlargest(n, 'change') losers = df.nsmallest(n, 'change') return gainers, losers @st.cache_data(ttl=300) def get_global_indices(): indices = {"πŸ‡ΊπŸ‡Έ S&P 500": "^GSPC", "πŸ‡ΊπŸ‡Έ Nasdaq": "^IXIC", "πŸ‡¬πŸ‡§ FTSE 100": "^FTSE", "πŸ‡―πŸ‡΅ Nikkei 225": "^N225"} data = [] for name, ticker in indices.items(): try: idx = yf.Ticker(ticker) hist = idx.history(period="2d") if len(hist) > 0: current = hist['Close'].iloc[-1] prev = hist['Close'].iloc[-2] if len(hist) > 1 else current change_pct = ((current - prev) / prev) * 100 data.append({"Index": name, "Price": current, "Change %": change_pct}) except: continue return pd.DataFrame(data) @st.cache_data(ttl=900) def get_market_news(): """ Fetch latest market news. Note: We use RELIANCE.NS as a proxy because ^NSEI (Index) news feed is often empty or broken in yfinance. """ try: # Primary Source: Reliance Industries (Major market mover) ticker = yf.Ticker("RELIANCE.NS") news = ticker.news # Fallback Source: TCS if Reliance returns nothing if not news: ticker = yf.Ticker("TCS.NS") news = ticker.news return news except Exception as e: print(f"News error: {e}") return []