github-actions commited on
Commit
b0169e0
·
1 Parent(s): e8dd6cb

Sync from GitHub

Browse files
Files changed (33) hide show
  1. engine/engine/trend_engine.py +36 -0
  2. hf_space/app.py +33 -48
  3. hf_space/hf_space/data/loader.py +23 -47
  4. hf_space/hf_space/hf_space/README.md +9 -13
  5. hf_space/hf_space/hf_space/hf_space/Dockerfile +21 -16
  6. hf_space/hf_space/hf_space/hf_space/hf_space/engine/engine/__init__.py +1 -0
  7. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/data/__init__.py +1 -0
  8. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/analytics/analytics/__init__.py +1 -0
  9. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py +42 -68
  10. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/loader.py +52 -11
  11. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/Dockerfile +9 -1
  12. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/loader.py +15 -0
  13. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/analytics/metrics.py +11 -16
  14. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/engine/backtest.py +7 -33
  15. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py +19 -55
  16. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/Dockerfile +7 -2
  17. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py +94 -48
  18. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/Dockerfile +6 -13
  19. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py +66 -31
  20. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/analytics/metrics.py +26 -0
  21. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py +35 -0
  22. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/fred.py +11 -0
  23. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/hf_store.py +17 -0
  24. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/updater.py +55 -0
  25. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/engine/backtest.py +46 -0
  26. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/.gitattributes +35 -0
  27. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/Dockerfile +20 -0
  28. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/README.md +19 -0
  29. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/requirements.txt +3 -0
  30. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/src/streamlit_app.py +40 -0
  31. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/requirements.txt +7 -2
  32. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/streamlit_app.py +70 -0
  33. hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/requirements.txt +1 -3
engine/engine/trend_engine.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pandas as pd
3
+
4
+ def run_trend_module(prices, daily_sofr, vol_target):
5
+ # 1. Signals (20-day Keltner/Donchian)
6
+ d_high = prices.rolling(20).max()
7
+ k_sma = prices.rolling(20).mean()
8
+ atr = (prices.rolling(20).max() - prices.rolling(20).min()) / 2
9
+ k_upper = k_sma + (2 * atr)
10
+
11
+ entry_band = np.minimum(d_high, k_upper)
12
+ signals = (prices > entry_band.shift(1)).astype(int)
13
+
14
+ # 2. Risk Parity Weighting
15
+ rets = prices.pct_change()
16
+ real_vol = rets.rolling(21).std() * np.sqrt(252)
17
+
18
+ n = len(prices.columns)
19
+ weights = (vol_target / n) / real_vol.shift(1)
20
+
21
+ # 3. Strategy Returns (Positions + Cash Interest)
22
+ strat_rets = (signals.shift(1) * weights.shift(1) * rets).sum(axis=1)
23
+ unused_cap = 1 - (signals.shift(1) * weights.shift(1)).sum(axis=1)
24
+ strat_rets += unused_cap.clip(0, 1) * (daily_sofr / 252)
25
+
26
+ equity_curve = (1 + strat_rets).cumprod()
27
+
28
+ # Next Day Allocation
29
+ tomorrow_sig = (prices.iloc[-1] > entry_band.iloc[-1]).astype(int)
30
+ tomorrow_w = (vol_target / n) / real_vol.iloc[-1]
31
+ alloc = pd.DataFrame({
32
+ "Ticker": prices.columns,
33
+ "Weight (%)": (tomorrow_sig * tomorrow_w * 100).round(2)
34
+ })
35
+
36
+ return {"curve": equity_curve, "alloc": alloc}
hf_space/app.py CHANGED
@@ -1,54 +1,39 @@
1
  import streamlit as st
 
 
 
2
 
3
- st.set_page_config(page_title="P2 ETF Trend Suite", layout="wide")
4
 
5
- st.title("📊 P2 ETF Trend Suite")
6
- st.markdown("Stooq-Primary Data Engine + HF Integration")
7
 
8
- # Sidebar Controls
9
- st.sidebar.header("Parameters")
10
- initial_capital = st.sidebar.number_input("Initial Capital", value=100000)
11
- vol_target = st.sidebar.slider("Target Volatility", 0.05, 0.30, 0.15)
12
- lookback = st.sidebar.slider("Lookback (Days)", 50, 300, 200)
13
 
14
- st.sidebar.markdown("---")
15
- st.sidebar.header("Hugging Face Sync")
16
- hf_repo = st.sidebar.text_input("Repo ID", placeholder="user/dataset-name")
17
- hf_token = st.sidebar.text_input("HF Token", type="password")
18
-
19
- run_button = st.sidebar.button("▶ Run Full Process")
20
-
21
- if run_button:
22
- from data.loader import load_data, push_to_hf
23
- from engine.backtest import run_backtest
24
- from analytics.metrics import compute_metrics
25
-
26
- # Phase 1: Data Fetching
27
- with st.spinner("Fetching data from Stooq..."):
28
- df = load_data()
29
 
30
- if not df.empty:
31
- st.subheader("📈 Market Data Preview")
32
- st.dataframe(df.tail(5), use_container_width=True)
33
-
34
- # Phase 2: Backtesting
35
- with st.spinner("Calculating Trend Strategy..."):
36
- results = run_backtest(df, initial_capital, vol_target, lookback)
37
- metrics = compute_metrics(results["returns"])
38
-
39
- # Display Results
40
- st.success("Analysis Complete")
41
- c1, c2, c3 = st.columns(3)
42
- c1.metric("CAGR", f"{metrics['cagr']:.2%}")
43
- c2.metric("Sharpe", f"{metrics['sharpe']:.2f}")
44
- c3.metric("Max Drawdown", f"{metrics['max_dd']:.2%}")
45
-
46
- st.line_chart(results["equity_curve"])
47
-
48
- # Phase 3: HF Sync
49
- if hf_repo and hf_token:
50
- with st.spinner("Syncing to Hugging Face..."):
51
- push_to_hf(df, hf_repo, hf_token)
52
- st.sidebar.success("✅ Dataset Synced!")
53
- else:
54
- st.error("Data fetch failed. Verify ticker symbols.")
 
1
  import streamlit as st
2
+ import pandas as pd
3
+ from data.loader import refresh_market_data, X_EQUITY_TICKERS, FI_TICKERS
4
+ from engine.trend_engine import run_trend_module
5
 
6
+ st.set_page_config(layout="wide", page_title="P2 ETF Trend Suite")
7
 
8
+ st.sidebar.title("Settings")
9
+ vol_target = st.sidebar.slider("Annual Vol Target", 0.05, 0.25, 0.126)
10
 
11
+ if st.sidebar.button("🔄 Refresh Market Data"):
12
+ refresh_market_data()
13
+ st.sidebar.success("Data Updated from Stooq/SOFR!")
 
 
14
 
15
+ if st.button("▶ Run All Modules"):
16
+ data = pd.read_csv("market_data.csv", index_col=0, parse_dates=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
17
 
18
+ # Run Modules
19
+ eq_res = run_trend_module(data[X_EQUITY_TICKERS], data['SOFR_ANNUAL'], vol_target)
20
+ fi_res = run_trend_module(data[FI_TICKERS], data['SOFR_ANNUAL'], vol_target)
21
+
22
+ # Performance Comparison
23
+ spy_curve = (1 + data['SPY'].pct_change()).cumprod()
24
+ comparison = pd.DataFrame({
25
+ "X-ETF Strategy": eq_res['curve'],
26
+ "SPY Benchmark": spy_curve
27
+ }).dropna()
28
+
29
+ st.header("📈 Performance: Equity Strategy vs. SPY")
30
+ st.line_chart(comparison)
31
+
32
+ # Target Allocations
33
+ col1, col2 = st.columns(2)
34
+ with col1:
35
+ st.subheader("🛡️ Equity Allocation (Next Day)")
36
+ st.dataframe(eq_res['alloc'][eq_res['alloc']['Weight (%)'] > 0])
37
+ with col2:
38
+ st.subheader("🏦 FI Comparison Allocation")
39
+ st.dataframe(fi_res['alloc'][fi_res['alloc']['Weight (%)'] > 0])
 
 
 
hf_space/hf_space/data/loader.py CHANGED
@@ -1,56 +1,32 @@
1
- import pandas as pd
2
  import pandas_datareader.data as web
3
  import yfinance as yf
4
- from datasets import Dataset
5
  import streamlit as st
6
- from datetime import datetime
7
 
8
- # Combined Universe (All will attempt Stooq first)
9
- TICKERS = ["SPY", "QQQ", "IWM", "TLT", "IEF", "SHY", "GLD"]
 
 
 
 
10
 
11
- def load_data(tickers=TICKERS):
12
- """Fetches data from Stooq with yfinance fallback."""
13
- all_series = {}
14
 
15
- for ticker in tickers:
16
- success = False
17
- # 1. Primary: Stooq
 
 
18
  try:
19
- # Stooq format: TICKER.US (e.g., TLT.US)
20
- stooq_symbol = f"{ticker}.US"
21
- df_stooq = web.DataReader(stooq_symbol, 'stooq')
22
 
23
- if not df_stooq.empty:
24
- # Stooq returns newest data first; sort to ascending for backtests
25
- all_series[ticker] = df_stooq['Close'].sort_index()
26
- st.toast(f"✅ {ticker} loaded from Stooq")
27
- success = True
28
- except Exception as e:
29
- print(f"Stooq failed for {ticker}: {e}")
30
-
31
- # 2. Fallback: yfinance
32
- if not success:
33
- try:
34
- yf_df = yf.download(ticker, period="max", progress=False)
35
- if not yf_df.empty:
36
- # Use Adj Close to account for dividends/splits
37
- all_series[ticker] = yf_df['Adj Close']
38
- st.toast(f"⚠️ {ticker} loaded from yfinance (Fallback)")
39
- success = True
40
- except Exception as e:
41
- st.error(f"❌ Critical: Could not load {ticker} from any source.")
42
-
43
- if all_series:
44
- # Align all tickers on the same dates and drop missing values
45
- return pd.concat(all_series, axis=1).dropna()
46
- return pd.DataFrame()
47
-
48
- def push_to_hf(df, repo_id, token):
49
- """Pushes the current dataframe to Hugging Face Hub."""
50
- # Ensure Date is a column, not an index, for HF compatibility
51
- hf_export = df.reset_index()
52
- hf_export.columns = [str(col) for col in hf_export.columns] # Ensure string columns
53
 
54
- dataset = Dataset.from_pandas(hf_export)
55
- dataset.push_to_hub(repo_id, token=token)
56
- return True
 
 
 
1
  import pandas_datareader.data as web
2
  import yfinance as yf
3
+ import pandas as pd
4
  import streamlit as st
 
5
 
6
+ # 27 "X-" EQUITY ETFS
7
+ X_EQUITY_TICKERS = [
8
+ "XLK", "XLY", "XLP", "XLE", "XLV", "XLI", "XLB", "XLRE", "XLU", "XLC", "XLF",
9
+ "XBI", "XME", "XOP", "XHB", "XSD", "XRT", "XPH", "XES", "XAR", "XHS", "XHE",
10
+ "XSW", "XTN", "XTL", "XNTK", "XITK"
11
+ ]
12
 
13
+ # 15 FIXED INCOME / COMPARISON
14
+ FI_TICKERS = ["TLT", "IEF", "TIP", "TBT", "GLD", "SLV", "VGIT", "VCLT", "VCIT", "HYG", "PFF", "MBB", "VNQ", "LQD", "AGG"]
 
15
 
16
+ def refresh_market_data():
17
+ """Syncs Stooq/FRED data to local CSV and HF."""
18
+ all_prices = {}
19
+ # Download all groups + SPY Benchmark
20
+ for t in list(set(X_EQUITY_TICKERS + FI_TICKERS + ["SPY"])):
21
  try:
22
+ all_prices[t] = web.DataReader(f"{t}.US", "stooq")['Close']
23
+ except:
24
+ all_prices[t] = yf.download(t, progress=False)['Adj Close']
25
 
26
+ # Fetch SOFR (Cash Yield) from FRED
27
+ sofr = web.DataReader('SOFR', 'fred').ffill()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
 
29
+ df = pd.DataFrame(all_prices).sort_index().ffill()
30
+ df['SOFR_ANNUAL'] = sofr / 100
31
+ df.to_csv("market_data.csv")
32
+ return df
hf_space/hf_space/hf_space/README.md CHANGED
@@ -1,19 +1,15 @@
1
  ---
2
- title: P2 ETF TREND SUITE
3
- emoji: 🚀
4
- colorFrom: red
5
- colorTo: red
6
  sdk: docker
7
- app_port: 8501
8
- tags:
9
- - streamlit
10
  pinned: false
11
- short_description: Streamlit template space
12
  ---
13
 
14
- # Welcome to Streamlit!
 
15
 
16
- Edit `/src/streamlit_app.py` to customize this app to your heart's desire. :heart:
17
-
18
- If you have any questions, checkout our [documentation](https://docs.streamlit.io) and [community
19
- forums](https://discuss.streamlit.io).
 
1
  ---
2
+ title: P2 ETF Trend Suite
3
+ emoji: 📊
4
+ colorFrom: blue
5
+ colorTo: indigo
6
  sdk: docker
7
+ app_port: 7860
 
 
8
  pinned: false
 
9
  ---
10
 
11
+ # 📊 P2 ETF Trend Suite
12
+ Institutional ETF Trend + Volatility Targeting Engine.
13
 
14
+ ### 🚀 Setup Info
15
+ This Space runs a Dockerized Streamlit app. It uses **Stooq** for market data with **yfinance** as a fallback.
 
 
hf_space/hf_space/hf_space/hf_space/Dockerfile CHANGED
@@ -1,26 +1,31 @@
1
- FROM python:3.10
 
 
 
 
 
 
2
 
3
  WORKDIR /app
4
 
5
- # Copy requirements first (better layer caching)
 
 
 
 
 
 
6
  COPY requirements.txt .
7
 
8
- RUN pip install --upgrade pip
9
- RUN pip install --no-cache-dir -r requirements.txt
 
10
 
11
- # Copy full project
12
  COPY . .
13
 
 
14
  EXPOSE 7860
15
 
16
- ENV STREAMLIT_SERVER_PORT=7860
17
- ENV STREAMLIT_SERVER_ADDRESS=0.0.0.0
18
- ENV STREAMLIT_BROWSER_GATHER_USAGE_STATS=false
19
-
20
- # Diagnostic startup command
21
- CMD ["bash", "-c", "echo '===== CONTAINER BOOTING ====='; \
22
- echo 'Python Version:'; python -V; \
23
- echo 'Current Directory:'; pwd; \
24
- echo 'Directory Listing:'; ls -la; \
25
- echo 'Starting Streamlit...'; \
26
- python -m streamlit run app.py --server.headless=true"]
 
1
+ # Use a lightweight but stable Python base
2
+ FROM python:3.10-slim
3
+
4
+ # Set environment variables for speed and logging
5
+ ENV PYTHONUNBUFFERED=1 \
6
+ PYTHONDONTWRITEBYTECODE=1 \
7
+ PIP_NO_CACHE_DIR=1
8
 
9
  WORKDIR /app
10
 
11
+ # Install system dependencies needed for pandas/datareader
12
+ RUN apt-get update && apt-get install -y \
13
+ build-essential \
14
+ curl \
15
+ && rm -rf /var/lib/apt/lists/*
16
+
17
+ # Copy only requirements first to leverage Docker cache
18
  COPY requirements.txt .
19
 
20
+ # Install dependencies (use --no-cache-dir to keep image small)
21
+ RUN pip install --upgrade pip && \
22
+ pip install -r requirements.txt
23
 
24
+ # Copy the rest of the application
25
  COPY . .
26
 
27
+ # Ensure the app runs on the port HF expects (7860 for Docker)
28
  EXPOSE 7860
29
 
30
+ # Correct entrypoint for Streamlit in a container
31
+ ENTRYPOINT ["streamlit", "run", "app.py", "--server.port=7860", "--server.address=0.0.0.0"]
 
 
 
 
 
 
 
 
 
hf_space/hf_space/hf_space/hf_space/hf_space/engine/engine/__init__.py ADDED
@@ -0,0 +1 @@
 
 
1
+
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/data/__init__.py ADDED
@@ -0,0 +1 @@
 
 
1
+
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/analytics/analytics/__init__.py ADDED
@@ -0,0 +1 @@
 
 
1
+
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py CHANGED
@@ -1,80 +1,54 @@
1
  import streamlit as st
2
 
3
- # =====================================================
4
- # PAGE CONFIG (ONLY LIGHT CODE HERE)
5
- # =====================================================
6
-
7
- st.set_page_config(
8
- page_title="P2 ETF Trend Suite",
9
- layout="wide",
10
- )
11
 
12
  st.title("📊 P2 ETF Trend Suite")
13
- st.markdown("Institutional ETF Trend + Volatility Targeting Engine")
14
-
15
- # =====================================================
16
- # SIDEBAR
17
- # =====================================================
18
-
19
- st.sidebar.header("Strategy Controls")
20
-
21
- initial_capital = st.sidebar.number_input(
22
- "Initial Capital",
23
- value=100000,
24
- step=10000,
25
- )
26
 
27
- vol_target = st.sidebar.slider(
28
- "Target Annual Volatility",
29
- 0.05, 0.30, 0.15
30
- )
31
-
32
- lookback = st.sidebar.slider(
33
- "Momentum Lookback (days)",
34
- 50, 300, 200
35
- )
36
-
37
- run_button = st.sidebar.button("▶ Run Backtest")
38
 
39
  st.sidebar.markdown("---")
40
- st.sidebar.info("Backtest runs only when button is pressed.")
 
 
41
 
42
- # =====================================================
43
- # EXECUTION BLOCK
44
- # =====================================================
45
 
46
  if run_button:
 
 
 
47
 
48
- with st.spinner("Loading engine..."):
49
-
50
- # Lazy imports happen HERE
51
- from engine.backtest import run_backtest
52
- from data.loader import load_data
53
- from analytics.metrics import compute_metrics
54
-
55
- with st.spinner("Loading market data..."):
56
  df = load_data()
57
-
58
- with st.spinner("Running strategy..."):
59
- results = run_backtest(
60
- df=df,
61
- initial_capital=initial_capital,
62
- vol_target=vol_target,
63
- lookback=lookback,
64
- )
65
-
66
- metrics = compute_metrics(results["returns"])
67
-
68
- st.success("Backtest Complete")
69
-
70
- col1, col2, col3, col4 = st.columns(4)
71
- col1.metric("CAGR", f"{metrics['cagr']:.2%}")
72
- col2.metric("Sharpe", f"{metrics['sharpe']:.2f}")
73
- col3.metric("Max Drawdown", f"{metrics['max_dd']:.2%}")
74
- col4.metric("Volatility", f"{metrics['vol']:.2%}")
75
-
76
- st.subheader("Equity Curve")
77
- st.line_chart(results["equity_curve"])
78
-
79
- else:
80
- st.info("Configure parameters and click Run Backtest.")
 
 
 
1
  import streamlit as st
2
 
3
+ st.set_page_config(page_title="P2 ETF Trend Suite", layout="wide")
 
 
 
 
 
 
 
4
 
5
  st.title("📊 P2 ETF Trend Suite")
6
+ st.markdown("Stooq-Primary Data Engine + HF Integration")
 
 
 
 
 
 
 
 
 
 
 
 
7
 
8
+ # Sidebar Controls
9
+ st.sidebar.header("Parameters")
10
+ initial_capital = st.sidebar.number_input("Initial Capital", value=100000)
11
+ vol_target = st.sidebar.slider("Target Volatility", 0.05, 0.30, 0.15)
12
+ lookback = st.sidebar.slider("Lookback (Days)", 50, 300, 200)
 
 
 
 
 
 
13
 
14
  st.sidebar.markdown("---")
15
+ st.sidebar.header("Hugging Face Sync")
16
+ hf_repo = st.sidebar.text_input("Repo ID", placeholder="user/dataset-name")
17
+ hf_token = st.sidebar.text_input("HF Token", type="password")
18
 
19
+ run_button = st.sidebar.button("▶ Run Full Process")
 
 
20
 
21
  if run_button:
22
+ from data.loader import load_data, push_to_hf
23
+ from engine.backtest import run_backtest
24
+ from analytics.metrics import compute_metrics
25
 
26
+ # Phase 1: Data Fetching
27
+ with st.spinner("Fetching data from Stooq..."):
 
 
 
 
 
 
28
  df = load_data()
29
+
30
+ if not df.empty:
31
+ st.subheader("📈 Market Data Preview")
32
+ st.dataframe(df.tail(5), use_container_width=True)
33
+
34
+ # Phase 2: Backtesting
35
+ with st.spinner("Calculating Trend Strategy..."):
36
+ results = run_backtest(df, initial_capital, vol_target, lookback)
37
+ metrics = compute_metrics(results["returns"])
38
+
39
+ # Display Results
40
+ st.success("Analysis Complete")
41
+ c1, c2, c3 = st.columns(3)
42
+ c1.metric("CAGR", f"{metrics['cagr']:.2%}")
43
+ c2.metric("Sharpe", f"{metrics['sharpe']:.2f}")
44
+ c3.metric("Max Drawdown", f"{metrics['max_dd']:.2%}")
45
+
46
+ st.line_chart(results["equity_curve"])
47
+
48
+ # Phase 3: HF Sync
49
+ if hf_repo and hf_token:
50
+ with st.spinner("Syncing to Hugging Face..."):
51
+ push_to_hf(df, hf_repo, hf_token)
52
+ st.sidebar.success(" Dataset Synced!")
53
+ else:
54
+ st.error("Data fetch failed. Verify ticker symbols.")
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/loader.py CHANGED
@@ -1,15 +1,56 @@
1
- def load_data():
2
- import pandas as pd
3
- import yfinance as yf
 
 
 
4
 
5
- tickers = ["SPY", "QQQ", "TLT"]
 
6
 
7
- data = yf.download(
8
- tickers,
9
- start="2015-01-01",
10
- progress=False,
11
- )["Adj Close"]
12
 
13
- data = data.dropna()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
 
15
- return data
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ import pandas_datareader.data as web
3
+ import yfinance as yf
4
+ from datasets import Dataset
5
+ import streamlit as st
6
+ from datetime import datetime
7
 
8
+ # Combined Universe (All will attempt Stooq first)
9
+ TICKERS = ["SPY", "QQQ", "IWM", "TLT", "IEF", "SHY", "GLD"]
10
 
11
+ def load_data(tickers=TICKERS):
12
+ """Fetches data from Stooq with yfinance fallback."""
13
+ all_series = {}
 
 
14
 
15
+ for ticker in tickers:
16
+ success = False
17
+ # 1. Primary: Stooq
18
+ try:
19
+ # Stooq format: TICKER.US (e.g., TLT.US)
20
+ stooq_symbol = f"{ticker}.US"
21
+ df_stooq = web.DataReader(stooq_symbol, 'stooq')
22
+
23
+ if not df_stooq.empty:
24
+ # Stooq returns newest data first; sort to ascending for backtests
25
+ all_series[ticker] = df_stooq['Close'].sort_index()
26
+ st.toast(f"✅ {ticker} loaded from Stooq")
27
+ success = True
28
+ except Exception as e:
29
+ print(f"Stooq failed for {ticker}: {e}")
30
 
31
+ # 2. Fallback: yfinance
32
+ if not success:
33
+ try:
34
+ yf_df = yf.download(ticker, period="max", progress=False)
35
+ if not yf_df.empty:
36
+ # Use Adj Close to account for dividends/splits
37
+ all_series[ticker] = yf_df['Adj Close']
38
+ st.toast(f"⚠️ {ticker} loaded from yfinance (Fallback)")
39
+ success = True
40
+ except Exception as e:
41
+ st.error(f"❌ Critical: Could not load {ticker} from any source.")
42
+
43
+ if all_series:
44
+ # Align all tickers on the same dates and drop missing values
45
+ return pd.concat(all_series, axis=1).dropna()
46
+ return pd.DataFrame()
47
+
48
+ def push_to_hf(df, repo_id, token):
49
+ """Pushes the current dataframe to Hugging Face Hub."""
50
+ # Ensure Date is a column, not an index, for HF compatibility
51
+ hf_export = df.reset_index()
52
+ hf_export.columns = [str(col) for col in hf_export.columns] # Ensure string columns
53
+
54
+ dataset = Dataset.from_pandas(hf_export)
55
+ dataset.push_to_hub(repo_id, token=token)
56
+ return True
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/Dockerfile CHANGED
@@ -2,11 +2,13 @@ FROM python:3.10
2
 
3
  WORKDIR /app
4
 
 
5
  COPY requirements.txt .
6
 
7
  RUN pip install --upgrade pip
8
  RUN pip install --no-cache-dir -r requirements.txt
9
 
 
10
  COPY . .
11
 
12
  EXPOSE 7860
@@ -15,4 +17,10 @@ ENV STREAMLIT_SERVER_PORT=7860
15
  ENV STREAMLIT_SERVER_ADDRESS=0.0.0.0
16
  ENV STREAMLIT_BROWSER_GATHER_USAGE_STATS=false
17
 
18
- CMD streamlit run app.py
 
 
 
 
 
 
 
2
 
3
  WORKDIR /app
4
 
5
+ # Copy requirements first (better layer caching)
6
  COPY requirements.txt .
7
 
8
  RUN pip install --upgrade pip
9
  RUN pip install --no-cache-dir -r requirements.txt
10
 
11
+ # Copy full project
12
  COPY . .
13
 
14
  EXPOSE 7860
 
17
  ENV STREAMLIT_SERVER_ADDRESS=0.0.0.0
18
  ENV STREAMLIT_BROWSER_GATHER_USAGE_STATS=false
19
 
20
+ # Diagnostic startup command
21
+ CMD ["bash", "-c", "echo '===== CONTAINER BOOTING ====='; \
22
+ echo 'Python Version:'; python -V; \
23
+ echo 'Current Directory:'; pwd; \
24
+ echo 'Directory Listing:'; ls -la; \
25
+ echo 'Starting Streamlit...'; \
26
+ python -m streamlit run app.py --server.headless=true"]
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/loader.py ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ def load_data():
2
+ import pandas as pd
3
+ import yfinance as yf
4
+
5
+ tickers = ["SPY", "QQQ", "TLT"]
6
+
7
+ data = yf.download(
8
+ tickers,
9
+ start="2015-01-01",
10
+ progress=False,
11
+ )["Adj Close"]
12
+
13
+ data = data.dropna()
14
+
15
+ return data
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/analytics/metrics.py CHANGED
@@ -1,26 +1,21 @@
1
- import numpy as np
2
- import pandas as pd
3
 
4
- def compute_metrics(returns, sofr):
5
 
6
- sofr_daily = sofr.reindex(returns.index).fillna(method="ffill")["sofr"] / 252
7
- excess = returns - sofr_daily
8
 
9
- sharpe = np.sqrt(252) * excess.mean() / excess.std()
 
 
10
 
11
- equity = (1 + returns).cumprod()
12
- cagr = equity.iloc[-1] ** (252 / len(equity)) - 1
13
-
14
- vol = returns.std() * np.sqrt(252)
15
-
16
- rolling_max = equity.cummax()
17
- drawdown = equity / rolling_max - 1
18
  max_dd = drawdown.min()
19
 
20
  return {
21
- "sharpe": sharpe,
22
  "cagr": cagr,
23
  "vol": vol,
24
- "max_dd": max_dd
 
25
  }
26
-
 
1
+ def compute_metrics(returns):
 
2
 
3
+ import numpy as np
4
 
5
+ ann_factor = 252
 
6
 
7
+ cagr = (1 + returns.mean()) ** ann_factor - 1
8
+ vol = returns.std() * (ann_factor ** 0.5)
9
+ sharpe = cagr / vol if vol != 0 else 0
10
 
11
+ cumulative = (1 + returns).cumprod()
12
+ peak = cumulative.cummax()
13
+ drawdown = (cumulative - peak) / peak
 
 
 
 
14
  max_dd = drawdown.min()
15
 
16
  return {
 
17
  "cagr": cagr,
18
  "vol": vol,
19
+ "sharpe": sharpe,
20
+ "max_dd": max_dd,
21
  }
 
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/engine/backtest.py CHANGED
@@ -1,46 +1,20 @@
1
- import pandas as pd
2
- import numpy as np
3
-
4
  def run_backtest(df, initial_capital, vol_target, lookback):
5
 
6
- df = df.sort_values(["ticker", "date"])
7
- prices = df.pivot(index="date", columns="ticker", values="adjusted_close")
8
- returns = prices.pct_change().dropna()
9
-
10
- momentum = prices.pct_change(lookback)
11
- signal = momentum.rank(axis=1, ascending=False)
12
- top = signal <= 3
13
-
14
- weights = top.div(top.sum(axis=1), axis=0)
15
-
16
- rolling_cov = returns.rolling(60).cov()
17
- vol = []
18
 
19
- for date in weights.index:
20
- if date not in rolling_cov.index:
21
- vol.append(0)
22
- continue
23
 
24
- w = weights.loc[date].values
25
- cov = rolling_cov.loc[date].values.reshape(len(w), len(w))
26
- portfolio_vol = np.sqrt(w @ cov @ w) * np.sqrt(252)
27
 
28
- scale = vol_target / portfolio_vol if portfolio_vol > 0 else 0
29
- weights.loc[date] = w * scale
30
- vol.append(portfolio_vol)
31
 
32
  strategy_returns = (weights.shift(1) * returns).sum(axis=1)
33
- equity_curve = (1 + strategy_returns).cumprod() * initial_capital
34
 
35
- latest_weights = weights.iloc[-1]
36
- allocation = pd.DataFrame({
37
- "Ticker": latest_weights.index,
38
- "Weight": latest_weights.values
39
- }).sort_values("Weight", ascending=False)
40
 
41
  return {
42
  "returns": strategy_returns,
43
  "equity_curve": equity_curve,
44
- "latest_allocation": allocation
45
  }
46
-
 
 
 
 
1
  def run_backtest(df, initial_capital, vol_target, lookback):
2
 
3
+ import numpy as np
4
+ import pandas as pd
 
 
 
 
 
 
 
 
 
 
5
 
6
+ returns = df.pct_change().dropna()
 
 
 
7
 
8
+ momentum = df.pct_change(lookback)
 
 
9
 
10
+ weights = (momentum > 0).astype(int)
11
+ weights = weights.div(weights.sum(axis=1), axis=0).fillna(0)
 
12
 
13
  strategy_returns = (weights.shift(1) * returns).sum(axis=1)
 
14
 
15
+ equity_curve = (1 + strategy_returns).cumprod() * initial_capital
 
 
 
 
16
 
17
  return {
18
  "returns": strategy_returns,
19
  "equity_curve": equity_curve,
 
20
  }
 
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py CHANGED
@@ -1,7 +1,7 @@
1
  import streamlit as st
2
 
3
  # =====================================================
4
- # PAGE CONFIG (must be first Streamlit command)
5
  # =====================================================
6
 
7
  st.set_page_config(
@@ -13,7 +13,7 @@ st.title("📊 P2 ETF Trend Suite")
13
  st.markdown("Institutional ETF Trend + Volatility Targeting Engine")
14
 
15
  # =====================================================
16
- # SIDEBAR CONTROLS
17
  # =====================================================
18
 
19
  st.sidebar.header("Strategy Controls")
@@ -26,16 +26,12 @@ initial_capital = st.sidebar.number_input(
26
 
27
  vol_target = st.sidebar.slider(
28
  "Target Annual Volatility",
29
- min_value=0.05,
30
- max_value=0.30,
31
- value=0.15,
32
  )
33
 
34
  lookback = st.sidebar.slider(
35
  "Momentum Lookback (days)",
36
- min_value=50,
37
- max_value=300,
38
- value=200,
39
  )
40
 
41
  run_button = st.sidebar.button("▶ Run Backtest")
@@ -44,34 +40,22 @@ st.sidebar.markdown("---")
44
  st.sidebar.info("Backtest runs only when button is pressed.")
45
 
46
  # =====================================================
47
- # MAIN EXECUTION (runs ONLY when button clicked)
48
  # =====================================================
49
 
50
  if run_button:
51
 
52
- # Import heavy modules only when needed
53
- from data.hf_store import load_dataset
54
- from data.updater import update_market_data
55
- from data.fred import get_sofr_series
56
- from engine.backtest import run_backtest
57
- from analytics.metrics import compute_metrics
58
-
59
- # ---------------------------
60
- # Load Dataset
61
- # ---------------------------
62
- with st.spinner("Loading ETF dataset from Hugging Face..."):
63
- df = load_dataset()
64
-
65
- # ---------------------------
66
- # Pull SOFR
67
- # ---------------------------
68
- with st.spinner("Pulling SOFR from FRED..."):
69
- sofr = get_sofr_series()
70
-
71
- # ---------------------------
72
- # Run Backtest
73
- # ---------------------------
74
- with st.spinner("Running backtest engine..."):
75
  results = run_backtest(
76
  df=df,
77
  initial_capital=initial_capital,
@@ -79,38 +63,18 @@ if run_button:
79
  lookback=lookback,
80
  )
81
 
82
- metrics = compute_metrics(results["returns"], sofr)
83
 
84
  st.success("Backtest Complete")
85
 
86
- # =====================================================
87
- # METRICS PANEL
88
- # =====================================================
89
-
90
  col1, col2, col3, col4 = st.columns(4)
91
-
92
  col1.metric("CAGR", f"{metrics['cagr']:.2%}")
93
- col2.metric("Sharpe (SOFR)", f"{metrics['sharpe']:.2f}")
94
  col3.metric("Max Drawdown", f"{metrics['max_dd']:.2%}")
95
  col4.metric("Volatility", f"{metrics['vol']:.2%}")
96
 
97
- st.markdown("---")
98
-
99
- # =====================================================
100
- # EQUITY CURVE
101
- # =====================================================
102
-
103
  st.subheader("Equity Curve")
104
  st.line_chart(results["equity_curve"])
105
 
106
- st.markdown("---")
107
-
108
- # =====================================================
109
- # ALLOCATION TABLE
110
- # =====================================================
111
-
112
- st.subheader("Latest Portfolio Allocation")
113
- st.dataframe(results["latest_allocation"], use_container_width=True)
114
-
115
  else:
116
- st.info("Configure parameters in the sidebar and click **Run Backtest**.")
 
1
  import streamlit as st
2
 
3
  # =====================================================
4
+ # PAGE CONFIG (ONLY LIGHT CODE HERE)
5
  # =====================================================
6
 
7
  st.set_page_config(
 
13
  st.markdown("Institutional ETF Trend + Volatility Targeting Engine")
14
 
15
  # =====================================================
16
+ # SIDEBAR
17
  # =====================================================
18
 
19
  st.sidebar.header("Strategy Controls")
 
26
 
27
  vol_target = st.sidebar.slider(
28
  "Target Annual Volatility",
29
+ 0.05, 0.30, 0.15
 
 
30
  )
31
 
32
  lookback = st.sidebar.slider(
33
  "Momentum Lookback (days)",
34
+ 50, 300, 200
 
 
35
  )
36
 
37
  run_button = st.sidebar.button("▶ Run Backtest")
 
40
  st.sidebar.info("Backtest runs only when button is pressed.")
41
 
42
  # =====================================================
43
+ # EXECUTION BLOCK
44
  # =====================================================
45
 
46
  if run_button:
47
 
48
+ with st.spinner("Loading engine..."):
49
+
50
+ # Lazy imports happen HERE
51
+ from engine.backtest import run_backtest
52
+ from data.loader import load_data
53
+ from analytics.metrics import compute_metrics
54
+
55
+ with st.spinner("Loading market data..."):
56
+ df = load_data()
57
+
58
+ with st.spinner("Running strategy..."):
 
 
 
 
 
 
 
 
 
 
 
 
59
  results = run_backtest(
60
  df=df,
61
  initial_capital=initial_capital,
 
63
  lookback=lookback,
64
  )
65
 
66
+ metrics = compute_metrics(results["returns"])
67
 
68
  st.success("Backtest Complete")
69
 
 
 
 
 
70
  col1, col2, col3, col4 = st.columns(4)
 
71
  col1.metric("CAGR", f"{metrics['cagr']:.2%}")
72
+ col2.metric("Sharpe", f"{metrics['sharpe']:.2f}")
73
  col3.metric("Max Drawdown", f"{metrics['max_dd']:.2%}")
74
  col4.metric("Volatility", f"{metrics['vol']:.2%}")
75
 
 
 
 
 
 
 
76
  st.subheader("Equity Curve")
77
  st.line_chart(results["equity_curve"])
78
 
 
 
 
 
 
 
 
 
 
79
  else:
80
+ st.info("Configure parameters and click Run Backtest.")
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/Dockerfile CHANGED
@@ -1,13 +1,18 @@
1
- FROM python:3.10-slim
2
 
3
  WORKDIR /app
4
 
5
  COPY requirements.txt .
6
 
 
7
  RUN pip install --no-cache-dir -r requirements.txt
8
 
9
  COPY . .
10
 
11
  EXPOSE 7860
12
 
13
- CMD ["streamlit", "run", "app.py", "--server.port=7860", "--server.address=0.0.0.0"]
 
 
 
 
 
1
+ FROM python:3.10
2
 
3
  WORKDIR /app
4
 
5
  COPY requirements.txt .
6
 
7
+ RUN pip install --upgrade pip
8
  RUN pip install --no-cache-dir -r requirements.txt
9
 
10
  COPY . .
11
 
12
  EXPOSE 7860
13
 
14
+ ENV STREAMLIT_SERVER_PORT=7860
15
+ ENV STREAMLIT_SERVER_ADDRESS=0.0.0.0
16
+ ENV STREAMLIT_BROWSER_GATHER_USAGE_STATS=false
17
+
18
+ CMD streamlit run app.py
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py CHANGED
@@ -1,70 +1,116 @@
1
  import streamlit as st
2
- import os
3
 
4
- from data.hf_store import load_dataset
5
- from data.updater import update_market_data
6
- from data.fred import get_sofr_series
7
- from engine.backtest import run_backtest
8
- from analytics.metrics import compute_metrics
9
 
10
- st.set_page_config(layout="wide")
 
 
 
11
 
12
  st.title("📊 P2 ETF Trend Suite")
13
  st.markdown("Institutional ETF Trend + Volatility Targeting Engine")
14
 
15
- # ========================
16
- # Sidebar Controls
17
- # ========================
18
 
19
- st.sidebar.header("Controls")
20
 
21
- initial_capital = st.sidebar.number_input("Initial Capital", value=100000, step=10000)
22
- vol_target = st.sidebar.slider("Target Annual Volatility", 0.05, 0.30, 0.15)
23
- lookback = st.sidebar.slider("Momentum Lookback (days)", 50, 300, 200)
 
 
24
 
25
- refresh = st.sidebar.button("🔄 Refresh Market Data")
 
 
 
 
 
26
 
27
- # ========================
28
- # Data Load
29
- # ========================
 
 
 
30
 
31
- with st.spinner("Loading ETF dataset..."):
32
- df = load_dataset()
33
 
34
- if refresh:
35
- with st.spinner("Updating market data from yfinance..."):
36
- df = update_market_data(df)
37
- st.success("Dataset updated successfully.")
38
 
39
- # ========================
40
- # Backtest
41
- # ========================
42
 
43
- with st.spinner("Pulling SOFR from FRED..."):
44
- sofr = get_sofr_series()
45
 
46
- with st.spinner("Running backtest..."):
47
- results = run_backtest(
48
- df=df,
49
- initial_capital=initial_capital,
50
- vol_target=vol_target,
51
- lookback=lookback,
52
- )
53
 
54
- metrics = compute_metrics(results["returns"], sofr)
 
 
 
 
55
 
56
- # ========================
57
- # Layout
58
- # ========================
 
 
59
 
60
- col1, col2, col3, col4 = st.columns(4)
 
 
 
 
 
 
 
 
 
61
 
62
- col1.metric("CAGR", f"{metrics['cagr']:.2%}")
63
- col2.metric("Sharpe (SOFR)", f"{metrics['sharpe']:.2f}")
64
- col3.metric("Max Drawdown", f"{metrics['max_dd']:.2%}")
65
- col4.metric("Volatility", f"{metrics['vol']:.2%}")
66
 
67
- st.line_chart(results["equity_curve"])
68
 
69
- st.subheader("Current Allocation")
70
- st.dataframe(results["latest_allocation"])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
 
2
 
3
+ # =====================================================
4
+ # PAGE CONFIG (must be first Streamlit command)
5
+ # =====================================================
 
 
6
 
7
+ st.set_page_config(
8
+ page_title="P2 ETF Trend Suite",
9
+ layout="wide",
10
+ )
11
 
12
  st.title("📊 P2 ETF Trend Suite")
13
  st.markdown("Institutional ETF Trend + Volatility Targeting Engine")
14
 
15
+ # =====================================================
16
+ # SIDEBAR CONTROLS
17
+ # =====================================================
18
 
19
+ st.sidebar.header("Strategy Controls")
20
 
21
+ initial_capital = st.sidebar.number_input(
22
+ "Initial Capital",
23
+ value=100000,
24
+ step=10000,
25
+ )
26
 
27
+ vol_target = st.sidebar.slider(
28
+ "Target Annual Volatility",
29
+ min_value=0.05,
30
+ max_value=0.30,
31
+ value=0.15,
32
+ )
33
 
34
+ lookback = st.sidebar.slider(
35
+ "Momentum Lookback (days)",
36
+ min_value=50,
37
+ max_value=300,
38
+ value=200,
39
+ )
40
 
41
+ run_button = st.sidebar.button(" Run Backtest")
 
42
 
43
+ st.sidebar.markdown("---")
44
+ st.sidebar.info("Backtest runs only when button is pressed.")
 
 
45
 
46
+ # =====================================================
47
+ # MAIN EXECUTION (runs ONLY when button clicked)
48
+ # =====================================================
49
 
50
+ if run_button:
 
51
 
52
+ # Import heavy modules only when needed
53
+ from data.hf_store import load_dataset
54
+ from data.updater import update_market_data
55
+ from data.fred import get_sofr_series
56
+ from engine.backtest import run_backtest
57
+ from analytics.metrics import compute_metrics
 
58
 
59
+ # ---------------------------
60
+ # Load Dataset
61
+ # ---------------------------
62
+ with st.spinner("Loading ETF dataset from Hugging Face..."):
63
+ df = load_dataset()
64
 
65
+ # ---------------------------
66
+ # Pull SOFR
67
+ # ---------------------------
68
+ with st.spinner("Pulling SOFR from FRED..."):
69
+ sofr = get_sofr_series()
70
 
71
+ # ---------------------------
72
+ # Run Backtest
73
+ # ---------------------------
74
+ with st.spinner("Running backtest engine..."):
75
+ results = run_backtest(
76
+ df=df,
77
+ initial_capital=initial_capital,
78
+ vol_target=vol_target,
79
+ lookback=lookback,
80
+ )
81
 
82
+ metrics = compute_metrics(results["returns"], sofr)
 
 
 
83
 
84
+ st.success("Backtest Complete")
85
 
86
+ # =====================================================
87
+ # METRICS PANEL
88
+ # =====================================================
89
+
90
+ col1, col2, col3, col4 = st.columns(4)
91
+
92
+ col1.metric("CAGR", f"{metrics['cagr']:.2%}")
93
+ col2.metric("Sharpe (SOFR)", f"{metrics['sharpe']:.2f}")
94
+ col3.metric("Max Drawdown", f"{metrics['max_dd']:.2%}")
95
+ col4.metric("Volatility", f"{metrics['vol']:.2%}")
96
+
97
+ st.markdown("---")
98
+
99
+ # =====================================================
100
+ # EQUITY CURVE
101
+ # =====================================================
102
+
103
+ st.subheader("Equity Curve")
104
+ st.line_chart(results["equity_curve"])
105
+
106
+ st.markdown("---")
107
+
108
+ # =====================================================
109
+ # ALLOCATION TABLE
110
+ # =====================================================
111
+
112
+ st.subheader("Latest Portfolio Allocation")
113
+ st.dataframe(results["latest_allocation"], use_container_width=True)
114
+
115
+ else:
116
+ st.info("Configure parameters in the sidebar and click **Run Backtest**.")
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/Dockerfile CHANGED
@@ -1,20 +1,13 @@
1
- FROM python:3.13.5-slim
2
 
3
  WORKDIR /app
4
 
5
- RUN apt-get update && apt-get install -y \
6
- build-essential \
7
- curl \
8
- git \
9
- && rm -rf /var/lib/apt/lists/*
10
 
11
- COPY requirements.txt ./
12
- COPY src/ ./src/
13
 
14
- RUN pip3 install -r requirements.txt
15
 
16
- EXPOSE 8501
17
 
18
- HEALTHCHECK CMD curl --fail http://localhost:8501/_stcore/health
19
-
20
- ENTRYPOINT ["streamlit", "run", "src/streamlit_app.py", "--server.port=8501", "--server.address=0.0.0.0"]
 
1
+ FROM python:3.10-slim
2
 
3
  WORKDIR /app
4
 
5
+ COPY requirements.txt .
 
 
 
 
6
 
7
+ RUN pip install --no-cache-dir -r requirements.txt
 
8
 
9
+ COPY . .
10
 
11
+ EXPOSE 7860
12
 
13
+ CMD ["streamlit", "run", "app.py", "--server.port=7860", "--server.address=0.0.0.0"]
 
 
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py CHANGED
@@ -1,35 +1,70 @@
1
  import streamlit as st
 
 
 
 
 
 
 
2
 
3
  st.set_page_config(layout="wide")
4
 
5
- st.title("P2 ETF Trend Suite - Debug Mode")
6
-
7
- try:
8
- from data.hf_store import load_dataset
9
- st.success("hf_store imported successfully")
10
- except Exception as e:
11
- st.error(f"hf_store failed: {e}")
12
-
13
- try:
14
- from data.updater import update_market_data
15
- st.success("updater imported successfully")
16
- except Exception as e:
17
- st.error(f"updater failed: {e}")
18
-
19
- try:
20
- from data.fred import get_sofr_series
21
- st.success("fred imported successfully")
22
- except Exception as e:
23
- st.error(f"fred failed: {e}")
24
-
25
- try:
26
- from engine.backtest import run_backtest
27
- st.success("backtest imported successfully")
28
- except Exception as e:
29
- st.error(f"backtest failed: {e}")
30
-
31
- try:
32
- from analytics.metrics import compute_metrics
33
- st.success("metrics imported successfully")
34
- except Exception as e:
35
- st.error(f"metrics failed: {e}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
2
+ import os
3
+
4
+ from data.hf_store import load_dataset
5
+ from data.updater import update_market_data
6
+ from data.fred import get_sofr_series
7
+ from engine.backtest import run_backtest
8
+ from analytics.metrics import compute_metrics
9
 
10
  st.set_page_config(layout="wide")
11
 
12
+ st.title("📊 P2 ETF Trend Suite")
13
+ st.markdown("Institutional ETF Trend + Volatility Targeting Engine")
14
+
15
+ # ========================
16
+ # Sidebar Controls
17
+ # ========================
18
+
19
+ st.sidebar.header("Controls")
20
+
21
+ initial_capital = st.sidebar.number_input("Initial Capital", value=100000, step=10000)
22
+ vol_target = st.sidebar.slider("Target Annual Volatility", 0.05, 0.30, 0.15)
23
+ lookback = st.sidebar.slider("Momentum Lookback (days)", 50, 300, 200)
24
+
25
+ refresh = st.sidebar.button("🔄 Refresh Market Data")
26
+
27
+ # ========================
28
+ # Data Load
29
+ # ========================
30
+
31
+ with st.spinner("Loading ETF dataset..."):
32
+ df = load_dataset()
33
+
34
+ if refresh:
35
+ with st.spinner("Updating market data from yfinance..."):
36
+ df = update_market_data(df)
37
+ st.success("Dataset updated successfully.")
38
+
39
+ # ========================
40
+ # Backtest
41
+ # ========================
42
+
43
+ with st.spinner("Pulling SOFR from FRED..."):
44
+ sofr = get_sofr_series()
45
+
46
+ with st.spinner("Running backtest..."):
47
+ results = run_backtest(
48
+ df=df,
49
+ initial_capital=initial_capital,
50
+ vol_target=vol_target,
51
+ lookback=lookback,
52
+ )
53
+
54
+ metrics = compute_metrics(results["returns"], sofr)
55
+
56
+ # ========================
57
+ # Layout
58
+ # ========================
59
+
60
+ col1, col2, col3, col4 = st.columns(4)
61
+
62
+ col1.metric("CAGR", f"{metrics['cagr']:.2%}")
63
+ col2.metric("Sharpe (SOFR)", f"{metrics['sharpe']:.2f}")
64
+ col3.metric("Max Drawdown", f"{metrics['max_dd']:.2%}")
65
+ col4.metric("Volatility", f"{metrics['vol']:.2%}")
66
+
67
+ st.line_chart(results["equity_curve"])
68
+
69
+ st.subheader("Current Allocation")
70
+ st.dataframe(results["latest_allocation"])
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/analytics/metrics.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pandas as pd
3
+
4
+ def compute_metrics(returns, sofr):
5
+
6
+ sofr_daily = sofr.reindex(returns.index).fillna(method="ffill")["sofr"] / 252
7
+ excess = returns - sofr_daily
8
+
9
+ sharpe = np.sqrt(252) * excess.mean() / excess.std()
10
+
11
+ equity = (1 + returns).cumprod()
12
+ cagr = equity.iloc[-1] ** (252 / len(equity)) - 1
13
+
14
+ vol = returns.std() * np.sqrt(252)
15
+
16
+ rolling_max = equity.cummax()
17
+ drawdown = equity / rolling_max - 1
18
+ max_dd = drawdown.min()
19
+
20
+ return {
21
+ "sharpe": sharpe,
22
+ "cagr": cagr,
23
+ "vol": vol,
24
+ "max_dd": max_dd
25
+ }
26
+
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+
3
+ st.set_page_config(layout="wide")
4
+
5
+ st.title("P2 ETF Trend Suite - Debug Mode")
6
+
7
+ try:
8
+ from data.hf_store import load_dataset
9
+ st.success("hf_store imported successfully")
10
+ except Exception as e:
11
+ st.error(f"hf_store failed: {e}")
12
+
13
+ try:
14
+ from data.updater import update_market_data
15
+ st.success("updater imported successfully")
16
+ except Exception as e:
17
+ st.error(f"updater failed: {e}")
18
+
19
+ try:
20
+ from data.fred import get_sofr_series
21
+ st.success("fred imported successfully")
22
+ except Exception as e:
23
+ st.error(f"fred failed: {e}")
24
+
25
+ try:
26
+ from engine.backtest import run_backtest
27
+ st.success("backtest imported successfully")
28
+ except Exception as e:
29
+ st.error(f"backtest failed: {e}")
30
+
31
+ try:
32
+ from analytics.metrics import compute_metrics
33
+ st.success("metrics imported successfully")
34
+ except Exception as e:
35
+ st.error(f"metrics failed: {e}")
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/fred.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import pandas as pd
3
+ from fredapi import Fred
4
+
5
+ def get_sofr_series():
6
+ fred = Fred(api_key=os.getenv("FRED_API_KEY"))
7
+ sofr = fred.get_series("SOFR")
8
+ sofr = sofr.to_frame("sofr")
9
+ sofr.index = pd.to_datetime(sofr.index)
10
+ sofr["sofr"] = sofr["sofr"] / 100
11
+ return sofr
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/hf_store.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ from datasets import load_dataset as hf_load_dataset
3
+
4
+ DATASET_PATH = "P2SAMAPA/etf_trend_data"
5
+
6
+ def load_dataset():
7
+ dataset = hf_load_dataset(DATASET_PATH)
8
+
9
+ # Handle split safely
10
+ if isinstance(dataset, dict) and "train" in dataset:
11
+ dataset = dataset["train"]
12
+
13
+ df = dataset.to_pandas()
14
+ df["date"] = pd.to_datetime(df["date"])
15
+ df = df.sort_values(["ticker", "date"])
16
+
17
+ return df
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/updater.py ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import pandas as pd
3
+ import yfinance as yf
4
+ from huggingface_hub import HfApi
5
+
6
+ DATASET_PATH = "P2SAMAPA/etf_trend_data"
7
+
8
+ def update_market_data(df):
9
+
10
+ tickers = df["ticker"].unique()
11
+ all_new = []
12
+
13
+ for ticker in tickers:
14
+ last_date = df[df["ticker"] == ticker]["date"].max()
15
+ start_date = (last_date + pd.Timedelta(days=1)).strftime("%Y-%m-%d")
16
+
17
+ new_data = yf.download(ticker, start=start_date, progress=False)
18
+
19
+ if new_data.empty:
20
+ continue
21
+
22
+ new_data.reset_index(inplace=True)
23
+ new_data["ticker"] = ticker
24
+ new_data.rename(columns={
25
+ "Date": "date",
26
+ "Open": "open",
27
+ "High": "high",
28
+ "Low": "low",
29
+ "Close": "close",
30
+ "Adj Close": "adjusted_close",
31
+ "Volume": "volume"
32
+ }, inplace=True)
33
+
34
+ all_new.append(new_data)
35
+
36
+ if not all_new:
37
+ return df
38
+
39
+ new_df = pd.concat(all_new)
40
+ df = pd.concat([df, new_df])
41
+ df.drop_duplicates(subset=["date", "ticker"], inplace=True)
42
+ df.sort_values(["ticker", "date"], inplace=True)
43
+
44
+ df.to_parquet("updated.parquet")
45
+
46
+ api = HfApi()
47
+ api.upload_file(
48
+ path_or_fileobj="updated.parquet",
49
+ path_in_repo="data/train-00000-of-00001.parquet",
50
+ repo_id=DATASET_PATH,
51
+ repo_type="dataset",
52
+ token=os.getenv("HF_TOKEN")
53
+ )
54
+
55
+ return df
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/engine/backtest.py ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ import numpy as np
3
+
4
+ def run_backtest(df, initial_capital, vol_target, lookback):
5
+
6
+ df = df.sort_values(["ticker", "date"])
7
+ prices = df.pivot(index="date", columns="ticker", values="adjusted_close")
8
+ returns = prices.pct_change().dropna()
9
+
10
+ momentum = prices.pct_change(lookback)
11
+ signal = momentum.rank(axis=1, ascending=False)
12
+ top = signal <= 3
13
+
14
+ weights = top.div(top.sum(axis=1), axis=0)
15
+
16
+ rolling_cov = returns.rolling(60).cov()
17
+ vol = []
18
+
19
+ for date in weights.index:
20
+ if date not in rolling_cov.index:
21
+ vol.append(0)
22
+ continue
23
+
24
+ w = weights.loc[date].values
25
+ cov = rolling_cov.loc[date].values.reshape(len(w), len(w))
26
+ portfolio_vol = np.sqrt(w @ cov @ w) * np.sqrt(252)
27
+
28
+ scale = vol_target / portfolio_vol if portfolio_vol > 0 else 0
29
+ weights.loc[date] = w * scale
30
+ vol.append(portfolio_vol)
31
+
32
+ strategy_returns = (weights.shift(1) * returns).sum(axis=1)
33
+ equity_curve = (1 + strategy_returns).cumprod() * initial_capital
34
+
35
+ latest_weights = weights.iloc[-1]
36
+ allocation = pd.DataFrame({
37
+ "Ticker": latest_weights.index,
38
+ "Weight": latest_weights.values
39
+ }).sort_values("Weight", ascending=False)
40
+
41
+ return {
42
+ "returns": strategy_returns,
43
+ "equity_curve": equity_curve,
44
+ "latest_allocation": allocation
45
+ }
46
+
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/.gitattributes ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar filter=lfs diff=lfs merge=lfs -text
29
+ *.tflite filter=lfs diff=lfs merge=lfs -text
30
+ *.tgz filter=lfs diff=lfs merge=lfs -text
31
+ *.wasm filter=lfs diff=lfs merge=lfs -text
32
+ *.xz filter=lfs diff=lfs merge=lfs -text
33
+ *.zip filter=lfs diff=lfs merge=lfs -text
34
+ *.zst filter=lfs diff=lfs merge=lfs -text
35
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/Dockerfile ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.13.5-slim
2
+
3
+ WORKDIR /app
4
+
5
+ RUN apt-get update && apt-get install -y \
6
+ build-essential \
7
+ curl \
8
+ git \
9
+ && rm -rf /var/lib/apt/lists/*
10
+
11
+ COPY requirements.txt ./
12
+ COPY src/ ./src/
13
+
14
+ RUN pip3 install -r requirements.txt
15
+
16
+ EXPOSE 8501
17
+
18
+ HEALTHCHECK CMD curl --fail http://localhost:8501/_stcore/health
19
+
20
+ ENTRYPOINT ["streamlit", "run", "src/streamlit_app.py", "--server.port=8501", "--server.address=0.0.0.0"]
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/README.md ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: P2 ETF TREND SUITE
3
+ emoji: 🚀
4
+ colorFrom: red
5
+ colorTo: red
6
+ sdk: docker
7
+ app_port: 8501
8
+ tags:
9
+ - streamlit
10
+ pinned: false
11
+ short_description: Streamlit template space
12
+ ---
13
+
14
+ # Welcome to Streamlit!
15
+
16
+ Edit `/src/streamlit_app.py` to customize this app to your heart's desire. :heart:
17
+
18
+ If you have any questions, checkout our [documentation](https://docs.streamlit.io) and [community
19
+ forums](https://discuss.streamlit.io).
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ altair
2
+ pandas
3
+ streamlit
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/src/streamlit_app.py ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import altair as alt
2
+ import numpy as np
3
+ import pandas as pd
4
+ import streamlit as st
5
+
6
+ """
7
+ # Welcome to Streamlit!
8
+
9
+ Edit `/streamlit_app.py` to customize this app to your heart's desire :heart:.
10
+ If you have any questions, checkout our [documentation](https://docs.streamlit.io) and [community
11
+ forums](https://discuss.streamlit.io).
12
+
13
+ In the meantime, below is an example of what you can do with just a few lines of code:
14
+ """
15
+
16
+ num_points = st.slider("Number of points in spiral", 1, 10000, 1100)
17
+ num_turns = st.slider("Number of turns in spiral", 1, 300, 31)
18
+
19
+ indices = np.linspace(0, 1, num_points)
20
+ theta = 2 * np.pi * num_turns * indices
21
+ radius = indices
22
+
23
+ x = radius * np.cos(theta)
24
+ y = radius * np.sin(theta)
25
+
26
+ df = pd.DataFrame({
27
+ "x": x,
28
+ "y": y,
29
+ "idx": indices,
30
+ "rand": np.random.randn(num_points),
31
+ })
32
+
33
+ st.altair_chart(alt.Chart(df, height=700, width=700)
34
+ .mark_point(filled=True)
35
+ .encode(
36
+ x=alt.X("x", axis=None),
37
+ y=alt.Y("y", axis=None),
38
+ color=alt.Color("idx", legend=None, scale=alt.Scale()),
39
+ size=alt.Size("rand", legend=None, scale=alt.Scale(range=[1, 150])),
40
+ ))
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/requirements.txt CHANGED
@@ -1,3 +1,8 @@
1
- altair
2
  pandas
3
- streamlit
 
 
 
 
 
 
1
+ streamlit
2
  pandas
3
+ numpy
4
+ yfinance
5
+ datasets
6
+ huggingface_hub
7
+ fredapi
8
+ scipy
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/streamlit_app.py ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import os
3
+
4
+ from data.hf_store import load_dataset
5
+ from data.updater import update_market_data
6
+ from data.fred import get_sofr_series
7
+ from engine.backtest import run_backtest
8
+ from analytics.metrics import compute_metrics
9
+
10
+ st.set_page_config(layout="wide")
11
+
12
+ st.title("📊 P2 ETF Trend Suite")
13
+ st.markdown("Institutional ETF Trend + Volatility Targeting Engine")
14
+
15
+ # ========================
16
+ # Sidebar Controls
17
+ # ========================
18
+
19
+ st.sidebar.header("Controls")
20
+
21
+ initial_capital = st.sidebar.number_input("Initial Capital", value=100000, step=10000)
22
+ vol_target = st.sidebar.slider("Target Annual Volatility", 0.05, 0.30, 0.15)
23
+ lookback = st.sidebar.slider("Momentum Lookback (days)", 50, 300, 200)
24
+
25
+ refresh = st.sidebar.button("🔄 Refresh Market Data")
26
+
27
+ # ========================
28
+ # Data Load
29
+ # ========================
30
+
31
+ with st.spinner("Loading ETF dataset..."):
32
+ df = load_dataset()
33
+
34
+ if refresh:
35
+ with st.spinner("Updating market data from yfinance..."):
36
+ df = update_market_data(df)
37
+ st.success("Dataset updated successfully.")
38
+
39
+ # ========================
40
+ # Backtest
41
+ # ========================
42
+
43
+ with st.spinner("Pulling SOFR from FRED..."):
44
+ sofr = get_sofr_series()
45
+
46
+ with st.spinner("Running backtest..."):
47
+ results = run_backtest(
48
+ df=df,
49
+ initial_capital=initial_capital,
50
+ vol_target=vol_target,
51
+ lookback=lookback,
52
+ )
53
+
54
+ metrics = compute_metrics(results["returns"], sofr)
55
+
56
+ # ========================
57
+ # Layout
58
+ # ========================
59
+
60
+ col1, col2, col3, col4 = st.columns(4)
61
+
62
+ col1.metric("CAGR", f"{metrics['cagr']:.2%}")
63
+ col2.metric("Sharpe (SOFR)", f"{metrics['sharpe']:.2f}")
64
+ col3.metric("Max Drawdown", f"{metrics['max_dd']:.2%}")
65
+ col4.metric("Volatility", f"{metrics['vol']:.2%}")
66
+
67
+ st.line_chart(results["equity_curve"])
68
+
69
+ st.subheader("Current Allocation")
70
+ st.dataframe(results["latest_allocation"])
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/requirements.txt CHANGED
@@ -1,8 +1,6 @@
1
  streamlit
2
  pandas
3
- numpy
4
  yfinance
 
5
  datasets
6
  huggingface_hub
7
- fredapi
8
- scipy
 
1
  streamlit
2
  pandas
 
3
  yfinance
4
+ pandas-datareader
5
  datasets
6
  huggingface_hub