github-actions commited on
Commit
d763787
·
1 Parent(s): cafd0aa

Sync from GitHub

Browse files
analytics/metrics.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pandas as pd
3
+
4
+ def compute_metrics(returns, sofr):
5
+
6
+ sofr_daily = sofr.reindex(returns.index).fillna(method="ffill")["sofr"] / 252
7
+ excess = returns - sofr_daily
8
+
9
+ sharpe = np.sqrt(252) * excess.mean() / excess.std()
10
+
11
+ equity = (1 + returns).cumprod()
12
+ cagr = equity.iloc[-1] ** (252 / len(equity)) - 1
13
+
14
+ vol = returns.std() * np.sqrt(252)
15
+
16
+ rolling_max = equity.cummax()
17
+ drawdown = equity / rolling_max - 1
18
+ max_dd = drawdown.min()
19
+
20
+ return {
21
+ "sharpe": sharpe,
22
+ "cagr": cagr,
23
+ "vol": vol,
24
+ "max_dd": max_dd
25
+ }
26
+
app.py ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import os
3
+
4
+ from data.hf_store import load_dataset
5
+ from data.updater import update_market_data
6
+ from data.fred import get_sofr_series
7
+ from engine.backtest import run_backtest
8
+ from analytics.metrics import compute_metrics
9
+
10
+ st.set_page_config(layout="wide")
11
+
12
+ st.title("📊 P2 ETF Trend Suite")
13
+ st.markdown("Institutional ETF Trend + Volatility Targeting Engine")
14
+
15
+ # ========================
16
+ # Sidebar Controls
17
+ # ========================
18
+
19
+ st.sidebar.header("Controls")
20
+
21
+ initial_capital = st.sidebar.number_input("Initial Capital", value=100000, step=10000)
22
+ vol_target = st.sidebar.slider("Target Annual Volatility", 0.05, 0.30, 0.15)
23
+ lookback = st.sidebar.slider("Momentum Lookback (days)", 50, 300, 200)
24
+
25
+ refresh = st.sidebar.button("🔄 Refresh Market Data")
26
+
27
+ # ========================
28
+ # Data Load
29
+ # ========================
30
+
31
+ with st.spinner("Loading ETF dataset..."):
32
+ df = load_dataset()
33
+
34
+ if refresh:
35
+ with st.spinner("Updating market data from yfinance..."):
36
+ df = update_market_data(df)
37
+ st.success("Dataset updated successfully.")
38
+
39
+ # ========================
40
+ # Backtest
41
+ # ========================
42
+
43
+ with st.spinner("Pulling SOFR from FRED..."):
44
+ sofr = get_sofr_series()
45
+
46
+ with st.spinner("Running backtest..."):
47
+ results = run_backtest(
48
+ df=df,
49
+ initial_capital=initial_capital,
50
+ vol_target=vol_target,
51
+ lookback=lookback,
52
+ )
53
+
54
+ metrics = compute_metrics(results["returns"], sofr)
55
+
56
+ # ========================
57
+ # Layout
58
+ # ========================
59
+
60
+ col1, col2, col3, col4 = st.columns(4)
61
+
62
+ col1.metric("CAGR", f"{metrics['cagr']:.2%}")
63
+ col2.metric("Sharpe (SOFR)", f"{metrics['sharpe']:.2f}")
64
+ col3.metric("Max Drawdown", f"{metrics['max_dd']:.2%}")
65
+ col4.metric("Volatility", f"{metrics['vol']:.2%}")
66
+
67
+ st.line_chart(results["equity_curve"])
68
+
69
+ st.subheader("Current Allocation")
70
+ st.dataframe(results["latest_allocation"])
data/fred.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import pandas as pd
3
+ from fredapi import Fred
4
+
5
+ def get_sofr_series():
6
+ fred = Fred(api_key=os.getenv("FRED_API_KEY"))
7
+ sofr = fred.get_series("SOFR")
8
+ sofr = sofr.to_frame("sofr")
9
+ sofr.index = pd.to_datetime(sofr.index)
10
+ sofr["sofr"] = sofr["sofr"] / 100
11
+ return sofr
data/hf_store.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ from datasets import load_dataset as hf_load_dataset
3
+
4
+ DATASET_PATH = "P2SAMAPA/etf_trend_data"
5
+
6
+ def load_dataset():
7
+ dataset = hf_load_dataset(DATASET_PATH)
8
+
9
+ # Handle split safely
10
+ if isinstance(dataset, dict) and "train" in dataset:
11
+ dataset = dataset["train"]
12
+
13
+ df = dataset.to_pandas()
14
+ df["date"] = pd.to_datetime(df["date"])
15
+ df = df.sort_values(["ticker", "date"])
16
+
17
+ return df
data/updater.py ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import pandas as pd
3
+ import yfinance as yf
4
+ from huggingface_hub import HfApi
5
+
6
+ DATASET_PATH = "P2SAMAPA/etf_trend_data"
7
+
8
+ def update_market_data(df):
9
+
10
+ tickers = df["ticker"].unique()
11
+ all_new = []
12
+
13
+ for ticker in tickers:
14
+ last_date = df[df["ticker"] == ticker]["date"].max()
15
+ start_date = (last_date + pd.Timedelta(days=1)).strftime("%Y-%m-%d")
16
+
17
+ new_data = yf.download(ticker, start=start_date, progress=False)
18
+
19
+ if new_data.empty:
20
+ continue
21
+
22
+ new_data.reset_index(inplace=True)
23
+ new_data["ticker"] = ticker
24
+ new_data.rename(columns={
25
+ "Date": "date",
26
+ "Open": "open",
27
+ "High": "high",
28
+ "Low": "low",
29
+ "Close": "close",
30
+ "Adj Close": "adjusted_close",
31
+ "Volume": "volume"
32
+ }, inplace=True)
33
+
34
+ all_new.append(new_data)
35
+
36
+ if not all_new:
37
+ return df
38
+
39
+ new_df = pd.concat(all_new)
40
+ df = pd.concat([df, new_df])
41
+ df.drop_duplicates(subset=["date", "ticker"], inplace=True)
42
+ df.sort_values(["ticker", "date"], inplace=True)
43
+
44
+ df.to_parquet("updated.parquet")
45
+
46
+ api = HfApi()
47
+ api.upload_file(
48
+ path_or_fileobj="updated.parquet",
49
+ path_in_repo="data/train-00000-of-00001.parquet",
50
+ repo_id=DATASET_PATH,
51
+ repo_type="dataset",
52
+ token=os.getenv("HF_TOKEN")
53
+ )
54
+
55
+ return df
engine/backtest.py ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ import numpy as np
3
+
4
+ def run_backtest(df, initial_capital, vol_target, lookback):
5
+
6
+ df = df.sort_values(["ticker", "date"])
7
+ prices = df.pivot(index="date", columns="ticker", values="adjusted_close")
8
+ returns = prices.pct_change().dropna()
9
+
10
+ momentum = prices.pct_change(lookback)
11
+ signal = momentum.rank(axis=1, ascending=False)
12
+ top = signal <= 3
13
+
14
+ weights = top.div(top.sum(axis=1), axis=0)
15
+
16
+ rolling_cov = returns.rolling(60).cov()
17
+ vol = []
18
+
19
+ for date in weights.index:
20
+ if date not in rolling_cov.index:
21
+ vol.append(0)
22
+ continue
23
+
24
+ w = weights.loc[date].values
25
+ cov = rolling_cov.loc[date].values.reshape(len(w), len(w))
26
+ portfolio_vol = np.sqrt(w @ cov @ w) * np.sqrt(252)
27
+
28
+ scale = vol_target / portfolio_vol if portfolio_vol > 0 else 0
29
+ weights.loc[date] = w * scale
30
+ vol.append(portfolio_vol)
31
+
32
+ strategy_returns = (weights.shift(1) * returns).sum(axis=1)
33
+ equity_curve = (1 + strategy_returns).cumprod() * initial_capital
34
+
35
+ latest_weights = weights.iloc[-1]
36
+ allocation = pd.DataFrame({
37
+ "Ticker": latest_weights.index,
38
+ "Weight": latest_weights.values
39
+ }).sort_values("Weight", ascending=False)
40
+
41
+ return {
42
+ "returns": strategy_returns,
43
+ "equity_curve": equity_curve,
44
+ "latest_allocation": allocation
45
+ }
46
+
hf_space/.gitattributes ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar filter=lfs diff=lfs merge=lfs -text
29
+ *.tflite filter=lfs diff=lfs merge=lfs -text
30
+ *.tgz filter=lfs diff=lfs merge=lfs -text
31
+ *.wasm filter=lfs diff=lfs merge=lfs -text
32
+ *.xz filter=lfs diff=lfs merge=lfs -text
33
+ *.zip filter=lfs diff=lfs merge=lfs -text
34
+ *.zst filter=lfs diff=lfs merge=lfs -text
35
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
hf_space/Dockerfile ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.13.5-slim
2
+
3
+ WORKDIR /app
4
+
5
+ RUN apt-get update && apt-get install -y \
6
+ build-essential \
7
+ curl \
8
+ git \
9
+ && rm -rf /var/lib/apt/lists/*
10
+
11
+ COPY requirements.txt ./
12
+ COPY src/ ./src/
13
+
14
+ RUN pip3 install -r requirements.txt
15
+
16
+ EXPOSE 8501
17
+
18
+ HEALTHCHECK CMD curl --fail http://localhost:8501/_stcore/health
19
+
20
+ ENTRYPOINT ["streamlit", "run", "src/streamlit_app.py", "--server.port=8501", "--server.address=0.0.0.0"]
hf_space/README.md ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: P2 ETF TREND SUITE
3
+ emoji: 🚀
4
+ colorFrom: red
5
+ colorTo: red
6
+ sdk: docker
7
+ app_port: 8501
8
+ tags:
9
+ - streamlit
10
+ pinned: false
11
+ short_description: Streamlit template space
12
+ ---
13
+
14
+ # Welcome to Streamlit!
15
+
16
+ Edit `/src/streamlit_app.py` to customize this app to your heart's desire. :heart:
17
+
18
+ If you have any questions, checkout our [documentation](https://docs.streamlit.io) and [community
19
+ forums](https://discuss.streamlit.io).
hf_space/requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ altair
2
+ pandas
3
+ streamlit
hf_space/src/streamlit_app.py ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import altair as alt
2
+ import numpy as np
3
+ import pandas as pd
4
+ import streamlit as st
5
+
6
+ """
7
+ # Welcome to Streamlit!
8
+
9
+ Edit `/streamlit_app.py` to customize this app to your heart's desire :heart:.
10
+ If you have any questions, checkout our [documentation](https://docs.streamlit.io) and [community
11
+ forums](https://discuss.streamlit.io).
12
+
13
+ In the meantime, below is an example of what you can do with just a few lines of code:
14
+ """
15
+
16
+ num_points = st.slider("Number of points in spiral", 1, 10000, 1100)
17
+ num_turns = st.slider("Number of turns in spiral", 1, 300, 31)
18
+
19
+ indices = np.linspace(0, 1, num_points)
20
+ theta = 2 * np.pi * num_turns * indices
21
+ radius = indices
22
+
23
+ x = radius * np.cos(theta)
24
+ y = radius * np.sin(theta)
25
+
26
+ df = pd.DataFrame({
27
+ "x": x,
28
+ "y": y,
29
+ "idx": indices,
30
+ "rand": np.random.randn(num_points),
31
+ })
32
+
33
+ st.altair_chart(alt.Chart(df, height=700, width=700)
34
+ .mark_point(filled=True)
35
+ .encode(
36
+ x=alt.X("x", axis=None),
37
+ y=alt.Y("y", axis=None),
38
+ color=alt.Color("idx", legend=None, scale=alt.Scale()),
39
+ size=alt.Size("rand", legend=None, scale=alt.Scale(range=[1, 150])),
40
+ ))
requirements.txt CHANGED
@@ -1,3 +1,8 @@
1
- altair
2
  pandas
3
- streamlit
 
 
 
 
 
 
1
+ streamlit
2
  pandas
3
+ numpy
4
+ yfinance
5
+ datasets
6
+ huggingface_hub
7
+ fredapi
8
+ scipy