Spaces:
Running
Running
github-actions commited on
Commit ·
56e8cf6
1
Parent(s): 7623b2f
Sync from GitHub
Browse files- engine/trend_engine.py +45 -38
- hf_space/data/loader.py +8 -9
- hf_space/hf_space/hf_space/app.py +55 -32
- hf_space/hf_space/hf_space/hf_space/data/loader.py +72 -6
- hf_space/hf_space/hf_space/hf_space/hf_space/data/loader.py +14 -40
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/loader.py +51 -22
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/loader.py +22 -34
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py +36 -26
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/loader.py +14 -36
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py +30 -18
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/loader.py +26 -24
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py +22 -98
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/loader.py +49 -23
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py +56 -45
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py +75 -26
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/engine/trend_engine.py +40 -0
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/engine/engine/trend_engine.py +36 -0
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py +33 -48
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/loader.py +23 -47
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/README.md +9 -13
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/Dockerfile +21 -16
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/engine/engine/__init__.py +1 -0
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/data/__init__.py +1 -0
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/analytics/analytics/__init__.py +1 -0
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py +42 -68
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/loader.py +52 -11
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/Dockerfile +9 -1
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/loader.py +15 -0
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/analytics/metrics.py +11 -16
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/engine/backtest.py +7 -33
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py +19 -55
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/Dockerfile +7 -2
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py +94 -48
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/Dockerfile +6 -13
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py +66 -31
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/analytics/metrics.py +26 -0
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py +35 -0
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/fred.py +11 -0
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/hf_store.py +17 -0
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/updater.py +55 -0
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/engine/backtest.py +46 -0
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/.gitattributes +35 -0
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/Dockerfile +20 -0
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/README.md +19 -0
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/requirements.txt +8 -0
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/src/streamlit_app.py +40 -0
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/streamlit_app.py +70 -0
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/requirements.txt +1 -3
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/requirements.txt +3 -3
- hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/requirements.txt +1 -0
engine/trend_engine.py
CHANGED
|
@@ -1,40 +1,47 @@
|
|
| 1 |
-
import numpy as np
|
| 2 |
import pandas as pd
|
|
|
|
| 3 |
|
| 4 |
-
def run_trend_module(
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
signals = (
|
| 13 |
-
|
| 14 |
-
# 2.
|
| 15 |
-
returns =
|
| 16 |
-
realized_vol = returns.rolling(
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
#
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import pandas as pd
|
| 2 |
+
import numpy as np
|
| 3 |
|
| 4 |
+
def run_trend_module(price_df, sofr_series, target_vol=0.12):
|
| 5 |
+
"""
|
| 6 |
+
Implements 2025 Dow Award Logic.
|
| 7 |
+
"""
|
| 8 |
+
# 1. Dual-Trend Signal (Fast vs Slow SMA)
|
| 9 |
+
sma_fast = price_df.rolling(50).mean()
|
| 10 |
+
sma_slow = price_df.rolling(200).mean()
|
| 11 |
+
# Signal is 1 if in trend, 0 if cash
|
| 12 |
+
signals = (sma_fast > sma_slow).astype(int)
|
| 13 |
+
|
| 14 |
+
# 2. Volatility Targeting (Inverse Vol Sizing)
|
| 15 |
+
returns = price_df.pct_change()
|
| 16 |
+
realized_vol = returns.rolling(60).std() * np.sqrt(252)
|
| 17 |
+
# Weights = Target Vol / Realized Vol
|
| 18 |
+
weights = (target_vol / realized_vol).fillna(0)
|
| 19 |
+
weights = weights.clip(upper=1.5) # Cap leverage at 150%
|
| 20 |
+
|
| 21 |
+
# 3. Portfolio Returns
|
| 22 |
+
# Position = Signal * Weight
|
| 23 |
+
asset_returns = (signals.shift(1) * weights.shift(1) * returns).mean(axis=1)
|
| 24 |
+
|
| 25 |
+
# 4. Interest on Cash (SOFR)
|
| 26 |
+
# If signals are 0 (in cash), we earn SOFR
|
| 27 |
+
cash_percentage = 1 - signals.mean(axis=1)
|
| 28 |
+
interest_returns = (cash_percentage.shift(1) * (sofr_series.shift(1) / 252))
|
| 29 |
+
|
| 30 |
+
total_returns = asset_returns + interest_returns
|
| 31 |
+
equity_curve = (1 + total_returns).fillna(0).cumprod()
|
| 32 |
+
|
| 33 |
+
# 5. Metrics
|
| 34 |
+
ann_ret = total_returns.mean() * 252
|
| 35 |
+
ann_vol = total_returns.std() * np.sqrt(252)
|
| 36 |
+
sharpe = (ann_ret - 0.035) / ann_vol if ann_vol > 0 else 0
|
| 37 |
+
|
| 38 |
+
dd = equity_curve / equity_curve.cummax() - 1
|
| 39 |
+
max_dd = dd.min()
|
| 40 |
+
|
| 41 |
+
return {
|
| 42 |
+
'equity_curve': equity_curve,
|
| 43 |
+
'sharpe': sharpe,
|
| 44 |
+
'ann_ret': ann_ret,
|
| 45 |
+
'max_dd': max_dd,
|
| 46 |
+
'current_signals': signals.iloc[-1]
|
| 47 |
+
}
|
hf_space/data/loader.py
CHANGED
|
@@ -14,7 +14,7 @@ X_EQUITY_TICKERS = ["XLK", "XLY", "XLP", "XLE", "XLV", "XLI", "XLB", "XLRE", "XL
|
|
| 14 |
FI_TICKERS = ["TLT", "IEF", "TIP", "TBT", "GLD", "SLV", "VGIT", "VCLT", "VCIT", "HYG", "PFF", "MBB", "VNQ", "LQD", "AGG"]
|
| 15 |
|
| 16 |
def get_hf_token():
|
| 17 |
-
"""Safely retrieves the token
|
| 18 |
try:
|
| 19 |
return st.secrets["HF_TOKEN"]
|
| 20 |
except:
|
|
@@ -32,7 +32,7 @@ def load_from_hf():
|
|
| 32 |
return None
|
| 33 |
|
| 34 |
def seed_dataset_from_scratch():
|
| 35 |
-
"""
|
| 36 |
tickers = list(set(X_EQUITY_TICKERS + FI_TICKERS + ["SPY", "AGG"]))
|
| 37 |
master_df = pd.DataFrame()
|
| 38 |
|
|
@@ -42,13 +42,13 @@ def seed_dataset_from_scratch():
|
|
| 42 |
for i, ticker in enumerate(tickers):
|
| 43 |
status.text(f"Fetching {ticker} from Stooq...")
|
| 44 |
try:
|
| 45 |
-
# Stooq primary
|
| 46 |
data = web.DataReader(f"{ticker}.US", 'stooq', start='2008-01-01')
|
| 47 |
if not data.empty:
|
| 48 |
master_df[ticker] = data['Close'].sort_index()
|
| 49 |
-
time.sleep(0.6)
|
| 50 |
except:
|
| 51 |
-
# YFinance fallback
|
| 52 |
try:
|
| 53 |
yf_data = yf.download(ticker, start="2008-01-01", progress=False)['Adj Close']
|
| 54 |
master_df[ticker] = yf_data
|
|
@@ -56,12 +56,12 @@ def seed_dataset_from_scratch():
|
|
| 56 |
pass
|
| 57 |
progress_bar.progress((i + 1) / len(tickers))
|
| 58 |
|
| 59 |
-
# Add SOFR Rate
|
| 60 |
try:
|
| 61 |
sofr = web.DataReader('SOFR', 'fred', start="2008-01-01").ffill()
|
| 62 |
master_df['SOFR_ANNUAL'] = sofr / 100
|
| 63 |
except:
|
| 64 |
-
master_df['SOFR_ANNUAL'] = 0.
|
| 65 |
|
| 66 |
master_df = master_df.sort_index().ffill()
|
| 67 |
master_df.to_csv(FILENAME)
|
|
@@ -70,11 +70,10 @@ def seed_dataset_from_scratch():
|
|
| 70 |
return master_df
|
| 71 |
|
| 72 |
def sync_incremental_data(df_existing):
|
| 73 |
-
"""Updates only new data since last index date."""
|
| 74 |
last_date = pd.to_datetime(df_existing.index).max()
|
| 75 |
tickers = list(set(X_EQUITY_TICKERS + FI_TICKERS + ["SPY", "AGG"]))
|
| 76 |
|
| 77 |
-
# Simple incremental fetch
|
| 78 |
new_data = yf.download(tickers, start=last_date, progress=False)['Adj Close']
|
| 79 |
combined = pd.concat([df_existing, new_data])
|
| 80 |
combined = combined[~combined.index.duplicated(keep='last')].sort_index()
|
|
|
|
| 14 |
FI_TICKERS = ["TLT", "IEF", "TIP", "TBT", "GLD", "SLV", "VGIT", "VCLT", "VCIT", "HYG", "PFF", "MBB", "VNQ", "LQD", "AGG"]
|
| 15 |
|
| 16 |
def get_hf_token():
|
| 17 |
+
"""Safely retrieves the token from secrets or environment."""
|
| 18 |
try:
|
| 19 |
return st.secrets["HF_TOKEN"]
|
| 20 |
except:
|
|
|
|
| 32 |
return None
|
| 33 |
|
| 34 |
def seed_dataset_from_scratch():
|
| 35 |
+
"""Initial download of 18 years of data using Stooq primarily."""
|
| 36 |
tickers = list(set(X_EQUITY_TICKERS + FI_TICKERS + ["SPY", "AGG"]))
|
| 37 |
master_df = pd.DataFrame()
|
| 38 |
|
|
|
|
| 42 |
for i, ticker in enumerate(tickers):
|
| 43 |
status.text(f"Fetching {ticker} from Stooq...")
|
| 44 |
try:
|
| 45 |
+
# Stooq primary (requires .US suffix for ETFs)
|
| 46 |
data = web.DataReader(f"{ticker}.US", 'stooq', start='2008-01-01')
|
| 47 |
if not data.empty:
|
| 48 |
master_df[ticker] = data['Close'].sort_index()
|
| 49 |
+
time.sleep(0.6)
|
| 50 |
except:
|
| 51 |
+
# YFinance fallback if Stooq fails for a ticker
|
| 52 |
try:
|
| 53 |
yf_data = yf.download(ticker, start="2008-01-01", progress=False)['Adj Close']
|
| 54 |
master_df[ticker] = yf_data
|
|
|
|
| 56 |
pass
|
| 57 |
progress_bar.progress((i + 1) / len(tickers))
|
| 58 |
|
| 59 |
+
# Add SOFR Rate (Cash interest)
|
| 60 |
try:
|
| 61 |
sofr = web.DataReader('SOFR', 'fred', start="2008-01-01").ffill()
|
| 62 |
master_df['SOFR_ANNUAL'] = sofr / 100
|
| 63 |
except:
|
| 64 |
+
master_df['SOFR_ANNUAL'] = 0.045 # Conservative proxy
|
| 65 |
|
| 66 |
master_df = master_df.sort_index().ffill()
|
| 67 |
master_df.to_csv(FILENAME)
|
|
|
|
| 70 |
return master_df
|
| 71 |
|
| 72 |
def sync_incremental_data(df_existing):
|
| 73 |
+
"""Updates only new data since last index date using YFinance for speed."""
|
| 74 |
last_date = pd.to_datetime(df_existing.index).max()
|
| 75 |
tickers = list(set(X_EQUITY_TICKERS + FI_TICKERS + ["SPY", "AGG"]))
|
| 76 |
|
|
|
|
| 77 |
new_data = yf.download(tickers, start=last_date, progress=False)['Adj Close']
|
| 78 |
combined = pd.concat([df_existing, new_data])
|
| 79 |
combined = combined[~combined.index.duplicated(keep='last')].sort_index()
|
hf_space/hf_space/hf_space/app.py
CHANGED
|
@@ -1,45 +1,68 @@
|
|
| 1 |
import streamlit as st
|
| 2 |
-
import
|
| 3 |
-
from
|
| 4 |
|
| 5 |
-
st.set_page_config(layout="wide", page_title="P2
|
| 6 |
|
| 7 |
-
# ---
|
| 8 |
-
st.sidebar.title("🗂️ Data Management")
|
| 9 |
-
|
| 10 |
-
# Initialize Session State
|
| 11 |
if 'master_data' not in st.session_state:
|
| 12 |
st.session_state.master_data = load_from_hf()
|
| 13 |
|
| 14 |
-
#
|
| 15 |
-
|
| 16 |
-
st.
|
| 17 |
-
if st.
|
| 18 |
-
|
|
|
|
| 19 |
st.session_state.master_data = seed_dataset_from_scratch()
|
| 20 |
-
st.sidebar.success("Database Seeded!")
|
| 21 |
st.rerun()
|
| 22 |
-
else:
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
if st.sidebar.button("🔄 Step 2: Sync Daily Data"):
|
| 27 |
-
with st.spinner("Pinging Stooq/FRED for new data..."):
|
| 28 |
st.session_state.master_data = sync_incremental_data(st.session_state.master_data)
|
| 29 |
-
st.sidebar.success("Incremental Sync Complete!")
|
| 30 |
st.rerun()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 31 |
|
| 32 |
-
# ---
|
| 33 |
-
st.sidebar.divider()
|
| 34 |
-
st.sidebar.title("⚙️ Strategy Settings")
|
| 35 |
-
option = st.sidebar.radio("Select Module", ("Option A - FI Trend", "Option B - Equity Trend"))
|
| 36 |
-
start_year = st.sidebar.slider("Start Year", 2008, 2026, 2015)
|
| 37 |
-
vol_target = st.sidebar.slider("Annual Vol Target", 0.05, 0.25, 0.126)
|
| 38 |
-
|
| 39 |
-
# --- MAIN UI: ANALYSIS ---
|
| 40 |
if st.session_state.master_data is not None:
|
| 41 |
-
|
| 42 |
-
|
| 43 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 44 |
else:
|
| 45 |
-
st.
|
|
|
|
| 1 |
import streamlit as st
|
| 2 |
+
from data.loader import load_from_hf, seed_dataset_from_scratch, sync_incremental_data, X_EQUITY_TICKERS, FI_TICKERS
|
| 3 |
+
from engine.trend_engine import run_trend_module
|
| 4 |
|
| 5 |
+
st.set_page_config(layout="wide", page_title="P2 Strategy Suite")
|
| 6 |
|
| 7 |
+
# --- INITIALIZATION ---
|
|
|
|
|
|
|
|
|
|
| 8 |
if 'master_data' not in st.session_state:
|
| 9 |
st.session_state.master_data = load_from_hf()
|
| 10 |
|
| 11 |
+
# --- SIDEBAR: DATA CONTROLS ---
|
| 12 |
+
with st.sidebar:
|
| 13 |
+
st.header("🗂️ Data Management")
|
| 14 |
+
if st.session_state.master_data is None:
|
| 15 |
+
st.error("No dataset detected.")
|
| 16 |
+
if st.button("🚀 Seed Database (2008-2026)", use_container_width=True):
|
| 17 |
st.session_state.master_data = seed_dataset_from_scratch()
|
|
|
|
| 18 |
st.rerun()
|
| 19 |
+
else:
|
| 20 |
+
last_dt = st.session_state.master_data.index.max()
|
| 21 |
+
st.success(f"Database Active: {last_dt.date()}")
|
| 22 |
+
if st.button("🔄 Sync New Data", use_container_width=True):
|
|
|
|
|
|
|
| 23 |
st.session_state.master_data = sync_incremental_data(st.session_state.master_data)
|
|
|
|
| 24 |
st.rerun()
|
| 25 |
+
|
| 26 |
+
st.divider()
|
| 27 |
+
st.header("⚙️ Strategy Settings")
|
| 28 |
+
option = st.radio("Universe Selection", ("Option A - FI Trend", "Option B - Equity Trend"))
|
| 29 |
+
start_yr = st.slider("Backtest Start Year", 2008, 2026, 2015)
|
| 30 |
+
vol_target = st.slider("Target Vol (%)", 5, 20, 12) / 100
|
| 31 |
+
|
| 32 |
+
st.divider()
|
| 33 |
+
run_btn = st.button("🚀 Run Strategy Analysis", use_container_width=True, type="primary")
|
| 34 |
|
| 35 |
+
# --- MAIN PAGE: DISPLAY ---
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 36 |
if st.session_state.master_data is not None:
|
| 37 |
+
if run_btn:
|
| 38 |
+
with st.spinner("Crunching data..."):
|
| 39 |
+
# Universe Selection
|
| 40 |
+
univ = FI_TICKERS if "Option A" in option else X_EQUITY_TICKERS
|
| 41 |
+
# Slice by date
|
| 42 |
+
df = st.session_state.master_data[st.session_state.master_data.index.year >= start_yr]
|
| 43 |
+
|
| 44 |
+
# Execute Engine
|
| 45 |
+
results = run_trend_module(df[univ], df['SOFR_ANNUAL'], vol_target)
|
| 46 |
+
|
| 47 |
+
# Show Metrics
|
| 48 |
+
st.title(f"📊 {option} Performance Report")
|
| 49 |
+
m1, m2, m3 = st.columns(3)
|
| 50 |
+
m1.metric("Sharpe Ratio", f"{results['sharpe']:.2f}")
|
| 51 |
+
m2.metric("Annual Return", f"{results['ann_ret']:.1%}")
|
| 52 |
+
m3.metric("Max Drawdown", f"{results['max_dd']:.1%}")
|
| 53 |
+
|
| 54 |
+
# Equity Curve
|
| 55 |
+
st.subheader("Cumulative Growth (vs Cash)")
|
| 56 |
+
st.line_chart(results['equity_curve'])
|
| 57 |
+
|
| 58 |
+
# Allocation Check
|
| 59 |
+
st.divider()
|
| 60 |
+
st.subheader("Current Market Status")
|
| 61 |
+
active_assets = results['current_signals'][results['current_signals'] > 0].index.tolist()
|
| 62 |
+
st.write(f"**In-Trend Assets:** {', '.join(active_assets) if active_assets else 'All Cash'}")
|
| 63 |
+
|
| 64 |
+
else:
|
| 65 |
+
st.title("Welcome to the 2025 Trend Suite")
|
| 66 |
+
st.info("👈 Use the sidebar to manage your data and click 'Run Strategy Analysis' to begin.")
|
| 67 |
else:
|
| 68 |
+
st.warning("Please initialize the database using the 'Seed' button in the sidebar.")
|
hf_space/hf_space/hf_space/hf_space/data/loader.py
CHANGED
|
@@ -9,10 +9,22 @@ import streamlit as st
|
|
| 9 |
REPO_ID = "P2SAMAPA/etf_trend_data"
|
| 10 |
FILENAME = "market_data.csv"
|
| 11 |
|
| 12 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13 |
def load_from_hf():
|
| 14 |
-
|
| 15 |
-
|
|
|
|
|
|
|
| 16 |
try:
|
| 17 |
path = hf_hub_download(repo_id=REPO_ID, filename=FILENAME, repo_type="dataset", token=token)
|
| 18 |
return pd.read_csv(path, index_col=0, parse_dates=True)
|
|
@@ -20,10 +32,64 @@ def load_from_hf():
|
|
| 20 |
return None
|
| 21 |
|
| 22 |
def seed_dataset_from_scratch():
|
| 23 |
-
|
| 24 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 25 |
return master_df
|
| 26 |
|
| 27 |
def sync_incremental_data(df_existing):
|
| 28 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 29 |
return combined
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 9 |
REPO_ID = "P2SAMAPA/etf_trend_data"
|
| 10 |
FILENAME = "market_data.csv"
|
| 11 |
|
| 12 |
+
# The 27 Equity X-ETFs and 15 FI ETFs from the 2025 Paper
|
| 13 |
+
X_EQUITY_TICKERS = ["XLK", "XLY", "XLP", "XLE", "XLV", "XLI", "XLB", "XLRE", "XLU", "XLC", "XLF", "XBI", "XME", "XOP", "XHB", "XSD", "XRT", "XPH", "XES", "XAR", "XHS", "XHE", "XSW", "XTN", "XTL", "XNTK", "XITK"]
|
| 14 |
+
FI_TICKERS = ["TLT", "IEF", "TIP", "TBT", "GLD", "SLV", "VGIT", "VCLT", "VCIT", "HYG", "PFF", "MBB", "VNQ", "LQD", "AGG"]
|
| 15 |
+
|
| 16 |
+
def get_hf_token():
|
| 17 |
+
"""Safely retrieves the token without triggering a SecretNotFoundError crash."""
|
| 18 |
+
try:
|
| 19 |
+
return st.secrets["HF_TOKEN"]
|
| 20 |
+
except:
|
| 21 |
+
return os.getenv("HF_TOKEN")
|
| 22 |
+
|
| 23 |
def load_from_hf():
|
| 24 |
+
"""Reads dataset from Hugging Face if it exists."""
|
| 25 |
+
token = get_hf_token()
|
| 26 |
+
if not token:
|
| 27 |
+
return None
|
| 28 |
try:
|
| 29 |
path = hf_hub_download(repo_id=REPO_ID, filename=FILENAME, repo_type="dataset", token=token)
|
| 30 |
return pd.read_csv(path, index_col=0, parse_dates=True)
|
|
|
|
| 32 |
return None
|
| 33 |
|
| 34 |
def seed_dataset_from_scratch():
|
| 35 |
+
"""Downloads 2008-Present data from STOOQ."""
|
| 36 |
+
tickers = list(set(X_EQUITY_TICKERS + FI_TICKERS + ["SPY", "AGG"]))
|
| 37 |
+
master_df = pd.DataFrame()
|
| 38 |
+
|
| 39 |
+
status = st.empty()
|
| 40 |
+
progress_bar = st.progress(0)
|
| 41 |
+
|
| 42 |
+
for i, ticker in enumerate(tickers):
|
| 43 |
+
status.text(f"Fetching {ticker} from Stooq...")
|
| 44 |
+
try:
|
| 45 |
+
# Stooq primary
|
| 46 |
+
data = web.DataReader(f"{ticker}.US", 'stooq', start='2008-01-01')
|
| 47 |
+
if not data.empty:
|
| 48 |
+
master_df[ticker] = data['Close'].sort_index()
|
| 49 |
+
time.sleep(0.6) # Anti-rate limit
|
| 50 |
+
except:
|
| 51 |
+
# YFinance fallback
|
| 52 |
+
try:
|
| 53 |
+
yf_data = yf.download(ticker, start="2008-01-01", progress=False)['Adj Close']
|
| 54 |
+
master_df[ticker] = yf_data
|
| 55 |
+
except:
|
| 56 |
+
pass
|
| 57 |
+
progress_bar.progress((i + 1) / len(tickers))
|
| 58 |
+
|
| 59 |
+
# Add SOFR Rate
|
| 60 |
+
try:
|
| 61 |
+
sofr = web.DataReader('SOFR', 'fred', start="2008-01-01").ffill()
|
| 62 |
+
master_df['SOFR_ANNUAL'] = sofr / 100
|
| 63 |
+
except:
|
| 64 |
+
master_df['SOFR_ANNUAL'] = 0.05
|
| 65 |
+
|
| 66 |
+
master_df = master_df.sort_index().ffill()
|
| 67 |
+
master_df.to_csv(FILENAME)
|
| 68 |
+
|
| 69 |
+
upload_to_hf(FILENAME)
|
| 70 |
return master_df
|
| 71 |
|
| 72 |
def sync_incremental_data(df_existing):
|
| 73 |
+
"""Updates only new data since last index date."""
|
| 74 |
+
last_date = pd.to_datetime(df_existing.index).max()
|
| 75 |
+
tickers = list(set(X_EQUITY_TICKERS + FI_TICKERS + ["SPY", "AGG"]))
|
| 76 |
+
|
| 77 |
+
# Simple incremental fetch
|
| 78 |
+
new_data = yf.download(tickers, start=last_date, progress=False)['Adj Close']
|
| 79 |
+
combined = pd.concat([df_existing, new_data])
|
| 80 |
+
combined = combined[~combined.index.duplicated(keep='last')].sort_index()
|
| 81 |
+
|
| 82 |
+
combined.to_csv(FILENAME)
|
| 83 |
+
upload_to_hf(FILENAME)
|
| 84 |
return combined
|
| 85 |
+
|
| 86 |
+
def upload_to_hf(path):
|
| 87 |
+
api = HfApi()
|
| 88 |
+
token = get_hf_token()
|
| 89 |
+
api.upload_file(
|
| 90 |
+
path_or_fileobj=path,
|
| 91 |
+
path_in_repo=FILENAME,
|
| 92 |
+
repo_id=REPO_ID,
|
| 93 |
+
repo_type="dataset",
|
| 94 |
+
token=token
|
| 95 |
+
)
|
hf_space/hf_space/hf_space/hf_space/hf_space/data/loader.py
CHANGED
|
@@ -9,47 +9,21 @@ import streamlit as st
|
|
| 9 |
REPO_ID = "P2SAMAPA/etf_trend_data"
|
| 10 |
FILENAME = "market_data.csv"
|
| 11 |
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
st.info("🛰️ Initializing Stooq Data Fetch (2008-Present)...")
|
| 17 |
-
progress_bar = st.progress(0)
|
| 18 |
-
|
| 19 |
-
for i, ticker in enumerate(tickers):
|
| 20 |
-
# Stooq ticker format is usually 'TICKER.US'
|
| 21 |
-
stooq_symbol = f"{ticker}.US"
|
| 22 |
-
try:
|
| 23 |
-
# PRIMARY: STOOQ
|
| 24 |
-
data = web.DataReader(stooq_symbol, 'stooq', start='2008-01-01')
|
| 25 |
-
if not data.empty:
|
| 26 |
-
# Stooq returns data in reverse chronological order; we sort it.
|
| 27 |
-
master_df[ticker] = data['Close'].sort_index()
|
| 28 |
-
|
| 29 |
-
# Anti-Rate Limit: 0.8s delay between requests
|
| 30 |
-
time.sleep(0.8)
|
| 31 |
-
|
| 32 |
-
except Exception as e:
|
| 33 |
-
st.warning(f"⚠️ Stooq failed for {ticker}. Attempting YFinance fallback...")
|
| 34 |
-
try:
|
| 35 |
-
# BACKUP: YFinance
|
| 36 |
-
yf_data = yf.download(ticker, start="2008-01-01", progress=False)['Adj Close']
|
| 37 |
-
master_df[ticker] = yf_data
|
| 38 |
-
except:
|
| 39 |
-
st.error(f"❌ Failed to fetch {ticker} from all sources.")
|
| 40 |
-
|
| 41 |
-
progress_bar.progress((i + 1) / len(tickers))
|
| 42 |
-
|
| 43 |
-
# Add SOFR (Cash Rate) from FRED
|
| 44 |
try:
|
| 45 |
-
|
| 46 |
-
|
| 47 |
except:
|
| 48 |
-
|
| 49 |
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
#
|
| 53 |
-
master_df.to_csv(FILENAME)
|
| 54 |
-
upload_to_hf(FILENAME)
|
| 55 |
return master_df
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 9 |
REPO_ID = "P2SAMAPA/etf_trend_data"
|
| 10 |
FILENAME = "market_data.csv"
|
| 11 |
|
| 12 |
+
# Make sure these match exactly what app.py expects
|
| 13 |
+
def load_from_hf():
|
| 14 |
+
token = st.secrets.get("HF_TOKEN")
|
| 15 |
+
if not token: return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
try:
|
| 17 |
+
path = hf_hub_download(repo_id=REPO_ID, filename=FILENAME, repo_type="dataset", token=token)
|
| 18 |
+
return pd.read_csv(path, index_col=0, parse_dates=True)
|
| 19 |
except:
|
| 20 |
+
return None
|
| 21 |
|
| 22 |
+
def seed_dataset_from_scratch():
|
| 23 |
+
# ... (Your Stooq download logic here)
|
| 24 |
+
# Ensure this function name matches the import in app.py
|
|
|
|
|
|
|
| 25 |
return master_df
|
| 26 |
+
|
| 27 |
+
def sync_incremental_data(df_existing):
|
| 28 |
+
# ... (Your incremental update logic here)
|
| 29 |
+
return combined
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/loader.py
CHANGED
|
@@ -1,26 +1,55 @@
|
|
| 1 |
-
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 7 |
tickers = list(set(X_EQUITY_TICKERS + FI_TICKERS + ["SPY", "AGG"]))
|
|
|
|
|
|
|
|
|
|
|
|
|
| 8 |
|
| 9 |
-
|
| 10 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 11 |
|
| 12 |
-
#
|
| 13 |
-
|
| 14 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 15 |
|
| 16 |
-
# Save &
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
path_or_fileobj=FILENAME,
|
| 21 |
-
path_in_repo=FILENAME,
|
| 22 |
-
repo_id=REPO_ID,
|
| 23 |
-
repo_type="dataset",
|
| 24 |
-
token=st.secrets["HF_TOKEN"]
|
| 25 |
-
)
|
| 26 |
-
return combined
|
|
|
|
| 1 |
+
import pandas as pd
|
| 2 |
+
import pandas_datareader.data as web
|
| 3 |
+
import yfinance as yf
|
| 4 |
+
import time
|
| 5 |
+
from huggingface_hub import hf_hub_download, HfApi
|
| 6 |
+
import os
|
| 7 |
+
import streamlit as st
|
| 8 |
+
|
| 9 |
+
REPO_ID = "P2SAMAPA/etf_trend_data"
|
| 10 |
+
FILENAME = "market_data.csv"
|
| 11 |
+
|
| 12 |
+
def seed_dataset():
|
| 13 |
tickers = list(set(X_EQUITY_TICKERS + FI_TICKERS + ["SPY", "AGG"]))
|
| 14 |
+
master_df = pd.DataFrame()
|
| 15 |
+
|
| 16 |
+
st.info("🛰️ Initializing Stooq Data Fetch (2008-Present)...")
|
| 17 |
+
progress_bar = st.progress(0)
|
| 18 |
|
| 19 |
+
for i, ticker in enumerate(tickers):
|
| 20 |
+
# Stooq ticker format is usually 'TICKER.US'
|
| 21 |
+
stooq_symbol = f"{ticker}.US"
|
| 22 |
+
try:
|
| 23 |
+
# PRIMARY: STOOQ
|
| 24 |
+
data = web.DataReader(stooq_symbol, 'stooq', start='2008-01-01')
|
| 25 |
+
if not data.empty:
|
| 26 |
+
# Stooq returns data in reverse chronological order; we sort it.
|
| 27 |
+
master_df[ticker] = data['Close'].sort_index()
|
| 28 |
+
|
| 29 |
+
# Anti-Rate Limit: 0.8s delay between requests
|
| 30 |
+
time.sleep(0.8)
|
| 31 |
+
|
| 32 |
+
except Exception as e:
|
| 33 |
+
st.warning(f"⚠️ Stooq failed for {ticker}. Attempting YFinance fallback...")
|
| 34 |
+
try:
|
| 35 |
+
# BACKUP: YFinance
|
| 36 |
+
yf_data = yf.download(ticker, start="2008-01-01", progress=False)['Adj Close']
|
| 37 |
+
master_df[ticker] = yf_data
|
| 38 |
+
except:
|
| 39 |
+
st.error(f"❌ Failed to fetch {ticker} from all sources.")
|
| 40 |
+
|
| 41 |
+
progress_bar.progress((i + 1) / len(tickers))
|
| 42 |
|
| 43 |
+
# Add SOFR (Cash Rate) from FRED
|
| 44 |
+
try:
|
| 45 |
+
sofr = web.DataReader('SOFR', 'fred', start="2008-01-01").ffill()
|
| 46 |
+
master_df['SOFR_ANNUAL'] = sofr / 100
|
| 47 |
+
except:
|
| 48 |
+
master_df['SOFR_ANNUAL'] = 0.05 # Conservative fallback
|
| 49 |
+
|
| 50 |
+
master_df = master_df.sort_index().ffill()
|
| 51 |
|
| 52 |
+
# Save & Upload
|
| 53 |
+
master_df.to_csv(FILENAME)
|
| 54 |
+
upload_to_hf(FILENAME)
|
| 55 |
+
return master_df
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/loader.py
CHANGED
|
@@ -1,38 +1,26 @@
|
|
| 1 |
-
|
| 2 |
-
|
| 3 |
-
import
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
REPO_ID = "P2SAMAPA/etf_trend_data"
|
| 9 |
-
FILENAME = "market_data.csv"
|
| 10 |
-
|
| 11 |
-
X_EQUITY_TICKERS = ["XLK", "XLY", "XLP", "XLE", "XLV", "XLI", "XLB", "XLRE", "XLU", "XLC", "XLF", "XBI", "XME", "XOP", "XHB", "XSD", "XRT", "XPH", "XES", "XAR", "XHS", "XHE", "XSW", "XTN", "XTL", "XNTK", "XITK"]
|
| 12 |
-
FI_TICKERS = ["TLT", "IEF", "TIP", "TBT", "GLD", "SLV", "VGIT", "VCLT", "VCIT", "HYG", "PFF", "MBB", "VNQ", "LQD", "AGG"]
|
| 13 |
-
|
| 14 |
-
def load_from_hf():
|
| 15 |
-
try:
|
| 16 |
-
token = st.secrets["HF_TOKEN"]
|
| 17 |
-
path = hf_hub_download(repo_id=REPO_ID, filename=FILENAME, repo_type="dataset", token=token)
|
| 18 |
-
return pd.read_csv(path, index_col=0, parse_dates=True)
|
| 19 |
-
except:
|
| 20 |
-
return None
|
| 21 |
-
|
| 22 |
-
def seed_dataset():
|
| 23 |
tickers = list(set(X_EQUITY_TICKERS + FI_TICKERS + ["SPY", "AGG"]))
|
| 24 |
-
# Download Wide Format
|
| 25 |
-
df = yf.download(tickers, start="2008-01-01")['Adj Close']
|
| 26 |
|
| 27 |
-
#
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
|
|
|
|
|
|
| 31 |
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
return df
|
| 35 |
-
|
| 36 |
-
def upload_to_hf(path):
|
| 37 |
api = HfApi()
|
| 38 |
-
api.upload_file(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
def sync_incremental_data(df_existing):
|
| 2 |
+
"""Downloads only missing data since last update and saves to HF."""
|
| 3 |
+
import yfinance as yf
|
| 4 |
+
|
| 5 |
+
# Identify last date in the CSV
|
| 6 |
+
last_date = pd.to_datetime(df_existing.index).max()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 7 |
tickers = list(set(X_EQUITY_TICKERS + FI_TICKERS + ["SPY", "AGG"]))
|
|
|
|
|
|
|
| 8 |
|
| 9 |
+
# Fetch new bars from yfinance or stooq
|
| 10 |
+
new_data = yf.download(tickers, start=last_date, progress=False)['Adj Close']
|
| 11 |
+
|
| 12 |
+
# Combine (Drop duplicates to avoid double-counting the last day)
|
| 13 |
+
combined = pd.concat([df_existing, new_data])
|
| 14 |
+
combined = combined[~combined.index.duplicated(keep='last')].sort_index()
|
| 15 |
|
| 16 |
+
# Save & Push
|
| 17 |
+
combined.to_csv(FILENAME)
|
|
|
|
|
|
|
|
|
|
| 18 |
api = HfApi()
|
| 19 |
+
api.upload_file(
|
| 20 |
+
path_or_fileobj=FILENAME,
|
| 21 |
+
path_in_repo=FILENAME,
|
| 22 |
+
repo_id=REPO_ID,
|
| 23 |
+
repo_type="dataset",
|
| 24 |
+
token=st.secrets["HF_TOKEN"]
|
| 25 |
+
)
|
| 26 |
+
return combined
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py
CHANGED
|
@@ -1,35 +1,45 @@
|
|
| 1 |
import streamlit as st
|
| 2 |
import pandas as pd
|
| 3 |
-
import
|
| 4 |
-
from datetime import datetime
|
| 5 |
-
from data.loader import load_from_hf, seed_dataset, X_EQUITY_TICKERS, FI_TICKERS
|
| 6 |
-
from engine.trend_engine import run_trend_module
|
| 7 |
|
| 8 |
st.set_page_config(layout="wide", page_title="P2 Trend Suite")
|
| 9 |
|
| 10 |
-
#
|
| 11 |
-
st.sidebar.title("
|
| 12 |
-
option = st.sidebar.radio("Select Strategy", ("Option A - FI Trend Follower", "Option B - Equity Trend Follower"))
|
| 13 |
-
start_year = st.sidebar.slider("Start Year", 2008, 2026, 2015)
|
| 14 |
-
vol_target = st.sidebar.slider("Annual Vol Target", 0.05, 0.25, 0.12)
|
| 15 |
|
| 16 |
-
#
|
| 17 |
-
if '
|
| 18 |
-
st.session_state.
|
| 19 |
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 24 |
else:
|
| 25 |
-
|
| 26 |
-
universe = FI_TICKERS if "Option A" in option else X_EQUITY_TICKERS
|
| 27 |
-
bench = "AGG" if "Option A" in option else "SPY"
|
| 28 |
-
|
| 29 |
-
# Filter by Year
|
| 30 |
-
d = st.session_state.data[st.session_state.data.index.year >= start_year]
|
| 31 |
-
results = run_trend_module(d[universe], d['SOFR_ANNUAL'], vol_target)
|
| 32 |
|
| 33 |
-
#
|
| 34 |
-
st.
|
| 35 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import streamlit as st
|
| 2 |
import pandas as pd
|
| 3 |
+
from data.loader import load_from_hf, seed_dataset_from_scratch, sync_incremental_data
|
|
|
|
|
|
|
|
|
|
| 4 |
|
| 5 |
st.set_page_config(layout="wide", page_title="P2 Trend Suite")
|
| 6 |
|
| 7 |
+
# --- SIDEBAR: DATA MANAGEMENT ---
|
| 8 |
+
st.sidebar.title("🗂️ Data Management")
|
|
|
|
|
|
|
|
|
|
| 9 |
|
| 10 |
+
# Initialize Session State
|
| 11 |
+
if 'master_data' not in st.session_state:
|
| 12 |
+
st.session_state.master_data = load_from_hf()
|
| 13 |
|
| 14 |
+
# LOGIC: If no data, show SEED. If data exists, show SYNC.
|
| 15 |
+
if st.session_state.master_data is None:
|
| 16 |
+
st.sidebar.warning("Database not found.")
|
| 17 |
+
if st.sidebar.button("🚀 Step 1: Seed Database (2008-2026)"):
|
| 18 |
+
with st.spinner("Downloading full history..."):
|
| 19 |
+
st.session_state.master_data = seed_dataset_from_scratch()
|
| 20 |
+
st.sidebar.success("Database Seeded!")
|
| 21 |
+
st.rerun()
|
| 22 |
else:
|
| 23 |
+
st.sidebar.success(f"Database Active: {st.session_state.master_data.index.max()}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 24 |
|
| 25 |
+
# SYNC BUTTON for daily incremental updates
|
| 26 |
+
if st.sidebar.button("🔄 Step 2: Sync Daily Data"):
|
| 27 |
+
with st.spinner("Pinging Stooq/FRED for new data..."):
|
| 28 |
+
st.session_state.master_data = sync_incremental_data(st.session_state.master_data)
|
| 29 |
+
st.sidebar.success("Incremental Sync Complete!")
|
| 30 |
+
st.rerun()
|
| 31 |
+
|
| 32 |
+
# --- SIDEBAR: STRATEGY CONTROLS ---
|
| 33 |
+
st.sidebar.divider()
|
| 34 |
+
st.sidebar.title("⚙️ Strategy Settings")
|
| 35 |
+
option = st.sidebar.radio("Select Module", ("Option A - FI Trend", "Option B - Equity Trend"))
|
| 36 |
+
start_year = st.sidebar.slider("Start Year", 2008, 2026, 2015)
|
| 37 |
+
vol_target = st.sidebar.slider("Annual Vol Target", 0.05, 0.25, 0.126)
|
| 38 |
+
|
| 39 |
+
# --- MAIN UI: ANALYSIS ---
|
| 40 |
+
if st.session_state.master_data is not None:
|
| 41 |
+
# Your strategy execution code here...
|
| 42 |
+
st.title(f"📊 {option}")
|
| 43 |
+
# ...
|
| 44 |
+
else:
|
| 45 |
+
st.info("Please use the sidebar to Seed the database first.")
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/loader.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
| 1 |
import pandas as pd
|
| 2 |
-
import pandas_datareader.data as web
|
| 3 |
import yfinance as yf
|
|
|
|
| 4 |
from huggingface_hub import hf_hub_download, HfApi
|
| 5 |
import os
|
| 6 |
import streamlit as st
|
|
@@ -12,49 +12,27 @@ X_EQUITY_TICKERS = ["XLK", "XLY", "XLP", "XLE", "XLV", "XLI", "XLB", "XLRE", "XL
|
|
| 12 |
FI_TICKERS = ["TLT", "IEF", "TIP", "TBT", "GLD", "SLV", "VGIT", "VCLT", "VCIT", "HYG", "PFF", "MBB", "VNQ", "LQD", "AGG"]
|
| 13 |
|
| 14 |
def load_from_hf():
|
| 15 |
-
"""Reads the dataset from Hugging Face."""
|
| 16 |
try:
|
| 17 |
-
|
| 18 |
-
token = st.secrets.get("HF_TOKEN") or os.getenv("HF_TOKEN")
|
| 19 |
path = hf_hub_download(repo_id=REPO_ID, filename=FILENAME, repo_type="dataset", token=token)
|
| 20 |
return pd.read_csv(path, index_col=0, parse_dates=True)
|
| 21 |
-
except
|
| 22 |
-
print(f"Dataset load failed: {e}")
|
| 23 |
return None
|
| 24 |
|
| 25 |
-
def
|
| 26 |
-
"""Download full history from 2008 and upload to HF."""
|
| 27 |
tickers = list(set(X_EQUITY_TICKERS + FI_TICKERS + ["SPY", "AGG"]))
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
status = st.empty()
|
| 31 |
-
progress_bar = st.progress(0)
|
| 32 |
-
|
| 33 |
-
for i, t in enumerate(tickers):
|
| 34 |
-
status.text(f"Seeding {t}...")
|
| 35 |
-
try:
|
| 36 |
-
# Fetching from 2008 for initial dataset
|
| 37 |
-
data = yf.download(t, start="2008-01-01", progress=False)['Adj Close']
|
| 38 |
-
master_df[t] = data
|
| 39 |
-
except:
|
| 40 |
-
continue
|
| 41 |
-
progress_bar.progress((i + 1) / len(tickers))
|
| 42 |
|
| 43 |
# Add SOFR
|
| 44 |
sofr = web.DataReader('SOFR', 'fred', start="2008-01-01").ffill()
|
| 45 |
-
|
| 46 |
-
|
| 47 |
-
|
| 48 |
-
master_df.to_csv(FILENAME)
|
| 49 |
|
| 50 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 51 |
api = HfApi()
|
| 52 |
-
|
| 53 |
-
api.upload_file(
|
| 54 |
-
path_or_fileobj=FILENAME,
|
| 55 |
-
path_in_repo=FILENAME,
|
| 56 |
-
repo_id=REPO_ID,
|
| 57 |
-
repo_type="dataset",
|
| 58 |
-
token=token
|
| 59 |
-
)
|
| 60 |
-
return master_df
|
|
|
|
| 1 |
import pandas as pd
|
|
|
|
| 2 |
import yfinance as yf
|
| 3 |
+
import pandas_datareader.data as web
|
| 4 |
from huggingface_hub import hf_hub_download, HfApi
|
| 5 |
import os
|
| 6 |
import streamlit as st
|
|
|
|
| 12 |
FI_TICKERS = ["TLT", "IEF", "TIP", "TBT", "GLD", "SLV", "VGIT", "VCLT", "VCIT", "HYG", "PFF", "MBB", "VNQ", "LQD", "AGG"]
|
| 13 |
|
| 14 |
def load_from_hf():
|
|
|
|
| 15 |
try:
|
| 16 |
+
token = st.secrets["HF_TOKEN"]
|
|
|
|
| 17 |
path = hf_hub_download(repo_id=REPO_ID, filename=FILENAME, repo_type="dataset", token=token)
|
| 18 |
return pd.read_csv(path, index_col=0, parse_dates=True)
|
| 19 |
+
except:
|
|
|
|
| 20 |
return None
|
| 21 |
|
| 22 |
+
def seed_dataset():
|
|
|
|
| 23 |
tickers = list(set(X_EQUITY_TICKERS + FI_TICKERS + ["SPY", "AGG"]))
|
| 24 |
+
# Download Wide Format
|
| 25 |
+
df = yf.download(tickers, start="2008-01-01")['Adj Close']
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 26 |
|
| 27 |
# Add SOFR
|
| 28 |
sofr = web.DataReader('SOFR', 'fred', start="2008-01-01").ffill()
|
| 29 |
+
df['SOFR_ANNUAL'] = sofr / 100
|
| 30 |
+
df = df.sort_index().ffill()
|
|
|
|
|
|
|
| 31 |
|
| 32 |
+
df.to_csv(FILENAME)
|
| 33 |
+
upload_to_hf(FILENAME)
|
| 34 |
+
return df
|
| 35 |
+
|
| 36 |
+
def upload_to_hf(path):
|
| 37 |
api = HfApi()
|
| 38 |
+
api.upload_file(path_or_fileobj=path, path_in_repo=FILENAME, repo_id=REPO_ID, repo_type="dataset", token=st.secrets["HF_TOKEN"])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py
CHANGED
|
@@ -1,23 +1,35 @@
|
|
| 1 |
import streamlit as st
|
| 2 |
-
|
| 3 |
-
|
|
|
|
|
|
|
|
|
|
| 4 |
|
| 5 |
-
st.
|
| 6 |
|
| 7 |
-
#
|
| 8 |
-
|
| 9 |
-
|
|
|
|
|
|
|
| 10 |
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
with st.spinner("Downloading 18 years of data... this takes a few minutes."):
|
| 15 |
-
st.session_state.master_data = seed_dataset_from_scratch()
|
| 16 |
-
st.success("Database seeded and uploaded to HF!")
|
| 17 |
-
else:
|
| 18 |
-
if st.sidebar.button("🔄 Step 2: Daily Incremental Sync"):
|
| 19 |
-
# (Existing incremental sync logic here)
|
| 20 |
-
st.sidebar.write("Last Data Point:", st.session_state.master_data.index.max())
|
| 21 |
|
| 22 |
-
|
| 23 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import streamlit as st
|
| 2 |
+
import pandas as pd
|
| 3 |
+
import pandas_market_calendars as mcal
|
| 4 |
+
from datetime import datetime
|
| 5 |
+
from data.loader import load_from_hf, seed_dataset, X_EQUITY_TICKERS, FI_TICKERS
|
| 6 |
+
from engine.trend_engine import run_trend_module
|
| 7 |
|
| 8 |
+
st.set_page_config(layout="wide", page_title="P2 Trend Suite")
|
| 9 |
|
| 10 |
+
# Sidebar Logic
|
| 11 |
+
st.sidebar.title("Configuration")
|
| 12 |
+
option = st.sidebar.radio("Select Strategy", ("Option A - FI Trend Follower", "Option B - Equity Trend Follower"))
|
| 13 |
+
start_year = st.sidebar.slider("Start Year", 2008, 2026, 2015)
|
| 14 |
+
vol_target = st.sidebar.slider("Annual Vol Target", 0.05, 0.25, 0.12)
|
| 15 |
|
| 16 |
+
# Data Initialization
|
| 17 |
+
if 'data' not in st.session_state:
|
| 18 |
+
st.session_state.data = load_from_hf()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 19 |
|
| 20 |
+
if st.session_state.data is None:
|
| 21 |
+
if st.button("🚀 First Time Setup: Seed 2008-2026 Data"):
|
| 22 |
+
st.session_state.data = seed_dataset()
|
| 23 |
+
st.rerun()
|
| 24 |
+
else:
|
| 25 |
+
# RUN STRATEGY
|
| 26 |
+
universe = FI_TICKERS if "Option A" in option else X_EQUITY_TICKERS
|
| 27 |
+
bench = "AGG" if "Option A" in option else "SPY"
|
| 28 |
+
|
| 29 |
+
# Filter by Year
|
| 30 |
+
d = st.session_state.data[st.session_state.data.index.year >= start_year]
|
| 31 |
+
results = run_trend_module(d[universe], d['SOFR_ANNUAL'], vol_target)
|
| 32 |
+
|
| 33 |
+
# UI OUTPUTS (Sharpe, Max DD, etc.)
|
| 34 |
+
st.title(f"📈 {option} Performance")
|
| 35 |
+
# ... (Insert Metric & Chart code here)
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/loader.py
CHANGED
|
@@ -8,51 +8,53 @@ import streamlit as st
|
|
| 8 |
REPO_ID = "P2SAMAPA/etf_trend_data"
|
| 9 |
FILENAME = "market_data.csv"
|
| 10 |
|
| 11 |
-
# Universes
|
| 12 |
X_EQUITY_TICKERS = ["XLK", "XLY", "XLP", "XLE", "XLV", "XLI", "XLB", "XLRE", "XLU", "XLC", "XLF", "XBI", "XME", "XOP", "XHB", "XSD", "XRT", "XPH", "XES", "XAR", "XHS", "XHE", "XSW", "XTN", "XTL", "XNTK", "XITK"]
|
| 13 |
FI_TICKERS = ["TLT", "IEF", "TIP", "TBT", "GLD", "SLV", "VGIT", "VCLT", "VCIT", "HYG", "PFF", "MBB", "VNQ", "LQD", "AGG"]
|
| 14 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 15 |
def seed_dataset_from_scratch():
|
| 16 |
-
"""Download full history from 2008
|
| 17 |
tickers = list(set(X_EQUITY_TICKERS + FI_TICKERS + ["SPY", "AGG"]))
|
| 18 |
master_df = pd.DataFrame()
|
| 19 |
|
|
|
|
| 20 |
progress_bar = st.progress(0)
|
|
|
|
| 21 |
for i, t in enumerate(tickers):
|
|
|
|
| 22 |
try:
|
| 23 |
-
#
|
| 24 |
data = yf.download(t, start="2008-01-01", progress=False)['Adj Close']
|
| 25 |
master_df[t] = data
|
| 26 |
-
except
|
| 27 |
-
|
| 28 |
progress_bar.progress((i + 1) / len(tickers))
|
| 29 |
|
| 30 |
-
# Add SOFR
|
| 31 |
sofr = web.DataReader('SOFR', 'fred', start="2008-01-01").ffill()
|
| 32 |
master_df['SOFR_ANNUAL'] = sofr / 100
|
|
|
|
| 33 |
|
| 34 |
-
master_df = master_df.sort_index().ffill().dropna(how='all')
|
| 35 |
-
|
| 36 |
-
# Save and Upload
|
| 37 |
master_df.to_csv(FILENAME)
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
def upload_to_hf(local_path):
|
| 42 |
-
"""Pushes the local CSV to your Hugging Face Dataset repo."""
|
| 43 |
api = HfApi()
|
|
|
|
| 44 |
api.upload_file(
|
| 45 |
-
path_or_fileobj=
|
| 46 |
path_in_repo=FILENAME,
|
| 47 |
repo_id=REPO_ID,
|
| 48 |
repo_type="dataset",
|
| 49 |
-
token=
|
| 50 |
)
|
| 51 |
-
|
| 52 |
-
def load_from_hf():
|
| 53 |
-
"""Reads the dataset from Hugging Face."""
|
| 54 |
-
try:
|
| 55 |
-
path = hf_hub_download(repo_id=REPO_ID, filename=FILENAME, repo_type="dataset", token=st.secrets["HF_TOKEN"])
|
| 56 |
-
return pd.read_csv(path, index_col=0, parse_dates=True)
|
| 57 |
-
except:
|
| 58 |
-
return None
|
|
|
|
| 8 |
REPO_ID = "P2SAMAPA/etf_trend_data"
|
| 9 |
FILENAME = "market_data.csv"
|
| 10 |
|
|
|
|
| 11 |
X_EQUITY_TICKERS = ["XLK", "XLY", "XLP", "XLE", "XLV", "XLI", "XLB", "XLRE", "XLU", "XLC", "XLF", "XBI", "XME", "XOP", "XHB", "XSD", "XRT", "XPH", "XES", "XAR", "XHS", "XHE", "XSW", "XTN", "XTL", "XNTK", "XITK"]
|
| 12 |
FI_TICKERS = ["TLT", "IEF", "TIP", "TBT", "GLD", "SLV", "VGIT", "VCLT", "VCIT", "HYG", "PFF", "MBB", "VNQ", "LQD", "AGG"]
|
| 13 |
|
| 14 |
+
def load_from_hf():
|
| 15 |
+
"""Reads the dataset from Hugging Face."""
|
| 16 |
+
try:
|
| 17 |
+
# Note: Use st.secrets if token is not in env
|
| 18 |
+
token = st.secrets.get("HF_TOKEN") or os.getenv("HF_TOKEN")
|
| 19 |
+
path = hf_hub_download(repo_id=REPO_ID, filename=FILENAME, repo_type="dataset", token=token)
|
| 20 |
+
return pd.read_csv(path, index_col=0, parse_dates=True)
|
| 21 |
+
except Exception as e:
|
| 22 |
+
print(f"Dataset load failed: {e}")
|
| 23 |
+
return None
|
| 24 |
+
|
| 25 |
def seed_dataset_from_scratch():
|
| 26 |
+
"""Download full history from 2008 and upload to HF."""
|
| 27 |
tickers = list(set(X_EQUITY_TICKERS + FI_TICKERS + ["SPY", "AGG"]))
|
| 28 |
master_df = pd.DataFrame()
|
| 29 |
|
| 30 |
+
status = st.empty()
|
| 31 |
progress_bar = st.progress(0)
|
| 32 |
+
|
| 33 |
for i, t in enumerate(tickers):
|
| 34 |
+
status.text(f"Seeding {t}...")
|
| 35 |
try:
|
| 36 |
+
# Fetching from 2008 for initial dataset
|
| 37 |
data = yf.download(t, start="2008-01-01", progress=False)['Adj Close']
|
| 38 |
master_df[t] = data
|
| 39 |
+
except:
|
| 40 |
+
continue
|
| 41 |
progress_bar.progress((i + 1) / len(tickers))
|
| 42 |
|
| 43 |
+
# Add SOFR
|
| 44 |
sofr = web.DataReader('SOFR', 'fred', start="2008-01-01").ffill()
|
| 45 |
master_df['SOFR_ANNUAL'] = sofr / 100
|
| 46 |
+
master_df = master_df.sort_index().ffill()
|
| 47 |
|
|
|
|
|
|
|
|
|
|
| 48 |
master_df.to_csv(FILENAME)
|
| 49 |
+
|
| 50 |
+
# Upload
|
|
|
|
|
|
|
|
|
|
| 51 |
api = HfApi()
|
| 52 |
+
token = st.secrets.get("HF_TOKEN") or os.getenv("HF_TOKEN")
|
| 53 |
api.upload_file(
|
| 54 |
+
path_or_fileobj=FILENAME,
|
| 55 |
path_in_repo=FILENAME,
|
| 56 |
repo_id=REPO_ID,
|
| 57 |
repo_type="dataset",
|
| 58 |
+
token=token
|
| 59 |
)
|
| 60 |
+
return master_df
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py
CHANGED
|
@@ -1,99 +1,23 @@
|
|
| 1 |
import streamlit as st
|
| 2 |
-
import
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
st.
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
st.
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
if st.sidebar.button("🔄 Sync Market Data"):
|
| 25 |
-
with st.spinner("Fetching Data..."):
|
| 26 |
-
refresh_market_data()
|
| 27 |
-
st.sidebar.success("Data Synced!")
|
| 28 |
-
|
| 29 |
-
# --- DATA PROCESSING ---
|
| 30 |
-
try:
|
| 31 |
-
data = pd.read_csv("market_data.csv", index_col=0, parse_dates=True)
|
| 32 |
-
|
| 33 |
-
# Filter by Year
|
| 34 |
-
data = data[data.index.year >= start_year]
|
| 35 |
-
|
| 36 |
-
# Assign Universe & Benchmark
|
| 37 |
-
if "Option B" in option:
|
| 38 |
-
universe = X_EQUITY_TICKERS
|
| 39 |
-
benchmark_ticker = "SPY"
|
| 40 |
-
else:
|
| 41 |
-
universe = FI_TICKERS
|
| 42 |
-
benchmark_ticker = "AGG"
|
| 43 |
-
|
| 44 |
-
# Run Analysis
|
| 45 |
-
results = run_trend_module(data[universe], data['SOFR_ANNUAL'], vol_target)
|
| 46 |
-
|
| 47 |
-
# --- CALCULATE METRICS ---
|
| 48 |
-
curve = results['curve']
|
| 49 |
-
rets = results['returns']
|
| 50 |
-
|
| 51 |
-
# Sharpe (Excess over 0)
|
| 52 |
-
sharpe = (rets.mean() * 252) / (rets.std() * np.sqrt(252))
|
| 53 |
-
|
| 54 |
-
# Annualized Return
|
| 55 |
-
total_days = (curve.index[-1] - curve.index[0]).days
|
| 56 |
-
ann_return = (curve.iloc[-1]**(365/total_days) - 1)
|
| 57 |
-
|
| 58 |
-
# Drawdowns
|
| 59 |
-
rolling_max = curve.cummax()
|
| 60 |
-
drawdown = (curve - rolling_max) / rolling_max
|
| 61 |
-
max_dd_peak = drawdown.min()
|
| 62 |
-
max_dd_daily = rets.min()
|
| 63 |
-
|
| 64 |
-
# NYSE Calendar for Next Day
|
| 65 |
-
nyse = mcal.get_calendar('NYSE')
|
| 66 |
-
schedule = nyse.schedule(start_date=datetime.now(), end_date='2026-12-31')
|
| 67 |
-
next_day = schedule.index[0].strftime('%Y-%m-%d')
|
| 68 |
-
|
| 69 |
-
# --- OUTPUT UI ---
|
| 70 |
-
st.title(f"📊 {option}")
|
| 71 |
-
|
| 72 |
-
# Stats Row
|
| 73 |
-
c1, c2, c3, c4, c5 = st.columns(5)
|
| 74 |
-
c1.metric("Sharpe Ratio", f"{sharpe:.2f}")
|
| 75 |
-
c2.metric("Annual Return", f"{ann_return:.2%}")
|
| 76 |
-
c3.metric("Max DD (P-to-T)", f"{max_dd_peak:.2%}")
|
| 77 |
-
c4.metric("Max DD (Daily)", f"{max_dd_daily:.2%}")
|
| 78 |
-
c5.metric("Next Trade Date", next_day)
|
| 79 |
-
|
| 80 |
-
# Allocation Table
|
| 81 |
-
st.subheader(f"📍 Target Allocation for {next_day}")
|
| 82 |
-
alloc = results['alloc']
|
| 83 |
-
st.dataframe(alloc[alloc['Weight (%)'] > 0].sort_values("Weight (%)", ascending=False), use_container_width=True)
|
| 84 |
-
|
| 85 |
-
# Performance Chart
|
| 86 |
-
st.subheader(f"Cumulative Return vs {benchmark_ticker}")
|
| 87 |
-
bench_curve = (1 + data[benchmark_ticker].pct_change().fillna(0)).cumprod()
|
| 88 |
-
# Normalize benchmark to start at 1.0 at start_year
|
| 89 |
-
bench_curve = bench_curve / bench_curve.iloc[0]
|
| 90 |
-
|
| 91 |
-
chart_df = pd.DataFrame({
|
| 92 |
-
"Strategy": curve,
|
| 93 |
-
f"Benchmark ({benchmark_ticker})": bench_curve
|
| 94 |
-
})
|
| 95 |
-
st.line_chart(chart_df)
|
| 96 |
-
|
| 97 |
-
except Exception as e:
|
| 98 |
-
st.info("Please Click 'Sync Market Data' in the sidebar to initialize the engine.")
|
| 99 |
-
st.error(f"Waiting for data... (Technical details: {e})")
|
|
|
|
| 1 |
import streamlit as st
|
| 2 |
+
from data.loader import load_from_hf, seed_dataset_from_scratch, X_EQUITY_TICKERS, FI_TICKERS
|
| 3 |
+
# ... other imports
|
| 4 |
+
|
| 5 |
+
st.sidebar.title("Data Management")
|
| 6 |
+
|
| 7 |
+
# Check if data exists
|
| 8 |
+
if 'master_data' not in st.session_state:
|
| 9 |
+
st.session_state.master_data = load_from_hf()
|
| 10 |
+
|
| 11 |
+
if st.session_state.master_data is None:
|
| 12 |
+
st.warning("Dataset not found on Hugging Face. Please Seed the Database.")
|
| 13 |
+
if st.sidebar.button("🚀 Step 1: Seed Database (2008-Present)"):
|
| 14 |
+
with st.spinner("Downloading 18 years of data... this takes a few minutes."):
|
| 15 |
+
st.session_state.master_data = seed_dataset_from_scratch()
|
| 16 |
+
st.success("Database seeded and uploaded to HF!")
|
| 17 |
+
else:
|
| 18 |
+
if st.sidebar.button("🔄 Step 2: Daily Incremental Sync"):
|
| 19 |
+
# (Existing incremental sync logic here)
|
| 20 |
+
st.sidebar.write("Last Data Point:", st.session_state.master_data.index.max())
|
| 21 |
+
|
| 22 |
+
# --- REST OF THE UI ---
|
| 23 |
+
# Run Option A/B logic using st.session_state.master_data
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/loader.py
CHANGED
|
@@ -1,32 +1,58 @@
|
|
|
|
|
| 1 |
import pandas_datareader.data as web
|
| 2 |
import yfinance as yf
|
| 3 |
-
|
|
|
|
| 4 |
import streamlit as st
|
| 5 |
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
"XLK", "XLY", "XLP", "XLE", "XLV", "XLI", "XLB", "XLRE", "XLU", "XLC", "XLF",
|
| 9 |
-
"XBI", "XME", "XOP", "XHB", "XSD", "XRT", "XPH", "XES", "XAR", "XHS", "XHE",
|
| 10 |
-
"XSW", "XTN", "XTL", "XNTK", "XITK"
|
| 11 |
-
]
|
| 12 |
|
| 13 |
-
#
|
|
|
|
| 14 |
FI_TICKERS = ["TLT", "IEF", "TIP", "TBT", "GLD", "SLV", "VGIT", "VCLT", "VCIT", "HYG", "PFF", "MBB", "VNQ", "LQD", "AGG"]
|
| 15 |
|
| 16 |
-
def
|
| 17 |
-
"""
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
|
|
|
|
|
|
| 21 |
try:
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 28 |
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pandas as pd
|
| 2 |
import pandas_datareader.data as web
|
| 3 |
import yfinance as yf
|
| 4 |
+
from huggingface_hub import hf_hub_download, HfApi
|
| 5 |
+
import os
|
| 6 |
import streamlit as st
|
| 7 |
|
| 8 |
+
REPO_ID = "P2SAMAPA/etf_trend_data"
|
| 9 |
+
FILENAME = "market_data.csv"
|
|
|
|
|
|
|
|
|
|
|
|
|
| 10 |
|
| 11 |
+
# Universes
|
| 12 |
+
X_EQUITY_TICKERS = ["XLK", "XLY", "XLP", "XLE", "XLV", "XLI", "XLB", "XLRE", "XLU", "XLC", "XLF", "XBI", "XME", "XOP", "XHB", "XSD", "XRT", "XPH", "XES", "XAR", "XHS", "XHE", "XSW", "XTN", "XTL", "XNTK", "XITK"]
|
| 13 |
FI_TICKERS = ["TLT", "IEF", "TIP", "TBT", "GLD", "SLV", "VGIT", "VCLT", "VCIT", "HYG", "PFF", "MBB", "VNQ", "LQD", "AGG"]
|
| 14 |
|
| 15 |
+
def seed_dataset_from_scratch():
|
| 16 |
+
"""Download full history from 2008 for all 42+ tickers and upload to HF."""
|
| 17 |
+
tickers = list(set(X_EQUITY_TICKERS + FI_TICKERS + ["SPY", "AGG"]))
|
| 18 |
+
master_df = pd.DataFrame()
|
| 19 |
+
|
| 20 |
+
progress_bar = st.progress(0)
|
| 21 |
+
for i, t in enumerate(tickers):
|
| 22 |
try:
|
| 23 |
+
# We use yfinance for the heavy initial lift as it handles long historical ranges reliably
|
| 24 |
+
data = yf.download(t, start="2008-01-01", progress=False)['Adj Close']
|
| 25 |
+
master_df[t] = data
|
| 26 |
+
except Exception as e:
|
| 27 |
+
st.warning(f"Failed to fetch {t}: {e}")
|
| 28 |
+
progress_bar.progress((i + 1) / len(tickers))
|
| 29 |
+
|
| 30 |
+
# Add SOFR (Cash Interest)
|
| 31 |
+
sofr = web.DataReader('SOFR', 'fred', start="2008-01-01").ffill()
|
| 32 |
+
master_df['SOFR_ANNUAL'] = sofr / 100
|
| 33 |
|
| 34 |
+
master_df = master_df.sort_index().ffill().dropna(how='all')
|
| 35 |
+
|
| 36 |
+
# Save and Upload
|
| 37 |
+
master_df.to_csv(FILENAME)
|
| 38 |
+
upload_to_hf(FILENAME)
|
| 39 |
+
return master_df
|
| 40 |
+
|
| 41 |
+
def upload_to_hf(local_path):
|
| 42 |
+
"""Pushes the local CSV to your Hugging Face Dataset repo."""
|
| 43 |
+
api = HfApi()
|
| 44 |
+
api.upload_file(
|
| 45 |
+
path_or_fileobj=local_path,
|
| 46 |
+
path_in_repo=FILENAME,
|
| 47 |
+
repo_id=REPO_ID,
|
| 48 |
+
repo_type="dataset",
|
| 49 |
+
token=st.secrets["HF_TOKEN"]
|
| 50 |
+
)
|
| 51 |
+
|
| 52 |
+
def load_from_hf():
|
| 53 |
+
"""Reads the dataset from Hugging Face."""
|
| 54 |
+
try:
|
| 55 |
+
path = hf_hub_download(repo_id=REPO_ID, filename=FILENAME, repo_type="dataset", token=st.secrets["HF_TOKEN"])
|
| 56 |
+
return pd.read_csv(path, index_col=0, parse_dates=True)
|
| 57 |
+
except:
|
| 58 |
+
return None
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py
CHANGED
|
@@ -11,78 +11,89 @@ st.set_page_config(layout="wide", page_title="P2 Trend Suite")
|
|
| 11 |
# --- SIDEBAR UI ---
|
| 12 |
st.sidebar.title("Strategy Controls")
|
| 13 |
|
| 14 |
-
# 1.
|
| 15 |
-
option = st.sidebar.radio("Select
|
| 16 |
("Option A - FI Trend Follower", "Option B - Equity Trend Follower"))
|
| 17 |
|
| 18 |
# 2. Year Slider
|
| 19 |
-
start_year = st.sidebar.slider("Start Year
|
| 20 |
|
| 21 |
-
# 3.
|
| 22 |
vol_target = st.sidebar.slider("Annual Vol Target", 0.05, 0.25, 0.126)
|
|
|
|
| 23 |
if st.sidebar.button("🔄 Sync Market Data"):
|
| 24 |
-
|
|
|
|
| 25 |
st.sidebar.success("Data Synced!")
|
| 26 |
|
| 27 |
-
# ---
|
| 28 |
-
|
| 29 |
-
today = datetime.now().strftime('%Y-%m-%d')
|
| 30 |
-
schedule = nyse.schedule(start_date=today, end_date='2026-12-31')
|
| 31 |
-
next_trading_day = schedule.index[0].strftime('%A, %b %d, %Y')
|
| 32 |
-
|
| 33 |
-
# --- EXECUTION ---
|
| 34 |
-
if st.button("▶ Run Analysis"):
|
| 35 |
data = pd.read_csv("market_data.csv", index_col=0, parse_dates=True)
|
| 36 |
|
| 37 |
-
# Filter by
|
| 38 |
data = data[data.index.year >= start_year]
|
| 39 |
-
|
| 40 |
-
#
|
| 41 |
if "Option B" in option:
|
| 42 |
universe = X_EQUITY_TICKERS
|
| 43 |
benchmark_ticker = "SPY"
|
| 44 |
-
module_name = "Equity"
|
| 45 |
else:
|
| 46 |
universe = FI_TICKERS
|
| 47 |
benchmark_ticker = "AGG"
|
| 48 |
-
module_name = "Fixed Income"
|
| 49 |
|
| 50 |
-
# Run
|
| 51 |
results = run_trend_module(data[universe], data['SOFR_ANNUAL'], vol_target)
|
| 52 |
|
| 53 |
-
#
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
bench_returns = data[benchmark_ticker].pct_change().fillna(0)
|
| 57 |
-
bench_curve = (1 + bench_returns).cumprod()
|
| 58 |
|
| 59 |
-
#
|
| 60 |
-
|
| 61 |
-
sharpe = (returns.mean() * 252) / (returns.std() * np.sqrt(252))
|
| 62 |
|
| 63 |
-
|
| 64 |
-
|
| 65 |
-
|
| 66 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 67 |
# --- OUTPUT UI ---
|
| 68 |
-
st.
|
| 69 |
|
| 70 |
-
#
|
| 71 |
-
|
| 72 |
-
|
| 73 |
-
|
|
|
|
|
|
|
|
|
|
| 74 |
|
| 75 |
-
#
|
| 76 |
-
|
| 77 |
-
|
| 78 |
-
|
| 79 |
-
m3.metric("Max DD (Peak-to-Trough)", f"{max_dd_peak:.2%}")
|
| 80 |
-
m4.metric("Last Daily Return", f"{returns.iloc[-1]:.2%}")
|
| 81 |
|
| 82 |
-
# Chart
|
| 83 |
st.subheader(f"Cumulative Return vs {benchmark_ticker}")
|
| 84 |
-
|
| 85 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 86 |
f"Benchmark ({benchmark_ticker})": bench_curve
|
| 87 |
})
|
| 88 |
-
st.line_chart(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 11 |
# --- SIDEBAR UI ---
|
| 12 |
st.sidebar.title("Strategy Controls")
|
| 13 |
|
| 14 |
+
# 1. Module Toggle
|
| 15 |
+
option = st.sidebar.radio("Select Module",
|
| 16 |
("Option A - FI Trend Follower", "Option B - Equity Trend Follower"))
|
| 17 |
|
| 18 |
# 2. Year Slider
|
| 19 |
+
start_year = st.sidebar.slider("Start Year", 2008, 2026, 2015)
|
| 20 |
|
| 21 |
+
# 3. Parameters
|
| 22 |
vol_target = st.sidebar.slider("Annual Vol Target", 0.05, 0.25, 0.126)
|
| 23 |
+
|
| 24 |
if st.sidebar.button("🔄 Sync Market Data"):
|
| 25 |
+
with st.spinner("Fetching Data..."):
|
| 26 |
+
refresh_market_data()
|
| 27 |
st.sidebar.success("Data Synced!")
|
| 28 |
|
| 29 |
+
# --- DATA PROCESSING ---
|
| 30 |
+
try:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 31 |
data = pd.read_csv("market_data.csv", index_col=0, parse_dates=True)
|
| 32 |
|
| 33 |
+
# Filter by Year
|
| 34 |
data = data[data.index.year >= start_year]
|
| 35 |
+
|
| 36 |
+
# Assign Universe & Benchmark
|
| 37 |
if "Option B" in option:
|
| 38 |
universe = X_EQUITY_TICKERS
|
| 39 |
benchmark_ticker = "SPY"
|
|
|
|
| 40 |
else:
|
| 41 |
universe = FI_TICKERS
|
| 42 |
benchmark_ticker = "AGG"
|
|
|
|
| 43 |
|
| 44 |
+
# Run Analysis
|
| 45 |
results = run_trend_module(data[universe], data['SOFR_ANNUAL'], vol_target)
|
| 46 |
|
| 47 |
+
# --- CALCULATE METRICS ---
|
| 48 |
+
curve = results['curve']
|
| 49 |
+
rets = results['returns']
|
|
|
|
|
|
|
| 50 |
|
| 51 |
+
# Sharpe (Excess over 0)
|
| 52 |
+
sharpe = (rets.mean() * 252) / (rets.std() * np.sqrt(252))
|
|
|
|
| 53 |
|
| 54 |
+
# Annualized Return
|
| 55 |
+
total_days = (curve.index[-1] - curve.index[0]).days
|
| 56 |
+
ann_return = (curve.iloc[-1]**(365/total_days) - 1)
|
| 57 |
|
| 58 |
+
# Drawdowns
|
| 59 |
+
rolling_max = curve.cummax()
|
| 60 |
+
drawdown = (curve - rolling_max) / rolling_max
|
| 61 |
+
max_dd_peak = drawdown.min()
|
| 62 |
+
max_dd_daily = rets.min()
|
| 63 |
+
|
| 64 |
+
# NYSE Calendar for Next Day
|
| 65 |
+
nyse = mcal.get_calendar('NYSE')
|
| 66 |
+
schedule = nyse.schedule(start_date=datetime.now(), end_date='2026-12-31')
|
| 67 |
+
next_day = schedule.index[0].strftime('%Y-%m-%d')
|
| 68 |
+
|
| 69 |
# --- OUTPUT UI ---
|
| 70 |
+
st.title(f"📊 {option}")
|
| 71 |
|
| 72 |
+
# Stats Row
|
| 73 |
+
c1, c2, c3, c4, c5 = st.columns(5)
|
| 74 |
+
c1.metric("Sharpe Ratio", f"{sharpe:.2f}")
|
| 75 |
+
c2.metric("Annual Return", f"{ann_return:.2%}")
|
| 76 |
+
c3.metric("Max DD (P-to-T)", f"{max_dd_peak:.2%}")
|
| 77 |
+
c4.metric("Max DD (Daily)", f"{max_dd_daily:.2%}")
|
| 78 |
+
c5.metric("Next Trade Date", next_day)
|
| 79 |
|
| 80 |
+
# Allocation Table
|
| 81 |
+
st.subheader(f"📍 Target Allocation for {next_day}")
|
| 82 |
+
alloc = results['alloc']
|
| 83 |
+
st.dataframe(alloc[alloc['Weight (%)'] > 0].sort_values("Weight (%)", ascending=False), use_container_width=True)
|
|
|
|
|
|
|
| 84 |
|
| 85 |
+
# Performance Chart
|
| 86 |
st.subheader(f"Cumulative Return vs {benchmark_ticker}")
|
| 87 |
+
bench_curve = (1 + data[benchmark_ticker].pct_change().fillna(0)).cumprod()
|
| 88 |
+
# Normalize benchmark to start at 1.0 at start_year
|
| 89 |
+
bench_curve = bench_curve / bench_curve.iloc[0]
|
| 90 |
+
|
| 91 |
+
chart_df = pd.DataFrame({
|
| 92 |
+
"Strategy": curve,
|
| 93 |
f"Benchmark ({benchmark_ticker})": bench_curve
|
| 94 |
})
|
| 95 |
+
st.line_chart(chart_df)
|
| 96 |
+
|
| 97 |
+
except Exception as e:
|
| 98 |
+
st.info("Please Click 'Sync Market Data' in the sidebar to initialize the engine.")
|
| 99 |
+
st.error(f"Waiting for data... (Technical details: {e})")
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py
CHANGED
|
@@ -1,39 +1,88 @@
|
|
| 1 |
import streamlit as st
|
| 2 |
import pandas as pd
|
|
|
|
|
|
|
|
|
|
| 3 |
from data.loader import refresh_market_data, X_EQUITY_TICKERS, FI_TICKERS
|
| 4 |
from engine.trend_engine import run_trend_module
|
| 5 |
|
| 6 |
-
st.set_page_config(layout="wide", page_title="P2
|
| 7 |
|
| 8 |
-
|
| 9 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 10 |
|
| 11 |
-
|
|
|
|
|
|
|
| 12 |
refresh_market_data()
|
| 13 |
-
st.sidebar.success("Data
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 14 |
|
| 15 |
-
|
|
|
|
| 16 |
data = pd.read_csv("market_data.csv", index_col=0, parse_dates=True)
|
| 17 |
|
| 18 |
-
#
|
| 19 |
-
|
| 20 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 21 |
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
"X-ETF Strategy": eq_res['curve'],
|
| 26 |
-
"SPY Benchmark": spy_curve
|
| 27 |
-
}).dropna()
|
| 28 |
-
|
| 29 |
-
st.header("📈 Performance: Equity Strategy vs. SPY")
|
| 30 |
-
st.line_chart(comparison)
|
| 31 |
|
| 32 |
-
#
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import streamlit as st
|
| 2 |
import pandas as pd
|
| 3 |
+
import numpy as np
|
| 4 |
+
import pandas_market_calendars as mcal
|
| 5 |
+
from datetime import datetime
|
| 6 |
from data.loader import refresh_market_data, X_EQUITY_TICKERS, FI_TICKERS
|
| 7 |
from engine.trend_engine import run_trend_module
|
| 8 |
|
| 9 |
+
st.set_page_config(layout="wide", page_title="P2 Trend Suite")
|
| 10 |
|
| 11 |
+
# --- SIDEBAR UI ---
|
| 12 |
+
st.sidebar.title("Strategy Controls")
|
| 13 |
+
|
| 14 |
+
# 1. Option Selection
|
| 15 |
+
option = st.sidebar.radio("Select Strategy Module",
|
| 16 |
+
("Option A - FI Trend Follower", "Option B - Equity Trend Follower"))
|
| 17 |
+
|
| 18 |
+
# 2. Year Slider
|
| 19 |
+
start_year = st.sidebar.slider("Start Year (OOS Period)", 2008, 2025, 2015)
|
| 20 |
|
| 21 |
+
# 3. Vol Target & Sync
|
| 22 |
+
vol_target = st.sidebar.slider("Annual Vol Target", 0.05, 0.25, 0.126)
|
| 23 |
+
if st.sidebar.button("🔄 Sync Market Data"):
|
| 24 |
refresh_market_data()
|
| 25 |
+
st.sidebar.success("Data Synced!")
|
| 26 |
+
|
| 27 |
+
# --- CALENDAR LOGIC ---
|
| 28 |
+
nyse = mcal.get_calendar('NYSE')
|
| 29 |
+
today = datetime.now().strftime('%Y-%m-%d')
|
| 30 |
+
schedule = nyse.schedule(start_date=today, end_date='2026-12-31')
|
| 31 |
+
next_trading_day = schedule.index[0].strftime('%A, %b %d, %Y')
|
| 32 |
|
| 33 |
+
# --- EXECUTION ---
|
| 34 |
+
if st.button("▶ Run Analysis"):
|
| 35 |
data = pd.read_csv("market_data.csv", index_col=0, parse_dates=True)
|
| 36 |
|
| 37 |
+
# Filter by Start Year
|
| 38 |
+
data = data[data.index.year >= start_year]
|
| 39 |
+
|
| 40 |
+
# Select Universe & Benchmark
|
| 41 |
+
if "Option B" in option:
|
| 42 |
+
universe = X_EQUITY_TICKERS
|
| 43 |
+
benchmark_ticker = "SPY"
|
| 44 |
+
module_name = "Equity"
|
| 45 |
+
else:
|
| 46 |
+
universe = FI_TICKERS
|
| 47 |
+
benchmark_ticker = "AGG"
|
| 48 |
+
module_name = "Fixed Income"
|
| 49 |
+
|
| 50 |
+
# Run Engine
|
| 51 |
+
results = run_trend_module(data[universe], data['SOFR_ANNUAL'], vol_target)
|
| 52 |
+
|
| 53 |
+
# Metrics Calculation
|
| 54 |
+
returns = results['returns']
|
| 55 |
+
cum_returns = results['curve']
|
| 56 |
+
bench_returns = data[benchmark_ticker].pct_change().fillna(0)
|
| 57 |
+
bench_curve = (1 + bench_returns).cumprod()
|
| 58 |
+
|
| 59 |
+
# Stats
|
| 60 |
+
ann_return = (cum_returns.iloc[-1]**(252/len(returns)) - 1)
|
| 61 |
+
sharpe = (returns.mean() * 252) / (returns.std() * np.sqrt(252))
|
| 62 |
|
| 63 |
+
rolling_max = cum_returns.cummax()
|
| 64 |
+
drawdown = (cum_returns - rolling_max) / rolling_max
|
| 65 |
+
max_dd_peak = drawdown.min()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 66 |
|
| 67 |
+
# --- OUTPUT UI ---
|
| 68 |
+
st.header(f"📊 {option} Results")
|
| 69 |
+
|
| 70 |
+
# Target Allocation Section
|
| 71 |
+
st.subheader(f"📅 Next Day Target Allocation: {next_trading_day}")
|
| 72 |
+
alloc_df = results['alloc']
|
| 73 |
+
st.table(alloc_df[alloc_df['Weight (%)'] > 0].sort_values("Weight (%)", ascending=False))
|
| 74 |
+
|
| 75 |
+
# Metrics Row
|
| 76 |
+
m1, m2, m3, m4 = st.columns(4)
|
| 77 |
+
m1.metric("Annualized Return", f"{ann_return:.2%}")
|
| 78 |
+
m2.metric("Sharpe Ratio", f"{sharpe:.2f}")
|
| 79 |
+
m3.metric("Max DD (Peak-to-Trough)", f"{max_dd_peak:.2%}")
|
| 80 |
+
m4.metric("Last Daily Return", f"{returns.iloc[-1]:.2%}")
|
| 81 |
+
|
| 82 |
+
# Chart
|
| 83 |
+
st.subheader(f"Cumulative Return vs {benchmark_ticker}")
|
| 84 |
+
chart_data = pd.DataFrame({
|
| 85 |
+
"Strategy": cum_returns,
|
| 86 |
+
f"Benchmark ({benchmark_ticker})": bench_curve
|
| 87 |
+
})
|
| 88 |
+
st.line_chart(chart_data)
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/engine/trend_engine.py
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
import pandas as pd
|
| 3 |
+
|
| 4 |
+
def run_trend_module(prices, daily_sofr, vol_target):
|
| 5 |
+
# 1. Dual-Trend Signal
|
| 6 |
+
d_high = prices.rolling(20).max()
|
| 7 |
+
sma = prices.rolling(20).mean()
|
| 8 |
+
atr = (prices.rolling(20).max() - prices.rolling(20).min()) / 2
|
| 9 |
+
k_upper = sma + (2 * atr)
|
| 10 |
+
|
| 11 |
+
entry_band = np.minimum(d_high, k_upper)
|
| 12 |
+
signals = (prices > entry_band.shift(1)).astype(int)
|
| 13 |
+
|
| 14 |
+
# 2. Risk Parity Position Sizing
|
| 15 |
+
returns = prices.pct_change()
|
| 16 |
+
realized_vol = returns.rolling(21).std() * np.sqrt(252)
|
| 17 |
+
|
| 18 |
+
n = len(prices.columns)
|
| 19 |
+
# Target weight = (Target Vol / Total Assets) / Individual Asset Vol
|
| 20 |
+
target_weights = (vol_target / n) / realized_vol.shift(1)
|
| 21 |
+
|
| 22 |
+
# 3. Strategy Returns (Positions + SOFR on Cash)
|
| 23 |
+
pos_rets = (signals.shift(1) * target_weights.shift(1) * returns).sum(axis=1)
|
| 24 |
+
weight_used = (signals.shift(1) * target_weights.shift(1)).sum(axis=1)
|
| 25 |
+
cash_rets = (1 - weight_used).clip(0, 1) * (daily_sofr / 252)
|
| 26 |
+
|
| 27 |
+
strat_rets = pos_rets + cash_rets
|
| 28 |
+
equity_curve = (1 + strat_rets).fillna(0).cumprod()
|
| 29 |
+
|
| 30 |
+
# 4. Target Allocation for Tomorrow
|
| 31 |
+
tomorrow_sig = (prices.iloc[-1] > entry_band.iloc[-1]).astype(int)
|
| 32 |
+
tomorrow_w = (vol_target / n) / realized_vol.iloc[-1]
|
| 33 |
+
|
| 34 |
+
alloc = pd.DataFrame({
|
| 35 |
+
"Ticker": prices.columns,
|
| 36 |
+
"Signal": ["LONG" if s == 1 else "CASH" for s in tomorrow_sig],
|
| 37 |
+
"Weight (%)": (tomorrow_sig * tomorrow_w * 100).round(2)
|
| 38 |
+
})
|
| 39 |
+
|
| 40 |
+
return {"curve": equity_curve, "alloc": alloc}
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/engine/engine/trend_engine.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
import pandas as pd
|
| 3 |
+
|
| 4 |
+
def run_trend_module(prices, daily_sofr, vol_target):
|
| 5 |
+
# 1. Signals (20-day Keltner/Donchian)
|
| 6 |
+
d_high = prices.rolling(20).max()
|
| 7 |
+
k_sma = prices.rolling(20).mean()
|
| 8 |
+
atr = (prices.rolling(20).max() - prices.rolling(20).min()) / 2
|
| 9 |
+
k_upper = k_sma + (2 * atr)
|
| 10 |
+
|
| 11 |
+
entry_band = np.minimum(d_high, k_upper)
|
| 12 |
+
signals = (prices > entry_band.shift(1)).astype(int)
|
| 13 |
+
|
| 14 |
+
# 2. Risk Parity Weighting
|
| 15 |
+
rets = prices.pct_change()
|
| 16 |
+
real_vol = rets.rolling(21).std() * np.sqrt(252)
|
| 17 |
+
|
| 18 |
+
n = len(prices.columns)
|
| 19 |
+
weights = (vol_target / n) / real_vol.shift(1)
|
| 20 |
+
|
| 21 |
+
# 3. Strategy Returns (Positions + Cash Interest)
|
| 22 |
+
strat_rets = (signals.shift(1) * weights.shift(1) * rets).sum(axis=1)
|
| 23 |
+
unused_cap = 1 - (signals.shift(1) * weights.shift(1)).sum(axis=1)
|
| 24 |
+
strat_rets += unused_cap.clip(0, 1) * (daily_sofr / 252)
|
| 25 |
+
|
| 26 |
+
equity_curve = (1 + strat_rets).cumprod()
|
| 27 |
+
|
| 28 |
+
# Next Day Allocation
|
| 29 |
+
tomorrow_sig = (prices.iloc[-1] > entry_band.iloc[-1]).astype(int)
|
| 30 |
+
tomorrow_w = (vol_target / n) / real_vol.iloc[-1]
|
| 31 |
+
alloc = pd.DataFrame({
|
| 32 |
+
"Ticker": prices.columns,
|
| 33 |
+
"Weight (%)": (tomorrow_sig * tomorrow_w * 100).round(2)
|
| 34 |
+
})
|
| 35 |
+
|
| 36 |
+
return {"curve": equity_curve, "alloc": alloc}
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py
CHANGED
|
@@ -1,54 +1,39 @@
|
|
| 1 |
import streamlit as st
|
|
|
|
|
|
|
|
|
|
| 2 |
|
| 3 |
-
st.set_page_config(page_title="P2 ETF Trend Suite"
|
| 4 |
|
| 5 |
-
st.title("
|
| 6 |
-
st.
|
| 7 |
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
vol_target = st.sidebar.slider("Target Volatility", 0.05, 0.30, 0.15)
|
| 12 |
-
lookback = st.sidebar.slider("Lookback (Days)", 50, 300, 200)
|
| 13 |
|
| 14 |
-
st.
|
| 15 |
-
|
| 16 |
-
hf_repo = st.sidebar.text_input("Repo ID", placeholder="user/dataset-name")
|
| 17 |
-
hf_token = st.sidebar.text_input("HF Token", type="password")
|
| 18 |
-
|
| 19 |
-
run_button = st.sidebar.button("▶ Run Full Process")
|
| 20 |
-
|
| 21 |
-
if run_button:
|
| 22 |
-
from data.loader import load_data, push_to_hf
|
| 23 |
-
from engine.backtest import run_backtest
|
| 24 |
-
from analytics.metrics import compute_metrics
|
| 25 |
-
|
| 26 |
-
# Phase 1: Data Fetching
|
| 27 |
-
with st.spinner("Fetching data from Stooq..."):
|
| 28 |
-
df = load_data()
|
| 29 |
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
|
| 42 |
-
|
| 43 |
-
|
| 44 |
-
|
| 45 |
-
|
| 46 |
-
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
st.sidebar.success("✅ Dataset Synced!")
|
| 53 |
-
else:
|
| 54 |
-
st.error("Data fetch failed. Verify ticker symbols.")
|
|
|
|
| 1 |
import streamlit as st
|
| 2 |
+
import pandas as pd
|
| 3 |
+
from data.loader import refresh_market_data, X_EQUITY_TICKERS, FI_TICKERS
|
| 4 |
+
from engine.trend_engine import run_trend_module
|
| 5 |
|
| 6 |
+
st.set_page_config(layout="wide", page_title="P2 ETF Trend Suite")
|
| 7 |
|
| 8 |
+
st.sidebar.title("Settings")
|
| 9 |
+
vol_target = st.sidebar.slider("Annual Vol Target", 0.05, 0.25, 0.126)
|
| 10 |
|
| 11 |
+
if st.sidebar.button("🔄 Refresh Market Data"):
|
| 12 |
+
refresh_market_data()
|
| 13 |
+
st.sidebar.success("Data Updated from Stooq/SOFR!")
|
|
|
|
|
|
|
| 14 |
|
| 15 |
+
if st.button("▶ Run All Modules"):
|
| 16 |
+
data = pd.read_csv("market_data.csv", index_col=0, parse_dates=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 17 |
|
| 18 |
+
# Run Modules
|
| 19 |
+
eq_res = run_trend_module(data[X_EQUITY_TICKERS], data['SOFR_ANNUAL'], vol_target)
|
| 20 |
+
fi_res = run_trend_module(data[FI_TICKERS], data['SOFR_ANNUAL'], vol_target)
|
| 21 |
+
|
| 22 |
+
# Performance Comparison
|
| 23 |
+
spy_curve = (1 + data['SPY'].pct_change()).cumprod()
|
| 24 |
+
comparison = pd.DataFrame({
|
| 25 |
+
"X-ETF Strategy": eq_res['curve'],
|
| 26 |
+
"SPY Benchmark": spy_curve
|
| 27 |
+
}).dropna()
|
| 28 |
+
|
| 29 |
+
st.header("📈 Performance: Equity Strategy vs. SPY")
|
| 30 |
+
st.line_chart(comparison)
|
| 31 |
+
|
| 32 |
+
# Target Allocations
|
| 33 |
+
col1, col2 = st.columns(2)
|
| 34 |
+
with col1:
|
| 35 |
+
st.subheader("🛡️ Equity Allocation (Next Day)")
|
| 36 |
+
st.dataframe(eq_res['alloc'][eq_res['alloc']['Weight (%)'] > 0])
|
| 37 |
+
with col2:
|
| 38 |
+
st.subheader("🏦 FI Comparison Allocation")
|
| 39 |
+
st.dataframe(fi_res['alloc'][fi_res['alloc']['Weight (%)'] > 0])
|
|
|
|
|
|
|
|
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/loader.py
CHANGED
|
@@ -1,56 +1,32 @@
|
|
| 1 |
-
import pandas as pd
|
| 2 |
import pandas_datareader.data as web
|
| 3 |
import yfinance as yf
|
| 4 |
-
|
| 5 |
import streamlit as st
|
| 6 |
-
from datetime import datetime
|
| 7 |
|
| 8 |
-
#
|
| 9 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 10 |
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
all_series = {}
|
| 14 |
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
|
|
|
|
|
|
| 18 |
try:
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
all_series[ticker] = df_stooq['Close'].sort_index()
|
| 26 |
-
st.toast(f"✅ {ticker} loaded from Stooq")
|
| 27 |
-
success = True
|
| 28 |
-
except Exception as e:
|
| 29 |
-
print(f"Stooq failed for {ticker}: {e}")
|
| 30 |
-
|
| 31 |
-
# 2. Fallback: yfinance
|
| 32 |
-
if not success:
|
| 33 |
-
try:
|
| 34 |
-
yf_df = yf.download(ticker, period="max", progress=False)
|
| 35 |
-
if not yf_df.empty:
|
| 36 |
-
# Use Adj Close to account for dividends/splits
|
| 37 |
-
all_series[ticker] = yf_df['Adj Close']
|
| 38 |
-
st.toast(f"⚠️ {ticker} loaded from yfinance (Fallback)")
|
| 39 |
-
success = True
|
| 40 |
-
except Exception as e:
|
| 41 |
-
st.error(f"❌ Critical: Could not load {ticker} from any source.")
|
| 42 |
-
|
| 43 |
-
if all_series:
|
| 44 |
-
# Align all tickers on the same dates and drop missing values
|
| 45 |
-
return pd.concat(all_series, axis=1).dropna()
|
| 46 |
-
return pd.DataFrame()
|
| 47 |
-
|
| 48 |
-
def push_to_hf(df, repo_id, token):
|
| 49 |
-
"""Pushes the current dataframe to Hugging Face Hub."""
|
| 50 |
-
# Ensure Date is a column, not an index, for HF compatibility
|
| 51 |
-
hf_export = df.reset_index()
|
| 52 |
-
hf_export.columns = [str(col) for col in hf_export.columns] # Ensure string columns
|
| 53 |
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
|
|
|
|
|
|
|
|
|
| 1 |
import pandas_datareader.data as web
|
| 2 |
import yfinance as yf
|
| 3 |
+
import pandas as pd
|
| 4 |
import streamlit as st
|
|
|
|
| 5 |
|
| 6 |
+
# 27 "X-" EQUITY ETFS
|
| 7 |
+
X_EQUITY_TICKERS = [
|
| 8 |
+
"XLK", "XLY", "XLP", "XLE", "XLV", "XLI", "XLB", "XLRE", "XLU", "XLC", "XLF",
|
| 9 |
+
"XBI", "XME", "XOP", "XHB", "XSD", "XRT", "XPH", "XES", "XAR", "XHS", "XHE",
|
| 10 |
+
"XSW", "XTN", "XTL", "XNTK", "XITK"
|
| 11 |
+
]
|
| 12 |
|
| 13 |
+
# 15 FIXED INCOME / COMPARISON
|
| 14 |
+
FI_TICKERS = ["TLT", "IEF", "TIP", "TBT", "GLD", "SLV", "VGIT", "VCLT", "VCIT", "HYG", "PFF", "MBB", "VNQ", "LQD", "AGG"]
|
|
|
|
| 15 |
|
| 16 |
+
def refresh_market_data():
|
| 17 |
+
"""Syncs Stooq/FRED data to local CSV and HF."""
|
| 18 |
+
all_prices = {}
|
| 19 |
+
# Download all groups + SPY Benchmark
|
| 20 |
+
for t in list(set(X_EQUITY_TICKERS + FI_TICKERS + ["SPY"])):
|
| 21 |
try:
|
| 22 |
+
all_prices[t] = web.DataReader(f"{t}.US", "stooq")['Close']
|
| 23 |
+
except:
|
| 24 |
+
all_prices[t] = yf.download(t, progress=False)['Adj Close']
|
| 25 |
|
| 26 |
+
# Fetch SOFR (Cash Yield) from FRED
|
| 27 |
+
sofr = web.DataReader('SOFR', 'fred').ffill()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 28 |
|
| 29 |
+
df = pd.DataFrame(all_prices).sort_index().ffill()
|
| 30 |
+
df['SOFR_ANNUAL'] = sofr / 100
|
| 31 |
+
df.to_csv("market_data.csv")
|
| 32 |
+
return df
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/README.md
CHANGED
|
@@ -1,19 +1,15 @@
|
|
| 1 |
---
|
| 2 |
-
title: P2 ETF
|
| 3 |
-
emoji:
|
| 4 |
-
colorFrom:
|
| 5 |
-
colorTo:
|
| 6 |
sdk: docker
|
| 7 |
-
app_port:
|
| 8 |
-
tags:
|
| 9 |
-
- streamlit
|
| 10 |
pinned: false
|
| 11 |
-
short_description: Streamlit template space
|
| 12 |
---
|
| 13 |
|
| 14 |
-
#
|
|
|
|
| 15 |
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
If you have any questions, checkout our [documentation](https://docs.streamlit.io) and [community
|
| 19 |
-
forums](https://discuss.streamlit.io).
|
|
|
|
| 1 |
---
|
| 2 |
+
title: P2 ETF Trend Suite
|
| 3 |
+
emoji: 📊
|
| 4 |
+
colorFrom: blue
|
| 5 |
+
colorTo: indigo
|
| 6 |
sdk: docker
|
| 7 |
+
app_port: 7860
|
|
|
|
|
|
|
| 8 |
pinned: false
|
|
|
|
| 9 |
---
|
| 10 |
|
| 11 |
+
# 📊 P2 ETF Trend Suite
|
| 12 |
+
Institutional ETF Trend + Volatility Targeting Engine.
|
| 13 |
|
| 14 |
+
### 🚀 Setup Info
|
| 15 |
+
This Space runs a Dockerized Streamlit app. It uses **Stooq** for market data with **yfinance** as a fallback.
|
|
|
|
|
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/Dockerfile
CHANGED
|
@@ -1,26 +1,31 @@
|
|
| 1 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2 |
|
| 3 |
WORKDIR /app
|
| 4 |
|
| 5 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 6 |
COPY requirements.txt .
|
| 7 |
|
| 8 |
-
|
| 9 |
-
RUN pip install --
|
|
|
|
| 10 |
|
| 11 |
-
# Copy
|
| 12 |
COPY . .
|
| 13 |
|
|
|
|
| 14 |
EXPOSE 7860
|
| 15 |
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
ENV STREAMLIT_BROWSER_GATHER_USAGE_STATS=false
|
| 19 |
-
|
| 20 |
-
# Diagnostic startup command
|
| 21 |
-
CMD ["bash", "-c", "echo '===== CONTAINER BOOTING ====='; \
|
| 22 |
-
echo 'Python Version:'; python -V; \
|
| 23 |
-
echo 'Current Directory:'; pwd; \
|
| 24 |
-
echo 'Directory Listing:'; ls -la; \
|
| 25 |
-
echo 'Starting Streamlit...'; \
|
| 26 |
-
python -m streamlit run app.py --server.headless=true"]
|
|
|
|
| 1 |
+
# Use a lightweight but stable Python base
|
| 2 |
+
FROM python:3.10-slim
|
| 3 |
+
|
| 4 |
+
# Set environment variables for speed and logging
|
| 5 |
+
ENV PYTHONUNBUFFERED=1 \
|
| 6 |
+
PYTHONDONTWRITEBYTECODE=1 \
|
| 7 |
+
PIP_NO_CACHE_DIR=1
|
| 8 |
|
| 9 |
WORKDIR /app
|
| 10 |
|
| 11 |
+
# Install system dependencies needed for pandas/datareader
|
| 12 |
+
RUN apt-get update && apt-get install -y \
|
| 13 |
+
build-essential \
|
| 14 |
+
curl \
|
| 15 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 16 |
+
|
| 17 |
+
# Copy only requirements first to leverage Docker cache
|
| 18 |
COPY requirements.txt .
|
| 19 |
|
| 20 |
+
# Install dependencies (use --no-cache-dir to keep image small)
|
| 21 |
+
RUN pip install --upgrade pip && \
|
| 22 |
+
pip install -r requirements.txt
|
| 23 |
|
| 24 |
+
# Copy the rest of the application
|
| 25 |
COPY . .
|
| 26 |
|
| 27 |
+
# Ensure the app runs on the port HF expects (7860 for Docker)
|
| 28 |
EXPOSE 7860
|
| 29 |
|
| 30 |
+
# Correct entrypoint for Streamlit in a container
|
| 31 |
+
ENTRYPOINT ["streamlit", "run", "app.py", "--server.port=7860", "--server.address=0.0.0.0"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/engine/engine/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/data/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/analytics/analytics/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py
CHANGED
|
@@ -1,80 +1,54 @@
|
|
| 1 |
import streamlit as st
|
| 2 |
|
| 3 |
-
|
| 4 |
-
# PAGE CONFIG (ONLY LIGHT CODE HERE)
|
| 5 |
-
# =====================================================
|
| 6 |
-
|
| 7 |
-
st.set_page_config(
|
| 8 |
-
page_title="P2 ETF Trend Suite",
|
| 9 |
-
layout="wide",
|
| 10 |
-
)
|
| 11 |
|
| 12 |
st.title("📊 P2 ETF Trend Suite")
|
| 13 |
-
st.markdown("
|
| 14 |
-
|
| 15 |
-
# =====================================================
|
| 16 |
-
# SIDEBAR
|
| 17 |
-
# =====================================================
|
| 18 |
-
|
| 19 |
-
st.sidebar.header("Strategy Controls")
|
| 20 |
-
|
| 21 |
-
initial_capital = st.sidebar.number_input(
|
| 22 |
-
"Initial Capital",
|
| 23 |
-
value=100000,
|
| 24 |
-
step=10000,
|
| 25 |
-
)
|
| 26 |
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
)
|
| 31 |
-
|
| 32 |
-
lookback = st.sidebar.slider(
|
| 33 |
-
"Momentum Lookback (days)",
|
| 34 |
-
50, 300, 200
|
| 35 |
-
)
|
| 36 |
-
|
| 37 |
-
run_button = st.sidebar.button("▶ Run Backtest")
|
| 38 |
|
| 39 |
st.sidebar.markdown("---")
|
| 40 |
-
st.sidebar.
|
|
|
|
|
|
|
| 41 |
|
| 42 |
-
|
| 43 |
-
# EXECUTION BLOCK
|
| 44 |
-
# =====================================================
|
| 45 |
|
| 46 |
if run_button:
|
|
|
|
|
|
|
|
|
|
| 47 |
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
# Lazy imports happen HERE
|
| 51 |
-
from engine.backtest import run_backtest
|
| 52 |
-
from data.loader import load_data
|
| 53 |
-
from analytics.metrics import compute_metrics
|
| 54 |
-
|
| 55 |
-
with st.spinner("Loading market data..."):
|
| 56 |
df = load_data()
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
|
| 64 |
-
|
| 65 |
-
|
| 66 |
-
|
| 67 |
-
|
| 68 |
-
|
| 69 |
-
|
| 70 |
-
|
| 71 |
-
|
| 72 |
-
|
| 73 |
-
|
| 74 |
-
|
| 75 |
-
|
| 76 |
-
|
| 77 |
-
|
| 78 |
-
|
| 79 |
-
|
| 80 |
-
|
|
|
|
|
|
|
|
|
| 1 |
import streamlit as st
|
| 2 |
|
| 3 |
+
st.set_page_config(page_title="P2 ETF Trend Suite", layout="wide")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 4 |
|
| 5 |
st.title("📊 P2 ETF Trend Suite")
|
| 6 |
+
st.markdown("Stooq-Primary Data Engine + HF Integration")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 7 |
|
| 8 |
+
# Sidebar Controls
|
| 9 |
+
st.sidebar.header("Parameters")
|
| 10 |
+
initial_capital = st.sidebar.number_input("Initial Capital", value=100000)
|
| 11 |
+
vol_target = st.sidebar.slider("Target Volatility", 0.05, 0.30, 0.15)
|
| 12 |
+
lookback = st.sidebar.slider("Lookback (Days)", 50, 300, 200)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13 |
|
| 14 |
st.sidebar.markdown("---")
|
| 15 |
+
st.sidebar.header("Hugging Face Sync")
|
| 16 |
+
hf_repo = st.sidebar.text_input("Repo ID", placeholder="user/dataset-name")
|
| 17 |
+
hf_token = st.sidebar.text_input("HF Token", type="password")
|
| 18 |
|
| 19 |
+
run_button = st.sidebar.button("▶ Run Full Process")
|
|
|
|
|
|
|
| 20 |
|
| 21 |
if run_button:
|
| 22 |
+
from data.loader import load_data, push_to_hf
|
| 23 |
+
from engine.backtest import run_backtest
|
| 24 |
+
from analytics.metrics import compute_metrics
|
| 25 |
|
| 26 |
+
# Phase 1: Data Fetching
|
| 27 |
+
with st.spinner("Fetching data from Stooq..."):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 28 |
df = load_data()
|
| 29 |
+
|
| 30 |
+
if not df.empty:
|
| 31 |
+
st.subheader("📈 Market Data Preview")
|
| 32 |
+
st.dataframe(df.tail(5), use_container_width=True)
|
| 33 |
+
|
| 34 |
+
# Phase 2: Backtesting
|
| 35 |
+
with st.spinner("Calculating Trend Strategy..."):
|
| 36 |
+
results = run_backtest(df, initial_capital, vol_target, lookback)
|
| 37 |
+
metrics = compute_metrics(results["returns"])
|
| 38 |
+
|
| 39 |
+
# Display Results
|
| 40 |
+
st.success("Analysis Complete")
|
| 41 |
+
c1, c2, c3 = st.columns(3)
|
| 42 |
+
c1.metric("CAGR", f"{metrics['cagr']:.2%}")
|
| 43 |
+
c2.metric("Sharpe", f"{metrics['sharpe']:.2f}")
|
| 44 |
+
c3.metric("Max Drawdown", f"{metrics['max_dd']:.2%}")
|
| 45 |
+
|
| 46 |
+
st.line_chart(results["equity_curve"])
|
| 47 |
+
|
| 48 |
+
# Phase 3: HF Sync
|
| 49 |
+
if hf_repo and hf_token:
|
| 50 |
+
with st.spinner("Syncing to Hugging Face..."):
|
| 51 |
+
push_to_hf(df, hf_repo, hf_token)
|
| 52 |
+
st.sidebar.success("✅ Dataset Synced!")
|
| 53 |
+
else:
|
| 54 |
+
st.error("Data fetch failed. Verify ticker symbols.")
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/loader.py
CHANGED
|
@@ -1,15 +1,56 @@
|
|
| 1 |
-
|
| 2 |
-
|
| 3 |
-
|
|
|
|
|
|
|
|
|
|
| 4 |
|
| 5 |
-
|
|
|
|
| 6 |
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
progress=False,
|
| 11 |
-
)["Adj Close"]
|
| 12 |
|
| 13 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 14 |
|
| 15 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pandas as pd
|
| 2 |
+
import pandas_datareader.data as web
|
| 3 |
+
import yfinance as yf
|
| 4 |
+
from datasets import Dataset
|
| 5 |
+
import streamlit as st
|
| 6 |
+
from datetime import datetime
|
| 7 |
|
| 8 |
+
# Combined Universe (All will attempt Stooq first)
|
| 9 |
+
TICKERS = ["SPY", "QQQ", "IWM", "TLT", "IEF", "SHY", "GLD"]
|
| 10 |
|
| 11 |
+
def load_data(tickers=TICKERS):
|
| 12 |
+
"""Fetches data from Stooq with yfinance fallback."""
|
| 13 |
+
all_series = {}
|
|
|
|
|
|
|
| 14 |
|
| 15 |
+
for ticker in tickers:
|
| 16 |
+
success = False
|
| 17 |
+
# 1. Primary: Stooq
|
| 18 |
+
try:
|
| 19 |
+
# Stooq format: TICKER.US (e.g., TLT.US)
|
| 20 |
+
stooq_symbol = f"{ticker}.US"
|
| 21 |
+
df_stooq = web.DataReader(stooq_symbol, 'stooq')
|
| 22 |
+
|
| 23 |
+
if not df_stooq.empty:
|
| 24 |
+
# Stooq returns newest data first; sort to ascending for backtests
|
| 25 |
+
all_series[ticker] = df_stooq['Close'].sort_index()
|
| 26 |
+
st.toast(f"✅ {ticker} loaded from Stooq")
|
| 27 |
+
success = True
|
| 28 |
+
except Exception as e:
|
| 29 |
+
print(f"Stooq failed for {ticker}: {e}")
|
| 30 |
|
| 31 |
+
# 2. Fallback: yfinance
|
| 32 |
+
if not success:
|
| 33 |
+
try:
|
| 34 |
+
yf_df = yf.download(ticker, period="max", progress=False)
|
| 35 |
+
if not yf_df.empty:
|
| 36 |
+
# Use Adj Close to account for dividends/splits
|
| 37 |
+
all_series[ticker] = yf_df['Adj Close']
|
| 38 |
+
st.toast(f"⚠️ {ticker} loaded from yfinance (Fallback)")
|
| 39 |
+
success = True
|
| 40 |
+
except Exception as e:
|
| 41 |
+
st.error(f"❌ Critical: Could not load {ticker} from any source.")
|
| 42 |
+
|
| 43 |
+
if all_series:
|
| 44 |
+
# Align all tickers on the same dates and drop missing values
|
| 45 |
+
return pd.concat(all_series, axis=1).dropna()
|
| 46 |
+
return pd.DataFrame()
|
| 47 |
+
|
| 48 |
+
def push_to_hf(df, repo_id, token):
|
| 49 |
+
"""Pushes the current dataframe to Hugging Face Hub."""
|
| 50 |
+
# Ensure Date is a column, not an index, for HF compatibility
|
| 51 |
+
hf_export = df.reset_index()
|
| 52 |
+
hf_export.columns = [str(col) for col in hf_export.columns] # Ensure string columns
|
| 53 |
+
|
| 54 |
+
dataset = Dataset.from_pandas(hf_export)
|
| 55 |
+
dataset.push_to_hub(repo_id, token=token)
|
| 56 |
+
return True
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/Dockerfile
CHANGED
|
@@ -2,11 +2,13 @@ FROM python:3.10
|
|
| 2 |
|
| 3 |
WORKDIR /app
|
| 4 |
|
|
|
|
| 5 |
COPY requirements.txt .
|
| 6 |
|
| 7 |
RUN pip install --upgrade pip
|
| 8 |
RUN pip install --no-cache-dir -r requirements.txt
|
| 9 |
|
|
|
|
| 10 |
COPY . .
|
| 11 |
|
| 12 |
EXPOSE 7860
|
|
@@ -15,4 +17,10 @@ ENV STREAMLIT_SERVER_PORT=7860
|
|
| 15 |
ENV STREAMLIT_SERVER_ADDRESS=0.0.0.0
|
| 16 |
ENV STREAMLIT_BROWSER_GATHER_USAGE_STATS=false
|
| 17 |
|
| 18 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2 |
|
| 3 |
WORKDIR /app
|
| 4 |
|
| 5 |
+
# Copy requirements first (better layer caching)
|
| 6 |
COPY requirements.txt .
|
| 7 |
|
| 8 |
RUN pip install --upgrade pip
|
| 9 |
RUN pip install --no-cache-dir -r requirements.txt
|
| 10 |
|
| 11 |
+
# Copy full project
|
| 12 |
COPY . .
|
| 13 |
|
| 14 |
EXPOSE 7860
|
|
|
|
| 17 |
ENV STREAMLIT_SERVER_ADDRESS=0.0.0.0
|
| 18 |
ENV STREAMLIT_BROWSER_GATHER_USAGE_STATS=false
|
| 19 |
|
| 20 |
+
# Diagnostic startup command
|
| 21 |
+
CMD ["bash", "-c", "echo '===== CONTAINER BOOTING ====='; \
|
| 22 |
+
echo 'Python Version:'; python -V; \
|
| 23 |
+
echo 'Current Directory:'; pwd; \
|
| 24 |
+
echo 'Directory Listing:'; ls -la; \
|
| 25 |
+
echo 'Starting Streamlit...'; \
|
| 26 |
+
python -m streamlit run app.py --server.headless=true"]
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/loader.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
def load_data():
|
| 2 |
+
import pandas as pd
|
| 3 |
+
import yfinance as yf
|
| 4 |
+
|
| 5 |
+
tickers = ["SPY", "QQQ", "TLT"]
|
| 6 |
+
|
| 7 |
+
data = yf.download(
|
| 8 |
+
tickers,
|
| 9 |
+
start="2015-01-01",
|
| 10 |
+
progress=False,
|
| 11 |
+
)["Adj Close"]
|
| 12 |
+
|
| 13 |
+
data = data.dropna()
|
| 14 |
+
|
| 15 |
+
return data
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/analytics/metrics.py
CHANGED
|
@@ -1,26 +1,21 @@
|
|
| 1 |
-
|
| 2 |
-
import pandas as pd
|
| 3 |
|
| 4 |
-
|
| 5 |
|
| 6 |
-
|
| 7 |
-
excess = returns - sofr_daily
|
| 8 |
|
| 9 |
-
|
|
|
|
|
|
|
| 10 |
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
vol = returns.std() * np.sqrt(252)
|
| 15 |
-
|
| 16 |
-
rolling_max = equity.cummax()
|
| 17 |
-
drawdown = equity / rolling_max - 1
|
| 18 |
max_dd = drawdown.min()
|
| 19 |
|
| 20 |
return {
|
| 21 |
-
"sharpe": sharpe,
|
| 22 |
"cagr": cagr,
|
| 23 |
"vol": vol,
|
| 24 |
-
"
|
|
|
|
| 25 |
}
|
| 26 |
-
|
|
|
|
| 1 |
+
def compute_metrics(returns):
|
|
|
|
| 2 |
|
| 3 |
+
import numpy as np
|
| 4 |
|
| 5 |
+
ann_factor = 252
|
|
|
|
| 6 |
|
| 7 |
+
cagr = (1 + returns.mean()) ** ann_factor - 1
|
| 8 |
+
vol = returns.std() * (ann_factor ** 0.5)
|
| 9 |
+
sharpe = cagr / vol if vol != 0 else 0
|
| 10 |
|
| 11 |
+
cumulative = (1 + returns).cumprod()
|
| 12 |
+
peak = cumulative.cummax()
|
| 13 |
+
drawdown = (cumulative - peak) / peak
|
|
|
|
|
|
|
|
|
|
|
|
|
| 14 |
max_dd = drawdown.min()
|
| 15 |
|
| 16 |
return {
|
|
|
|
| 17 |
"cagr": cagr,
|
| 18 |
"vol": vol,
|
| 19 |
+
"sharpe": sharpe,
|
| 20 |
+
"max_dd": max_dd,
|
| 21 |
}
|
|
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/engine/backtest.py
CHANGED
|
@@ -1,46 +1,20 @@
|
|
| 1 |
-
import pandas as pd
|
| 2 |
-
import numpy as np
|
| 3 |
-
|
| 4 |
def run_backtest(df, initial_capital, vol_target, lookback):
|
| 5 |
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
returns = prices.pct_change().dropna()
|
| 9 |
-
|
| 10 |
-
momentum = prices.pct_change(lookback)
|
| 11 |
-
signal = momentum.rank(axis=1, ascending=False)
|
| 12 |
-
top = signal <= 3
|
| 13 |
-
|
| 14 |
-
weights = top.div(top.sum(axis=1), axis=0)
|
| 15 |
-
|
| 16 |
-
rolling_cov = returns.rolling(60).cov()
|
| 17 |
-
vol = []
|
| 18 |
|
| 19 |
-
|
| 20 |
-
if date not in rolling_cov.index:
|
| 21 |
-
vol.append(0)
|
| 22 |
-
continue
|
| 23 |
|
| 24 |
-
|
| 25 |
-
cov = rolling_cov.loc[date].values.reshape(len(w), len(w))
|
| 26 |
-
portfolio_vol = np.sqrt(w @ cov @ w) * np.sqrt(252)
|
| 27 |
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
vol.append(portfolio_vol)
|
| 31 |
|
| 32 |
strategy_returns = (weights.shift(1) * returns).sum(axis=1)
|
| 33 |
-
equity_curve = (1 + strategy_returns).cumprod() * initial_capital
|
| 34 |
|
| 35 |
-
|
| 36 |
-
allocation = pd.DataFrame({
|
| 37 |
-
"Ticker": latest_weights.index,
|
| 38 |
-
"Weight": latest_weights.values
|
| 39 |
-
}).sort_values("Weight", ascending=False)
|
| 40 |
|
| 41 |
return {
|
| 42 |
"returns": strategy_returns,
|
| 43 |
"equity_curve": equity_curve,
|
| 44 |
-
"latest_allocation": allocation
|
| 45 |
}
|
| 46 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
def run_backtest(df, initial_capital, vol_target, lookback):
|
| 2 |
|
| 3 |
+
import numpy as np
|
| 4 |
+
import pandas as pd
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 5 |
|
| 6 |
+
returns = df.pct_change().dropna()
|
|
|
|
|
|
|
|
|
|
| 7 |
|
| 8 |
+
momentum = df.pct_change(lookback)
|
|
|
|
|
|
|
| 9 |
|
| 10 |
+
weights = (momentum > 0).astype(int)
|
| 11 |
+
weights = weights.div(weights.sum(axis=1), axis=0).fillna(0)
|
|
|
|
| 12 |
|
| 13 |
strategy_returns = (weights.shift(1) * returns).sum(axis=1)
|
|
|
|
| 14 |
|
| 15 |
+
equity_curve = (1 + strategy_returns).cumprod() * initial_capital
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
|
| 17 |
return {
|
| 18 |
"returns": strategy_returns,
|
| 19 |
"equity_curve": equity_curve,
|
|
|
|
| 20 |
}
|
|
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
import streamlit as st
|
| 2 |
|
| 3 |
# =====================================================
|
| 4 |
-
# PAGE CONFIG (
|
| 5 |
# =====================================================
|
| 6 |
|
| 7 |
st.set_page_config(
|
|
@@ -13,7 +13,7 @@ st.title("📊 P2 ETF Trend Suite")
|
|
| 13 |
st.markdown("Institutional ETF Trend + Volatility Targeting Engine")
|
| 14 |
|
| 15 |
# =====================================================
|
| 16 |
-
# SIDEBAR
|
| 17 |
# =====================================================
|
| 18 |
|
| 19 |
st.sidebar.header("Strategy Controls")
|
|
@@ -26,16 +26,12 @@ initial_capital = st.sidebar.number_input(
|
|
| 26 |
|
| 27 |
vol_target = st.sidebar.slider(
|
| 28 |
"Target Annual Volatility",
|
| 29 |
-
|
| 30 |
-
max_value=0.30,
|
| 31 |
-
value=0.15,
|
| 32 |
)
|
| 33 |
|
| 34 |
lookback = st.sidebar.slider(
|
| 35 |
"Momentum Lookback (days)",
|
| 36 |
-
|
| 37 |
-
max_value=300,
|
| 38 |
-
value=200,
|
| 39 |
)
|
| 40 |
|
| 41 |
run_button = st.sidebar.button("▶ Run Backtest")
|
|
@@ -44,34 +40,22 @@ st.sidebar.markdown("---")
|
|
| 44 |
st.sidebar.info("Backtest runs only when button is pressed.")
|
| 45 |
|
| 46 |
# =====================================================
|
| 47 |
-
#
|
| 48 |
# =====================================================
|
| 49 |
|
| 50 |
if run_button:
|
| 51 |
|
| 52 |
-
|
| 53 |
-
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
with st.spinner("
|
| 63 |
-
df = load_dataset()
|
| 64 |
-
|
| 65 |
-
# ---------------------------
|
| 66 |
-
# Pull SOFR
|
| 67 |
-
# ---------------------------
|
| 68 |
-
with st.spinner("Pulling SOFR from FRED..."):
|
| 69 |
-
sofr = get_sofr_series()
|
| 70 |
-
|
| 71 |
-
# ---------------------------
|
| 72 |
-
# Run Backtest
|
| 73 |
-
# ---------------------------
|
| 74 |
-
with st.spinner("Running backtest engine..."):
|
| 75 |
results = run_backtest(
|
| 76 |
df=df,
|
| 77 |
initial_capital=initial_capital,
|
|
@@ -79,38 +63,18 @@ if run_button:
|
|
| 79 |
lookback=lookback,
|
| 80 |
)
|
| 81 |
|
| 82 |
-
metrics = compute_metrics(results["returns"]
|
| 83 |
|
| 84 |
st.success("Backtest Complete")
|
| 85 |
|
| 86 |
-
# =====================================================
|
| 87 |
-
# METRICS PANEL
|
| 88 |
-
# =====================================================
|
| 89 |
-
|
| 90 |
col1, col2, col3, col4 = st.columns(4)
|
| 91 |
-
|
| 92 |
col1.metric("CAGR", f"{metrics['cagr']:.2%}")
|
| 93 |
-
col2.metric("Sharpe
|
| 94 |
col3.metric("Max Drawdown", f"{metrics['max_dd']:.2%}")
|
| 95 |
col4.metric("Volatility", f"{metrics['vol']:.2%}")
|
| 96 |
|
| 97 |
-
st.markdown("---")
|
| 98 |
-
|
| 99 |
-
# =====================================================
|
| 100 |
-
# EQUITY CURVE
|
| 101 |
-
# =====================================================
|
| 102 |
-
|
| 103 |
st.subheader("Equity Curve")
|
| 104 |
st.line_chart(results["equity_curve"])
|
| 105 |
|
| 106 |
-
st.markdown("---")
|
| 107 |
-
|
| 108 |
-
# =====================================================
|
| 109 |
-
# ALLOCATION TABLE
|
| 110 |
-
# =====================================================
|
| 111 |
-
|
| 112 |
-
st.subheader("Latest Portfolio Allocation")
|
| 113 |
-
st.dataframe(results["latest_allocation"], use_container_width=True)
|
| 114 |
-
|
| 115 |
else:
|
| 116 |
-
st.info("Configure parameters
|
|
|
|
| 1 |
import streamlit as st
|
| 2 |
|
| 3 |
# =====================================================
|
| 4 |
+
# PAGE CONFIG (ONLY LIGHT CODE HERE)
|
| 5 |
# =====================================================
|
| 6 |
|
| 7 |
st.set_page_config(
|
|
|
|
| 13 |
st.markdown("Institutional ETF Trend + Volatility Targeting Engine")
|
| 14 |
|
| 15 |
# =====================================================
|
| 16 |
+
# SIDEBAR
|
| 17 |
# =====================================================
|
| 18 |
|
| 19 |
st.sidebar.header("Strategy Controls")
|
|
|
|
| 26 |
|
| 27 |
vol_target = st.sidebar.slider(
|
| 28 |
"Target Annual Volatility",
|
| 29 |
+
0.05, 0.30, 0.15
|
|
|
|
|
|
|
| 30 |
)
|
| 31 |
|
| 32 |
lookback = st.sidebar.slider(
|
| 33 |
"Momentum Lookback (days)",
|
| 34 |
+
50, 300, 200
|
|
|
|
|
|
|
| 35 |
)
|
| 36 |
|
| 37 |
run_button = st.sidebar.button("▶ Run Backtest")
|
|
|
|
| 40 |
st.sidebar.info("Backtest runs only when button is pressed.")
|
| 41 |
|
| 42 |
# =====================================================
|
| 43 |
+
# EXECUTION BLOCK
|
| 44 |
# =====================================================
|
| 45 |
|
| 46 |
if run_button:
|
| 47 |
|
| 48 |
+
with st.spinner("Loading engine..."):
|
| 49 |
+
|
| 50 |
+
# Lazy imports happen HERE
|
| 51 |
+
from engine.backtest import run_backtest
|
| 52 |
+
from data.loader import load_data
|
| 53 |
+
from analytics.metrics import compute_metrics
|
| 54 |
+
|
| 55 |
+
with st.spinner("Loading market data..."):
|
| 56 |
+
df = load_data()
|
| 57 |
+
|
| 58 |
+
with st.spinner("Running strategy..."):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 59 |
results = run_backtest(
|
| 60 |
df=df,
|
| 61 |
initial_capital=initial_capital,
|
|
|
|
| 63 |
lookback=lookback,
|
| 64 |
)
|
| 65 |
|
| 66 |
+
metrics = compute_metrics(results["returns"])
|
| 67 |
|
| 68 |
st.success("Backtest Complete")
|
| 69 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 70 |
col1, col2, col3, col4 = st.columns(4)
|
|
|
|
| 71 |
col1.metric("CAGR", f"{metrics['cagr']:.2%}")
|
| 72 |
+
col2.metric("Sharpe", f"{metrics['sharpe']:.2f}")
|
| 73 |
col3.metric("Max Drawdown", f"{metrics['max_dd']:.2%}")
|
| 74 |
col4.metric("Volatility", f"{metrics['vol']:.2%}")
|
| 75 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 76 |
st.subheader("Equity Curve")
|
| 77 |
st.line_chart(results["equity_curve"])
|
| 78 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 79 |
else:
|
| 80 |
+
st.info("Configure parameters and click Run Backtest.")
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/Dockerfile
CHANGED
|
@@ -1,13 +1,18 @@
|
|
| 1 |
-
FROM python:3.10
|
| 2 |
|
| 3 |
WORKDIR /app
|
| 4 |
|
| 5 |
COPY requirements.txt .
|
| 6 |
|
|
|
|
| 7 |
RUN pip install --no-cache-dir -r requirements.txt
|
| 8 |
|
| 9 |
COPY . .
|
| 10 |
|
| 11 |
EXPOSE 7860
|
| 12 |
|
| 13 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM python:3.10
|
| 2 |
|
| 3 |
WORKDIR /app
|
| 4 |
|
| 5 |
COPY requirements.txt .
|
| 6 |
|
| 7 |
+
RUN pip install --upgrade pip
|
| 8 |
RUN pip install --no-cache-dir -r requirements.txt
|
| 9 |
|
| 10 |
COPY . .
|
| 11 |
|
| 12 |
EXPOSE 7860
|
| 13 |
|
| 14 |
+
ENV STREAMLIT_SERVER_PORT=7860
|
| 15 |
+
ENV STREAMLIT_SERVER_ADDRESS=0.0.0.0
|
| 16 |
+
ENV STREAMLIT_BROWSER_GATHER_USAGE_STATS=false
|
| 17 |
+
|
| 18 |
+
CMD streamlit run app.py
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py
CHANGED
|
@@ -1,70 +1,116 @@
|
|
| 1 |
import streamlit as st
|
| 2 |
-
import os
|
| 3 |
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
from engine.backtest import run_backtest
|
| 8 |
-
from analytics.metrics import compute_metrics
|
| 9 |
|
| 10 |
-
st.set_page_config(
|
|
|
|
|
|
|
|
|
|
| 11 |
|
| 12 |
st.title("📊 P2 ETF Trend Suite")
|
| 13 |
st.markdown("Institutional ETF Trend + Volatility Targeting Engine")
|
| 14 |
|
| 15 |
-
# ========================
|
| 16 |
-
#
|
| 17 |
-
# ========================
|
| 18 |
|
| 19 |
-
st.sidebar.header("Controls")
|
| 20 |
|
| 21 |
-
initial_capital = st.sidebar.number_input(
|
| 22 |
-
|
| 23 |
-
|
|
|
|
|
|
|
| 24 |
|
| 25 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 26 |
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
|
|
|
|
|
|
|
|
|
| 30 |
|
| 31 |
-
|
| 32 |
-
df = load_dataset()
|
| 33 |
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
df = update_market_data(df)
|
| 37 |
-
st.success("Dataset updated successfully.")
|
| 38 |
|
| 39 |
-
# ========================
|
| 40 |
-
#
|
| 41 |
-
# ========================
|
| 42 |
|
| 43 |
-
|
| 44 |
-
sofr = get_sofr_series()
|
| 45 |
|
| 46 |
-
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
)
|
| 53 |
|
| 54 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 55 |
|
| 56 |
-
#
|
| 57 |
-
#
|
| 58 |
-
#
|
|
|
|
|
|
|
| 59 |
|
| 60 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 61 |
|
| 62 |
-
|
| 63 |
-
col2.metric("Sharpe (SOFR)", f"{metrics['sharpe']:.2f}")
|
| 64 |
-
col3.metric("Max Drawdown", f"{metrics['max_dd']:.2%}")
|
| 65 |
-
col4.metric("Volatility", f"{metrics['vol']:.2%}")
|
| 66 |
|
| 67 |
-
st.
|
| 68 |
|
| 69 |
-
|
| 70 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import streamlit as st
|
|
|
|
| 2 |
|
| 3 |
+
# =====================================================
|
| 4 |
+
# PAGE CONFIG (must be first Streamlit command)
|
| 5 |
+
# =====================================================
|
|
|
|
|
|
|
| 6 |
|
| 7 |
+
st.set_page_config(
|
| 8 |
+
page_title="P2 ETF Trend Suite",
|
| 9 |
+
layout="wide",
|
| 10 |
+
)
|
| 11 |
|
| 12 |
st.title("📊 P2 ETF Trend Suite")
|
| 13 |
st.markdown("Institutional ETF Trend + Volatility Targeting Engine")
|
| 14 |
|
| 15 |
+
# =====================================================
|
| 16 |
+
# SIDEBAR CONTROLS
|
| 17 |
+
# =====================================================
|
| 18 |
|
| 19 |
+
st.sidebar.header("Strategy Controls")
|
| 20 |
|
| 21 |
+
initial_capital = st.sidebar.number_input(
|
| 22 |
+
"Initial Capital",
|
| 23 |
+
value=100000,
|
| 24 |
+
step=10000,
|
| 25 |
+
)
|
| 26 |
|
| 27 |
+
vol_target = st.sidebar.slider(
|
| 28 |
+
"Target Annual Volatility",
|
| 29 |
+
min_value=0.05,
|
| 30 |
+
max_value=0.30,
|
| 31 |
+
value=0.15,
|
| 32 |
+
)
|
| 33 |
|
| 34 |
+
lookback = st.sidebar.slider(
|
| 35 |
+
"Momentum Lookback (days)",
|
| 36 |
+
min_value=50,
|
| 37 |
+
max_value=300,
|
| 38 |
+
value=200,
|
| 39 |
+
)
|
| 40 |
|
| 41 |
+
run_button = st.sidebar.button("▶ Run Backtest")
|
|
|
|
| 42 |
|
| 43 |
+
st.sidebar.markdown("---")
|
| 44 |
+
st.sidebar.info("Backtest runs only when button is pressed.")
|
|
|
|
|
|
|
| 45 |
|
| 46 |
+
# =====================================================
|
| 47 |
+
# MAIN EXECUTION (runs ONLY when button clicked)
|
| 48 |
+
# =====================================================
|
| 49 |
|
| 50 |
+
if run_button:
|
|
|
|
| 51 |
|
| 52 |
+
# Import heavy modules only when needed
|
| 53 |
+
from data.hf_store import load_dataset
|
| 54 |
+
from data.updater import update_market_data
|
| 55 |
+
from data.fred import get_sofr_series
|
| 56 |
+
from engine.backtest import run_backtest
|
| 57 |
+
from analytics.metrics import compute_metrics
|
|
|
|
| 58 |
|
| 59 |
+
# ---------------------------
|
| 60 |
+
# Load Dataset
|
| 61 |
+
# ---------------------------
|
| 62 |
+
with st.spinner("Loading ETF dataset from Hugging Face..."):
|
| 63 |
+
df = load_dataset()
|
| 64 |
|
| 65 |
+
# ---------------------------
|
| 66 |
+
# Pull SOFR
|
| 67 |
+
# ---------------------------
|
| 68 |
+
with st.spinner("Pulling SOFR from FRED..."):
|
| 69 |
+
sofr = get_sofr_series()
|
| 70 |
|
| 71 |
+
# ---------------------------
|
| 72 |
+
# Run Backtest
|
| 73 |
+
# ---------------------------
|
| 74 |
+
with st.spinner("Running backtest engine..."):
|
| 75 |
+
results = run_backtest(
|
| 76 |
+
df=df,
|
| 77 |
+
initial_capital=initial_capital,
|
| 78 |
+
vol_target=vol_target,
|
| 79 |
+
lookback=lookback,
|
| 80 |
+
)
|
| 81 |
|
| 82 |
+
metrics = compute_metrics(results["returns"], sofr)
|
|
|
|
|
|
|
|
|
|
| 83 |
|
| 84 |
+
st.success("Backtest Complete")
|
| 85 |
|
| 86 |
+
# =====================================================
|
| 87 |
+
# METRICS PANEL
|
| 88 |
+
# =====================================================
|
| 89 |
+
|
| 90 |
+
col1, col2, col3, col4 = st.columns(4)
|
| 91 |
+
|
| 92 |
+
col1.metric("CAGR", f"{metrics['cagr']:.2%}")
|
| 93 |
+
col2.metric("Sharpe (SOFR)", f"{metrics['sharpe']:.2f}")
|
| 94 |
+
col3.metric("Max Drawdown", f"{metrics['max_dd']:.2%}")
|
| 95 |
+
col4.metric("Volatility", f"{metrics['vol']:.2%}")
|
| 96 |
+
|
| 97 |
+
st.markdown("---")
|
| 98 |
+
|
| 99 |
+
# =====================================================
|
| 100 |
+
# EQUITY CURVE
|
| 101 |
+
# =====================================================
|
| 102 |
+
|
| 103 |
+
st.subheader("Equity Curve")
|
| 104 |
+
st.line_chart(results["equity_curve"])
|
| 105 |
+
|
| 106 |
+
st.markdown("---")
|
| 107 |
+
|
| 108 |
+
# =====================================================
|
| 109 |
+
# ALLOCATION TABLE
|
| 110 |
+
# =====================================================
|
| 111 |
+
|
| 112 |
+
st.subheader("Latest Portfolio Allocation")
|
| 113 |
+
st.dataframe(results["latest_allocation"], use_container_width=True)
|
| 114 |
+
|
| 115 |
+
else:
|
| 116 |
+
st.info("Configure parameters in the sidebar and click **Run Backtest**.")
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/Dockerfile
CHANGED
|
@@ -1,20 +1,13 @@
|
|
| 1 |
-
FROM python:3.
|
| 2 |
|
| 3 |
WORKDIR /app
|
| 4 |
|
| 5 |
-
|
| 6 |
-
build-essential \
|
| 7 |
-
curl \
|
| 8 |
-
git \
|
| 9 |
-
&& rm -rf /var/lib/apt/lists/*
|
| 10 |
|
| 11 |
-
|
| 12 |
-
COPY src/ ./src/
|
| 13 |
|
| 14 |
-
|
| 15 |
|
| 16 |
-
EXPOSE
|
| 17 |
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
ENTRYPOINT ["streamlit", "run", "src/streamlit_app.py", "--server.port=8501", "--server.address=0.0.0.0"]
|
|
|
|
| 1 |
+
FROM python:3.10-slim
|
| 2 |
|
| 3 |
WORKDIR /app
|
| 4 |
|
| 5 |
+
COPY requirements.txt .
|
|
|
|
|
|
|
|
|
|
|
|
|
| 6 |
|
| 7 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
|
|
|
| 8 |
|
| 9 |
+
COPY . .
|
| 10 |
|
| 11 |
+
EXPOSE 7860
|
| 12 |
|
| 13 |
+
CMD ["streamlit", "run", "app.py", "--server.port=7860", "--server.address=0.0.0.0"]
|
|
|
|
|
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py
CHANGED
|
@@ -1,35 +1,70 @@
|
|
| 1 |
import streamlit as st
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2 |
|
| 3 |
st.set_page_config(layout="wide")
|
| 4 |
|
| 5 |
-
st.title("P2 ETF Trend Suite
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import streamlit as st
|
| 2 |
+
import os
|
| 3 |
+
|
| 4 |
+
from data.hf_store import load_dataset
|
| 5 |
+
from data.updater import update_market_data
|
| 6 |
+
from data.fred import get_sofr_series
|
| 7 |
+
from engine.backtest import run_backtest
|
| 8 |
+
from analytics.metrics import compute_metrics
|
| 9 |
|
| 10 |
st.set_page_config(layout="wide")
|
| 11 |
|
| 12 |
+
st.title("📊 P2 ETF Trend Suite")
|
| 13 |
+
st.markdown("Institutional ETF Trend + Volatility Targeting Engine")
|
| 14 |
+
|
| 15 |
+
# ========================
|
| 16 |
+
# Sidebar Controls
|
| 17 |
+
# ========================
|
| 18 |
+
|
| 19 |
+
st.sidebar.header("Controls")
|
| 20 |
+
|
| 21 |
+
initial_capital = st.sidebar.number_input("Initial Capital", value=100000, step=10000)
|
| 22 |
+
vol_target = st.sidebar.slider("Target Annual Volatility", 0.05, 0.30, 0.15)
|
| 23 |
+
lookback = st.sidebar.slider("Momentum Lookback (days)", 50, 300, 200)
|
| 24 |
+
|
| 25 |
+
refresh = st.sidebar.button("🔄 Refresh Market Data")
|
| 26 |
+
|
| 27 |
+
# ========================
|
| 28 |
+
# Data Load
|
| 29 |
+
# ========================
|
| 30 |
+
|
| 31 |
+
with st.spinner("Loading ETF dataset..."):
|
| 32 |
+
df = load_dataset()
|
| 33 |
+
|
| 34 |
+
if refresh:
|
| 35 |
+
with st.spinner("Updating market data from yfinance..."):
|
| 36 |
+
df = update_market_data(df)
|
| 37 |
+
st.success("Dataset updated successfully.")
|
| 38 |
+
|
| 39 |
+
# ========================
|
| 40 |
+
# Backtest
|
| 41 |
+
# ========================
|
| 42 |
+
|
| 43 |
+
with st.spinner("Pulling SOFR from FRED..."):
|
| 44 |
+
sofr = get_sofr_series()
|
| 45 |
+
|
| 46 |
+
with st.spinner("Running backtest..."):
|
| 47 |
+
results = run_backtest(
|
| 48 |
+
df=df,
|
| 49 |
+
initial_capital=initial_capital,
|
| 50 |
+
vol_target=vol_target,
|
| 51 |
+
lookback=lookback,
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
metrics = compute_metrics(results["returns"], sofr)
|
| 55 |
+
|
| 56 |
+
# ========================
|
| 57 |
+
# Layout
|
| 58 |
+
# ========================
|
| 59 |
+
|
| 60 |
+
col1, col2, col3, col4 = st.columns(4)
|
| 61 |
+
|
| 62 |
+
col1.metric("CAGR", f"{metrics['cagr']:.2%}")
|
| 63 |
+
col2.metric("Sharpe (SOFR)", f"{metrics['sharpe']:.2f}")
|
| 64 |
+
col3.metric("Max Drawdown", f"{metrics['max_dd']:.2%}")
|
| 65 |
+
col4.metric("Volatility", f"{metrics['vol']:.2%}")
|
| 66 |
+
|
| 67 |
+
st.line_chart(results["equity_curve"])
|
| 68 |
+
|
| 69 |
+
st.subheader("Current Allocation")
|
| 70 |
+
st.dataframe(results["latest_allocation"])
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/analytics/metrics.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
import pandas as pd
|
| 3 |
+
|
| 4 |
+
def compute_metrics(returns, sofr):
|
| 5 |
+
|
| 6 |
+
sofr_daily = sofr.reindex(returns.index).fillna(method="ffill")["sofr"] / 252
|
| 7 |
+
excess = returns - sofr_daily
|
| 8 |
+
|
| 9 |
+
sharpe = np.sqrt(252) * excess.mean() / excess.std()
|
| 10 |
+
|
| 11 |
+
equity = (1 + returns).cumprod()
|
| 12 |
+
cagr = equity.iloc[-1] ** (252 / len(equity)) - 1
|
| 13 |
+
|
| 14 |
+
vol = returns.std() * np.sqrt(252)
|
| 15 |
+
|
| 16 |
+
rolling_max = equity.cummax()
|
| 17 |
+
drawdown = equity / rolling_max - 1
|
| 18 |
+
max_dd = drawdown.min()
|
| 19 |
+
|
| 20 |
+
return {
|
| 21 |
+
"sharpe": sharpe,
|
| 22 |
+
"cagr": cagr,
|
| 23 |
+
"vol": vol,
|
| 24 |
+
"max_dd": max_dd
|
| 25 |
+
}
|
| 26 |
+
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/app.py
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import streamlit as st
|
| 2 |
+
|
| 3 |
+
st.set_page_config(layout="wide")
|
| 4 |
+
|
| 5 |
+
st.title("P2 ETF Trend Suite - Debug Mode")
|
| 6 |
+
|
| 7 |
+
try:
|
| 8 |
+
from data.hf_store import load_dataset
|
| 9 |
+
st.success("hf_store imported successfully")
|
| 10 |
+
except Exception as e:
|
| 11 |
+
st.error(f"hf_store failed: {e}")
|
| 12 |
+
|
| 13 |
+
try:
|
| 14 |
+
from data.updater import update_market_data
|
| 15 |
+
st.success("updater imported successfully")
|
| 16 |
+
except Exception as e:
|
| 17 |
+
st.error(f"updater failed: {e}")
|
| 18 |
+
|
| 19 |
+
try:
|
| 20 |
+
from data.fred import get_sofr_series
|
| 21 |
+
st.success("fred imported successfully")
|
| 22 |
+
except Exception as e:
|
| 23 |
+
st.error(f"fred failed: {e}")
|
| 24 |
+
|
| 25 |
+
try:
|
| 26 |
+
from engine.backtest import run_backtest
|
| 27 |
+
st.success("backtest imported successfully")
|
| 28 |
+
except Exception as e:
|
| 29 |
+
st.error(f"backtest failed: {e}")
|
| 30 |
+
|
| 31 |
+
try:
|
| 32 |
+
from analytics.metrics import compute_metrics
|
| 33 |
+
st.success("metrics imported successfully")
|
| 34 |
+
except Exception as e:
|
| 35 |
+
st.error(f"metrics failed: {e}")
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/fred.py
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import pandas as pd
|
| 3 |
+
from fredapi import Fred
|
| 4 |
+
|
| 5 |
+
def get_sofr_series():
|
| 6 |
+
fred = Fred(api_key=os.getenv("FRED_API_KEY"))
|
| 7 |
+
sofr = fred.get_series("SOFR")
|
| 8 |
+
sofr = sofr.to_frame("sofr")
|
| 9 |
+
sofr.index = pd.to_datetime(sofr.index)
|
| 10 |
+
sofr["sofr"] = sofr["sofr"] / 100
|
| 11 |
+
return sofr
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/hf_store.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pandas as pd
|
| 2 |
+
from datasets import load_dataset as hf_load_dataset
|
| 3 |
+
|
| 4 |
+
DATASET_PATH = "P2SAMAPA/etf_trend_data"
|
| 5 |
+
|
| 6 |
+
def load_dataset():
|
| 7 |
+
dataset = hf_load_dataset(DATASET_PATH)
|
| 8 |
+
|
| 9 |
+
# Handle split safely
|
| 10 |
+
if isinstance(dataset, dict) and "train" in dataset:
|
| 11 |
+
dataset = dataset["train"]
|
| 12 |
+
|
| 13 |
+
df = dataset.to_pandas()
|
| 14 |
+
df["date"] = pd.to_datetime(df["date"])
|
| 15 |
+
df = df.sort_values(["ticker", "date"])
|
| 16 |
+
|
| 17 |
+
return df
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/data/updater.py
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import pandas as pd
|
| 3 |
+
import yfinance as yf
|
| 4 |
+
from huggingface_hub import HfApi
|
| 5 |
+
|
| 6 |
+
DATASET_PATH = "P2SAMAPA/etf_trend_data"
|
| 7 |
+
|
| 8 |
+
def update_market_data(df):
|
| 9 |
+
|
| 10 |
+
tickers = df["ticker"].unique()
|
| 11 |
+
all_new = []
|
| 12 |
+
|
| 13 |
+
for ticker in tickers:
|
| 14 |
+
last_date = df[df["ticker"] == ticker]["date"].max()
|
| 15 |
+
start_date = (last_date + pd.Timedelta(days=1)).strftime("%Y-%m-%d")
|
| 16 |
+
|
| 17 |
+
new_data = yf.download(ticker, start=start_date, progress=False)
|
| 18 |
+
|
| 19 |
+
if new_data.empty:
|
| 20 |
+
continue
|
| 21 |
+
|
| 22 |
+
new_data.reset_index(inplace=True)
|
| 23 |
+
new_data["ticker"] = ticker
|
| 24 |
+
new_data.rename(columns={
|
| 25 |
+
"Date": "date",
|
| 26 |
+
"Open": "open",
|
| 27 |
+
"High": "high",
|
| 28 |
+
"Low": "low",
|
| 29 |
+
"Close": "close",
|
| 30 |
+
"Adj Close": "adjusted_close",
|
| 31 |
+
"Volume": "volume"
|
| 32 |
+
}, inplace=True)
|
| 33 |
+
|
| 34 |
+
all_new.append(new_data)
|
| 35 |
+
|
| 36 |
+
if not all_new:
|
| 37 |
+
return df
|
| 38 |
+
|
| 39 |
+
new_df = pd.concat(all_new)
|
| 40 |
+
df = pd.concat([df, new_df])
|
| 41 |
+
df.drop_duplicates(subset=["date", "ticker"], inplace=True)
|
| 42 |
+
df.sort_values(["ticker", "date"], inplace=True)
|
| 43 |
+
|
| 44 |
+
df.to_parquet("updated.parquet")
|
| 45 |
+
|
| 46 |
+
api = HfApi()
|
| 47 |
+
api.upload_file(
|
| 48 |
+
path_or_fileobj="updated.parquet",
|
| 49 |
+
path_in_repo="data/train-00000-of-00001.parquet",
|
| 50 |
+
repo_id=DATASET_PATH,
|
| 51 |
+
repo_type="dataset",
|
| 52 |
+
token=os.getenv("HF_TOKEN")
|
| 53 |
+
)
|
| 54 |
+
|
| 55 |
+
return df
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/engine/backtest.py
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pandas as pd
|
| 2 |
+
import numpy as np
|
| 3 |
+
|
| 4 |
+
def run_backtest(df, initial_capital, vol_target, lookback):
|
| 5 |
+
|
| 6 |
+
df = df.sort_values(["ticker", "date"])
|
| 7 |
+
prices = df.pivot(index="date", columns="ticker", values="adjusted_close")
|
| 8 |
+
returns = prices.pct_change().dropna()
|
| 9 |
+
|
| 10 |
+
momentum = prices.pct_change(lookback)
|
| 11 |
+
signal = momentum.rank(axis=1, ascending=False)
|
| 12 |
+
top = signal <= 3
|
| 13 |
+
|
| 14 |
+
weights = top.div(top.sum(axis=1), axis=0)
|
| 15 |
+
|
| 16 |
+
rolling_cov = returns.rolling(60).cov()
|
| 17 |
+
vol = []
|
| 18 |
+
|
| 19 |
+
for date in weights.index:
|
| 20 |
+
if date not in rolling_cov.index:
|
| 21 |
+
vol.append(0)
|
| 22 |
+
continue
|
| 23 |
+
|
| 24 |
+
w = weights.loc[date].values
|
| 25 |
+
cov = rolling_cov.loc[date].values.reshape(len(w), len(w))
|
| 26 |
+
portfolio_vol = np.sqrt(w @ cov @ w) * np.sqrt(252)
|
| 27 |
+
|
| 28 |
+
scale = vol_target / portfolio_vol if portfolio_vol > 0 else 0
|
| 29 |
+
weights.loc[date] = w * scale
|
| 30 |
+
vol.append(portfolio_vol)
|
| 31 |
+
|
| 32 |
+
strategy_returns = (weights.shift(1) * returns).sum(axis=1)
|
| 33 |
+
equity_curve = (1 + strategy_returns).cumprod() * initial_capital
|
| 34 |
+
|
| 35 |
+
latest_weights = weights.iloc[-1]
|
| 36 |
+
allocation = pd.DataFrame({
|
| 37 |
+
"Ticker": latest_weights.index,
|
| 38 |
+
"Weight": latest_weights.values
|
| 39 |
+
}).sort_values("Weight", ascending=False)
|
| 40 |
+
|
| 41 |
+
return {
|
| 42 |
+
"returns": strategy_returns,
|
| 43 |
+
"equity_curve": equity_curve,
|
| 44 |
+
"latest_allocation": allocation
|
| 45 |
+
}
|
| 46 |
+
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/.gitattributes
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
| 2 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
| 3 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
| 4 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
| 5 |
+
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
| 6 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
| 7 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
| 8 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
| 9 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
| 10 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
| 11 |
+
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
| 12 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
| 13 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
| 14 |
+
*.npy filter=lfs diff=lfs merge=lfs -text
|
| 15 |
+
*.npz filter=lfs diff=lfs merge=lfs -text
|
| 16 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
| 17 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
| 18 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
| 19 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
| 20 |
+
*.pickle filter=lfs diff=lfs merge=lfs -text
|
| 21 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
| 22 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
| 23 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
| 24 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
| 25 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
| 26 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
| 27 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
| 28 |
+
*.tar filter=lfs diff=lfs merge=lfs -text
|
| 29 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
| 30 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
| 31 |
+
*.wasm filter=lfs diff=lfs merge=lfs -text
|
| 32 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
| 33 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
+
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/Dockerfile
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM python:3.13.5-slim
|
| 2 |
+
|
| 3 |
+
WORKDIR /app
|
| 4 |
+
|
| 5 |
+
RUN apt-get update && apt-get install -y \
|
| 6 |
+
build-essential \
|
| 7 |
+
curl \
|
| 8 |
+
git \
|
| 9 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 10 |
+
|
| 11 |
+
COPY requirements.txt ./
|
| 12 |
+
COPY src/ ./src/
|
| 13 |
+
|
| 14 |
+
RUN pip3 install -r requirements.txt
|
| 15 |
+
|
| 16 |
+
EXPOSE 8501
|
| 17 |
+
|
| 18 |
+
HEALTHCHECK CMD curl --fail http://localhost:8501/_stcore/health
|
| 19 |
+
|
| 20 |
+
ENTRYPOINT ["streamlit", "run", "src/streamlit_app.py", "--server.port=8501", "--server.address=0.0.0.0"]
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/README.md
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
title: P2 ETF TREND SUITE
|
| 3 |
+
emoji: 🚀
|
| 4 |
+
colorFrom: red
|
| 5 |
+
colorTo: red
|
| 6 |
+
sdk: docker
|
| 7 |
+
app_port: 8501
|
| 8 |
+
tags:
|
| 9 |
+
- streamlit
|
| 10 |
+
pinned: false
|
| 11 |
+
short_description: Streamlit template space
|
| 12 |
+
---
|
| 13 |
+
|
| 14 |
+
# Welcome to Streamlit!
|
| 15 |
+
|
| 16 |
+
Edit `/src/streamlit_app.py` to customize this app to your heart's desire. :heart:
|
| 17 |
+
|
| 18 |
+
If you have any questions, checkout our [documentation](https://docs.streamlit.io) and [community
|
| 19 |
+
forums](https://discuss.streamlit.io).
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/requirements.txt
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
streamlit
|
| 2 |
+
pandas
|
| 3 |
+
numpy
|
| 4 |
+
yfinance
|
| 5 |
+
datasets
|
| 6 |
+
huggingface_hub
|
| 7 |
+
fredapi
|
| 8 |
+
scipy
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/src/streamlit_app.py
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import altair as alt
|
| 2 |
+
import numpy as np
|
| 3 |
+
import pandas as pd
|
| 4 |
+
import streamlit as st
|
| 5 |
+
|
| 6 |
+
"""
|
| 7 |
+
# Welcome to Streamlit!
|
| 8 |
+
|
| 9 |
+
Edit `/streamlit_app.py` to customize this app to your heart's desire :heart:.
|
| 10 |
+
If you have any questions, checkout our [documentation](https://docs.streamlit.io) and [community
|
| 11 |
+
forums](https://discuss.streamlit.io).
|
| 12 |
+
|
| 13 |
+
In the meantime, below is an example of what you can do with just a few lines of code:
|
| 14 |
+
"""
|
| 15 |
+
|
| 16 |
+
num_points = st.slider("Number of points in spiral", 1, 10000, 1100)
|
| 17 |
+
num_turns = st.slider("Number of turns in spiral", 1, 300, 31)
|
| 18 |
+
|
| 19 |
+
indices = np.linspace(0, 1, num_points)
|
| 20 |
+
theta = 2 * np.pi * num_turns * indices
|
| 21 |
+
radius = indices
|
| 22 |
+
|
| 23 |
+
x = radius * np.cos(theta)
|
| 24 |
+
y = radius * np.sin(theta)
|
| 25 |
+
|
| 26 |
+
df = pd.DataFrame({
|
| 27 |
+
"x": x,
|
| 28 |
+
"y": y,
|
| 29 |
+
"idx": indices,
|
| 30 |
+
"rand": np.random.randn(num_points),
|
| 31 |
+
})
|
| 32 |
+
|
| 33 |
+
st.altair_chart(alt.Chart(df, height=700, width=700)
|
| 34 |
+
.mark_point(filled=True)
|
| 35 |
+
.encode(
|
| 36 |
+
x=alt.X("x", axis=None),
|
| 37 |
+
y=alt.Y("y", axis=None),
|
| 38 |
+
color=alt.Color("idx", legend=None, scale=alt.Scale()),
|
| 39 |
+
size=alt.Size("rand", legend=None, scale=alt.Scale(range=[1, 150])),
|
| 40 |
+
))
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/streamlit_app.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import streamlit as st
|
| 2 |
+
import os
|
| 3 |
+
|
| 4 |
+
from data.hf_store import load_dataset
|
| 5 |
+
from data.updater import update_market_data
|
| 6 |
+
from data.fred import get_sofr_series
|
| 7 |
+
from engine.backtest import run_backtest
|
| 8 |
+
from analytics.metrics import compute_metrics
|
| 9 |
+
|
| 10 |
+
st.set_page_config(layout="wide")
|
| 11 |
+
|
| 12 |
+
st.title("📊 P2 ETF Trend Suite")
|
| 13 |
+
st.markdown("Institutional ETF Trend + Volatility Targeting Engine")
|
| 14 |
+
|
| 15 |
+
# ========================
|
| 16 |
+
# Sidebar Controls
|
| 17 |
+
# ========================
|
| 18 |
+
|
| 19 |
+
st.sidebar.header("Controls")
|
| 20 |
+
|
| 21 |
+
initial_capital = st.sidebar.number_input("Initial Capital", value=100000, step=10000)
|
| 22 |
+
vol_target = st.sidebar.slider("Target Annual Volatility", 0.05, 0.30, 0.15)
|
| 23 |
+
lookback = st.sidebar.slider("Momentum Lookback (days)", 50, 300, 200)
|
| 24 |
+
|
| 25 |
+
refresh = st.sidebar.button("🔄 Refresh Market Data")
|
| 26 |
+
|
| 27 |
+
# ========================
|
| 28 |
+
# Data Load
|
| 29 |
+
# ========================
|
| 30 |
+
|
| 31 |
+
with st.spinner("Loading ETF dataset..."):
|
| 32 |
+
df = load_dataset()
|
| 33 |
+
|
| 34 |
+
if refresh:
|
| 35 |
+
with st.spinner("Updating market data from yfinance..."):
|
| 36 |
+
df = update_market_data(df)
|
| 37 |
+
st.success("Dataset updated successfully.")
|
| 38 |
+
|
| 39 |
+
# ========================
|
| 40 |
+
# Backtest
|
| 41 |
+
# ========================
|
| 42 |
+
|
| 43 |
+
with st.spinner("Pulling SOFR from FRED..."):
|
| 44 |
+
sofr = get_sofr_series()
|
| 45 |
+
|
| 46 |
+
with st.spinner("Running backtest..."):
|
| 47 |
+
results = run_backtest(
|
| 48 |
+
df=df,
|
| 49 |
+
initial_capital=initial_capital,
|
| 50 |
+
vol_target=vol_target,
|
| 51 |
+
lookback=lookback,
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
metrics = compute_metrics(results["returns"], sofr)
|
| 55 |
+
|
| 56 |
+
# ========================
|
| 57 |
+
# Layout
|
| 58 |
+
# ========================
|
| 59 |
+
|
| 60 |
+
col1, col2, col3, col4 = st.columns(4)
|
| 61 |
+
|
| 62 |
+
col1.metric("CAGR", f"{metrics['cagr']:.2%}")
|
| 63 |
+
col2.metric("Sharpe (SOFR)", f"{metrics['sharpe']:.2f}")
|
| 64 |
+
col3.metric("Max Drawdown", f"{metrics['max_dd']:.2%}")
|
| 65 |
+
col4.metric("Volatility", f"{metrics['vol']:.2%}")
|
| 66 |
+
|
| 67 |
+
st.line_chart(results["equity_curve"])
|
| 68 |
+
|
| 69 |
+
st.subheader("Current Allocation")
|
| 70 |
+
st.dataframe(results["latest_allocation"])
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/requirements.txt
CHANGED
|
@@ -1,8 +1,6 @@
|
|
| 1 |
streamlit
|
| 2 |
pandas
|
| 3 |
-
numpy
|
| 4 |
yfinance
|
|
|
|
| 5 |
datasets
|
| 6 |
huggingface_hub
|
| 7 |
-
fredapi
|
| 8 |
-
scipy
|
|
|
|
| 1 |
streamlit
|
| 2 |
pandas
|
|
|
|
| 3 |
yfinance
|
| 4 |
+
pandas-datareader
|
| 5 |
datasets
|
| 6 |
huggingface_hub
|
|
|
|
|
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/requirements.txt
CHANGED
|
@@ -1,6 +1,6 @@
|
|
| 1 |
streamlit
|
| 2 |
pandas
|
| 3 |
-
yfinance
|
| 4 |
pandas-datareader
|
| 5 |
-
|
| 6 |
-
|
|
|
|
|
|
| 1 |
streamlit
|
| 2 |
pandas
|
|
|
|
| 3 |
pandas-datareader
|
| 4 |
+
yfinance
|
| 5 |
+
numpy
|
| 6 |
+
pandas_market_calendars
|
hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/hf_space/requirements.txt
CHANGED
|
@@ -4,3 +4,4 @@ pandas-datareader
|
|
| 4 |
yfinance
|
| 5 |
numpy
|
| 6 |
pandas_market_calendars
|
|
|
|
|
|
| 4 |
yfinance
|
| 5 |
numpy
|
| 6 |
pandas_market_calendars
|
| 7 |
+
huggingface_hub
|