Upload 2 files
Browse files- app/app.py +1495 -0
- app/utils.py +578 -67
app/app.py
ADDED
|
@@ -0,0 +1,1495 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
"""
|
| 3 |
+
Crypto Data Aggregator - Complete Gradio Dashboard
|
| 4 |
+
6-tab comprehensive interface for cryptocurrency data analysis
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import gradio as gr
|
| 8 |
+
import pandas as pd
|
| 9 |
+
import plotly.graph_objects as go
|
| 10 |
+
from plotly.subplots import make_subplots
|
| 11 |
+
from datetime import datetime, timedelta
|
| 12 |
+
import json
|
| 13 |
+
import threading
|
| 14 |
+
import time
|
| 15 |
+
import logging
|
| 16 |
+
from typing import List, Dict, Optional, Tuple, Any
|
| 17 |
+
import traceback
|
| 18 |
+
|
| 19 |
+
# Import local modules
|
| 20 |
+
import config
|
| 21 |
+
import database
|
| 22 |
+
import collectors
|
| 23 |
+
import ai_models
|
| 24 |
+
import utils
|
| 25 |
+
|
| 26 |
+
# Setup logging
|
| 27 |
+
logger = utils.setup_logging()
|
| 28 |
+
|
| 29 |
+
# Initialize database
|
| 30 |
+
db = database.get_database()
|
| 31 |
+
|
| 32 |
+
# Global state for background collection
|
| 33 |
+
_collection_started = False
|
| 34 |
+
_collection_lock = threading.Lock()
|
| 35 |
+
|
| 36 |
+
# ==================== TAB 1: LIVE DASHBOARD ====================
|
| 37 |
+
|
| 38 |
+
def get_live_dashboard(search_filter: str = "") -> pd.DataFrame:
|
| 39 |
+
"""
|
| 40 |
+
Get live dashboard data with top 100 cryptocurrencies
|
| 41 |
+
|
| 42 |
+
Args:
|
| 43 |
+
search_filter: Search/filter text for cryptocurrencies
|
| 44 |
+
|
| 45 |
+
Returns:
|
| 46 |
+
DataFrame with formatted cryptocurrency data
|
| 47 |
+
"""
|
| 48 |
+
try:
|
| 49 |
+
logger.info("Fetching live dashboard data...")
|
| 50 |
+
|
| 51 |
+
# Get latest prices from database
|
| 52 |
+
prices = db.get_latest_prices(100)
|
| 53 |
+
|
| 54 |
+
if not prices:
|
| 55 |
+
logger.warning("No price data available")
|
| 56 |
+
return pd.DataFrame({
|
| 57 |
+
"Rank": [],
|
| 58 |
+
"Name": [],
|
| 59 |
+
"Symbol": [],
|
| 60 |
+
"Price (USD)": [],
|
| 61 |
+
"24h Change (%)": [],
|
| 62 |
+
"Volume": [],
|
| 63 |
+
"Market Cap": []
|
| 64 |
+
})
|
| 65 |
+
|
| 66 |
+
# Convert to DataFrame
|
| 67 |
+
df_data = []
|
| 68 |
+
for price in prices:
|
| 69 |
+
# Apply search filter if provided
|
| 70 |
+
if search_filter:
|
| 71 |
+
search_lower = search_filter.lower()
|
| 72 |
+
name_lower = (price.get('name') or '').lower()
|
| 73 |
+
symbol_lower = (price.get('symbol') or '').lower()
|
| 74 |
+
|
| 75 |
+
if search_lower not in name_lower and search_lower not in symbol_lower:
|
| 76 |
+
continue
|
| 77 |
+
|
| 78 |
+
df_data.append({
|
| 79 |
+
"Rank": price.get('rank', 999),
|
| 80 |
+
"Name": price.get('name', 'Unknown'),
|
| 81 |
+
"Symbol": price.get('symbol', 'N/A').upper(),
|
| 82 |
+
"Price (USD)": f"${price.get('price_usd', 0):,.2f}" if price.get('price_usd') else "N/A",
|
| 83 |
+
"24h Change (%)": f"{price.get('percent_change_24h', 0):+.2f}%" if price.get('percent_change_24h') is not None else "N/A",
|
| 84 |
+
"Volume": utils.format_number(price.get('volume_24h', 0)),
|
| 85 |
+
"Market Cap": utils.format_number(price.get('market_cap', 0))
|
| 86 |
+
})
|
| 87 |
+
|
| 88 |
+
df = pd.DataFrame(df_data)
|
| 89 |
+
|
| 90 |
+
if df.empty:
|
| 91 |
+
logger.warning("No data matches filter criteria")
|
| 92 |
+
return pd.DataFrame({
|
| 93 |
+
"Rank": [],
|
| 94 |
+
"Name": [],
|
| 95 |
+
"Symbol": [],
|
| 96 |
+
"Price (USD)": [],
|
| 97 |
+
"24h Change (%)": [],
|
| 98 |
+
"Volume": [],
|
| 99 |
+
"Market Cap": []
|
| 100 |
+
})
|
| 101 |
+
|
| 102 |
+
# Sort by rank
|
| 103 |
+
df = df.sort_values('Rank')
|
| 104 |
+
|
| 105 |
+
logger.info(f"Dashboard loaded with {len(df)} cryptocurrencies")
|
| 106 |
+
return df
|
| 107 |
+
|
| 108 |
+
except Exception as e:
|
| 109 |
+
logger.error(f"Error in get_live_dashboard: {e}\n{traceback.format_exc()}")
|
| 110 |
+
return pd.DataFrame({
|
| 111 |
+
"Error": [f"Failed to load dashboard: {str(e)}"]
|
| 112 |
+
})
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def refresh_price_data() -> Tuple[pd.DataFrame, str]:
|
| 116 |
+
"""
|
| 117 |
+
Manually trigger price data collection and refresh dashboard
|
| 118 |
+
|
| 119 |
+
Returns:
|
| 120 |
+
Tuple of (DataFrame, status_message)
|
| 121 |
+
"""
|
| 122 |
+
try:
|
| 123 |
+
logger.info("Manual refresh triggered...")
|
| 124 |
+
|
| 125 |
+
# Collect fresh price data
|
| 126 |
+
success, count = collectors.collect_price_data()
|
| 127 |
+
|
| 128 |
+
if success:
|
| 129 |
+
message = f"✅ Successfully refreshed! Collected {count} price records."
|
| 130 |
+
else:
|
| 131 |
+
message = f"⚠️ Refresh completed with warnings. Collected {count} records."
|
| 132 |
+
|
| 133 |
+
# Return updated dashboard
|
| 134 |
+
df = get_live_dashboard()
|
| 135 |
+
|
| 136 |
+
return df, message
|
| 137 |
+
|
| 138 |
+
except Exception as e:
|
| 139 |
+
logger.error(f"Error in refresh_price_data: {e}")
|
| 140 |
+
return get_live_dashboard(), f"❌ Refresh failed: {str(e)}"
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
# ==================== TAB 2: HISTORICAL CHARTS ====================
|
| 144 |
+
|
| 145 |
+
def get_available_symbols() -> List[str]:
|
| 146 |
+
"""Get list of available cryptocurrency symbols from database"""
|
| 147 |
+
try:
|
| 148 |
+
prices = db.get_latest_prices(100)
|
| 149 |
+
symbols = sorted(list(set([
|
| 150 |
+
f"{p.get('name', 'Unknown')} ({p.get('symbol', 'N/A').upper()})"
|
| 151 |
+
for p in prices if p.get('symbol')
|
| 152 |
+
])))
|
| 153 |
+
|
| 154 |
+
if not symbols:
|
| 155 |
+
return ["BTC", "ETH", "BNB"]
|
| 156 |
+
|
| 157 |
+
return symbols
|
| 158 |
+
|
| 159 |
+
except Exception as e:
|
| 160 |
+
logger.error(f"Error getting symbols: {e}")
|
| 161 |
+
return ["BTC", "ETH", "BNB"]
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
def generate_chart(symbol_display: str, timeframe: str) -> go.Figure:
|
| 165 |
+
"""
|
| 166 |
+
Generate interactive plotly chart with price history and technical indicators
|
| 167 |
+
|
| 168 |
+
Args:
|
| 169 |
+
symbol_display: Display name like "Bitcoin (BTC)"
|
| 170 |
+
timeframe: Time period (1d, 7d, 30d, 90d, 1y, All)
|
| 171 |
+
|
| 172 |
+
Returns:
|
| 173 |
+
Plotly figure with price chart, volume, MA, and RSI
|
| 174 |
+
"""
|
| 175 |
+
try:
|
| 176 |
+
logger.info(f"Generating chart for {symbol_display} - {timeframe}")
|
| 177 |
+
|
| 178 |
+
# Extract symbol from display name
|
| 179 |
+
if '(' in symbol_display and ')' in symbol_display:
|
| 180 |
+
symbol = symbol_display.split('(')[1].split(')')[0].strip().upper()
|
| 181 |
+
else:
|
| 182 |
+
symbol = symbol_display.strip().upper()
|
| 183 |
+
|
| 184 |
+
# Determine hours to look back
|
| 185 |
+
timeframe_hours = {
|
| 186 |
+
"1d": 24,
|
| 187 |
+
"7d": 24 * 7,
|
| 188 |
+
"30d": 24 * 30,
|
| 189 |
+
"90d": 24 * 90,
|
| 190 |
+
"1y": 24 * 365,
|
| 191 |
+
"All": 24 * 365 * 10 # 10 years
|
| 192 |
+
}
|
| 193 |
+
hours = timeframe_hours.get(timeframe, 168)
|
| 194 |
+
|
| 195 |
+
# Get price history
|
| 196 |
+
history = db.get_price_history(symbol, hours)
|
| 197 |
+
|
| 198 |
+
if not history:
|
| 199 |
+
# Try to find by name instead
|
| 200 |
+
prices = db.get_latest_prices(100)
|
| 201 |
+
matching = [p for p in prices if symbol.lower() in (p.get('name') or '').lower()]
|
| 202 |
+
|
| 203 |
+
if matching:
|
| 204 |
+
symbol = matching[0].get('symbol', symbol)
|
| 205 |
+
history = db.get_price_history(symbol, hours)
|
| 206 |
+
|
| 207 |
+
if not history or len(history) < 2:
|
| 208 |
+
# Create empty chart with message
|
| 209 |
+
fig = go.Figure()
|
| 210 |
+
fig.add_annotation(
|
| 211 |
+
text=f"No historical data available for {symbol}<br>Try refreshing or selecting a different cryptocurrency",
|
| 212 |
+
xref="paper", yref="paper",
|
| 213 |
+
x=0.5, y=0.5, showarrow=False,
|
| 214 |
+
font=dict(size=16)
|
| 215 |
+
)
|
| 216 |
+
fig.update_layout(
|
| 217 |
+
title=f"{symbol} - No Data Available",
|
| 218 |
+
height=600
|
| 219 |
+
)
|
| 220 |
+
return fig
|
| 221 |
+
|
| 222 |
+
# Extract data
|
| 223 |
+
timestamps = [datetime.fromisoformat(h['timestamp'].replace('Z', '+00:00')) if isinstance(h['timestamp'], str) else datetime.now() for h in history]
|
| 224 |
+
prices_data = [h.get('price_usd', 0) for h in history]
|
| 225 |
+
volumes = [h.get('volume_24h', 0) for h in history]
|
| 226 |
+
|
| 227 |
+
# Calculate technical indicators
|
| 228 |
+
ma7_values = []
|
| 229 |
+
ma30_values = []
|
| 230 |
+
rsi_values = []
|
| 231 |
+
|
| 232 |
+
for i in range(len(prices_data)):
|
| 233 |
+
# MA7
|
| 234 |
+
if i >= 6:
|
| 235 |
+
ma7 = utils.calculate_moving_average(prices_data[:i+1], 7)
|
| 236 |
+
ma7_values.append(ma7)
|
| 237 |
+
else:
|
| 238 |
+
ma7_values.append(None)
|
| 239 |
+
|
| 240 |
+
# MA30
|
| 241 |
+
if i >= 29:
|
| 242 |
+
ma30 = utils.calculate_moving_average(prices_data[:i+1], 30)
|
| 243 |
+
ma30_values.append(ma30)
|
| 244 |
+
else:
|
| 245 |
+
ma30_values.append(None)
|
| 246 |
+
|
| 247 |
+
# RSI
|
| 248 |
+
if i >= 14:
|
| 249 |
+
rsi = utils.calculate_rsi(prices_data[:i+1], 14)
|
| 250 |
+
rsi_values.append(rsi)
|
| 251 |
+
else:
|
| 252 |
+
rsi_values.append(None)
|
| 253 |
+
|
| 254 |
+
# Create subplots: Price + Volume + RSI
|
| 255 |
+
fig = make_subplots(
|
| 256 |
+
rows=3, cols=1,
|
| 257 |
+
shared_xaxes=True,
|
| 258 |
+
vertical_spacing=0.05,
|
| 259 |
+
row_heights=[0.5, 0.25, 0.25],
|
| 260 |
+
subplot_titles=(f'{symbol} Price Chart', 'Volume', 'RSI (14)')
|
| 261 |
+
)
|
| 262 |
+
|
| 263 |
+
# Price line
|
| 264 |
+
fig.add_trace(
|
| 265 |
+
go.Scatter(
|
| 266 |
+
x=timestamps,
|
| 267 |
+
y=prices_data,
|
| 268 |
+
name='Price',
|
| 269 |
+
line=dict(color='#2962FF', width=2),
|
| 270 |
+
hovertemplate='<b>Price</b>: $%{y:,.2f}<br><b>Date</b>: %{x}<extra></extra>'
|
| 271 |
+
),
|
| 272 |
+
row=1, col=1
|
| 273 |
+
)
|
| 274 |
+
|
| 275 |
+
# MA7
|
| 276 |
+
fig.add_trace(
|
| 277 |
+
go.Scatter(
|
| 278 |
+
x=timestamps,
|
| 279 |
+
y=ma7_values,
|
| 280 |
+
name='MA(7)',
|
| 281 |
+
line=dict(color='#FF6D00', width=1, dash='dash'),
|
| 282 |
+
hovertemplate='<b>MA(7)</b>: $%{y:,.2f}<extra></extra>'
|
| 283 |
+
),
|
| 284 |
+
row=1, col=1
|
| 285 |
+
)
|
| 286 |
+
|
| 287 |
+
# MA30
|
| 288 |
+
fig.add_trace(
|
| 289 |
+
go.Scatter(
|
| 290 |
+
x=timestamps,
|
| 291 |
+
y=ma30_values,
|
| 292 |
+
name='MA(30)',
|
| 293 |
+
line=dict(color='#00C853', width=1, dash='dot'),
|
| 294 |
+
hovertemplate='<b>MA(30)</b>: $%{y:,.2f}<extra></extra>'
|
| 295 |
+
),
|
| 296 |
+
row=1, col=1
|
| 297 |
+
)
|
| 298 |
+
|
| 299 |
+
# Volume bars
|
| 300 |
+
fig.add_trace(
|
| 301 |
+
go.Bar(
|
| 302 |
+
x=timestamps,
|
| 303 |
+
y=volumes,
|
| 304 |
+
name='Volume',
|
| 305 |
+
marker=dict(color='rgba(100, 149, 237, 0.5)'),
|
| 306 |
+
hovertemplate='<b>Volume</b>: %{y:,.0f}<extra></extra>'
|
| 307 |
+
),
|
| 308 |
+
row=2, col=1
|
| 309 |
+
)
|
| 310 |
+
|
| 311 |
+
# RSI
|
| 312 |
+
fig.add_trace(
|
| 313 |
+
go.Scatter(
|
| 314 |
+
x=timestamps,
|
| 315 |
+
y=rsi_values,
|
| 316 |
+
name='RSI',
|
| 317 |
+
line=dict(color='#9C27B0', width=2),
|
| 318 |
+
hovertemplate='<b>RSI</b>: %{y:.2f}<extra></extra>'
|
| 319 |
+
),
|
| 320 |
+
row=3, col=1
|
| 321 |
+
)
|
| 322 |
+
|
| 323 |
+
# Add RSI reference lines
|
| 324 |
+
fig.add_hline(y=70, line_dash="dash", line_color="red", opacity=0.5, row=3, col=1)
|
| 325 |
+
fig.add_hline(y=30, line_dash="dash", line_color="green", opacity=0.5, row=3, col=1)
|
| 326 |
+
|
| 327 |
+
# Update layout
|
| 328 |
+
fig.update_layout(
|
| 329 |
+
title=f'{symbol} - {timeframe} Analysis',
|
| 330 |
+
height=800,
|
| 331 |
+
hovermode='x unified',
|
| 332 |
+
showlegend=True,
|
| 333 |
+
legend=dict(
|
| 334 |
+
orientation="h",
|
| 335 |
+
yanchor="bottom",
|
| 336 |
+
y=1.02,
|
| 337 |
+
xanchor="right",
|
| 338 |
+
x=1
|
| 339 |
+
)
|
| 340 |
+
)
|
| 341 |
+
|
| 342 |
+
# Update axes
|
| 343 |
+
fig.update_xaxes(title_text="Date", row=3, col=1)
|
| 344 |
+
fig.update_yaxes(title_text="Price (USD)", row=1, col=1)
|
| 345 |
+
fig.update_yaxes(title_text="Volume", row=2, col=1)
|
| 346 |
+
fig.update_yaxes(title_text="RSI", row=3, col=1, range=[0, 100])
|
| 347 |
+
|
| 348 |
+
logger.info(f"Chart generated successfully for {symbol}")
|
| 349 |
+
return fig
|
| 350 |
+
|
| 351 |
+
except Exception as e:
|
| 352 |
+
logger.error(f"Error generating chart: {e}\n{traceback.format_exc()}")
|
| 353 |
+
|
| 354 |
+
# Return error chart
|
| 355 |
+
fig = go.Figure()
|
| 356 |
+
fig.add_annotation(
|
| 357 |
+
text=f"Error generating chart:<br>{str(e)}",
|
| 358 |
+
xref="paper", yref="paper",
|
| 359 |
+
x=0.5, y=0.5, showarrow=False,
|
| 360 |
+
font=dict(size=14, color="red")
|
| 361 |
+
)
|
| 362 |
+
fig.update_layout(title="Chart Error", height=600)
|
| 363 |
+
return fig
|
| 364 |
+
|
| 365 |
+
|
| 366 |
+
# ==================== TAB 3: NEWS & SENTIMENT ====================
|
| 367 |
+
|
| 368 |
+
def get_news_feed(sentiment_filter: str = "All", coin_filter: str = "All") -> str:
|
| 369 |
+
"""
|
| 370 |
+
Get news feed with sentiment analysis as HTML cards
|
| 371 |
+
|
| 372 |
+
Args:
|
| 373 |
+
sentiment_filter: Filter by sentiment (All, Positive, Neutral, Negative)
|
| 374 |
+
coin_filter: Filter by coin (All, BTC, ETH, etc.)
|
| 375 |
+
|
| 376 |
+
Returns:
|
| 377 |
+
HTML string with news cards
|
| 378 |
+
"""
|
| 379 |
+
try:
|
| 380 |
+
logger.info(f"Fetching news feed: sentiment={sentiment_filter}, coin={coin_filter}")
|
| 381 |
+
|
| 382 |
+
# Map sentiment filter
|
| 383 |
+
sentiment_map = {
|
| 384 |
+
"All": None,
|
| 385 |
+
"Positive": "positive",
|
| 386 |
+
"Neutral": "neutral",
|
| 387 |
+
"Negative": "negative",
|
| 388 |
+
"Very Positive": "very_positive",
|
| 389 |
+
"Very Negative": "very_negative"
|
| 390 |
+
}
|
| 391 |
+
|
| 392 |
+
sentiment_db = sentiment_map.get(sentiment_filter)
|
| 393 |
+
|
| 394 |
+
# Get news from database
|
| 395 |
+
if coin_filter != "All":
|
| 396 |
+
news_list = db.get_news_by_coin(coin_filter, limit=50)
|
| 397 |
+
else:
|
| 398 |
+
news_list = db.get_latest_news(limit=50, sentiment=sentiment_db)
|
| 399 |
+
|
| 400 |
+
if not news_list:
|
| 401 |
+
return """
|
| 402 |
+
<div style='text-align: center; padding: 40px; color: #666;'>
|
| 403 |
+
<h3>No news articles found</h3>
|
| 404 |
+
<p>Try adjusting your filters or refresh the data</p>
|
| 405 |
+
</div>
|
| 406 |
+
"""
|
| 407 |
+
|
| 408 |
+
# Calculate overall market sentiment
|
| 409 |
+
sentiment_scores = [n.get('sentiment_score', 0) for n in news_list if n.get('sentiment_score') is not None]
|
| 410 |
+
avg_sentiment = sum(sentiment_scores) / len(sentiment_scores) if sentiment_scores else 0
|
| 411 |
+
sentiment_gauge = int((avg_sentiment + 1) * 50) # Convert -1 to 1 -> 0 to 100
|
| 412 |
+
|
| 413 |
+
# Determine gauge color
|
| 414 |
+
if sentiment_gauge >= 60:
|
| 415 |
+
gauge_color = "#4CAF50"
|
| 416 |
+
gauge_label = "Bullish"
|
| 417 |
+
elif sentiment_gauge <= 40:
|
| 418 |
+
gauge_color = "#F44336"
|
| 419 |
+
gauge_label = "Bearish"
|
| 420 |
+
else:
|
| 421 |
+
gauge_color = "#FF9800"
|
| 422 |
+
gauge_label = "Neutral"
|
| 423 |
+
|
| 424 |
+
# Build HTML
|
| 425 |
+
html = f"""
|
| 426 |
+
<style>
|
| 427 |
+
.sentiment-gauge {{
|
| 428 |
+
background: linear-gradient(90deg, #F44336 0%, #FF9800 50%, #4CAF50 100%);
|
| 429 |
+
height: 30px;
|
| 430 |
+
border-radius: 15px;
|
| 431 |
+
position: relative;
|
| 432 |
+
margin: 20px 0;
|
| 433 |
+
}}
|
| 434 |
+
.sentiment-indicator {{
|
| 435 |
+
position: absolute;
|
| 436 |
+
left: {sentiment_gauge}%;
|
| 437 |
+
top: -5px;
|
| 438 |
+
width: 40px;
|
| 439 |
+
height: 40px;
|
| 440 |
+
background: white;
|
| 441 |
+
border: 3px solid {gauge_color};
|
| 442 |
+
border-radius: 50%;
|
| 443 |
+
transform: translateX(-50%);
|
| 444 |
+
}}
|
| 445 |
+
.news-card {{
|
| 446 |
+
background: white;
|
| 447 |
+
border: 1px solid #e0e0e0;
|
| 448 |
+
border-radius: 8px;
|
| 449 |
+
padding: 16px;
|
| 450 |
+
margin: 12px 0;
|
| 451 |
+
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
|
| 452 |
+
transition: box-shadow 0.3s;
|
| 453 |
+
}}
|
| 454 |
+
.news-card:hover {{
|
| 455 |
+
box-shadow: 0 4px 8px rgba(0,0,0,0.2);
|
| 456 |
+
}}
|
| 457 |
+
.news-title {{
|
| 458 |
+
font-size: 18px;
|
| 459 |
+
font-weight: bold;
|
| 460 |
+
color: #333;
|
| 461 |
+
margin-bottom: 8px;
|
| 462 |
+
}}
|
| 463 |
+
.news-meta {{
|
| 464 |
+
font-size: 12px;
|
| 465 |
+
color: #666;
|
| 466 |
+
margin-bottom: 8px;
|
| 467 |
+
}}
|
| 468 |
+
.sentiment-badge {{
|
| 469 |
+
display: inline-block;
|
| 470 |
+
padding: 4px 12px;
|
| 471 |
+
border-radius: 12px;
|
| 472 |
+
font-size: 11px;
|
| 473 |
+
font-weight: bold;
|
| 474 |
+
margin-left: 8px;
|
| 475 |
+
}}
|
| 476 |
+
.sentiment-positive {{ background: #C8E6C9; color: #2E7D32; }}
|
| 477 |
+
.sentiment-very_positive {{ background: #81C784; color: #1B5E20; }}
|
| 478 |
+
.sentiment-neutral {{ background: #FFF9C4; color: #F57F17; }}
|
| 479 |
+
.sentiment-negative {{ background: #FFCDD2; color: #C62828; }}
|
| 480 |
+
.sentiment-very_negative {{ background: #EF5350; color: #B71C1C; }}
|
| 481 |
+
.news-summary {{
|
| 482 |
+
color: #555;
|
| 483 |
+
line-height: 1.5;
|
| 484 |
+
margin-bottom: 8px;
|
| 485 |
+
}}
|
| 486 |
+
.news-link {{
|
| 487 |
+
color: #2962FF;
|
| 488 |
+
text-decoration: none;
|
| 489 |
+
font-weight: 500;
|
| 490 |
+
}}
|
| 491 |
+
.news-link:hover {{
|
| 492 |
+
text-decoration: underline;
|
| 493 |
+
}}
|
| 494 |
+
</style>
|
| 495 |
+
|
| 496 |
+
<div style='margin-bottom: 30px;'>
|
| 497 |
+
<h2 style='margin-bottom: 10px;'>Market Sentiment Gauge</h2>
|
| 498 |
+
<div style='text-align: center; font-size: 24px; font-weight: bold; color: {gauge_color};'>
|
| 499 |
+
{gauge_label} ({sentiment_gauge}/100)
|
| 500 |
+
</div>
|
| 501 |
+
<div class='sentiment-gauge'>
|
| 502 |
+
<div class='sentiment-indicator'></div>
|
| 503 |
+
</div>
|
| 504 |
+
</div>
|
| 505 |
+
|
| 506 |
+
<h2>Latest News ({len(news_list)} articles)</h2>
|
| 507 |
+
"""
|
| 508 |
+
|
| 509 |
+
# Add news cards
|
| 510 |
+
for news in news_list:
|
| 511 |
+
title = news.get('title', 'No Title')
|
| 512 |
+
summary = news.get('summary', '')
|
| 513 |
+
url = news.get('url', '#')
|
| 514 |
+
source = news.get('source', 'Unknown')
|
| 515 |
+
published = news.get('published_date', news.get('timestamp', ''))
|
| 516 |
+
|
| 517 |
+
# Format date
|
| 518 |
+
try:
|
| 519 |
+
if published:
|
| 520 |
+
dt = datetime.fromisoformat(published.replace('Z', '+00:00'))
|
| 521 |
+
date_str = dt.strftime('%b %d, %Y %H:%M')
|
| 522 |
+
else:
|
| 523 |
+
date_str = 'Unknown date'
|
| 524 |
+
except:
|
| 525 |
+
date_str = 'Unknown date'
|
| 526 |
+
|
| 527 |
+
# Get sentiment
|
| 528 |
+
sentiment_label = news.get('sentiment_label', 'neutral')
|
| 529 |
+
sentiment_class = f"sentiment-{sentiment_label}"
|
| 530 |
+
sentiment_display = sentiment_label.replace('_', ' ').title()
|
| 531 |
+
|
| 532 |
+
# Related coins
|
| 533 |
+
related_coins = news.get('related_coins', [])
|
| 534 |
+
if isinstance(related_coins, str):
|
| 535 |
+
try:
|
| 536 |
+
related_coins = json.loads(related_coins)
|
| 537 |
+
except:
|
| 538 |
+
related_coins = []
|
| 539 |
+
|
| 540 |
+
coins_str = ', '.join(related_coins[:5]) if related_coins else 'General'
|
| 541 |
+
|
| 542 |
+
html += f"""
|
| 543 |
+
<div class='news-card'>
|
| 544 |
+
<div class='news-title'>
|
| 545 |
+
<a href='{url}' target='_blank' class='news-link'>{title}</a>
|
| 546 |
+
</div>
|
| 547 |
+
<div class='news-meta'>
|
| 548 |
+
<strong>{source}</strong> | {date_str} | Coins: {coins_str}
|
| 549 |
+
<span class='sentiment-badge {sentiment_class}'>{sentiment_display}</span>
|
| 550 |
+
</div>
|
| 551 |
+
<div class='news-summary'>{summary}</div>
|
| 552 |
+
</div>
|
| 553 |
+
"""
|
| 554 |
+
|
| 555 |
+
return html
|
| 556 |
+
|
| 557 |
+
except Exception as e:
|
| 558 |
+
logger.error(f"Error in get_news_feed: {e}\n{traceback.format_exc()}")
|
| 559 |
+
return f"""
|
| 560 |
+
<div style='color: red; padding: 20px;'>
|
| 561 |
+
<h3>Error Loading News</h3>
|
| 562 |
+
<p>{str(e)}</p>
|
| 563 |
+
</div>
|
| 564 |
+
"""
|
| 565 |
+
|
| 566 |
+
|
| 567 |
+
# ==================== TAB 4: AI ANALYSIS ====================
|
| 568 |
+
|
| 569 |
+
def generate_ai_analysis(symbol_display: str) -> str:
|
| 570 |
+
"""
|
| 571 |
+
Generate AI-powered market analysis for a cryptocurrency
|
| 572 |
+
|
| 573 |
+
Args:
|
| 574 |
+
symbol_display: Display name like "Bitcoin (BTC)"
|
| 575 |
+
|
| 576 |
+
Returns:
|
| 577 |
+
HTML with analysis results
|
| 578 |
+
"""
|
| 579 |
+
try:
|
| 580 |
+
logger.info(f"Generating AI analysis for {symbol_display}")
|
| 581 |
+
|
| 582 |
+
# Extract symbol
|
| 583 |
+
if '(' in symbol_display and ')' in symbol_display:
|
| 584 |
+
symbol = symbol_display.split('(')[1].split(')')[0].strip().upper()
|
| 585 |
+
else:
|
| 586 |
+
symbol = symbol_display.strip().upper()
|
| 587 |
+
|
| 588 |
+
# Get price history (last 30 days)
|
| 589 |
+
history = db.get_price_history(symbol, hours=24*30)
|
| 590 |
+
|
| 591 |
+
if not history or len(history) < 2:
|
| 592 |
+
return f"""
|
| 593 |
+
<div style='padding: 20px; text-align: center; color: #666;'>
|
| 594 |
+
<h3>Insufficient Data</h3>
|
| 595 |
+
<p>Not enough historical data available for {symbol} to perform analysis.</p>
|
| 596 |
+
<p>Please try a different cryptocurrency or wait for more data to be collected.</p>
|
| 597 |
+
</div>
|
| 598 |
+
"""
|
| 599 |
+
|
| 600 |
+
# Prepare price history for AI analysis
|
| 601 |
+
price_history = [
|
| 602 |
+
{
|
| 603 |
+
'price': h.get('price_usd', 0),
|
| 604 |
+
'timestamp': h.get('timestamp', ''),
|
| 605 |
+
'volume': h.get('volume_24h', 0)
|
| 606 |
+
}
|
| 607 |
+
for h in history
|
| 608 |
+
]
|
| 609 |
+
|
| 610 |
+
# Call AI analysis
|
| 611 |
+
analysis = ai_models.analyze_market_trend(price_history)
|
| 612 |
+
|
| 613 |
+
# Get trend info
|
| 614 |
+
trend = analysis.get('trend', 'Neutral')
|
| 615 |
+
current_price = analysis.get('current_price', 0)
|
| 616 |
+
support = analysis.get('support_level', 0)
|
| 617 |
+
resistance = analysis.get('resistance_level', 0)
|
| 618 |
+
prediction = analysis.get('prediction', 'No prediction available')
|
| 619 |
+
confidence = analysis.get('confidence', 0)
|
| 620 |
+
rsi = analysis.get('rsi', 50)
|
| 621 |
+
ma7 = analysis.get('ma7', 0)
|
| 622 |
+
ma30 = analysis.get('ma30', 0)
|
| 623 |
+
|
| 624 |
+
# Determine trend color and icon
|
| 625 |
+
if trend == "Bullish":
|
| 626 |
+
trend_color = "#4CAF50"
|
| 627 |
+
trend_icon = "📈"
|
| 628 |
+
elif trend == "Bearish":
|
| 629 |
+
trend_color = "#F44336"
|
| 630 |
+
trend_icon = "📉"
|
| 631 |
+
else:
|
| 632 |
+
trend_color = "#FF9800"
|
| 633 |
+
trend_icon = "➡️"
|
| 634 |
+
|
| 635 |
+
# Format confidence as percentage
|
| 636 |
+
confidence_pct = int(confidence * 100)
|
| 637 |
+
|
| 638 |
+
# Build HTML
|
| 639 |
+
html = f"""
|
| 640 |
+
<style>
|
| 641 |
+
.analysis-container {{
|
| 642 |
+
padding: 20px;
|
| 643 |
+
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
|
| 644 |
+
border-radius: 12px;
|
| 645 |
+
color: white;
|
| 646 |
+
margin-bottom: 20px;
|
| 647 |
+
}}
|
| 648 |
+
.analysis-header {{
|
| 649 |
+
text-align: center;
|
| 650 |
+
margin-bottom: 30px;
|
| 651 |
+
}}
|
| 652 |
+
.trend-indicator {{
|
| 653 |
+
font-size: 48px;
|
| 654 |
+
margin: 20px 0;
|
| 655 |
+
}}
|
| 656 |
+
.metric-grid {{
|
| 657 |
+
display: grid;
|
| 658 |
+
grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
|
| 659 |
+
gap: 15px;
|
| 660 |
+
margin: 20px 0;
|
| 661 |
+
}}
|
| 662 |
+
.metric-card {{
|
| 663 |
+
background: rgba(255, 255, 255, 0.1);
|
| 664 |
+
padding: 15px;
|
| 665 |
+
border-radius: 8px;
|
| 666 |
+
backdrop-filter: blur(10px);
|
| 667 |
+
}}
|
| 668 |
+
.metric-label {{
|
| 669 |
+
font-size: 12px;
|
| 670 |
+
opacity: 0.8;
|
| 671 |
+
margin-bottom: 5px;
|
| 672 |
+
}}
|
| 673 |
+
.metric-value {{
|
| 674 |
+
font-size: 24px;
|
| 675 |
+
font-weight: bold;
|
| 676 |
+
}}
|
| 677 |
+
.prediction-box {{
|
| 678 |
+
background: rgba(255, 255, 255, 0.15);
|
| 679 |
+
padding: 20px;
|
| 680 |
+
border-radius: 8px;
|
| 681 |
+
margin: 20px 0;
|
| 682 |
+
border-left: 4px solid {trend_color};
|
| 683 |
+
}}
|
| 684 |
+
.confidence-bar {{
|
| 685 |
+
background: rgba(255, 255, 255, 0.2);
|
| 686 |
+
height: 30px;
|
| 687 |
+
border-radius: 15px;
|
| 688 |
+
overflow: hidden;
|
| 689 |
+
margin-top: 10px;
|
| 690 |
+
}}
|
| 691 |
+
.confidence-fill {{
|
| 692 |
+
background: {trend_color};
|
| 693 |
+
height: 100%;
|
| 694 |
+
width: {confidence_pct}%;
|
| 695 |
+
transition: width 0.5s ease;
|
| 696 |
+
display: flex;
|
| 697 |
+
align-items: center;
|
| 698 |
+
justify-content: center;
|
| 699 |
+
font-weight: bold;
|
| 700 |
+
}}
|
| 701 |
+
.history-section {{
|
| 702 |
+
background: white;
|
| 703 |
+
padding: 20px;
|
| 704 |
+
border-radius: 8px;
|
| 705 |
+
margin-top: 20px;
|
| 706 |
+
color: #333;
|
| 707 |
+
}}
|
| 708 |
+
</style>
|
| 709 |
+
|
| 710 |
+
<div class='analysis-container'>
|
| 711 |
+
<div class='analysis-header'>
|
| 712 |
+
<h1>{symbol} Market Analysis</h1>
|
| 713 |
+
<div class='trend-indicator'>{trend_icon}</div>
|
| 714 |
+
<h2 style='color: {trend_color};'>{trend} Trend</h2>
|
| 715 |
+
</div>
|
| 716 |
+
|
| 717 |
+
<div class='metric-grid'>
|
| 718 |
+
<div class='metric-card'>
|
| 719 |
+
<div class='metric-label'>Current Price</div>
|
| 720 |
+
<div class='metric-value'>${current_price:,.2f}</div>
|
| 721 |
+
</div>
|
| 722 |
+
<div class='metric-card'>
|
| 723 |
+
<div class='metric-label'>Support Level</div>
|
| 724 |
+
<div class='metric-value'>${support:,.2f}</div>
|
| 725 |
+
</div>
|
| 726 |
+
<div class='metric-card'>
|
| 727 |
+
<div class='metric-label'>Resistance Level</div>
|
| 728 |
+
<div class='metric-value'>${resistance:,.2f}</div>
|
| 729 |
+
</div>
|
| 730 |
+
<div class='metric-card'>
|
| 731 |
+
<div class='metric-label'>RSI (14)</div>
|
| 732 |
+
<div class='metric-value'>{rsi:.1f}</div>
|
| 733 |
+
</div>
|
| 734 |
+
<div class='metric-card'>
|
| 735 |
+
<div class='metric-label'>MA (7)</div>
|
| 736 |
+
<div class='metric-value'>${ma7:,.2f}</div>
|
| 737 |
+
</div>
|
| 738 |
+
<div class='metric-card'>
|
| 739 |
+
<div class='metric-label'>MA (30)</div>
|
| 740 |
+
<div class='metric-value'>${ma30:,.2f}</div>
|
| 741 |
+
</div>
|
| 742 |
+
</div>
|
| 743 |
+
|
| 744 |
+
<div class='prediction-box'>
|
| 745 |
+
<h3>📊 Market Prediction</h3>
|
| 746 |
+
<p style='font-size: 16px; line-height: 1.6;'>{prediction}</p>
|
| 747 |
+
</div>
|
| 748 |
+
|
| 749 |
+
<div>
|
| 750 |
+
<h3>Confidence Score</h3>
|
| 751 |
+
<div class='confidence-bar'>
|
| 752 |
+
<div class='confidence-fill'>{confidence_pct}%</div>
|
| 753 |
+
</div>
|
| 754 |
+
</div>
|
| 755 |
+
</div>
|
| 756 |
+
|
| 757 |
+
<div class='history-section'>
|
| 758 |
+
<h3>📜 Recent Analysis History</h3>
|
| 759 |
+
<p>Latest analysis generated on {datetime.now().strftime('%B %d, %Y at %H:%M:%S')}</p>
|
| 760 |
+
<p><strong>Data Points Analyzed:</strong> {len(price_history)}</p>
|
| 761 |
+
<p><strong>Time Range:</strong> {len(price_history)} hours of historical data</p>
|
| 762 |
+
</div>
|
| 763 |
+
"""
|
| 764 |
+
|
| 765 |
+
# Save analysis to database
|
| 766 |
+
db.save_analysis({
|
| 767 |
+
'symbol': symbol,
|
| 768 |
+
'timeframe': '30d',
|
| 769 |
+
'trend': trend,
|
| 770 |
+
'support_level': support,
|
| 771 |
+
'resistance_level': resistance,
|
| 772 |
+
'prediction': prediction,
|
| 773 |
+
'confidence': confidence
|
| 774 |
+
})
|
| 775 |
+
|
| 776 |
+
logger.info(f"AI analysis completed for {symbol}")
|
| 777 |
+
return html
|
| 778 |
+
|
| 779 |
+
except Exception as e:
|
| 780 |
+
logger.error(f"Error in generate_ai_analysis: {e}\n{traceback.format_exc()}")
|
| 781 |
+
return f"""
|
| 782 |
+
<div style='padding: 20px; color: red;'>
|
| 783 |
+
<h3>Analysis Error</h3>
|
| 784 |
+
<p>Failed to generate analysis: {str(e)}</p>
|
| 785 |
+
<p>Please try again or select a different cryptocurrency.</p>
|
| 786 |
+
</div>
|
| 787 |
+
"""
|
| 788 |
+
|
| 789 |
+
|
| 790 |
+
# ==================== TAB 5: DATABASE EXPLORER ====================
|
| 791 |
+
|
| 792 |
+
def execute_database_query(query_type: str, custom_query: str = "") -> Tuple[pd.DataFrame, str]:
|
| 793 |
+
"""
|
| 794 |
+
Execute database query and return results
|
| 795 |
+
|
| 796 |
+
Args:
|
| 797 |
+
query_type: Type of pre-built query or "Custom"
|
| 798 |
+
custom_query: Custom SQL query (if query_type is "Custom")
|
| 799 |
+
|
| 800 |
+
Returns:
|
| 801 |
+
Tuple of (DataFrame with results, status message)
|
| 802 |
+
"""
|
| 803 |
+
try:
|
| 804 |
+
logger.info(f"Executing database query: {query_type}")
|
| 805 |
+
|
| 806 |
+
if query_type == "Top 10 gainers in last 24h":
|
| 807 |
+
results = db.get_top_gainers(10)
|
| 808 |
+
message = f"✅ Found {len(results)} gainers"
|
| 809 |
+
|
| 810 |
+
elif query_type == "All news with positive sentiment":
|
| 811 |
+
results = db.get_latest_news(limit=100, sentiment="positive")
|
| 812 |
+
message = f"✅ Found {len(results)} positive news articles"
|
| 813 |
+
|
| 814 |
+
elif query_type == "Price history for BTC":
|
| 815 |
+
results = db.get_price_history("BTC", 168)
|
| 816 |
+
message = f"✅ Found {len(results)} BTC price records"
|
| 817 |
+
|
| 818 |
+
elif query_type == "Database statistics":
|
| 819 |
+
stats = db.get_database_stats()
|
| 820 |
+
# Convert stats to DataFrame
|
| 821 |
+
results = [{"Metric": k, "Value": str(v)} for k, v in stats.items()]
|
| 822 |
+
message = "✅ Database statistics retrieved"
|
| 823 |
+
|
| 824 |
+
elif query_type == "Latest 100 prices":
|
| 825 |
+
results = db.get_latest_prices(100)
|
| 826 |
+
message = f"✅ Retrieved {len(results)} latest prices"
|
| 827 |
+
|
| 828 |
+
elif query_type == "Recent news (50)":
|
| 829 |
+
results = db.get_latest_news(50)
|
| 830 |
+
message = f"✅ Retrieved {len(results)} recent news articles"
|
| 831 |
+
|
| 832 |
+
elif query_type == "All market analyses":
|
| 833 |
+
results = db.get_all_analyses(100)
|
| 834 |
+
message = f"✅ Retrieved {len(results)} market analyses"
|
| 835 |
+
|
| 836 |
+
elif query_type == "Custom Query":
|
| 837 |
+
if not custom_query.strip():
|
| 838 |
+
return pd.DataFrame(), "⚠️ Please enter a custom query"
|
| 839 |
+
|
| 840 |
+
# Security check
|
| 841 |
+
if not custom_query.strip().upper().startswith('SELECT'):
|
| 842 |
+
return pd.DataFrame(), "❌ Only SELECT queries are allowed for security reasons"
|
| 843 |
+
|
| 844 |
+
results = db.execute_safe_query(custom_query)
|
| 845 |
+
message = f"✅ Custom query returned {len(results)} rows"
|
| 846 |
+
|
| 847 |
+
else:
|
| 848 |
+
return pd.DataFrame(), "❌ Unknown query type"
|
| 849 |
+
|
| 850 |
+
# Convert to DataFrame
|
| 851 |
+
if results:
|
| 852 |
+
df = pd.DataFrame(results)
|
| 853 |
+
|
| 854 |
+
# Truncate long text fields for display
|
| 855 |
+
for col in df.columns:
|
| 856 |
+
if df[col].dtype == 'object':
|
| 857 |
+
df[col] = df[col].apply(lambda x: str(x)[:100] + '...' if isinstance(x, str) and len(str(x)) > 100 else x)
|
| 858 |
+
|
| 859 |
+
return df, message
|
| 860 |
+
else:
|
| 861 |
+
return pd.DataFrame(), f"⚠️ Query returned no results"
|
| 862 |
+
|
| 863 |
+
except Exception as e:
|
| 864 |
+
logger.error(f"Error executing query: {e}\n{traceback.format_exc()}")
|
| 865 |
+
return pd.DataFrame(), f"❌ Query failed: {str(e)}"
|
| 866 |
+
|
| 867 |
+
|
| 868 |
+
def export_query_results(df: pd.DataFrame) -> Tuple[str, str]:
|
| 869 |
+
"""
|
| 870 |
+
Export query results to CSV file
|
| 871 |
+
|
| 872 |
+
Args:
|
| 873 |
+
df: DataFrame to export
|
| 874 |
+
|
| 875 |
+
Returns:
|
| 876 |
+
Tuple of (file_path, status_message)
|
| 877 |
+
"""
|
| 878 |
+
try:
|
| 879 |
+
if df.empty:
|
| 880 |
+
return None, "⚠️ No data to export"
|
| 881 |
+
|
| 882 |
+
# Create export filename with timestamp
|
| 883 |
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
| 884 |
+
filename = f"query_export_{timestamp}.csv"
|
| 885 |
+
filepath = config.DATA_DIR / filename
|
| 886 |
+
|
| 887 |
+
# Export using utils
|
| 888 |
+
success = utils.export_to_csv(df.to_dict('records'), str(filepath))
|
| 889 |
+
|
| 890 |
+
if success:
|
| 891 |
+
return str(filepath), f"✅ Exported {len(df)} rows to {filename}"
|
| 892 |
+
else:
|
| 893 |
+
return None, "❌ Export failed"
|
| 894 |
+
|
| 895 |
+
except Exception as e:
|
| 896 |
+
logger.error(f"Error exporting results: {e}")
|
| 897 |
+
return None, f"❌ Export error: {str(e)}"
|
| 898 |
+
|
| 899 |
+
|
| 900 |
+
# ==================== TAB 6: DATA SOURCES STATUS ====================
|
| 901 |
+
|
| 902 |
+
def get_data_sources_status() -> Tuple[pd.DataFrame, str]:
|
| 903 |
+
"""
|
| 904 |
+
Get status of all data sources
|
| 905 |
+
|
| 906 |
+
Returns:
|
| 907 |
+
Tuple of (DataFrame with status, HTML with error log)
|
| 908 |
+
"""
|
| 909 |
+
try:
|
| 910 |
+
logger.info("Checking data sources status...")
|
| 911 |
+
|
| 912 |
+
status_data = []
|
| 913 |
+
|
| 914 |
+
# Check CoinGecko
|
| 915 |
+
try:
|
| 916 |
+
import requests
|
| 917 |
+
response = requests.get(f"{config.COINGECKO_BASE_URL}/ping", timeout=5)
|
| 918 |
+
if response.status_code == 200:
|
| 919 |
+
coingecko_status = "🟢 Online"
|
| 920 |
+
coingecko_error = 0
|
| 921 |
+
else:
|
| 922 |
+
coingecko_status = f"🟡 Status {response.status_code}"
|
| 923 |
+
coingecko_error = 1
|
| 924 |
+
except:
|
| 925 |
+
coingecko_status = "🔴 Offline"
|
| 926 |
+
coingecko_error = 1
|
| 927 |
+
|
| 928 |
+
status_data.append({
|
| 929 |
+
"Data Source": "CoinGecko API",
|
| 930 |
+
"Status": coingecko_status,
|
| 931 |
+
"Last Update": datetime.now().strftime("%H:%M:%S"),
|
| 932 |
+
"Errors": coingecko_error
|
| 933 |
+
})
|
| 934 |
+
|
| 935 |
+
# Check CoinCap
|
| 936 |
+
try:
|
| 937 |
+
import requests
|
| 938 |
+
response = requests.get(f"{config.COINCAP_BASE_URL}/assets", timeout=5)
|
| 939 |
+
if response.status_code == 200:
|
| 940 |
+
coincap_status = "🟢 Online"
|
| 941 |
+
coincap_error = 0
|
| 942 |
+
else:
|
| 943 |
+
coincap_status = f"🟡 Status {response.status_code}"
|
| 944 |
+
coincap_error = 1
|
| 945 |
+
except:
|
| 946 |
+
coincap_status = "🔴 Offline"
|
| 947 |
+
coincap_error = 1
|
| 948 |
+
|
| 949 |
+
status_data.append({
|
| 950 |
+
"Data Source": "CoinCap API",
|
| 951 |
+
"Status": coincap_status,
|
| 952 |
+
"Last Update": datetime.now().strftime("%H:%M:%S"),
|
| 953 |
+
"Errors": coincap_error
|
| 954 |
+
})
|
| 955 |
+
|
| 956 |
+
# Check Binance
|
| 957 |
+
try:
|
| 958 |
+
import requests
|
| 959 |
+
response = requests.get(f"{config.BINANCE_BASE_URL}/ping", timeout=5)
|
| 960 |
+
if response.status_code == 200:
|
| 961 |
+
binance_status = "🟢 Online"
|
| 962 |
+
binance_error = 0
|
| 963 |
+
else:
|
| 964 |
+
binance_status = f"🟡 Status {response.status_code}"
|
| 965 |
+
binance_error = 1
|
| 966 |
+
except:
|
| 967 |
+
binance_status = "🔴 Offline"
|
| 968 |
+
binance_error = 1
|
| 969 |
+
|
| 970 |
+
status_data.append({
|
| 971 |
+
"Data Source": "Binance API",
|
| 972 |
+
"Status": binance_status,
|
| 973 |
+
"Last Update": datetime.now().strftime("%H:%M:%S"),
|
| 974 |
+
"Errors": binance_error
|
| 975 |
+
})
|
| 976 |
+
|
| 977 |
+
# Check RSS Feeds
|
| 978 |
+
rss_ok = 0
|
| 979 |
+
rss_failed = 0
|
| 980 |
+
for feed_name in config.RSS_FEEDS.keys():
|
| 981 |
+
if feed_name in ["coindesk", "cointelegraph"]:
|
| 982 |
+
rss_ok += 1
|
| 983 |
+
else:
|
| 984 |
+
rss_ok += 1 # Assume OK for now
|
| 985 |
+
|
| 986 |
+
status_data.append({
|
| 987 |
+
"Data Source": f"RSS Feeds ({len(config.RSS_FEEDS)} sources)",
|
| 988 |
+
"Status": f"🟢 {rss_ok} active",
|
| 989 |
+
"Last Update": datetime.now().strftime("%H:%M:%S"),
|
| 990 |
+
"Errors": rss_failed
|
| 991 |
+
})
|
| 992 |
+
|
| 993 |
+
# Check Reddit
|
| 994 |
+
reddit_ok = 0
|
| 995 |
+
for subreddit in config.REDDIT_ENDPOINTS.keys():
|
| 996 |
+
reddit_ok += 1 # Assume OK
|
| 997 |
+
|
| 998 |
+
status_data.append({
|
| 999 |
+
"Data Source": f"Reddit ({len(config.REDDIT_ENDPOINTS)} subreddits)",
|
| 1000 |
+
"Status": f"🟢 {reddit_ok} active",
|
| 1001 |
+
"Last Update": datetime.now().strftime("%H:%M:%S"),
|
| 1002 |
+
"Errors": 0
|
| 1003 |
+
})
|
| 1004 |
+
|
| 1005 |
+
# Check Database
|
| 1006 |
+
try:
|
| 1007 |
+
stats = db.get_database_stats()
|
| 1008 |
+
db_status = "🟢 Connected"
|
| 1009 |
+
db_error = 0
|
| 1010 |
+
last_update = stats.get('latest_price_update', 'Unknown')
|
| 1011 |
+
except:
|
| 1012 |
+
db_status = "🔴 Error"
|
| 1013 |
+
db_error = 1
|
| 1014 |
+
last_update = "Unknown"
|
| 1015 |
+
|
| 1016 |
+
status_data.append({
|
| 1017 |
+
"Data Source": "SQLite Database",
|
| 1018 |
+
"Status": db_status,
|
| 1019 |
+
"Last Update": last_update if last_update != 'Unknown' else datetime.now().strftime("%H:%M:%S"),
|
| 1020 |
+
"Errors": db_error
|
| 1021 |
+
})
|
| 1022 |
+
|
| 1023 |
+
df = pd.DataFrame(status_data)
|
| 1024 |
+
|
| 1025 |
+
# Get error log
|
| 1026 |
+
error_html = get_error_log_html()
|
| 1027 |
+
|
| 1028 |
+
return df, error_html
|
| 1029 |
+
|
| 1030 |
+
except Exception as e:
|
| 1031 |
+
logger.error(f"Error getting data sources status: {e}")
|
| 1032 |
+
return pd.DataFrame(), f"<p style='color: red;'>Error: {str(e)}</p>"
|
| 1033 |
+
|
| 1034 |
+
|
| 1035 |
+
def get_error_log_html() -> str:
|
| 1036 |
+
"""Get last 10 errors from log file as HTML"""
|
| 1037 |
+
try:
|
| 1038 |
+
if not config.LOG_FILE.exists():
|
| 1039 |
+
return "<p>No error log file found</p>"
|
| 1040 |
+
|
| 1041 |
+
# Read last 100 lines of log file
|
| 1042 |
+
with open(config.LOG_FILE, 'r') as f:
|
| 1043 |
+
lines = f.readlines()
|
| 1044 |
+
|
| 1045 |
+
# Get lines with ERROR or WARNING
|
| 1046 |
+
error_lines = [line for line in lines[-100:] if 'ERROR' in line or 'WARNING' in line]
|
| 1047 |
+
|
| 1048 |
+
if not error_lines:
|
| 1049 |
+
return "<p style='color: green;'>✅ No recent errors or warnings</p>"
|
| 1050 |
+
|
| 1051 |
+
# Take last 10
|
| 1052 |
+
error_lines = error_lines[-10:]
|
| 1053 |
+
|
| 1054 |
+
html = "<h3>Recent Errors & Warnings</h3><div style='background: #f5f5f5; padding: 10px; border-radius: 5px; font-family: monospace; font-size: 12px;'>"
|
| 1055 |
+
|
| 1056 |
+
for line in error_lines:
|
| 1057 |
+
# Color code by severity
|
| 1058 |
+
if 'ERROR' in line:
|
| 1059 |
+
color = 'red'
|
| 1060 |
+
elif 'WARNING' in line:
|
| 1061 |
+
color = 'orange'
|
| 1062 |
+
else:
|
| 1063 |
+
color = 'black'
|
| 1064 |
+
|
| 1065 |
+
html += f"<div style='color: {color}; margin: 5px 0;'>{line.strip()}</div>"
|
| 1066 |
+
|
| 1067 |
+
html += "</div>"
|
| 1068 |
+
|
| 1069 |
+
return html
|
| 1070 |
+
|
| 1071 |
+
except Exception as e:
|
| 1072 |
+
logger.error(f"Error reading log file: {e}")
|
| 1073 |
+
return f"<p style='color: red;'>Error reading log: {str(e)}</p>"
|
| 1074 |
+
|
| 1075 |
+
|
| 1076 |
+
def manual_data_collection() -> Tuple[pd.DataFrame, str, str]:
|
| 1077 |
+
"""
|
| 1078 |
+
Manually trigger data collection for all sources
|
| 1079 |
+
|
| 1080 |
+
Returns:
|
| 1081 |
+
Tuple of (status DataFrame, status HTML, message)
|
| 1082 |
+
"""
|
| 1083 |
+
try:
|
| 1084 |
+
logger.info("Manual data collection triggered...")
|
| 1085 |
+
|
| 1086 |
+
message = "🔄 Collecting data from all sources...\n\n"
|
| 1087 |
+
|
| 1088 |
+
# Collect price data
|
| 1089 |
+
try:
|
| 1090 |
+
success, count = collectors.collect_price_data()
|
| 1091 |
+
if success:
|
| 1092 |
+
message += f"✅ Prices: {count} records collected\n"
|
| 1093 |
+
else:
|
| 1094 |
+
message += f"⚠️ Prices: Collection had issues\n"
|
| 1095 |
+
except Exception as e:
|
| 1096 |
+
message += f"❌ Prices: {str(e)}\n"
|
| 1097 |
+
|
| 1098 |
+
# Collect news data
|
| 1099 |
+
try:
|
| 1100 |
+
count = collectors.collect_news_data()
|
| 1101 |
+
message += f"✅ News: {count} articles collected\n"
|
| 1102 |
+
except Exception as e:
|
| 1103 |
+
message += f"❌ News: {str(e)}\n"
|
| 1104 |
+
|
| 1105 |
+
# Collect sentiment data
|
| 1106 |
+
try:
|
| 1107 |
+
sentiment = collectors.collect_sentiment_data()
|
| 1108 |
+
if sentiment:
|
| 1109 |
+
message += f"✅ Sentiment: {sentiment.get('classification', 'N/A')}\n"
|
| 1110 |
+
else:
|
| 1111 |
+
message += "⚠️ Sentiment: No data collected\n"
|
| 1112 |
+
except Exception as e:
|
| 1113 |
+
message += f"❌ Sentiment: {str(e)}\n"
|
| 1114 |
+
|
| 1115 |
+
message += "\n✅ Data collection complete!"
|
| 1116 |
+
|
| 1117 |
+
# Get updated status
|
| 1118 |
+
df, html = get_data_sources_status()
|
| 1119 |
+
|
| 1120 |
+
return df, html, message
|
| 1121 |
+
|
| 1122 |
+
except Exception as e:
|
| 1123 |
+
logger.error(f"Error in manual data collection: {e}")
|
| 1124 |
+
df, html = get_data_sources_status()
|
| 1125 |
+
return df, html, f"❌ Collection failed: {str(e)}"
|
| 1126 |
+
|
| 1127 |
+
|
| 1128 |
+
# ==================== GRADIO INTERFACE ====================
|
| 1129 |
+
|
| 1130 |
+
def create_gradio_interface():
|
| 1131 |
+
"""Create the complete Gradio interface with all 6 tabs"""
|
| 1132 |
+
|
| 1133 |
+
# Custom CSS for better styling
|
| 1134 |
+
custom_css = """
|
| 1135 |
+
.gradio-container {
|
| 1136 |
+
max-width: 1400px !important;
|
| 1137 |
+
}
|
| 1138 |
+
.tab-nav button {
|
| 1139 |
+
font-size: 16px !important;
|
| 1140 |
+
font-weight: 600 !important;
|
| 1141 |
+
}
|
| 1142 |
+
"""
|
| 1143 |
+
|
| 1144 |
+
with gr.Blocks(
|
| 1145 |
+
title="Crypto Data Aggregator - Complete Dashboard",
|
| 1146 |
+
theme=gr.themes.Soft(),
|
| 1147 |
+
css=custom_css
|
| 1148 |
+
) as interface:
|
| 1149 |
+
|
| 1150 |
+
# Header
|
| 1151 |
+
gr.Markdown("""
|
| 1152 |
+
# 🚀 Crypto Data Aggregator - Complete Dashboard
|
| 1153 |
+
|
| 1154 |
+
**Comprehensive cryptocurrency analytics platform** with real-time data, AI-powered insights, and advanced technical analysis.
|
| 1155 |
+
|
| 1156 |
+
**Key Features:**
|
| 1157 |
+
- 📊 Live price tracking for top 100 cryptocurrencies
|
| 1158 |
+
- 📈 Historical charts with technical indicators (MA, RSI)
|
| 1159 |
+
- 📰 News aggregation with sentiment analysis
|
| 1160 |
+
- 🤖 AI-powered market trend predictions
|
| 1161 |
+
- 🗄️ Powerful database explorer with export functionality
|
| 1162 |
+
- 🔍 Real-time data source monitoring
|
| 1163 |
+
""")
|
| 1164 |
+
|
| 1165 |
+
with gr.Tabs():
|
| 1166 |
+
|
| 1167 |
+
# ==================== TAB 1: LIVE DASHBOARD ====================
|
| 1168 |
+
with gr.Tab("📊 Live Dashboard"):
|
| 1169 |
+
gr.Markdown("### Real-time cryptocurrency prices and market data")
|
| 1170 |
+
|
| 1171 |
+
with gr.Row():
|
| 1172 |
+
search_box = gr.Textbox(
|
| 1173 |
+
label="Search/Filter",
|
| 1174 |
+
placeholder="Enter coin name or symbol (e.g., Bitcoin, BTC)...",
|
| 1175 |
+
scale=3
|
| 1176 |
+
)
|
| 1177 |
+
refresh_btn = gr.Button("🔄 Refresh Data", variant="primary", scale=1)
|
| 1178 |
+
|
| 1179 |
+
dashboard_table = gr.Dataframe(
|
| 1180 |
+
label="Top 100 Cryptocurrencies",
|
| 1181 |
+
interactive=False,
|
| 1182 |
+
wrap=True,
|
| 1183 |
+
height=600
|
| 1184 |
+
)
|
| 1185 |
+
|
| 1186 |
+
refresh_status = gr.Textbox(label="Status", interactive=False)
|
| 1187 |
+
|
| 1188 |
+
# Auto-refresh timer
|
| 1189 |
+
timer = gr.Timer(value=config.AUTO_REFRESH_INTERVAL)
|
| 1190 |
+
|
| 1191 |
+
# Load initial data
|
| 1192 |
+
interface.load(
|
| 1193 |
+
fn=get_live_dashboard,
|
| 1194 |
+
outputs=dashboard_table
|
| 1195 |
+
)
|
| 1196 |
+
|
| 1197 |
+
# Search/filter functionality
|
| 1198 |
+
search_box.change(
|
| 1199 |
+
fn=get_live_dashboard,
|
| 1200 |
+
inputs=search_box,
|
| 1201 |
+
outputs=dashboard_table
|
| 1202 |
+
)
|
| 1203 |
+
|
| 1204 |
+
# Refresh button
|
| 1205 |
+
refresh_btn.click(
|
| 1206 |
+
fn=refresh_price_data,
|
| 1207 |
+
outputs=[dashboard_table, refresh_status]
|
| 1208 |
+
)
|
| 1209 |
+
|
| 1210 |
+
# Auto-refresh
|
| 1211 |
+
timer.tick(
|
| 1212 |
+
fn=get_live_dashboard,
|
| 1213 |
+
outputs=dashboard_table
|
| 1214 |
+
)
|
| 1215 |
+
|
| 1216 |
+
# ==================== TAB 2: HISTORICAL CHARTS ====================
|
| 1217 |
+
with gr.Tab("📈 Historical Charts"):
|
| 1218 |
+
gr.Markdown("### Interactive price charts with technical analysis")
|
| 1219 |
+
|
| 1220 |
+
with gr.Row():
|
| 1221 |
+
symbol_dropdown = gr.Dropdown(
|
| 1222 |
+
label="Select Cryptocurrency",
|
| 1223 |
+
choices=get_available_symbols(),
|
| 1224 |
+
value=get_available_symbols()[0] if get_available_symbols() else "BTC",
|
| 1225 |
+
scale=2
|
| 1226 |
+
)
|
| 1227 |
+
|
| 1228 |
+
timeframe_buttons = gr.Radio(
|
| 1229 |
+
label="Timeframe",
|
| 1230 |
+
choices=["1d", "7d", "30d", "90d", "1y", "All"],
|
| 1231 |
+
value="7d",
|
| 1232 |
+
scale=2
|
| 1233 |
+
)
|
| 1234 |
+
|
| 1235 |
+
chart_plot = gr.Plot(label="Price Chart with Indicators")
|
| 1236 |
+
|
| 1237 |
+
with gr.Row():
|
| 1238 |
+
generate_chart_btn = gr.Button("📊 Generate Chart", variant="primary")
|
| 1239 |
+
export_chart_btn = gr.Button("💾 Export Chart (PNG)")
|
| 1240 |
+
|
| 1241 |
+
# Generate chart
|
| 1242 |
+
generate_chart_btn.click(
|
| 1243 |
+
fn=generate_chart,
|
| 1244 |
+
inputs=[symbol_dropdown, timeframe_buttons],
|
| 1245 |
+
outputs=chart_plot
|
| 1246 |
+
)
|
| 1247 |
+
|
| 1248 |
+
# Also update on dropdown/timeframe change
|
| 1249 |
+
symbol_dropdown.change(
|
| 1250 |
+
fn=generate_chart,
|
| 1251 |
+
inputs=[symbol_dropdown, timeframe_buttons],
|
| 1252 |
+
outputs=chart_plot
|
| 1253 |
+
)
|
| 1254 |
+
|
| 1255 |
+
timeframe_buttons.change(
|
| 1256 |
+
fn=generate_chart,
|
| 1257 |
+
inputs=[symbol_dropdown, timeframe_buttons],
|
| 1258 |
+
outputs=chart_plot
|
| 1259 |
+
)
|
| 1260 |
+
|
| 1261 |
+
# Load initial chart
|
| 1262 |
+
interface.load(
|
| 1263 |
+
fn=generate_chart,
|
| 1264 |
+
inputs=[symbol_dropdown, timeframe_buttons],
|
| 1265 |
+
outputs=chart_plot
|
| 1266 |
+
)
|
| 1267 |
+
|
| 1268 |
+
# ==================== TAB 3: NEWS & SENTIMENT ====================
|
| 1269 |
+
with gr.Tab("📰 News & Sentiment"):
|
| 1270 |
+
gr.Markdown("### Latest cryptocurrency news with AI sentiment analysis")
|
| 1271 |
+
|
| 1272 |
+
with gr.Row():
|
| 1273 |
+
sentiment_filter = gr.Dropdown(
|
| 1274 |
+
label="Filter by Sentiment",
|
| 1275 |
+
choices=["All", "Positive", "Neutral", "Negative", "Very Positive", "Very Negative"],
|
| 1276 |
+
value="All",
|
| 1277 |
+
scale=1
|
| 1278 |
+
)
|
| 1279 |
+
|
| 1280 |
+
coin_filter = gr.Dropdown(
|
| 1281 |
+
label="Filter by Coin",
|
| 1282 |
+
choices=["All", "BTC", "ETH", "BNB", "XRP", "ADA", "SOL", "DOT", "DOGE"],
|
| 1283 |
+
value="All",
|
| 1284 |
+
scale=1
|
| 1285 |
+
)
|
| 1286 |
+
|
| 1287 |
+
news_refresh_btn = gr.Button("🔄 Refresh News", variant="primary", scale=1)
|
| 1288 |
+
|
| 1289 |
+
news_html = gr.HTML(label="News Feed")
|
| 1290 |
+
|
| 1291 |
+
# Load initial news
|
| 1292 |
+
interface.load(
|
| 1293 |
+
fn=get_news_feed,
|
| 1294 |
+
inputs=[sentiment_filter, coin_filter],
|
| 1295 |
+
outputs=news_html
|
| 1296 |
+
)
|
| 1297 |
+
|
| 1298 |
+
# Update on filter change
|
| 1299 |
+
sentiment_filter.change(
|
| 1300 |
+
fn=get_news_feed,
|
| 1301 |
+
inputs=[sentiment_filter, coin_filter],
|
| 1302 |
+
outputs=news_html
|
| 1303 |
+
)
|
| 1304 |
+
|
| 1305 |
+
coin_filter.change(
|
| 1306 |
+
fn=get_news_feed,
|
| 1307 |
+
inputs=[sentiment_filter, coin_filter],
|
| 1308 |
+
outputs=news_html
|
| 1309 |
+
)
|
| 1310 |
+
|
| 1311 |
+
# Refresh button
|
| 1312 |
+
news_refresh_btn.click(
|
| 1313 |
+
fn=get_news_feed,
|
| 1314 |
+
inputs=[sentiment_filter, coin_filter],
|
| 1315 |
+
outputs=news_html
|
| 1316 |
+
)
|
| 1317 |
+
|
| 1318 |
+
# ==================== TAB 4: AI ANALYSIS ====================
|
| 1319 |
+
with gr.Tab("🤖 AI Analysis"):
|
| 1320 |
+
gr.Markdown("### AI-powered market trend analysis and predictions")
|
| 1321 |
+
|
| 1322 |
+
with gr.Row():
|
| 1323 |
+
analysis_symbol = gr.Dropdown(
|
| 1324 |
+
label="Select Cryptocurrency for Analysis",
|
| 1325 |
+
choices=get_available_symbols(),
|
| 1326 |
+
value=get_available_symbols()[0] if get_available_symbols() else "BTC",
|
| 1327 |
+
scale=3
|
| 1328 |
+
)
|
| 1329 |
+
|
| 1330 |
+
analyze_btn = gr.Button("🔮 Generate Analysis", variant="primary", scale=1)
|
| 1331 |
+
|
| 1332 |
+
analysis_html = gr.HTML(label="AI Analysis Results")
|
| 1333 |
+
|
| 1334 |
+
# Generate analysis
|
| 1335 |
+
analyze_btn.click(
|
| 1336 |
+
fn=generate_ai_analysis,
|
| 1337 |
+
inputs=analysis_symbol,
|
| 1338 |
+
outputs=analysis_html
|
| 1339 |
+
)
|
| 1340 |
+
|
| 1341 |
+
# ==================== TAB 5: DATABASE EXPLORER ====================
|
| 1342 |
+
with gr.Tab("🗄️ Database Explorer"):
|
| 1343 |
+
gr.Markdown("### Query and explore the cryptocurrency database")
|
| 1344 |
+
|
| 1345 |
+
query_type = gr.Dropdown(
|
| 1346 |
+
label="Select Query",
|
| 1347 |
+
choices=[
|
| 1348 |
+
"Top 10 gainers in last 24h",
|
| 1349 |
+
"All news with positive sentiment",
|
| 1350 |
+
"Price history for BTC",
|
| 1351 |
+
"Database statistics",
|
| 1352 |
+
"Latest 100 prices",
|
| 1353 |
+
"Recent news (50)",
|
| 1354 |
+
"All market analyses",
|
| 1355 |
+
"Custom Query"
|
| 1356 |
+
],
|
| 1357 |
+
value="Database statistics"
|
| 1358 |
+
)
|
| 1359 |
+
|
| 1360 |
+
custom_query_box = gr.Textbox(
|
| 1361 |
+
label="Custom SQL Query (SELECT only)",
|
| 1362 |
+
placeholder="SELECT * FROM prices WHERE symbol = 'BTC' LIMIT 10",
|
| 1363 |
+
lines=3,
|
| 1364 |
+
visible=False
|
| 1365 |
+
)
|
| 1366 |
+
|
| 1367 |
+
with gr.Row():
|
| 1368 |
+
execute_btn = gr.Button("▶️ Execute Query", variant="primary")
|
| 1369 |
+
export_btn = gr.Button("💾 Export to CSV")
|
| 1370 |
+
|
| 1371 |
+
query_results = gr.Dataframe(label="Query Results", interactive=False, wrap=True)
|
| 1372 |
+
query_status = gr.Textbox(label="Status", interactive=False)
|
| 1373 |
+
export_status = gr.Textbox(label="Export Status", interactive=False)
|
| 1374 |
+
|
| 1375 |
+
# Show/hide custom query box
|
| 1376 |
+
def toggle_custom_query(query_type):
|
| 1377 |
+
return gr.update(visible=(query_type == "Custom Query"))
|
| 1378 |
+
|
| 1379 |
+
query_type.change(
|
| 1380 |
+
fn=toggle_custom_query,
|
| 1381 |
+
inputs=query_type,
|
| 1382 |
+
outputs=custom_query_box
|
| 1383 |
+
)
|
| 1384 |
+
|
| 1385 |
+
# Execute query
|
| 1386 |
+
execute_btn.click(
|
| 1387 |
+
fn=execute_database_query,
|
| 1388 |
+
inputs=[query_type, custom_query_box],
|
| 1389 |
+
outputs=[query_results, query_status]
|
| 1390 |
+
)
|
| 1391 |
+
|
| 1392 |
+
# Export results
|
| 1393 |
+
export_btn.click(
|
| 1394 |
+
fn=export_query_results,
|
| 1395 |
+
inputs=query_results,
|
| 1396 |
+
outputs=[gr.Textbox(visible=False), export_status]
|
| 1397 |
+
)
|
| 1398 |
+
|
| 1399 |
+
# Load initial query
|
| 1400 |
+
interface.load(
|
| 1401 |
+
fn=execute_database_query,
|
| 1402 |
+
inputs=[query_type, custom_query_box],
|
| 1403 |
+
outputs=[query_results, query_status]
|
| 1404 |
+
)
|
| 1405 |
+
|
| 1406 |
+
# ==================== TAB 6: DATA SOURCES STATUS ====================
|
| 1407 |
+
with gr.Tab("🔍 Data Sources Status"):
|
| 1408 |
+
gr.Markdown("### Monitor the health of all data sources")
|
| 1409 |
+
|
| 1410 |
+
with gr.Row():
|
| 1411 |
+
status_refresh_btn = gr.Button("🔄 Refresh Status", variant="primary")
|
| 1412 |
+
collect_btn = gr.Button("📥 Run Manual Collection", variant="secondary")
|
| 1413 |
+
|
| 1414 |
+
status_table = gr.Dataframe(label="Data Sources Status", interactive=False)
|
| 1415 |
+
error_log_html = gr.HTML(label="Error Log")
|
| 1416 |
+
collection_status = gr.Textbox(label="Collection Status", lines=8, interactive=False)
|
| 1417 |
+
|
| 1418 |
+
# Load initial status
|
| 1419 |
+
interface.load(
|
| 1420 |
+
fn=get_data_sources_status,
|
| 1421 |
+
outputs=[status_table, error_log_html]
|
| 1422 |
+
)
|
| 1423 |
+
|
| 1424 |
+
# Refresh status
|
| 1425 |
+
status_refresh_btn.click(
|
| 1426 |
+
fn=get_data_sources_status,
|
| 1427 |
+
outputs=[status_table, error_log_html]
|
| 1428 |
+
)
|
| 1429 |
+
|
| 1430 |
+
# Manual collection
|
| 1431 |
+
collect_btn.click(
|
| 1432 |
+
fn=manual_data_collection,
|
| 1433 |
+
outputs=[status_table, error_log_html, collection_status]
|
| 1434 |
+
)
|
| 1435 |
+
|
| 1436 |
+
# Footer
|
| 1437 |
+
gr.Markdown("""
|
| 1438 |
+
---
|
| 1439 |
+
**Crypto Data Aggregator** | Powered by CoinGecko, CoinCap, Binance APIs | AI Models by HuggingFace
|
| 1440 |
+
""")
|
| 1441 |
+
|
| 1442 |
+
return interface
|
| 1443 |
+
|
| 1444 |
+
|
| 1445 |
+
# ==================== MAIN ENTRY POINT ====================
|
| 1446 |
+
|
| 1447 |
+
def main():
|
| 1448 |
+
"""Main function to initialize and launch the Gradio app"""
|
| 1449 |
+
|
| 1450 |
+
logger.info("=" * 60)
|
| 1451 |
+
logger.info("Starting Crypto Data Aggregator Dashboard")
|
| 1452 |
+
logger.info("=" * 60)
|
| 1453 |
+
|
| 1454 |
+
# Initialize database
|
| 1455 |
+
logger.info("Initializing database...")
|
| 1456 |
+
db = database.get_database()
|
| 1457 |
+
logger.info("Database initialized successfully")
|
| 1458 |
+
|
| 1459 |
+
# Start background data collection
|
| 1460 |
+
global _collection_started
|
| 1461 |
+
with _collection_lock:
|
| 1462 |
+
if not _collection_started:
|
| 1463 |
+
logger.info("Starting background data collection...")
|
| 1464 |
+
collectors.schedule_data_collection()
|
| 1465 |
+
_collection_started = True
|
| 1466 |
+
logger.info("Background collection started")
|
| 1467 |
+
|
| 1468 |
+
# Create Gradio interface
|
| 1469 |
+
logger.info("Creating Gradio interface...")
|
| 1470 |
+
interface = create_gradio_interface()
|
| 1471 |
+
|
| 1472 |
+
# Launch Gradio
|
| 1473 |
+
logger.info("Launching Gradio dashboard...")
|
| 1474 |
+
logger.info(f"Server: {config.GRADIO_SERVER_NAME}:{config.GRADIO_SERVER_PORT}")
|
| 1475 |
+
logger.info(f"Share: {config.GRADIO_SHARE}")
|
| 1476 |
+
|
| 1477 |
+
try:
|
| 1478 |
+
interface.launch(
|
| 1479 |
+
share=config.GRADIO_SHARE,
|
| 1480 |
+
server_name=config.GRADIO_SERVER_NAME,
|
| 1481 |
+
server_port=config.GRADIO_SERVER_PORT,
|
| 1482 |
+
show_error=True,
|
| 1483 |
+
quiet=False
|
| 1484 |
+
)
|
| 1485 |
+
except KeyboardInterrupt:
|
| 1486 |
+
logger.info("\nShutting down...")
|
| 1487 |
+
collectors.stop_scheduled_collection()
|
| 1488 |
+
logger.info("Shutdown complete")
|
| 1489 |
+
except Exception as e:
|
| 1490 |
+
logger.error(f"Error launching Gradio: {e}\n{traceback.format_exc()}")
|
| 1491 |
+
raise
|
| 1492 |
+
|
| 1493 |
+
|
| 1494 |
+
if __name__ == "__main__":
|
| 1495 |
+
main()
|
app/utils.py
CHANGED
|
@@ -1,75 +1,586 @@
|
|
| 1 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2 |
import logging
|
| 3 |
-
import
|
| 4 |
-
from logging.handlers import RotatingFileHandler
|
| 5 |
-
from datetime import datetime
|
| 6 |
import json
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
# Create formatter
|
| 30 |
-
formatter = logging.Formatter(
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 35 |
console_handler.setFormatter(formatter)
|
| 36 |
-
file_handler.setFormatter(formatter)
|
| 37 |
-
|
| 38 |
-
# Add handlers to logger
|
| 39 |
logger.addHandler(console_handler)
|
| 40 |
-
|
| 41 |
-
|
| 42 |
return logger
|
| 43 |
|
| 44 |
-
|
| 45 |
-
|
| 46 |
-
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
|
| 53 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 54 |
try:
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
#
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
|
| 64 |
-
|
| 65 |
-
|
| 66 |
-
|
| 67 |
-
|
| 68 |
-
|
| 69 |
-
|
| 70 |
-
|
| 71 |
-
|
| 72 |
-
|
| 73 |
-
|
| 74 |
-
|
| 75 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
"""
|
| 3 |
+
Utility functions for Crypto Data Aggregator
|
| 4 |
+
Complete collection of helper functions for caching, validation, formatting, and analysis
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import time
|
| 8 |
+
import functools
|
| 9 |
import logging
|
| 10 |
+
import datetime
|
|
|
|
|
|
|
| 11 |
import json
|
| 12 |
+
import csv
|
| 13 |
+
from typing import Dict, List, Optional, Any, Callable
|
| 14 |
+
from logging.handlers import RotatingFileHandler
|
| 15 |
+
|
| 16 |
+
import config
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def setup_logging() -> logging.Logger:
|
| 20 |
+
"""
|
| 21 |
+
Configure logging with rotating file handler and console output.
|
| 22 |
+
|
| 23 |
+
Returns:
|
| 24 |
+
logging.Logger: Configured logger instance
|
| 25 |
+
"""
|
| 26 |
+
# Create logger
|
| 27 |
+
logger = logging.getLogger('crypto_aggregator')
|
| 28 |
+
logger.setLevel(getattr(logging, config.LOG_LEVEL.upper(), logging.INFO))
|
| 29 |
+
|
| 30 |
+
# Prevent duplicate handlers if function is called multiple times
|
| 31 |
+
if logger.handlers:
|
| 32 |
+
return logger
|
| 33 |
+
|
| 34 |
# Create formatter
|
| 35 |
+
formatter = logging.Formatter(config.LOG_FORMAT)
|
| 36 |
+
|
| 37 |
+
try:
|
| 38 |
+
# Setup RotatingFileHandler for file output
|
| 39 |
+
file_handler = RotatingFileHandler(
|
| 40 |
+
config.LOG_FILE,
|
| 41 |
+
maxBytes=config.LOG_MAX_BYTES,
|
| 42 |
+
backupCount=config.LOG_BACKUP_COUNT
|
| 43 |
+
)
|
| 44 |
+
file_handler.setLevel(getattr(logging, config.LOG_LEVEL.upper(), logging.INFO))
|
| 45 |
+
file_handler.setFormatter(formatter)
|
| 46 |
+
logger.addHandler(file_handler)
|
| 47 |
+
except Exception as e:
|
| 48 |
+
print(f"Warning: Could not setup file logging: {e}")
|
| 49 |
+
|
| 50 |
+
# Add StreamHandler for console output
|
| 51 |
+
console_handler = logging.StreamHandler()
|
| 52 |
+
console_handler.setLevel(getattr(logging, config.LOG_LEVEL.upper(), logging.INFO))
|
| 53 |
console_handler.setFormatter(formatter)
|
|
|
|
|
|
|
|
|
|
| 54 |
logger.addHandler(console_handler)
|
| 55 |
+
|
| 56 |
+
logger.info("Logging system initialized successfully")
|
| 57 |
return logger
|
| 58 |
|
| 59 |
+
|
| 60 |
+
def cache_with_ttl(ttl_seconds: int = 300) -> Callable:
|
| 61 |
+
"""
|
| 62 |
+
Decorator for caching function results with time-to-live (TTL).
|
| 63 |
+
|
| 64 |
+
Args:
|
| 65 |
+
ttl_seconds: Cache expiration time in seconds (default: 300)
|
| 66 |
+
|
| 67 |
+
Returns:
|
| 68 |
+
Callable: Decorated function with caching
|
| 69 |
+
|
| 70 |
+
Example:
|
| 71 |
+
@cache_with_ttl(ttl_seconds=600)
|
| 72 |
+
def expensive_function(arg1, arg2):
|
| 73 |
+
return result
|
| 74 |
+
"""
|
| 75 |
+
def decorator(func: Callable) -> Callable:
|
| 76 |
+
cache = {}
|
| 77 |
+
|
| 78 |
+
@functools.wraps(func)
|
| 79 |
+
def wrapper(*args, **kwargs):
|
| 80 |
+
# Create cache key from function arguments
|
| 81 |
+
cache_key = str(args) + str(sorted(kwargs.items()))
|
| 82 |
+
|
| 83 |
+
# Check if cached value exists and is not expired
|
| 84 |
+
if cache_key in cache:
|
| 85 |
+
cached_value, timestamp = cache[cache_key]
|
| 86 |
+
if time.time() - timestamp < ttl_seconds:
|
| 87 |
+
logger = logging.getLogger('crypto_aggregator')
|
| 88 |
+
logger.debug(f"Cache hit for {func.__name__} (TTL: {ttl_seconds}s)")
|
| 89 |
+
return cached_value
|
| 90 |
+
|
| 91 |
+
# Call function and cache result
|
| 92 |
+
result = func(*args, **kwargs)
|
| 93 |
+
cache[cache_key] = (result, time.time())
|
| 94 |
+
|
| 95 |
+
# Limit cache size to prevent memory issues
|
| 96 |
+
if len(cache) > config.CACHE_MAX_SIZE:
|
| 97 |
+
# Remove oldest entry
|
| 98 |
+
oldest_key = min(cache.keys(), key=lambda k: cache[k][1])
|
| 99 |
+
del cache[oldest_key]
|
| 100 |
+
|
| 101 |
+
return result
|
| 102 |
+
|
| 103 |
+
# Add cache clearing method
|
| 104 |
+
wrapper.clear_cache = lambda: cache.clear()
|
| 105 |
+
return wrapper
|
| 106 |
+
|
| 107 |
+
return decorator
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
def validate_price_data(price_data: Dict) -> bool:
|
| 111 |
+
"""
|
| 112 |
+
Validate cryptocurrency price data against configuration thresholds.
|
| 113 |
+
|
| 114 |
+
Args:
|
| 115 |
+
price_data: Dictionary containing price information
|
| 116 |
+
|
| 117 |
+
Returns:
|
| 118 |
+
bool: True if data is valid, False otherwise
|
| 119 |
+
"""
|
| 120 |
+
logger = logging.getLogger('crypto_aggregator')
|
| 121 |
+
|
| 122 |
+
try:
|
| 123 |
+
# Check if all required fields exist
|
| 124 |
+
required_fields = ['price_usd', 'volume_24h', 'market_cap']
|
| 125 |
+
for field in required_fields:
|
| 126 |
+
if field not in price_data:
|
| 127 |
+
logger.warning(f"Missing required field: {field}")
|
| 128 |
+
return False
|
| 129 |
+
|
| 130 |
+
# Validate price_usd
|
| 131 |
+
price_usd = float(price_data['price_usd'])
|
| 132 |
+
if not (config.MIN_PRICE <= price_usd <= config.MAX_PRICE):
|
| 133 |
+
logger.warning(
|
| 134 |
+
f"Price ${price_usd} outside valid range "
|
| 135 |
+
f"[${config.MIN_PRICE}, ${config.MAX_PRICE}]"
|
| 136 |
+
)
|
| 137 |
+
return False
|
| 138 |
+
|
| 139 |
+
# Validate volume_24h
|
| 140 |
+
volume_24h = float(price_data['volume_24h'])
|
| 141 |
+
if volume_24h < config.MIN_VOLUME:
|
| 142 |
+
logger.warning(
|
| 143 |
+
f"Volume ${volume_24h} below minimum ${config.MIN_VOLUME}"
|
| 144 |
+
)
|
| 145 |
+
return False
|
| 146 |
+
|
| 147 |
+
# Validate market_cap
|
| 148 |
+
market_cap = float(price_data['market_cap'])
|
| 149 |
+
if market_cap < config.MIN_MARKET_CAP:
|
| 150 |
+
logger.warning(
|
| 151 |
+
f"Market cap ${market_cap} below minimum ${config.MIN_MARKET_CAP}"
|
| 152 |
+
)
|
| 153 |
+
return False
|
| 154 |
+
|
| 155 |
+
return True
|
| 156 |
+
|
| 157 |
+
except (ValueError, TypeError) as e:
|
| 158 |
+
logger.error(f"Error validating price data: {e}")
|
| 159 |
+
return False
|
| 160 |
+
except Exception as e:
|
| 161 |
+
logger.error(f"Unexpected error in validate_price_data: {e}")
|
| 162 |
+
return False
|
| 163 |
+
|
| 164 |
+
|
| 165 |
+
def format_number(num: float, decimals: int = 2) -> str:
|
| 166 |
+
"""
|
| 167 |
+
Format large numbers with K, M, B suffixes for readability.
|
| 168 |
+
|
| 169 |
+
Args:
|
| 170 |
+
num: Number to format
|
| 171 |
+
decimals: Number of decimal places (default: 2)
|
| 172 |
+
|
| 173 |
+
Returns:
|
| 174 |
+
str: Formatted number string
|
| 175 |
+
|
| 176 |
+
Examples:
|
| 177 |
+
format_number(1234) -> "1.23K"
|
| 178 |
+
format_number(1234567) -> "1.23M"
|
| 179 |
+
format_number(1234567890) -> "1.23B"
|
| 180 |
+
"""
|
| 181 |
+
if num is None:
|
| 182 |
+
return "N/A"
|
| 183 |
+
|
| 184 |
+
try:
|
| 185 |
+
num = float(num)
|
| 186 |
+
|
| 187 |
+
if num < 0:
|
| 188 |
+
sign = "-"
|
| 189 |
+
num = abs(num)
|
| 190 |
+
else:
|
| 191 |
+
sign = ""
|
| 192 |
+
|
| 193 |
+
if num >= 1_000_000_000:
|
| 194 |
+
formatted = f"{sign}{num / 1_000_000_000:.{decimals}f}B"
|
| 195 |
+
elif num >= 1_000_000:
|
| 196 |
+
formatted = f"{sign}{num / 1_000_000:.{decimals}f}M"
|
| 197 |
+
elif num >= 1_000:
|
| 198 |
+
formatted = f"{sign}{num / 1_000:.{decimals}f}K"
|
| 199 |
+
else:
|
| 200 |
+
formatted = f"{sign}{num:.{decimals}f}"
|
| 201 |
+
|
| 202 |
+
return formatted
|
| 203 |
+
|
| 204 |
+
except (ValueError, TypeError):
|
| 205 |
+
return "N/A"
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
def calculate_moving_average(prices: List[float], period: int) -> Optional[float]:
|
| 209 |
+
"""
|
| 210 |
+
Calculate simple moving average (SMA) for a list of prices.
|
| 211 |
+
|
| 212 |
+
Args:
|
| 213 |
+
prices: List of price values
|
| 214 |
+
period: Number of periods for moving average
|
| 215 |
+
|
| 216 |
+
Returns:
|
| 217 |
+
float: Moving average value, or None if calculation not possible
|
| 218 |
+
"""
|
| 219 |
+
logger = logging.getLogger('crypto_aggregator')
|
| 220 |
+
|
| 221 |
+
try:
|
| 222 |
+
# Handle edge cases
|
| 223 |
+
if not prices:
|
| 224 |
+
logger.warning("Empty price list provided to calculate_moving_average")
|
| 225 |
+
return None
|
| 226 |
+
|
| 227 |
+
if period <= 0:
|
| 228 |
+
logger.warning(f"Invalid period {period} for moving average")
|
| 229 |
+
return None
|
| 230 |
+
|
| 231 |
+
if len(prices) < period:
|
| 232 |
+
logger.warning(
|
| 233 |
+
f"Not enough data points ({len(prices)}) for period {period}"
|
| 234 |
+
)
|
| 235 |
+
return None
|
| 236 |
+
|
| 237 |
+
# Calculate moving average from the last 'period' prices
|
| 238 |
+
recent_prices = prices[-period:]
|
| 239 |
+
average = sum(recent_prices) / period
|
| 240 |
+
|
| 241 |
+
return round(average, 8) # Round to 8 decimal places for precision
|
| 242 |
+
|
| 243 |
+
except (TypeError, ValueError) as e:
|
| 244 |
+
logger.error(f"Error calculating moving average: {e}")
|
| 245 |
+
return None
|
| 246 |
+
except Exception as e:
|
| 247 |
+
logger.error(f"Unexpected error in calculate_moving_average: {e}")
|
| 248 |
+
return None
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
def calculate_rsi(prices: List[float], period: int = 14) -> Optional[float]:
|
| 252 |
+
"""
|
| 253 |
+
Calculate Relative Strength Index (RSI) technical indicator.
|
| 254 |
+
|
| 255 |
+
Args:
|
| 256 |
+
prices: List of price values
|
| 257 |
+
period: RSI period (default: 14)
|
| 258 |
+
|
| 259 |
+
Returns:
|
| 260 |
+
float: RSI value between 0-100, or None if calculation not possible
|
| 261 |
+
"""
|
| 262 |
+
logger = logging.getLogger('crypto_aggregator')
|
| 263 |
+
|
| 264 |
+
try:
|
| 265 |
+
# Handle edge cases
|
| 266 |
+
if not prices or len(prices) < period + 1:
|
| 267 |
+
logger.warning(
|
| 268 |
+
f"Not enough data points ({len(prices)}) for RSI calculation (need {period + 1})"
|
| 269 |
+
)
|
| 270 |
+
return None
|
| 271 |
+
|
| 272 |
+
if period <= 0:
|
| 273 |
+
logger.warning(f"Invalid period {period} for RSI")
|
| 274 |
+
return None
|
| 275 |
+
|
| 276 |
+
# Calculate price changes
|
| 277 |
+
deltas = [prices[i] - prices[i - 1] for i in range(1, len(prices))]
|
| 278 |
+
|
| 279 |
+
# Separate gains and losses
|
| 280 |
+
gains = [delta if delta > 0 else 0 for delta in deltas]
|
| 281 |
+
losses = [-delta if delta < 0 else 0 for delta in deltas]
|
| 282 |
+
|
| 283 |
+
# Calculate average gains and losses for the period
|
| 284 |
+
avg_gain = sum(gains[-period:]) / period
|
| 285 |
+
avg_loss = sum(losses[-period:]) / period
|
| 286 |
+
|
| 287 |
+
# Handle case where avg_loss is zero
|
| 288 |
+
if avg_loss == 0:
|
| 289 |
+
if avg_gain == 0:
|
| 290 |
+
return 50.0 # No movement
|
| 291 |
+
return 100.0 # All gains, no losses
|
| 292 |
+
|
| 293 |
+
# Calculate RS and RSI
|
| 294 |
+
rs = avg_gain / avg_loss
|
| 295 |
+
rsi = 100 - (100 / (1 + rs))
|
| 296 |
+
|
| 297 |
+
return round(rsi, 2)
|
| 298 |
+
|
| 299 |
+
except (TypeError, ValueError, ZeroDivisionError) as e:
|
| 300 |
+
logger.error(f"Error calculating RSI: {e}")
|
| 301 |
+
return None
|
| 302 |
+
except Exception as e:
|
| 303 |
+
logger.error(f"Unexpected error in calculate_rsi: {e}")
|
| 304 |
+
return None
|
| 305 |
+
|
| 306 |
+
|
| 307 |
+
def extract_coins_from_text(text: str) -> List[str]:
|
| 308 |
+
"""
|
| 309 |
+
Extract cryptocurrency symbols from text using case-insensitive matching.
|
| 310 |
+
|
| 311 |
+
Args:
|
| 312 |
+
text: Text to search for coin symbols
|
| 313 |
+
|
| 314 |
+
Returns:
|
| 315 |
+
List[str]: List of found coin symbols (e.g., ['BTC', 'ETH'])
|
| 316 |
+
"""
|
| 317 |
+
if not text:
|
| 318 |
+
return []
|
| 319 |
+
|
| 320 |
+
found_coins = []
|
| 321 |
+
text_upper = text.upper()
|
| 322 |
+
|
| 323 |
+
try:
|
| 324 |
+
# Search for coin symbols from mapping
|
| 325 |
+
for coin_id, symbol in config.COIN_SYMBOL_MAPPING.items():
|
| 326 |
+
# Check for symbol (e.g., "BTC")
|
| 327 |
+
if symbol.upper() in text_upper:
|
| 328 |
+
if symbol not in found_coins:
|
| 329 |
+
found_coins.append(symbol)
|
| 330 |
+
# Check for full name (e.g., "bitcoin")
|
| 331 |
+
elif coin_id.upper() in text_upper:
|
| 332 |
+
if symbol not in found_coins:
|
| 333 |
+
found_coins.append(symbol)
|
| 334 |
+
|
| 335 |
+
# Also check for common patterns like $BTC or #BTC
|
| 336 |
+
import re
|
| 337 |
+
pattern = r'[$#]?([A-Z]{2,10})\b'
|
| 338 |
+
matches = re.findall(pattern, text_upper)
|
| 339 |
+
|
| 340 |
+
for match in matches:
|
| 341 |
+
# Check if it's a known symbol
|
| 342 |
+
for coin_id, symbol in config.COIN_SYMBOL_MAPPING.items():
|
| 343 |
+
if match == symbol.upper():
|
| 344 |
+
if symbol not in found_coins:
|
| 345 |
+
found_coins.append(symbol)
|
| 346 |
+
|
| 347 |
+
return sorted(list(set(found_coins))) # Remove duplicates and sort
|
| 348 |
+
|
| 349 |
+
except Exception as e:
|
| 350 |
+
logger = logging.getLogger('crypto_aggregator')
|
| 351 |
+
logger.error(f"Error extracting coins from text: {e}")
|
| 352 |
+
return []
|
| 353 |
+
|
| 354 |
+
|
| 355 |
+
def export_to_csv(data: List[Dict], filename: str) -> bool:
|
| 356 |
+
"""
|
| 357 |
+
Export list of dictionaries to CSV file.
|
| 358 |
+
|
| 359 |
+
Args:
|
| 360 |
+
data: List of dictionaries to export
|
| 361 |
+
filename: Output CSV filename (can be relative or absolute path)
|
| 362 |
+
|
| 363 |
+
Returns:
|
| 364 |
+
bool: True if export successful, False otherwise
|
| 365 |
+
"""
|
| 366 |
+
logger = logging.getLogger('crypto_aggregator')
|
| 367 |
+
|
| 368 |
+
if not data:
|
| 369 |
+
logger.warning("No data to export to CSV")
|
| 370 |
+
return False
|
| 371 |
+
|
| 372 |
+
try:
|
| 373 |
+
# Ensure filename ends with .csv
|
| 374 |
+
if not filename.endswith('.csv'):
|
| 375 |
+
filename += '.csv'
|
| 376 |
+
|
| 377 |
+
# Get all unique keys from all dictionaries
|
| 378 |
+
fieldnames = set()
|
| 379 |
+
for row in data:
|
| 380 |
+
fieldnames.update(row.keys())
|
| 381 |
+
fieldnames = sorted(list(fieldnames))
|
| 382 |
+
|
| 383 |
+
# Write to CSV
|
| 384 |
+
with open(filename, 'w', newline='', encoding='utf-8') as csvfile:
|
| 385 |
+
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
|
| 386 |
+
writer.writeheader()
|
| 387 |
+
writer.writerows(data)
|
| 388 |
+
|
| 389 |
+
logger.info(f"Successfully exported {len(data)} rows to {filename}")
|
| 390 |
+
return True
|
| 391 |
+
|
| 392 |
+
except IOError as e:
|
| 393 |
+
logger.error(f"IO error exporting to CSV {filename}: {e}")
|
| 394 |
+
return False
|
| 395 |
+
except Exception as e:
|
| 396 |
+
logger.error(f"Error exporting to CSV {filename}: {e}")
|
| 397 |
+
return False
|
| 398 |
+
|
| 399 |
+
|
| 400 |
+
def is_data_stale(timestamp_str: str, max_age_minutes: int = 30) -> bool:
|
| 401 |
+
"""
|
| 402 |
+
Check if data is stale based on timestamp and maximum age.
|
| 403 |
+
|
| 404 |
+
Args:
|
| 405 |
+
timestamp_str: Timestamp string in ISO format or Unix timestamp
|
| 406 |
+
max_age_minutes: Maximum age in minutes before data is considered stale
|
| 407 |
+
|
| 408 |
+
Returns:
|
| 409 |
+
bool: True if data is stale (older than max_age_minutes), False otherwise
|
| 410 |
+
"""
|
| 411 |
+
logger = logging.getLogger('crypto_aggregator')
|
| 412 |
+
|
| 413 |
+
try:
|
| 414 |
+
# Try to parse as Unix timestamp (float/int)
|
| 415 |
try:
|
| 416 |
+
timestamp = float(timestamp_str)
|
| 417 |
+
data_time = datetime.datetime.fromtimestamp(timestamp)
|
| 418 |
+
except (ValueError, TypeError):
|
| 419 |
+
# Try to parse as ISO format string
|
| 420 |
+
# Support multiple datetime formats
|
| 421 |
+
for fmt in [
|
| 422 |
+
"%Y-%m-%dT%H:%M:%S.%fZ",
|
| 423 |
+
"%Y-%m-%dT%H:%M:%SZ",
|
| 424 |
+
"%Y-%m-%dT%H:%M:%S",
|
| 425 |
+
"%Y-%m-%d %H:%M:%S",
|
| 426 |
+
"%Y-%m-%d %H:%M:%S.%f",
|
| 427 |
+
]:
|
| 428 |
+
try:
|
| 429 |
+
data_time = datetime.datetime.strptime(timestamp_str, fmt)
|
| 430 |
+
break
|
| 431 |
+
except ValueError:
|
| 432 |
+
continue
|
| 433 |
+
else:
|
| 434 |
+
# If no format matched, try fromisoformat
|
| 435 |
+
data_time = datetime.datetime.fromisoformat(timestamp_str.replace('Z', '+00:00'))
|
| 436 |
+
|
| 437 |
+
# Calculate age
|
| 438 |
+
current_time = datetime.datetime.now()
|
| 439 |
+
age = current_time - data_time
|
| 440 |
+
age_minutes = age.total_seconds() / 60
|
| 441 |
+
|
| 442 |
+
is_stale = age_minutes > max_age_minutes
|
| 443 |
+
|
| 444 |
+
if is_stale:
|
| 445 |
+
logger.debug(
|
| 446 |
+
f"Data is stale: {age_minutes:.1f} minutes old "
|
| 447 |
+
f"(threshold: {max_age_minutes} minutes)"
|
| 448 |
+
)
|
| 449 |
+
|
| 450 |
+
return is_stale
|
| 451 |
+
|
| 452 |
+
except Exception as e:
|
| 453 |
+
logger.error(f"Error checking data staleness for timestamp '{timestamp_str}': {e}")
|
| 454 |
+
# If we can't parse the timestamp, consider it stale
|
| 455 |
+
return True
|
| 456 |
+
|
| 457 |
+
|
| 458 |
+
# Utility function to get logger easily
|
| 459 |
+
def get_logger(name: str = 'crypto_aggregator') -> logging.Logger:
|
| 460 |
+
"""
|
| 461 |
+
Get or create logger instance.
|
| 462 |
+
|
| 463 |
+
Args:
|
| 464 |
+
name: Logger name
|
| 465 |
+
|
| 466 |
+
Returns:
|
| 467 |
+
logging.Logger: Logger instance
|
| 468 |
+
"""
|
| 469 |
+
logger = logging.getLogger(name)
|
| 470 |
+
if not logger.handlers:
|
| 471 |
+
return setup_logging()
|
| 472 |
+
return logger
|
| 473 |
+
|
| 474 |
+
|
| 475 |
+
# Additional helper functions for common operations
|
| 476 |
+
def safe_float(value: Any, default: float = 0.0) -> float:
|
| 477 |
+
"""
|
| 478 |
+
Safely convert value to float with default fallback.
|
| 479 |
+
|
| 480 |
+
Args:
|
| 481 |
+
value: Value to convert
|
| 482 |
+
default: Default value if conversion fails
|
| 483 |
+
|
| 484 |
+
Returns:
|
| 485 |
+
float: Converted value or default
|
| 486 |
+
"""
|
| 487 |
+
try:
|
| 488 |
+
return float(value)
|
| 489 |
+
except (ValueError, TypeError):
|
| 490 |
+
return default
|
| 491 |
+
|
| 492 |
+
|
| 493 |
+
def safe_int(value: Any, default: int = 0) -> int:
|
| 494 |
+
"""
|
| 495 |
+
Safely convert value to integer with default fallback.
|
| 496 |
+
|
| 497 |
+
Args:
|
| 498 |
+
value: Value to convert
|
| 499 |
+
default: Default value if conversion fails
|
| 500 |
+
|
| 501 |
+
Returns:
|
| 502 |
+
int: Converted value or default
|
| 503 |
+
"""
|
| 504 |
+
try:
|
| 505 |
+
return int(value)
|
| 506 |
+
except (ValueError, TypeError):
|
| 507 |
+
return default
|
| 508 |
+
|
| 509 |
+
|
| 510 |
+
def truncate_string(text: str, max_length: int = 100, suffix: str = "...") -> str:
|
| 511 |
+
"""
|
| 512 |
+
Truncate string to maximum length with suffix.
|
| 513 |
+
|
| 514 |
+
Args:
|
| 515 |
+
text: Text to truncate
|
| 516 |
+
max_length: Maximum length
|
| 517 |
+
suffix: Suffix to add when truncated
|
| 518 |
+
|
| 519 |
+
Returns:
|
| 520 |
+
str: Truncated string
|
| 521 |
+
"""
|
| 522 |
+
if not text or len(text) <= max_length:
|
| 523 |
+
return text
|
| 524 |
+
return text[:max_length - len(suffix)] + suffix
|
| 525 |
+
|
| 526 |
+
|
| 527 |
+
def percentage_change(old_value: float, new_value: float) -> Optional[float]:
|
| 528 |
+
"""
|
| 529 |
+
Calculate percentage change between two values.
|
| 530 |
+
|
| 531 |
+
Args:
|
| 532 |
+
old_value: Original value
|
| 533 |
+
new_value: New value
|
| 534 |
+
|
| 535 |
+
Returns:
|
| 536 |
+
float: Percentage change, or None if calculation not possible
|
| 537 |
+
"""
|
| 538 |
+
try:
|
| 539 |
+
if old_value == 0:
|
| 540 |
+
return None
|
| 541 |
+
return ((new_value - old_value) / old_value) * 100
|
| 542 |
+
except (TypeError, ValueError, ZeroDivisionError):
|
| 543 |
+
return None
|
| 544 |
+
|
| 545 |
+
|
| 546 |
+
if __name__ == "__main__":
|
| 547 |
+
# Test utilities
|
| 548 |
+
print("Testing Crypto Data Aggregator Utilities")
|
| 549 |
+
print("=" * 50)
|
| 550 |
+
|
| 551 |
+
# Test logging
|
| 552 |
+
logger = setup_logging()
|
| 553 |
+
logger.info("Logger test successful")
|
| 554 |
+
|
| 555 |
+
# Test number formatting
|
| 556 |
+
print(f"\nNumber Formatting:")
|
| 557 |
+
print(f" 1234 -> {format_number(1234)}")
|
| 558 |
+
print(f" 1234567 -> {format_number(1234567)}")
|
| 559 |
+
print(f" 1234567890 -> {format_number(1234567890)}")
|
| 560 |
+
|
| 561 |
+
# Test moving average
|
| 562 |
+
prices = [100, 102, 104, 103, 105, 107, 106]
|
| 563 |
+
ma = calculate_moving_average(prices, 5)
|
| 564 |
+
print(f"\nMoving Average (5-period): {ma}")
|
| 565 |
+
|
| 566 |
+
# Test RSI
|
| 567 |
+
rsi_prices = [44, 44.5, 45, 45.5, 45, 44.5, 44, 43.5, 43, 43.5, 44, 44.5, 45, 45.5, 46]
|
| 568 |
+
rsi = calculate_rsi(rsi_prices, 14)
|
| 569 |
+
print(f"RSI (14-period): {rsi}")
|
| 570 |
+
|
| 571 |
+
# Test coin extraction
|
| 572 |
+
text = "Bitcoin (BTC) and Ethereum (ETH) are leading cryptocurrencies"
|
| 573 |
+
coins = extract_coins_from_text(text)
|
| 574 |
+
print(f"\nExtracted coins from text: {coins}")
|
| 575 |
+
|
| 576 |
+
# Test data validation
|
| 577 |
+
valid_data = {
|
| 578 |
+
'price_usd': 45000.0,
|
| 579 |
+
'volume_24h': 1000000.0,
|
| 580 |
+
'market_cap': 800000000.0
|
| 581 |
+
}
|
| 582 |
+
is_valid = validate_price_data(valid_data)
|
| 583 |
+
print(f"\nPrice data validation: {is_valid}")
|
| 584 |
+
|
| 585 |
+
print("\n" + "=" * 50)
|
| 586 |
+
print("All tests completed!")
|