Nhughes09 commited on
Commit ·
387721d
1
Parent(s): c89a139
Fix JSON serialization for NumPy types
Browse files- app.py +26 -4
- json_utils.py +44 -0
app.py
CHANGED
|
@@ -216,6 +216,10 @@ async def get_catalog():
|
|
| 216 |
logger.error(traceback.format_exc())
|
| 217 |
raise e
|
| 218 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 219 |
@app.get("/api/preview/{vertical}")
|
| 220 |
async def get_preview(vertical: str):
|
| 221 |
"""Get preview data for a specific vertical"""
|
|
@@ -251,14 +255,17 @@ async def get_preview(vertical: str):
|
|
| 251 |
# Get latest row for "Live Signals"
|
| 252 |
latest = df.iloc[-1].to_dict()
|
| 253 |
|
| 254 |
-
|
| 255 |
"vertical": vertical,
|
| 256 |
"latest": latest,
|
| 257 |
"history": history,
|
| 258 |
"total_rows": len(df)
|
| 259 |
-
}
|
| 260 |
-
|
| 261 |
-
|
|
|
|
|
|
|
|
|
|
| 262 |
except Exception as e:
|
| 263 |
logger.error(f"Error fetching preview: {e}")
|
| 264 |
return JSONResponse({"error": str(e)}, status_code=500)
|
|
@@ -393,9 +400,24 @@ async def get_prediction(vertical: str):
|
|
| 393 |
predictor = predictors[vertical]
|
| 394 |
result = predictor.predict(latest_data)
|
| 395 |
|
|
|
|
|
|
|
|
|
|
| 396 |
return JSONResponse(result)
|
| 397 |
except Exception as e:
|
| 398 |
logger.error(f"Prediction failed: {e}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 399 |
return JSONResponse({"error": str(e)}, status_code=500)
|
| 400 |
|
| 401 |
@app.get("/api/pnl")
|
|
|
|
| 216 |
logger.error(traceback.format_exc())
|
| 217 |
raise e
|
| 218 |
|
| 219 |
+
from json_utils import convert_numpy_types, log_object_types
|
| 220 |
+
|
| 221 |
+
# ... (imports remain the same)
|
| 222 |
+
|
| 223 |
@app.get("/api/preview/{vertical}")
|
| 224 |
async def get_preview(vertical: str):
|
| 225 |
"""Get preview data for a specific vertical"""
|
|
|
|
| 255 |
# Get latest row for "Live Signals"
|
| 256 |
latest = df.iloc[-1].to_dict()
|
| 257 |
|
| 258 |
+
response_data = {
|
| 259 |
"vertical": vertical,
|
| 260 |
"latest": latest,
|
| 261 |
"history": history,
|
| 262 |
"total_rows": len(df)
|
| 263 |
+
}
|
| 264 |
+
|
| 265 |
+
# Ensure all types are JSON serializable
|
| 266 |
+
response_data = convert_numpy_types(response_data)
|
| 267 |
+
|
| 268 |
+
return JSONResponse(response_data)
|
| 269 |
except Exception as e:
|
| 270 |
logger.error(f"Error fetching preview: {e}")
|
| 271 |
return JSONResponse({"error": str(e)}, status_code=500)
|
|
|
|
| 400 |
predictor = predictors[vertical]
|
| 401 |
result = predictor.predict(latest_data)
|
| 402 |
|
| 403 |
+
# Convert NumPy types to Python types
|
| 404 |
+
result = convert_numpy_types(result)
|
| 405 |
+
|
| 406 |
return JSONResponse(result)
|
| 407 |
except Exception as e:
|
| 408 |
logger.error(f"Prediction failed: {e}")
|
| 409 |
+
# Detailed logging for debugging
|
| 410 |
+
logger.error("Dumping object types for debugging:")
|
| 411 |
+
try:
|
| 412 |
+
# We assume 'result' might be defined if the error happened during serialization
|
| 413 |
+
# If it happened before, this might fail, so we wrap it
|
| 414 |
+
if 'result' in locals():
|
| 415 |
+
log_object_types(result)
|
| 416 |
+
else:
|
| 417 |
+
logger.error("Result object was not created.")
|
| 418 |
+
except Exception as log_err:
|
| 419 |
+
logger.error(f"Failed to log object types: {log_err}")
|
| 420 |
+
|
| 421 |
return JSONResponse({"error": str(e)}, status_code=500)
|
| 422 |
|
| 423 |
@app.get("/api/pnl")
|
json_utils.py
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
import pandas as pd
|
| 3 |
+
import logging
|
| 4 |
+
|
| 5 |
+
logger = logging.getLogger(__name__)
|
| 6 |
+
|
| 7 |
+
def convert_numpy_types(obj):
|
| 8 |
+
"""
|
| 9 |
+
Recursively convert NumPy types to standard Python types for JSON serialization.
|
| 10 |
+
"""
|
| 11 |
+
if isinstance(obj, (np.int_, np.intc, np.intp, np.int8,
|
| 12 |
+
np.int16, np.int32, np.int64, np.uint8,
|
| 13 |
+
np.uint16, np.uint32, np.uint64)):
|
| 14 |
+
return int(obj)
|
| 15 |
+
elif isinstance(obj, (np.float_, np.float16, np.float32, np.float64)):
|
| 16 |
+
return float(obj)
|
| 17 |
+
elif isinstance(obj, (np.ndarray,)):
|
| 18 |
+
return obj.tolist()
|
| 19 |
+
elif isinstance(obj, dict):
|
| 20 |
+
return {k: convert_numpy_types(v) for k, v in obj.items()}
|
| 21 |
+
elif isinstance(obj, list):
|
| 22 |
+
return [convert_numpy_types(i) for i in obj]
|
| 23 |
+
elif isinstance(obj, (pd.Timestamp, pd.DatetimeIndex)):
|
| 24 |
+
return str(obj)
|
| 25 |
+
return obj
|
| 26 |
+
|
| 27 |
+
def log_object_types(obj, prefix="", depth=0, max_depth=3):
|
| 28 |
+
"""
|
| 29 |
+
Log the types of keys and values in a dictionary or list to help debug serialization issues.
|
| 30 |
+
"""
|
| 31 |
+
if depth > max_depth:
|
| 32 |
+
return
|
| 33 |
+
|
| 34 |
+
indent = " " * depth
|
| 35 |
+
if isinstance(obj, dict):
|
| 36 |
+
for k, v in obj.items():
|
| 37 |
+
logger.error(f"{indent}{prefix}Key: {k} ({type(k)}), Value Type: {type(v)}")
|
| 38 |
+
if isinstance(v, (dict, list)):
|
| 39 |
+
log_object_types(v, prefix=f"{k}.", depth=depth+1, max_depth=max_depth)
|
| 40 |
+
elif isinstance(obj, list):
|
| 41 |
+
for i, v in enumerate(obj[:5]): # Only check first 5 to avoid spam
|
| 42 |
+
logger.error(f"{indent}{prefix}Index: {i}, Value Type: {type(v)}")
|
| 43 |
+
if isinstance(v, (dict, list)):
|
| 44 |
+
log_object_types(v, prefix=f"[{i}].", depth=depth+1, max_depth=max_depth)
|