Spaces:
Running
Running
File size: 3,261 Bytes
b8a1e0a | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 | import os
import sys
import logging
logging.basicConfig(
level=logging.DEBUG,
format="%(asctime)s %(levelname)s %(name)s: %(message)s",
stream=sys.stdout,
)
log = logging.getLogger("ff1000-api")
log.info("Starting FF1000 API server...")
log.info(f"PYTHONPATH: {os.environ.get('PYTHONPATH', 'not set')}")
log.info(f"Current directory: {os.getcwd()}")
log.info(f"Directory contents: {os.listdir('.')}")
from typing import Dict, Any
from flask import Flask, request, jsonify
from werkzeug.exceptions import HTTPException
# Try to import models with error handling
MODELS: Dict[str, Any] = {}
try:
log.info("Attempting to import machine_learning models...")
from machine_learning.load_models import (
not_for_me,
recommended_for_you,
similarity,
)
MODELS = {
"nfm": not_for_me,
"rfy": recommended_for_you,
"similarity": similarity,
}
log.info("Successfully loaded ML models!")
except Exception as e:
log.error(f"Failed to import ML models: {e}")
import traceback
traceback.print_exc()
def create_app() -> Flask:
log.info("Creating Flask app...")
app = Flask(__name__)
@app.errorhandler(Exception)
def handle_exception(e):
if isinstance(e, HTTPException):
return jsonify(error=e.name, message=e.description), e.code
log.exception("Unhandled exception")
return jsonify(error="InternalServerError", message=str(e)), 500
@app.route("/")
def root():
return jsonify(
status="ok",
message="FF1000 Recommendation Service",
models_loaded=len(MODELS) > 0,
available_models=list(MODELS.keys()) if MODELS else []
)
@app.route("/health")
@app.get("/health")
def healthz():
return jsonify(status="ok", models_loaded=len(MODELS) > 0)
@app.route("/predict/<model_name>", methods=["POST"])
def predict(model_name: str):
if not MODELS:
return jsonify(error="ModelsNotLoaded", message="ML models failed to load"), 500
if model_name not in MODELS:
return jsonify(error="UnknownModel", message=f"valid models: {list(MODELS.keys())}"), 400
try:
payload = request.get_json(force=True, silent=False)
except Exception:
return jsonify(error="InvalidJSON", message="body must be valid JSON"), 400
if not isinstance(payload, dict) or "items" not in payload:
return jsonify(error="BadRequest", message="json must have key 'items'"), 400
inputs = payload["items"]
if not isinstance(inputs, list):
return jsonify(error="BadRequest", message="'items' must be a list"), 400
model = MODELS[model_name]
try:
preds = model.predict([inputs])
except Exception as e:
log.exception("Prediction failed")
return jsonify(error="PredictionError", message=str(e)), 500
return jsonify(model=model_name, predictions=preds)
log.info(f"Flask app created with routes: {[rule.rule for rule in app.url_map.iter_rules()]}")
return app
log.info("Creating app instance...")
app = create_app()
log.info("App instance created successfully!")
|