SaiStack's picture
Added logic to allow for loading of models from huggingface model
504405e
raw
history blame contribute delete
911 Bytes
import gc
from tensorflow.keras import backend as K
from app.pipelines.main_pipeline import load_model as load_maize, predict_image
AUTO_UNLOAD = False
_cache = {}
async def predict(req, file):
name = req.model_name
# choose loader/predictor
# if name == "sentiment":
# loader, predictor, data = load_sentiment, predict_sentiment, req.text
if name.startswith("xception_"):
loader, predictor, data = load_maize, predict_image, await file.read()
else:
raise ValueError("Unknown model")
# load-on-demand
if name not in _cache:
_cache[name] = loader(name)
model = _cache[name]
label, conf, disease_data = predictor(data, model, name)
if AUTO_UNLOAD:
del _cache[name]
del model
K.clear_session()
gc.collect()
return {"model": name, "label": label, "confidence": conf, "recommendation": disease_data}