| | from flask import Flask, request, jsonify |
| | from transformers import AutoModelForImageClassification, AutoImageProcessor |
| | from PIL import Image |
| | import torch |
| | import os |
| |
|
| | app = Flask(__name__) |
| |
|
| | |
| | model_name = "SanketJadhav/PlantDiseaseClassifier-Resnet50" |
| |
|
| | print("π Model loading... please wait.") |
| | try: |
| | |
| | model = AutoModelForImageClassification.from_pretrained( |
| | model_name, |
| | use_safetensors=False |
| | ) |
| | |
| | processor = AutoImageProcessor.from_pretrained(model_name) |
| | model.eval() |
| | print(f"β
Model '{model_name}' loaded successfully") |
| | except Exception as e: |
| | print(f"β Error loading model: {e}") |
| | model = None |
| | processor = None |
| |
|
| | @app.route("/", methods=["GET"]) |
| | def home(): |
| | return jsonify({"status": "Online", "message": "Plant Disease API is running on Hugging Face!"}) |
| |
|
| | @app.route("/predict", methods=["POST"]) |
| | def predict(): |
| | if model is None or processor is None: |
| | return jsonify({"error": "Model not loaded on server"}), 500 |
| |
|
| | if "image" not in request.files: |
| | return jsonify({"error": "No image file provided"}), 400 |
| |
|
| | try: |
| | file = request.files["image"] |
| | image = Image.open(file.stream).convert("RGB") |
| |
|
| | |
| | inputs = processor(images=image, return_tensors="pt") |
| |
|
| | with torch.no_grad(): |
| | outputs = model(**inputs) |
| | logits = outputs.logits |
| | predicted_class_idx = logits.argmax(-1).item() |
| |
|
| | |
| | label = model.config.id2label[predicted_class_idx] |
| | |
| | return jsonify({ |
| | "prediction": label, |
| | "class_index": predicted_class_idx |
| | }) |
| |
|
| | except Exception as e: |
| | return jsonify({"error": str(e)}), 500 |
| |
|
| | if __name__ == "__main__": |
| | |
| | app.run(host="0.0.0.0", port=7860) |