from flask import Flask, request, jsonify from transformers import AutoModelForImageClassification, AutoImageProcessor from PIL import Image import torch import os app = Flask(__name__) # 1. Model Configuration model_name = "SanketJadhav/PlantDiseaseClassifier-Resnet50" print("🔄 Model loading... please wait.") try: # Safetensors error se bachne ke liye use_safetensors=False model = AutoModelForImageClassification.from_pretrained( model_name, use_safetensors=False ) # Manual transforms ki bajaye processor use karein jo model ke saath aata hai processor = AutoImageProcessor.from_pretrained(model_name) model.eval() # Model ko evaluation mode mein set karein print(f"✅ Model '{model_name}' loaded successfully") except Exception as e: print(f"❌ Error loading model: {e}") model = None processor = None @app.route("/", methods=["GET"]) def home(): return jsonify({"status": "Online", "message": "Plant Disease API is running on Hugging Face!"}) @app.route("/predict", methods=["POST"]) def predict(): if model is None or processor is None: return jsonify({"error": "Model not loaded on server"}), 500 if "image" not in request.files: return jsonify({"error": "No image file provided"}), 400 try: file = request.files["image"] image = Image.open(file.stream).convert("RGB") # 2. Preprocessing using AutoImageProcessor inputs = processor(images=image, return_tensors="pt") with torch.no_grad(): outputs = model(**inputs) logits = outputs.logits predicted_class_idx = logits.argmax(-1).item() # Label nikalna label = model.config.id2label[predicted_class_idx] return jsonify({ "prediction": label, "class_index": predicted_class_idx }) except Exception as e: return jsonify({"error": str(e)}), 500 if __name__ == "__main__": # 3. IMPORTANT: Hugging Face 5000 port allow nahi karta, 7860 lazmi hai app.run(host="0.0.0.0", port=7860)