File size: 2,099 Bytes
0f66690
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
from flask import Flask, request, jsonify
from transformers import AutoModelForImageClassification, AutoImageProcessor
from PIL import Image
import torch
import os

app = Flask(__name__)

# 1. Model Configuration
model_name = "SanketJadhav/PlantDiseaseClassifier-Resnet50"

print("๐Ÿ”„ Model loading... please wait.")
try:
    # Safetensors error se bachne ke liye use_safetensors=False
    model = AutoModelForImageClassification.from_pretrained(
        model_name,
        use_safetensors=False
    )
    # Manual transforms ki bajaye processor use karein jo model ke saath aata hai
    processor = AutoImageProcessor.from_pretrained(model_name)
    model.eval() # Model ko evaluation mode mein set karein
    print(f"โœ… Model '{model_name}' loaded successfully")
except Exception as e:
    print(f"โŒ Error loading model: {e}")
    model = None
    processor = None

@app.route("/", methods=["GET"])
def home():
    return jsonify({"status": "Online", "message": "Plant Disease API is running on Hugging Face!"})

@app.route("/predict", methods=["POST"])
def predict():
    if model is None or processor is None:
        return jsonify({"error": "Model not loaded on server"}), 500

    if "image" not in request.files:
        return jsonify({"error": "No image file provided"}), 400

    try:
        file = request.files["image"]
        image = Image.open(file.stream).convert("RGB")

        # 2. Preprocessing using AutoImageProcessor
        inputs = processor(images=image, return_tensors="pt")

        with torch.no_grad():
            outputs = model(**inputs)
            logits = outputs.logits
            predicted_class_idx = logits.argmax(-1).item()

        # Label nikalna
        label = model.config.id2label[predicted_class_idx]
        
        return jsonify({
            "prediction": label,
            "class_index": predicted_class_idx
        })

    except Exception as e:
        return jsonify({"error": str(e)}), 500

if __name__ == "__main__":
    # 3. IMPORTANT: Hugging Face 5000 port allow nahi karta, 7860 lazmi hai
    app.run(host="0.0.0.0", port=7860)