ayman-ejaz-dev's picture
Update app.py
48ea679 verified
from flask import Flask, request, jsonify
import torch
from PIL import Image
from transformers import AutoModelForImageClassification, AutoImageProcessor
import io
app = Flask(__name__)
# 1. Model loading directly from Hugging Face
# Ye model automatic download hoga jab aap 'Factory Rebuild' karenge
model_name = "SanketJadhav/PlantDiseaseClassifier-Resnet50"
print("Loading model... please wait.")
model = AutoModelForImageClassification.from_pretrained(
"aapka-model-name",
use_safetensors=False
)processor = AutoImageProcessor.from_pretrained(model_name)
print("Model loaded successfully!")
@app.route("/", methods=["GET"])
def home():
return jsonify({"status": "Server is running on port 7860"})
@app.route("/predict", methods=["POST"])
def predict():
try:
# Check if file is in request
if 'file' not in request.files:
return jsonify({"error": "No file uploaded"}), 400
# Read and process image
file = request.files['file'].read()
image = Image.open(io.BytesIO(file)).convert("RGB")
# Model Prediction
inputs = processor(images=image, return_tensors="pt")
outputs = model(**inputs)
logits = outputs.logits
predicted_class = logits.argmax(-1).item()
# Get Label
label = model.config.id2label[predicted_class]
return jsonify({"prediction": label})
except Exception as e:
return jsonify({"error": str(e)}), 500
if __name__ == "__main__":
# BOHAT ZAROORI: Hugging Face sirf port 7860 ko accept karta hai
print("Starting Flask server on port 7860...")
app.run(host="0.0.0.0", port=7860)