metanice / app.py
mac9087's picture
Update app.py
c92f782 verified
from flask import Flask, request, jsonify
from flask_cors import CORS
import sys
import os
# Force flush prints immediately
sys.stdout.flush()
sys.stderr.flush()
print("="*70, flush=True)
print("STARTING PLAYWEBIT VISION API - ALL MODELS", flush=True)
print("="*70, flush=True)
app = Flask(__name__)
CORS(app)
# Model paths - using models WITH external data files
MODEL_DIR = "models"
FRONTLINE_MODEL = os.path.join(MODEL_DIR, "playwebit_vision.onnx")
CAT_MODEL = os.path.join(MODEL_DIR, "cat_breed.onnx")
DOG_MODEL = os.path.join(MODEL_DIR, "dog_breed.onnx")
FLOWER_MODEL = os.path.join(MODEL_DIR, "flower_species.onnx")
print(f"\nCurrent directory: {os.getcwd()}", flush=True)
print(f"Files in models/: {os.listdir(MODEL_DIR) if os.path.exists(MODEL_DIR) else 'Not found'}", flush=True)
# Import libraries
print("\nImporting libraries...", flush=True)
try:
import onnxruntime as ort
import numpy as np
from PIL import Image
import io
import base64
print("✓ Libraries imported successfully", flush=True)
except Exception as e:
print(f"✗ Failed to import libraries: {e}", flush=True)
sys.exit(1)
# Class definitions
FRONTLINE_CLASSES = [
'airplane', 'automobile', 'bird', 'cat', 'deer',
'dog', 'frog', 'horse', 'ship', 'truck',
'bicycle', 'bus', 'motorcycle', 'pickup_truck', 'train',
'bottle', 'cup', 'plate', 'table', 'chair',
'apple', 'orange', 'boy', 'girl', 'man',
'woman', 'house', 'road', 'tree', 'flower'
]
CAT_BREEDS = [
'Abyssinian', 'Bengal', 'Birman', 'Bombay',
'British Shorthair', 'Egyptian Mau', 'Maine Coon', 'Persian',
'Ragdoll', 'Russian Blue', 'Siamese', 'Sphynx'
]
DOG_BREEDS = [
'American Bulldog', 'American Pit Bull Terrier', 'Basset Hound', 'Beagle',
'Boxer', 'Chihuahua', 'English Cocker Spaniel', 'English Setter',
'German Shorthaired', 'Great Pyrenees', 'Havanese', 'Japanese Chin',
'Keeshond', 'Leonberger', 'Miniature Pinscher', 'Newfoundland',
'Pomeranian', 'Pug', 'Saint Bernard', 'Samoyed',
'Scottish Terrier', 'Shiba Inu', 'Staffordshire Bull Terrier',
'Wheaten Terrier', 'Yorkshire Terrier'
]
FLOWER_SPECIES = [
'Pink Primrose', 'Hard-leaved Pocket Orchid', 'Canterbury Bells',
'Sweet Pea', 'English Marigold', 'Tiger Lily', 'Moon Orchid',
'Bird of Paradise', 'Monkshood', 'Globe Thistle', 'Snapdragon',
'Colt\'s Foot', 'King Protea', 'Spear Thistle', 'Yellow Iris',
'Globe Flower', 'Purple Coneflower', 'Peruvian Lily', 'Balloon Flower',
'Giant White Arum Lily', 'Fire Lily', 'Pincushion Flower', 'Fritillary',
'Red Ginger', 'Grape Hyacinth', 'Corn Poppy', 'Prince of Wales Feathers',
'Stemless Gentian', 'Artichoke', 'Sweet William', 'Carnation',
'Garden Phlox', 'Love in the Mist', 'Cosmos', 'Alpine Sea Holly',
'Ruby-lipped Cattleya', 'Cape Flower', 'Great Masterwort', 'Siam Tulip',
'Lenten Rose', 'Barberton Daisy', 'Daffodil', 'Sword Lily', 'Poinsettia',
'Bolero Deep Blue', 'Wallflower', 'Marigold', 'Buttercup', 'Daisy',
'Common Dandelion', 'Petunia', 'Wild Pansy', 'Primula', 'Sunflower',
'Lilac Hibiscus', 'Bishop of Llandaff', 'Gaura', 'Geranium', 'Orange Dahlia',
'Pink and Yellow Dahlia', 'Cautleya Spicata', 'Japanese Anemone',
'Black-eyed Susan', 'Silverbush', 'Californian Poppy', 'Osteospermum',
'Spring Crocus', 'Iris', 'Windflower', 'Tree Poppy', 'Gazania',
'Azalea', 'Water Lily', 'Rose', 'Thorn Apple', 'Morning Glory',
'Passion Flower', 'Lotus', 'Toad Lily', 'Anthurium', 'Frangipani',
'Clematis', 'Hibiscus', 'Columbine', 'Desert Rose', 'Tree Mallow',
'Magnolia', 'Cyclamen', 'Watercress', 'Canna Lily', 'Hippeastrum',
'Bee Balm', 'Pink Quill', 'Foxglove', 'Bougainvillea', 'Camellia',
'Mallow', 'Mexican Petunia', 'Bromelia', 'Blanket Flower', 'Trumpet Creeper',
'Blackberry Lily', 'Common Tulip', 'Wild Rose'
]
# Load all models
print("\n" + "="*70, flush=True)
print("LOADING ALL MODELS (with external data)", flush=True)
print("="*70, flush=True)
# 1. Frontline Model
print("\n1. Loading Frontline Model...", flush=True)
frontline_session = None
try:
print(f" Path: {FRONTLINE_MODEL}", flush=True)
print(f" File exists: {os.path.exists(FRONTLINE_MODEL)}", flush=True)
print(f" Data file exists: {os.path.exists(FRONTLINE_MODEL + '.data')}", flush=True)
frontline_session = ort.InferenceSession(FRONTLINE_MODEL)
print(" ✓ Frontline model loaded!", flush=True)
for inp in frontline_session.get_inputs():
print(f" Input: {inp.name} {inp.shape}", flush=True)
for out in frontline_session.get_outputs():
print(f" Output: {out.name} {out.shape}", flush=True)
except Exception as e:
print(f" ✗ Failed: {e}", flush=True)
# 2. Cat Breed Model
print("\n2. Loading Cat Breed Model...", flush=True)
cat_session = None
try:
print(f" Path: {CAT_MODEL}", flush=True)
print(f" File exists: {os.path.exists(CAT_MODEL)}", flush=True)
print(f" Data file exists: {os.path.exists(CAT_MODEL + '.data')}", flush=True)
cat_session = ort.InferenceSession(CAT_MODEL)
print(" ✓ Cat breed model loaded!", flush=True)
for inp in cat_session.get_inputs():
print(f" Input: {inp.name} {inp.shape}", flush=True)
for out in cat_session.get_outputs():
print(f" Output: {out.name} {out.shape}", flush=True)
except Exception as e:
print(f" ✗ Failed: {e}", flush=True)
# 3. Dog Breed Model
print("\n3. Loading Dog Breed Model...", flush=True)
dog_session = None
try:
print(f" Path: {DOG_MODEL}", flush=True)
print(f" File exists: {os.path.exists(DOG_MODEL)}", flush=True)
print(f" Data file exists: {os.path.exists(DOG_MODEL + '.data')}", flush=True)
dog_session = ort.InferenceSession(DOG_MODEL)
print(" ✓ Dog breed model loaded!", flush=True)
for inp in dog_session.get_inputs():
print(f" Input: {inp.name} {inp.shape}", flush=True)
for out in dog_session.get_outputs():
print(f" Output: {out.name} {out.shape}", flush=True)
except Exception as e:
print(f" ✗ Failed: {e}", flush=True)
# 4. Flower Species Model
print("\n4. Loading Flower Species Model...", flush=True)
flower_session = None
try:
print(f" Path: {FLOWER_MODEL}", flush=True)
print(f" File exists: {os.path.exists(FLOWER_MODEL)}", flush=True)
print(f" Data file exists: {os.path.exists(FLOWER_MODEL + '.data')}", flush=True)
flower_session = ort.InferenceSession(FLOWER_MODEL)
print(" ✓ Flower species model loaded!", flush=True)
for inp in flower_session.get_inputs():
print(f" Input: {inp.name} {inp.shape}", flush=True)
for out in flower_session.get_outputs():
print(f" Output: {out.name} {out.shape}", flush=True)
except Exception as e:
print(f" ✗ Failed: {e}", flush=True)
print("\n" + "="*70, flush=True)
print("MODEL LOADING SUMMARY:", flush=True)
print(f" Frontline: {frontline_session is not None}", flush=True)
print(f" Cat: {cat_session is not None}", flush=True)
print(f" Dog: {dog_session is not None}", flush=True)
print(f" Flower: {flower_session is not None}", flush=True)
print("="*70 + "\n", flush=True)
def preprocess_image(image_data, target_size=128):
"""Preprocess image for model inference"""
if isinstance(image_data, str):
if 'base64,' in image_data:
image_data = image_data.split('base64,')[1]
image_bytes = base64.b64decode(image_data)
image = Image.open(io.BytesIO(image_bytes))
else:
image = Image.open(io.BytesIO(image_data))
image = image.convert('RGB')
image = image.resize((target_size, target_size))
img_array = np.array(image).astype(np.float32) / 255.0
img_array = img_array.transpose(2, 0, 1)
mean = np.array([0.485, 0.456, 0.406]).reshape(3, 1, 1)
std = np.array([0.229, 0.224, 0.225]).reshape(3, 1, 1)
img_array = (img_array - mean) / std
img_array = np.expand_dims(img_array, axis=0)
return img_array.astype(np.float32)
def softmax(logits):
"""Apply softmax to get probabilities"""
exp_logits = np.exp(logits - np.max(logits))
return exp_logits / np.sum(exp_logits)
@app.route('/')
def home():
"""Health check endpoint"""
return jsonify({
"status": "ok",
"message": "PlayWebit Vision API - All Models",
"models": {
"frontline": frontline_session is not None,
"cat": cat_session is not None,
"dog": dog_session is not None,
"flower": flower_session is not None
}
})
@app.route('/debug')
def debug():
"""Debug endpoint"""
debug_info = {
"current_directory": os.getcwd(),
"files_in_models": os.listdir(MODEL_DIR) if os.path.exists(MODEL_DIR) else [],
"models_loaded": {
"frontline": frontline_session is not None,
"cat": cat_session is not None,
"dog": dog_session is not None,
"flower": flower_session is not None
}
}
return jsonify(debug_info)
@app.route('/classify', methods=['POST'])
def classify():
"""Full hierarchical classification"""
try:
if frontline_session is None:
return jsonify({"error": "Frontline model not loaded"}), 503
if 'image' not in request.files and 'image' not in request.json:
return jsonify({"error": "No image provided"}), 400
if 'image' in request.files:
image_data = request.files['image'].read()
else:
image_data = request.json['image']
print(f"[CLASSIFY] Processing image...", flush=True)
# Frontline classification
input_tensor = preprocess_image(image_data, target_size=128)
input_name = frontline_session.get_inputs()[0].name
outputs = frontline_session.run(None, {input_name: input_tensor})
class_logits = outputs[0][0]
confidence_score = outputs[1][0][0] if len(outputs) > 1 else 1.0
probabilities = softmax(class_logits)
predicted_idx = int(np.argmax(probabilities))
predicted_class = FRONTLINE_CLASSES[predicted_idx]
predicted_prob = float(probabilities[predicted_idx])
top5_indices = np.argsort(probabilities)[-5:][::-1]
top5_predictions = [
{
"class": FRONTLINE_CLASSES[int(idx)],
"probability": float(probabilities[idx])
}
for idx in top5_indices
]
result = {
"frontline": {
"class": predicted_class,
"probability": predicted_prob,
"confidence_score": float(confidence_score),
"top5": top5_predictions
}
}
print(f"[CLASSIFY] Frontline result: {predicted_class} ({predicted_prob*100:.1f}%)", flush=True)
# Check for specialist
specialist_map = {
'cat': (cat_session, CAT_BREEDS, 144),
'dog': (dog_session, DOG_BREEDS, 144),
'flower': (flower_session, FLOWER_SPECIES, 144)
}
if predicted_class in specialist_map and predicted_prob > 0.7:
session, classes, size = specialist_map[predicted_class]
if session is not None:
print(f"[CLASSIFY] Running {predicted_class} specialist...", flush=True)
specialist_input = preprocess_image(image_data, target_size=size)
specialist_input_name = session.get_inputs()[0].name
specialist_outputs = session.run(None, {specialist_input_name: specialist_input})
breed_logits = specialist_outputs[0][0]
breed_confidence = specialist_outputs[1][0][0] if len(specialist_outputs) > 1 else 1.0
breed_probabilities = softmax(breed_logits)
breed_idx = int(np.argmax(breed_probabilities))
predicted_breed = classes[breed_idx]
breed_prob = float(breed_probabilities[breed_idx])
top5_breed_indices = np.argsort(breed_probabilities)[-5:][::-1]
top5_breeds = [
{
"breed": classes[int(idx)],
"probability": float(breed_probabilities[idx])
}
for idx in top5_breed_indices
]
result["specialist"] = {
"type": predicted_class,
"breed": predicted_breed,
"probability": breed_prob,
"confidence_score": float(breed_confidence),
"top5": top5_breeds
}
print(f"[CLASSIFY] Specialist result: {predicted_breed} ({breed_prob*100:.1f}%)", flush=True)
return jsonify(result)
except Exception as e:
import traceback
error_trace = traceback.format_exc()
print(f"[ERROR] {error_trace}", flush=True)
return jsonify({
"error": str(e),
"traceback": error_trace
}), 500
if __name__ == '__main__':
port = int(os.environ.get('PORT', 7860))
print(f"\nStarting Flask server on port {port}...", flush=True)
app.run(host='0.0.0.0', port=port, debug=False)