maftuh-main's picture
Fix JSON BOM error and correct input size to 224x224 (23 classes)
29bd04d
"""
Batik Classifier API - CNN Model (VGG16-based)
24 batik classes from Indonesia
Stable H5 deployment version
"""
import os
import io
import json
import numpy as np
from flask import Flask, request, jsonify
from flask_cors import CORS
from PIL import Image
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
import tensorflow as tf
app = Flask(__name__)
CORS(app)
model = None
class_names = None
def preprocess_image(image, target_size=(224, 224)):
"""Preprocess for VGG16-based model"""
if image.mode != 'RGB':
image = image.convert('RGB')
image = image.resize(target_size, Image.Resampling.LANCZOS)
img_array = np.array(image, dtype=np.float32) / 255.0
return np.expand_dims(img_array, axis=0)
def load_models():
global model, class_names
try:
model_path = "models/batik_model.h5"
print(f"Loading model from {model_path}...")
model = tf.keras.models.load_model(model_path, compile=False)
print(f"Model loaded: {model.input_shape} -> {model.output_shape}")
with open("models/batik_classes.json") as f:
data = json.load(f)
class_names = data if isinstance(data, list) else data.get("classes", [])
print(f"Loaded {len(class_names)} classes")
return True
except Exception as e:
print(f"Error: {e}")
import traceback
traceback.print_exc()
return False
@app.route('/')
def index():
return jsonify({
"name": "Batik Classifier API",
"model": "CNN (VGG16-based)",
"classes": len(class_names) if class_names else 0,
"input_size": "224x224",
"status": "ready" if model and class_names else "error"
})
@app.route('/health')
def health():
return jsonify({
"status": "healthy" if model else "unhealthy",
"model_loaded": model is not None,
"classes_loaded": class_names is not None
})
@app.route('/classes')
def get_classes():
if not class_names:
return jsonify({"error": "Not loaded"}), 500
return jsonify({"classes": class_names, "total": len(class_names)})
@app.route('/predict', methods=['POST'])
def predict():
if not model or not class_names:
return jsonify({"error": "Model not loaded"}), 500
if 'image' not in request.files:
return jsonify({"error": "No image"}), 400
try:
image = Image.open(io.BytesIO(request.files['image'].read()))
processed = preprocess_image(image)
preds = model.predict(processed, verbose=0)
idx = np.argmax(preds[0])
conf = float(preds[0][idx])
top5 = np.argsort(preds[0])[-5:][::-1]
top5_preds = [{"class": class_names[i], "confidence": float(preds[0][i])} for i in top5]
return jsonify({
"predicted_class": class_names[idx],
"confidence": conf,
"top5_predictions": top5_preds,
"model": "CNN-VGG16"
})
except Exception as e:
import traceback
return jsonify({"error": str(e), "trace": traceback.format_exc()}), 500
print("="*70)
print("Loading Batik Classifier (24 classes)...")
print("="*70)
if load_models():
print("="*70)
print("Ready!")
print("="*70)
else:
print("="*70)
print("Failed to load")
print("="*70)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=7860)