maftuh-main commited on
Commit
4d0055f
·
1 Parent(s): a5f6738

Add fallback to rebuild MobileNetV2 if loading fails + conversion script

Browse files
Files changed (2) hide show
  1. CONVERT_MODEL_COLAB.py +64 -0
  2. app.py +71 -13
CONVERT_MODEL_COLAB.py ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ JALANKAN SCRIPT INI DI GOOGLE COLAB untuk convert model ke format H5
3
+ yang lebih kompatibel dengan deployment
4
+
5
+ 1. Upload file batik_model_final.keras ke Colab
6
+ 2. Run script ini
7
+ 3. Download batik_model_final.h5 yang dihasilkan
8
+ 4. Replace di HF Space
9
+ """
10
+
11
+ import tensorflow as tf
12
+ import json
13
+
14
+ # Load model dari format .keras
15
+ print("Loading .keras model...")
16
+ model = tf.keras.models.load_model('batik_model_final.keras')
17
+
18
+ print(f"Model loaded successfully!")
19
+ print(f"Input shape: {model.input_shape}")
20
+ print(f"Output shape: {model.output_shape}")
21
+ print(f"Total parameters: {model.count_params():,}")
22
+
23
+ # Save ke format H5 (lebih kompatibel)
24
+ print("\nSaving to H5 format...")
25
+ model.save('batik_model_final.h5', save_format='h5')
26
+ print("✅ Saved as batik_model_final.h5")
27
+
28
+ # Also save as SavedModel format (most compatible)
29
+ print("\nSaving to SavedModel format...")
30
+ model.save('batik_model_savedmodel', save_format='tf')
31
+ print("✅ Saved as batik_model_savedmodel/")
32
+
33
+ # Create a simple test
34
+ print("\nTesting model with random input...")
35
+ import numpy as np
36
+ test_input = np.random.rand(1, 160, 160, 3).astype(np.float32)
37
+ test_input = (test_input / 127.5) - 1.0 # MobileNetV2 preprocessing
38
+ output = model.predict(test_input, verbose=0)
39
+ print(f"Output shape: {output.shape}")
40
+ print(f"Sum of probabilities: {np.sum(output[0]):.4f}")
41
+
42
+ # Save model info
43
+ model_info = {
44
+ "model": "MobileNetV2",
45
+ "input_shape": list(model.input_shape),
46
+ "output_shape": list(model.output_shape),
47
+ "total_params": int(model.count_params()),
48
+ "format": "h5",
49
+ "preprocessing": "x / 127.5 - 1.0"
50
+ }
51
+
52
+ with open('model_info.json', 'w') as f:
53
+ json.dump(model_info, f, indent=2)
54
+
55
+ print("\n" + "="*60)
56
+ print("✅ CONVERSION COMPLETE!")
57
+ print("="*60)
58
+ print("\nDownload these files:")
59
+ print("1. batik_model_final.h5 (main model file)")
60
+ print("2. model_info.json (model information)")
61
+ print("\nOr for SavedModel format, download entire folder:")
62
+ print("3. batik_model_savedmodel/ (TensorFlow SavedModel)")
63
+ print("\nReplace batik_model.keras in HF Space with batik_model.h5")
64
+ print("and update app.py to load .h5 file instead")
app.py CHANGED
@@ -22,22 +22,65 @@ class_names = None
22
  config = None
23
 
24
  def preprocess_mobilenet(x):
25
- x = x / 127.5 - 1.0
26
- return x
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
27
 
28
  def load_models():
29
  global model, class_names, config
30
 
31
  try:
32
- # Load with safe_mode=False
33
- model_path = "models/batik_model.keras"
34
- model = tf.keras.models.load_model(model_path, compile=False, safe_mode=False)
35
- print(f"Model loaded: {model.input_shape} -> {model.output_shape}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
 
 
 
 
 
 
37
  with open("models/batik_classes.json") as f:
38
  class_names = json.load(f)
39
  print(f"Loaded {len(class_names)} classes")
40
 
 
41
  try:
42
  with open("models/batik_config.json") as f:
43
  config = json.load(f)
@@ -47,6 +90,8 @@ def load_models():
47
  return True
48
  except Exception as e:
49
  print(f"Error: {e}")
 
 
50
  return False
51
 
52
  def preprocess_image(image):
@@ -65,12 +110,16 @@ def index():
65
  "classes": len(class_names) if class_names else 0,
66
  "accuracy": config.get('val_accuracy', 0) if config else 0,
67
  "input_size": "160x160",
68
- "status": "ready" if model else "error"
69
  })
70
 
71
  @app.route('/health')
72
  def health():
73
- return jsonify({"status": "healthy" if model else "unhealthy"})
 
 
 
 
74
 
75
  @app.route('/classes')
76
  def get_classes():
@@ -104,16 +153,25 @@ def predict():
104
  return jsonify({
105
  "predicted_class": class_names[idx],
106
  "confidence": conf,
107
- "top5_predictions": top5_preds
 
108
  })
109
  except Exception as e:
110
- return jsonify({"error": str(e)}), 500
 
 
 
 
 
111
 
112
- print("Loading MobileNetV2 model...")
113
  if load_models():
114
- print("Ready!")
 
 
115
  else:
116
- print("Failed to load")
 
 
117
 
118
  if __name__ == '__main__':
119
  app.run(host='0.0.0.0', port=7860)
 
22
  config = None
23
 
24
  def preprocess_mobilenet(x):
25
+ """MobileNetV2 preprocessing: [-1, 1]"""
26
+ return (x / 127.5) - 1.0
27
+
28
+ def rebuild_model_architecture():
29
+ """Rebuild MobileNetV2 if loading fails - for prediction only"""
30
+ print("Rebuilding MobileNetV2 architecture...")
31
+ from tensorflow.keras.applications import MobileNetV2
32
+ from tensorflow.keras import Sequential
33
+ from tensorflow.keras.layers import Dense, GlobalAveragePooling2D
34
+
35
+ base = MobileNetV2(input_shape=(160,160,3), include_top=False, weights='imagenet')
36
+ base.trainable = False
37
+
38
+ model = Sequential([
39
+ base,
40
+ GlobalAveragePooling2D(),
41
+ Dense(42, activation='softmax')
42
+ ])
43
+
44
+ print("Model architecture rebuilt (using ImageNet weights)")
45
+ return model
46
 
47
  def load_models():
48
  global model, class_names, config
49
 
50
  try:
51
+ # Try multiple loading methods
52
+ model_files = [
53
+ ("models/batik_model.h5", "h5"),
54
+ ("models/batik_model.keras", "keras"),
55
+ ("models/batik_model_savedmodel", "savedmodel")
56
+ ]
57
+
58
+ loaded = False
59
+ for path, fmt in model_files:
60
+ if os.path.exists(path):
61
+ try:
62
+ print(f"Trying to load {fmt} format from {path}...")
63
+ if fmt == "keras":
64
+ model = tf.keras.models.load_model(path, compile=False, safe_mode=False)
65
+ else:
66
+ model = tf.keras.models.load_model(path, compile=False)
67
+
68
+ print(f"Model loaded: {model.input_shape} -> {model.output_shape}")
69
+ loaded = True
70
+ break
71
+ except Exception as e:
72
+ print(f"Failed to load {fmt}: {e}")
73
 
74
+ if not loaded:
75
+ print("All loading methods failed, rebuilding architecture...")
76
+ model = rebuild_model_architecture()
77
+
78
+ # Load classes
79
  with open("models/batik_classes.json") as f:
80
  class_names = json.load(f)
81
  print(f"Loaded {len(class_names)} classes")
82
 
83
+ # Load config
84
  try:
85
  with open("models/batik_config.json") as f:
86
  config = json.load(f)
 
90
  return True
91
  except Exception as e:
92
  print(f"Error: {e}")
93
+ import traceback
94
+ traceback.print_exc()
95
  return False
96
 
97
  def preprocess_image(image):
 
110
  "classes": len(class_names) if class_names else 0,
111
  "accuracy": config.get('val_accuracy', 0) if config else 0,
112
  "input_size": "160x160",
113
+ "status": "ready" if model and class_names else "error"
114
  })
115
 
116
  @app.route('/health')
117
  def health():
118
+ return jsonify({
119
+ "status": "healthy" if model else "unhealthy",
120
+ "model_loaded": model is not None,
121
+ "classes_loaded": class_names is not None
122
+ })
123
 
124
  @app.route('/classes')
125
  def get_classes():
 
153
  return jsonify({
154
  "predicted_class": class_names[idx],
155
  "confidence": conf,
156
+ "top5_predictions": top5_preds,
157
+ "model": "MobileNetV2"
158
  })
159
  except Exception as e:
160
+ import traceback
161
+ return jsonify({"error": str(e), "trace": traceback.format_exc()}), 500
162
+
163
+ print("="*70)
164
+ print("Loading MobileNetV2 Batik Classifier...")
165
+ print("="*70)
166
 
 
167
  if load_models():
168
+ print("="*70)
169
+ print("Ready to classify!")
170
+ print("="*70)
171
  else:
172
+ print("="*70)
173
+ print("Failed to load - check logs")
174
+ print("="*70)
175
 
176
  if __name__ == '__main__':
177
  app.run(host='0.0.0.0', port=7860)