import numpy as np import tensorflow as tf from huggingface_hub import hf_hub_download # Download .h5 model from HF model_path = hf_hub_download( repo_id="danielritchie/vibe-color-model", filename="vibe_model.h5" ) # Load Keras model model = tf.keras.models.load_model(model_path, compile=False) # ------------------------------------------------- # Model Inference # ------------------------------------------------- def infer_color(vad): input_data = np.array([[ vad["V"], vad["A"], vad["D"], vad["Cx"], vad["Co"] ]], dtype=np.float32) output = model.predict(input_data, verbose=0)[0] r, g, b, e, i = output return { "R": float(r), "G": float(g), "B": float(b), "E": float(e), # Energy "I": float(i) # Intensity } # ------------------------------------------------- # Cinematic Drama Rendering # ------------------------------------------------- def apply_cinematic_blend(model_output, drama): """ drama: 0.0 → near white 1.0 → cinematic target 1.5 → expressive peak """ color = np.array([ model_output["R"], model_output["G"], model_output["B"] ]) energy = model_output["E"] intensity = model_output["I"] white = np.array([1.0, 1.0, 1.0]) # Nonlinear cinematic curve curve = drama ** 2.2 # Blend strength influenced by model intensity strength = np.clip(curve * intensity, 0.0, 1.0) # Blend toward white baseline blended = white * (1 - strength) + color * strength # Subtle energy-based brightness shaping brightness_boost = 0.9 + (0.25 * energy) blended = blended * brightness_boost blended = np.clip(blended, 0.0, 1.0) return { "R": int(blended[0] * 255), "G": int(blended[1] * 255), "B": int(blended[2] * 255), "E": energy, "I": intensity } # ------------------------------------------------- # Render HTML Block # ------------------------------------------------- def render_color(model_output): # Convert 0–1 floats to 0–255 integers r = int(max(0, min(255, model_output["R"] * 255))) g = int(max(0, min(255, model_output["G"] * 255))) b = int(max(0, min(255, model_output["B"] * 255))) return f"""
"""