File size: 2,601 Bytes
583fc37
f4fb2f2
b6789a6
9191c48
f4fb2f2
 
b6789a6
f4fb2f2
b6789a6
 
f4fb2f2
0625831
 
9191c48
e8a13e5
 
 
 
9191c48
f4fb2f2
9191c48
 
 
 
 
f4fb2f2
9191c48
f4fb2f2
 
9191c48
f4fb2f2
 
 
 
e8a13e5
 
f4fb2f2
9191c48
e8a13e5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9191c48
e8a13e5
 
 
 
 
9191c48
 
e8a13e5
 
 
 
 
fb07755
 
 
 
 
 
 
f4fb2f2
 
 
 
 
fb07755
f4fb2f2
e8a13e5
f4fb2f2
 
 
fb07755
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
import numpy as np
import tensorflow as tf
from huggingface_hub import hf_hub_download

# Download .h5 model from HF
model_path = hf_hub_download(
    repo_id="danielritchie/vibe-color-model",
    filename="vibe_model.h5"
)

# Load Keras model
model = tf.keras.models.load_model(model_path, compile=False)


# -------------------------------------------------
# Model Inference
# -------------------------------------------------

def infer_color(vad):
    input_data = np.array([[
        vad["V"],
        vad["A"],
        vad["D"],
        vad["Cx"],
        vad["Co"]
    ]], dtype=np.float32)

    output = model.predict(input_data, verbose=0)[0]
    r, g, b, e, i = output

    return {
        "R": float(r),
        "G": float(g),
        "B": float(b),
        "E": float(e),  # Energy
        "I": float(i)   # Intensity
    }


# -------------------------------------------------
# Cinematic Drama Rendering
# -------------------------------------------------

def apply_cinematic_blend(model_output, drama):
    """
    drama:
        0.0  → near white
        1.0  → cinematic target
        1.5  → expressive peak
    """

    color = np.array([
        model_output["R"],
        model_output["G"],
        model_output["B"]
    ])

    energy = model_output["E"]
    intensity = model_output["I"]

    white = np.array([1.0, 1.0, 1.0])

    # Nonlinear cinematic curve
    curve = drama ** 2.2

    # Blend strength influenced by model intensity
    strength = np.clip(curve * intensity, 0.0, 1.0)

    # Blend toward white baseline
    blended = white * (1 - strength) + color * strength

    # Subtle energy-based brightness shaping
    brightness_boost = 0.9 + (0.25 * energy)
    blended = blended * brightness_boost

    blended = np.clip(blended, 0.0, 1.0)

    return {
        "R": int(blended[0] * 255),
        "G": int(blended[1] * 255),
        "B": int(blended[2] * 255),
        "E": energy,
        "I": intensity
    }


# -------------------------------------------------
# Render HTML Block
# -------------------------------------------------

def render_color(model_output):

    # Convert 0–1 floats to 0–255 integers
    r = int(max(0, min(255, model_output["R"] * 255)))
    g = int(max(0, min(255, model_output["G"] * 255)))
    b = int(max(0, min(255, model_output["B"] * 255)))

    return f"""
    <div style="
        width:100%;
        height:240px;
        border-radius:18px;
        background: rgb({r},{g},{b});
        box-shadow: 0px 6px 32px rgba(0,0,0,0.25);
        transition: all 0.35s cubic-bezier(.4,0,.2,1);
    "></div>
    """