Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,190 +1,145 @@
|
|
| 1 |
import gradio as gr
|
| 2 |
from defrag_engine import DefragDeepCompute
|
| 3 |
-
import
|
|
|
|
| 4 |
|
| 5 |
-
# ---
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 6 |
engine = DefragDeepCompute()
|
| 7 |
|
| 8 |
-
# ---
|
| 9 |
-
def
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
"name": name,
|
| 14 |
-
"date": dob,
|
| 15 |
-
"time": time,
|
| 16 |
-
"city": city,
|
| 17 |
-
"country_code": "US"
|
| 18 |
-
}
|
| 19 |
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 23 |
|
| 24 |
-
#
|
| 25 |
-
|
|
|
|
| 26 |
|
|
|
|
| 27 |
friction = vector['friction_level']
|
| 28 |
p_day = soul_log['hardware']['numerology']['personal_day']
|
| 29 |
element = soul_log['weather']['astro']['dominant_element'].title()
|
| 30 |
|
| 31 |
if friction == "CRITICAL":
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
directive = "DIRECTIVE: PAUSE. Do not initiate action. Allow the signal to clear."
|
| 36 |
else:
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
|
| 42 |
-
|
| 43 |
-
|
| 44 |
-
|
| 45 |
-
|
| 46 |
-
<p style="color: #CCC; font-size: 15px; line-height: 1.5; margin-bottom: 20px;">{context_msg}</p>
|
| 47 |
-
<div style="border-top: 1px solid #333; padding-top: 15px;">
|
| 48 |
-
<strong style="color: #FFF; font-size: 14px; letter-spacing: 0.5px;">{directive}</strong>
|
| 49 |
-
</div>
|
| 50 |
-
</div>
|
| 51 |
-
"""
|
| 52 |
|
| 53 |
-
# 4.
|
|
|
|
| 54 |
visual_args = [
|
| 55 |
-
vector['visual_code'],
|
| 56 |
-
vector['visual_seed'],
|
| 57 |
-
vector['visual_element'],
|
| 58 |
-
0.3 if friction == "CRITICAL" else 1.0
|
|
|
|
| 59 |
]
|
| 60 |
|
| 61 |
-
return
|
| 62 |
-
|
| 63 |
-
# --- THE PREMIUM VISUAL LAYER (CSS) ---
|
| 64 |
-
premium_css = """
|
| 65 |
-
@import url('https://fonts.googleapis.com/css2?family=Inter:wght@300;400;600&display=swap');
|
| 66 |
-
|
| 67 |
-
body, .gradio-container {
|
| 68 |
-
background-color: #000000 !important;
|
| 69 |
-
color: #F0F0F0 !important;
|
| 70 |
-
font-family: 'Inter', sans-serif !important;
|
| 71 |
-
}
|
| 72 |
-
|
| 73 |
-
/* Mobile-First App Container */
|
| 74 |
-
.gradio-container {
|
| 75 |
-
max-width: 420px !important;
|
| 76 |
-
margin: 0 auto !important;
|
| 77 |
-
padding: 20px !important;
|
| 78 |
-
}
|
| 79 |
-
|
| 80 |
-
/* The Monitor (Mandala) */
|
| 81 |
-
#monitor-frame {
|
| 82 |
-
width: 280px;
|
| 83 |
-
height: 280px;
|
| 84 |
-
margin: 20px auto;
|
| 85 |
-
border-radius: 50%;
|
| 86 |
-
border: 1px solid #222;
|
| 87 |
-
box-shadow: 0 0 40px rgba(0, 240, 255, 0.05); /* Subtle Cyan Glow */
|
| 88 |
-
overflow: hidden;
|
| 89 |
-
background: #000;
|
| 90 |
-
transition: all 0.5s ease;
|
| 91 |
-
}
|
| 92 |
|
| 93 |
-
|
| 94 |
-
|
| 95 |
-
|
| 96 |
-
|
| 97 |
-
|
| 98 |
-
|
| 99 |
-
|
| 100 |
-
border-radius: 12px !important;
|
| 101 |
-
outline: none !important;
|
| 102 |
-
box-shadow: none !important;
|
| 103 |
-
}
|
| 104 |
-
.stealth-input textarea::placeholder { color: #555 !important; }
|
| 105 |
-
|
| 106 |
-
/* The Action Button */
|
| 107 |
-
.align-btn {
|
| 108 |
-
background: #FFFFFF !important;
|
| 109 |
-
color: #000000 !important;
|
| 110 |
-
border: none !important;
|
| 111 |
-
border-radius: 30px !important;
|
| 112 |
-
font-weight: 600 !important;
|
| 113 |
-
font-size: 14px !important;
|
| 114 |
-
letter-spacing: 2px !important;
|
| 115 |
-
height: 50px !important;
|
| 116 |
-
margin-top: 15px !important;
|
| 117 |
-
transition: transform 0.2s ease !important;
|
| 118 |
-
}
|
| 119 |
-
.align-btn:hover {
|
| 120 |
-
box-shadow: 0 0 20px rgba(255, 255, 255, 0.3) !important;
|
| 121 |
-
transform: scale(1.02);
|
| 122 |
-
}
|
| 123 |
-
|
| 124 |
-
/* Animations */
|
| 125 |
-
@keyframes fadeIn {
|
| 126 |
-
from { opacity: 0; transform: translateY(10px); }
|
| 127 |
-
to { opacity: 1; transform: translateY(0); }
|
| 128 |
-
}
|
| 129 |
"""
|
| 130 |
|
| 131 |
-
|
| 132 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 133 |
|
| 134 |
-
#
|
| 135 |
-
|
| 136 |
-
with gr.Row(visible=False):
|
| 137 |
-
name_in = gr.Textbox(value="User")
|
| 138 |
-
dob_in = gr.Textbox(value="1990-01-01")
|
| 139 |
-
time_in = gr.Textbox(value="12:00")
|
| 140 |
-
city_in = gr.Textbox(value="Los Angeles")
|
| 141 |
|
| 142 |
-
#
|
| 143 |
-
|
| 144 |
-
|
| 145 |
-
|
| 146 |
-
|
| 147 |
-
|
| 148 |
-
|
| 149 |
-
|
| 150 |
-
|
| 151 |
-
|
| 152 |
-
#
|
| 153 |
-
|
| 154 |
-
placeholder="Type your state...",
|
| 155 |
-
show_label=False,
|
| 156 |
-
lines=2,
|
| 157 |
-
elem_classes="stealth-input"
|
| 158 |
-
)
|
| 159 |
|
| 160 |
-
#
|
| 161 |
-
|
|
|
|
| 162 |
|
| 163 |
-
|
| 164 |
-
output_card = gr.HTML(label=None)
|
| 165 |
|
| 166 |
-
|
| 167 |
-
# Hidden JSON component to pass data to JavaScript
|
| 168 |
-
visual_data = gr.JSON(visible=False)
|
| 169 |
|
| 170 |
-
#
|
| 171 |
js_bridge = """
|
| 172 |
(args) => {
|
| 173 |
if (window.hexa) {
|
| 174 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 175 |
}
|
| 176 |
return args;
|
| 177 |
}
|
| 178 |
"""
|
| 179 |
-
|
| 180 |
-
# Event Listener
|
| 181 |
-
btn_align.click(
|
| 182 |
-
fn=run_alignment,
|
| 183 |
-
inputs=[user_input, name_in, dob_in, time_in, city_in],
|
| 184 |
-
outputs=[visual_data, output_card]
|
| 185 |
-
)
|
| 186 |
-
|
| 187 |
-
# Trigger JS when data returns
|
| 188 |
visual_data.change(None, [visual_data], None, js=js_bridge)
|
| 189 |
|
| 190 |
if __name__ == "__main__":
|
|
|
|
| 1 |
import gradio as gr
|
| 2 |
from defrag_engine import DefragDeepCompute
|
| 3 |
+
import os
|
| 4 |
+
from elevenlabs.client import ElevenLabs
|
| 5 |
|
| 6 |
+
# --- CONFIGURATION ---
|
| 7 |
+
# IMPORTANT: Add ELEVENLABS_API_KEY to your Space's 'Settings > Secrets'
|
| 8 |
+
ELEVEN_KEY = os.getenv("ELEVENLABS_API_KEY")
|
| 9 |
+
client = ElevenLabs(api_key=ELEVEN_KEY) if ELEVEN_KEY else None
|
| 10 |
+
|
| 11 |
+
# Initialize Defrag Engine
|
| 12 |
engine = DefragDeepCompute()
|
| 13 |
|
| 14 |
+
# --- AUDIO ENGINE ---
|
| 15 |
+
def generate_audio(text):
|
| 16 |
+
"""Generates audio via ElevenLabs and returns the file path."""
|
| 17 |
+
if not ELEVEN_KEY or not client:
|
| 18 |
+
return None # Silent mode if no key
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 19 |
|
| 20 |
+
try:
|
| 21 |
+
# Using a default calm voice (e.g., 'Rachel' or similar stable voice)
|
| 22 |
+
# You can swap this ID for a specific custom voice
|
| 23 |
+
audio_stream = client.generate(
|
| 24 |
+
text=text,
|
| 25 |
+
voice="Rachel",
|
| 26 |
+
model="eleven_monolingual_v1"
|
| 27 |
+
)
|
| 28 |
+
# Save to temp file for Gradio
|
| 29 |
+
save_path = "response.mp3"
|
| 30 |
+
with open(save_path, "wb") as f:
|
| 31 |
+
for chunk in audio_stream:
|
| 32 |
+
if chunk:
|
| 33 |
+
f.write(chunk)
|
| 34 |
+
return save_path
|
| 35 |
+
except Exception as e:
|
| 36 |
+
print(f"Audio Error: {e}")
|
| 37 |
+
return None
|
| 38 |
+
|
| 39 |
+
# --- CHAT LOGIC ---
|
| 40 |
+
def chat_interaction(user_message, history):
|
| 41 |
+
"""
|
| 42 |
+
1. Run Deep Compute on user input.
|
| 43 |
+
2. Generate System Admin response.
|
| 44 |
+
3. Update Visuals.
|
| 45 |
+
4. Generate Audio.
|
| 46 |
+
"""
|
| 47 |
+
# Hardcoded profile for prototype (In prod, fetch from session)
|
| 48 |
+
birth_data = { "name": "User", "date": "1990-01-01", "time": "12:00", "city": "Los Angeles", "country_code": "US" }
|
| 49 |
|
| 50 |
+
# 1. RUN DEEP COMPUTE
|
| 51 |
+
soul_log = engine.execute_defrag(birth_data, user_message)
|
| 52 |
+
vector = soul_log['computed_vector']
|
| 53 |
|
| 54 |
+
# 2. CONSTRUCT RESPONSE (The Translation Layer)
|
| 55 |
friction = vector['friction_level']
|
| 56 |
p_day = soul_log['hardware']['numerology']['personal_day']
|
| 57 |
element = soul_log['weather']['astro']['dominant_element'].title()
|
| 58 |
|
| 59 |
if friction == "CRITICAL":
|
| 60 |
+
prefix = "⚠️ HIGH INTERFERENCE DETECTED."
|
| 61 |
+
body = f"You are navigating a Resistance Pattern on Personal Day {p_day}. The {element} energy in your chart is currently blocked."
|
| 62 |
+
directive = "DIRECTIVE: Pause. Do not initiate. Allow the signal to clear."
|
|
|
|
| 63 |
else:
|
| 64 |
+
prefix = "✓ SIGNAL CLEAR."
|
| 65 |
+
body = f"Your internal rhythm is aligned. Supported by {element} Energy."
|
| 66 |
+
directive = "DIRECTIVE: Proceed with intent."
|
| 67 |
+
|
| 68 |
+
bot_response = f"{prefix}\n\n{body}\n\n**{directive}**"
|
| 69 |
+
|
| 70 |
+
# 3. GENERATE AUDIO
|
| 71 |
+
# We speak only the body and directive, not the technical prefix
|
| 72 |
+
audio_path = generate_audio(f"{body}. {directive}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 73 |
|
| 74 |
+
# 4. PREPARE VISUAL DATA
|
| 75 |
+
# [HexCode, Petals, Element, Stability, IsSpeaking]
|
| 76 |
visual_args = [
|
| 77 |
+
vector['visual_code'],
|
| 78 |
+
vector['visual_seed'],
|
| 79 |
+
vector['visual_element'],
|
| 80 |
+
0.3 if friction == "CRITICAL" else 1.0,
|
| 81 |
+
True # Trigger "Speaking" pulse in mandala
|
| 82 |
]
|
| 83 |
|
| 84 |
+
return bot_response, audio_path, visual_args
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 85 |
|
| 86 |
+
# --- THE UI ---
|
| 87 |
+
css = """
|
| 88 |
+
body { background-color: #000000; color: #F0F0F0; font-family: 'Helvetica Neue', sans-serif; }
|
| 89 |
+
.gradio-container { max-width: 450px !important; margin: 0 auto !important; padding: 0 !important; }
|
| 90 |
+
#monitor-frame { width: 100%; height: 320px; border-bottom: 1px solid #333; }
|
| 91 |
+
.chat-window { height: 400px !important; overflow-y: scroll; }
|
| 92 |
+
footer { display: none !important; }
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 93 |
"""
|
| 94 |
|
| 95 |
+
with gr.Blocks(theme=gr.themes.Base(), css=css, title="DEFRAG NODE") as demo:
|
| 96 |
+
|
| 97 |
+
# 1. VISUAL LAYER (Top)
|
| 98 |
+
with open("mandala_component.html", "r") as f:
|
| 99 |
+
html_content = f.read()
|
| 100 |
+
|
| 101 |
+
# Container for the mandala
|
| 102 |
+
mandala_view = gr.HTML(html_content, elem_id="monitor-frame")
|
| 103 |
|
| 104 |
+
# 2. AUDIO LAYER (Hidden Player - Autoplay via JS if possible, or manual click)
|
| 105 |
+
audio_player = gr.Audio(label="System Voice", visible=True, interactive=False)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 106 |
|
| 107 |
+
# 3. CHAT LAYER (Bottom)
|
| 108 |
+
chatbot = gr.Chatbot(label="DEFRAG // LOG", type="messages", elem_classes="chat-window")
|
| 109 |
+
msg = gr.Textbox(placeholder="Input System Status...", show_label=False)
|
| 110 |
+
|
| 111 |
+
# Hidden Data Bridge
|
| 112 |
+
visual_data = gr.JSON(visible=False)
|
| 113 |
+
|
| 114 |
+
# --- EVENT WIRING ---
|
| 115 |
+
|
| 116 |
+
def respond(message, chat_history):
|
| 117 |
+
# Run logic
|
| 118 |
+
bot_msg, audio, viz_data = chat_interaction(message, chat_history)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 119 |
|
| 120 |
+
# Update history
|
| 121 |
+
chat_history.append({"role": "user", "content": message})
|
| 122 |
+
chat_history.append({"role": "assistant", "content": bot_msg})
|
| 123 |
|
| 124 |
+
return "", chat_history, audio, viz_data
|
|
|
|
| 125 |
|
| 126 |
+
msg.submit(respond, [msg, chatbot], [msg, chatbot, audio_player, visual_data])
|
|
|
|
|
|
|
| 127 |
|
| 128 |
+
# JS Bridge to update Canvas
|
| 129 |
js_bridge = """
|
| 130 |
(args) => {
|
| 131 |
if (window.hexa) {
|
| 132 |
+
// args = [hex, petals, elem, stability, speaking]
|
| 133 |
+
window.hexa.updateParams(args[0], args[1], args[2], args[3], args[4]);
|
| 134 |
+
|
| 135 |
+
// Stop speaking pulse after 5 seconds
|
| 136 |
+
setTimeout(() => {
|
| 137 |
+
window.hexa.updateParams(args[0], args[1], args[2], args[3], false);
|
| 138 |
+
}, 5000);
|
| 139 |
}
|
| 140 |
return args;
|
| 141 |
}
|
| 142 |
"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 143 |
visual_data.change(None, [visual_data], None, js=js_bridge)
|
| 144 |
|
| 145 |
if __name__ == "__main__":
|