import gradio as gr from defrag_engine import DefragDeepCompute import os from elevenlabs.client import ElevenLabs # --- CONFIGURATION --- # IMPORTANT: Add ELEVENLABS_API_KEY to your Space's 'Settings > Secrets' ELEVEN_KEY = os.getenv("ELEVENLABS_API_KEY") client = ElevenLabs(api_key=ELEVEN_KEY) if ELEVEN_KEY else None # Initialize Defrag Engine engine = DefragDeepCompute() # --- AUDIO ENGINE --- def generate_audio(text): """Generates audio via ElevenLabs and returns the file path.""" if not ELEVEN_KEY or not client: return None # Silent mode if no key try: # Using a default calm voice (e.g., 'Rachel' or similar stable voice) # You can swap this ID for a specific custom voice audio_stream = client.generate( text=text, voice="Rachel", model="eleven_monolingual_v1" ) # Save to temp file for Gradio save_path = "response.mp3" with open(save_path, "wb") as f: for chunk in audio_stream: if chunk: f.write(chunk) return save_path except Exception as e: print(f"Audio Error: {e}") return None # --- CHAT LOGIC --- def chat_interaction(user_message, history): """ 1. Run Deep Compute on user input. 2. Generate System Admin response. 3. Update Visuals. 4. Generate Audio. """ # Hardcoded profile for prototype (In prod, fetch from session) birth_data = { "name": "User", "year": 1990, "month": 1, "day": 1, "hour": 12, "minute": 0, "city": "Los Angeles", "country": "US" } # 1. RUN DEEP COMPUTE soul_log = engine.execute_defrag(birth_data, user_message) vector = soul_log['computed_vector'] # 2. CONSTRUCT RESPONSE (The Translation Layer) friction = vector['friction_level'] p_day = soul_log['hardware']['numerology']['personal_day'] element = soul_log['weather']['astro']['dominant_element'].title() if friction == "CRITICAL": prefix = "⚠️ HIGH INTERFERENCE DETECTED." body = f"You are navigating a Resistance Pattern on Personal Day {p_day}. The {element} energy in your chart is currently blocked." directive = "DIRECTIVE: Pause. Do not initiate. Allow the signal to clear." else: prefix = "✓ SIGNAL CLEAR." body = f"Your internal rhythm is aligned. Supported by {element} Energy." directive = "DIRECTIVE: Proceed with intent." bot_response = f"{prefix}\n\n{body}\n\n**{directive}**" # 3. GENERATE AUDIO # We speak only the body and directive, not the technical prefix audio_path = generate_audio(f"{body}. {directive}") # 4. PREPARE VISUAL DATA # [HexCode, Petals, Element, Stability, IsSpeaking] visual_args = [ vector['visual_code'], vector['visual_seed'], vector['visual_element'], 0.3 if friction == "CRITICAL" else 1.0, True # Trigger "Speaking" pulse in mandala ] return bot_response, audio_path, visual_args # --- THE UI --- css = """ /* ====== DEFRAG.APP PREMIUM UI ====== */ * { margin: 0; padding: 0; box-sizing: border-box; } body, html { background: #0a0a0f; color: #e0e4e8; font-family: 'Inter', -apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif; overflow: hidden; } /* Main Container - Mobile First */ .gradio-container { max-width: 480px !important; margin: 0 auto !important; padding: 0 !important; background: linear-gradient(135deg, #0a0a0f 0%, #141420 100%); min-height: 100vh !important; } /* Mandala Visual Frame - Glassmorphism */ #monitor-frame { width: 100%; height: 350px; border: none; border-radius: 0; background: radial-gradient(circle at 50% 50%, rgba(80, 120, 255, 0.08) 0%, rgba(10, 10, 15, 0.95) 70%); backdrop-filter: blur(20px); -webkit-backdrop-filter: blur(20px); position: relative; overflow: hidden; box-shadow: inset 0 1px 0 rgba(255,255,255,0.03), 0 20px 60px rgba(0,0,0,0.4); } #monitor-frame::before { content: ''; position: absolute; top: 50%; left: 50%; transform: translate(-50%, -50%); width: 300px; height: 300px; background: radial-gradient(circle, rgba(100, 150, 255, 0.15) 0%, transparent 70%); filter: blur(40px); animation: subtleGlow 4s ease-in-out infinite; pointer-events: none; } @keyframes subtleGlow { 0%, 100% { opacity: 0.3; transform: translate(-50%, -50%) scale(1); } 50% { opacity: 0.6; transform: translate(-50%, -50%) scale(1.1); } } /* Audio Player - Hidden but Functional */ .audio-container { display: none !important; } /* Chat Interface - ChatGPT Style Glassmorphism */ .chatbot-container { background: rgba(20, 20, 32, 0.6) !important; backdrop-filter: blur(30px) !important; -webkit-backdrop-filter: blur(30px) !important; border: 1px solid rgba(255,255,255,0.06) !important; border-radius: 16px !important; box-shadow: 0 8px 32px rgba(0,0,0,0.3), inset 0 1px 0 rgba(255,255,255,0.03) !important; max-height: 450px !important; overflow-y: auto !important; padding: 20px !important; } .chat-window { height: 450px !important; overflow-y: auto !important; scroll-behavior: smooth; scrollbar-width: thin; scrollbar-color: rgba(100,150,255,0.3) transparent; } .chat-window::-webkit-scrollbar { width: 6px; } .chat-window::-webkit-scrollbar-track { background: transparent; } .chat-window::-webkit-scrollbar-thumb { background: rgba(100,150,255,0.3); border-radius: 10px; } /* Message Bubbles - Premium Styling */ .message { background: rgba(255,255,255,0.04) !important; backdrop-filter: blur(10px); border: 1px solid rgba(255,255,255,0.05); border-radius: 12px; padding: 12px 16px; margin: 8px 0; box-shadow: 0 4px 12px rgba(0,0,0,0.2); } .user-message { background: linear-gradient(135deg, rgba(80,120,255,0.15), rgba(100,150,255,0.1)) !important; border: 1px solid rgba(100,150,255,0.2); } .bot-message { background: rgba(255,255,255,0.03) !important; border: 1px solid rgba(255,255,255,0.06); } /* Input Box - Glassmorphism */ textarea, input[type="text"] { background: rgba(30,30,45,0.8) !important; border: 1px solid rgba(100,150,255,0.2) !important; border-radius: 14px !important; color: #e0e4e8 !important; padding: 14px 18px !important; font-size: 15px !important; backdrop-filter: blur(20px); -webkit-backdrop-filter: blur(20px); box-shadow: inset 0 2px 8px rgba(0,0,0,0.2), 0 4px 16px rgba(0,0,0,0.1) !important; transition: all 0.3s ease; } textarea:focus, input[type="text"]:focus { outline: none !important; border-color: rgba(100,150,255,0.5) !important; box-shadow: inset 0 2px 8px rgba(0,0,0,0.2), 0 0 0 3px rgba(100,150,255,0.1), 0 4px 20px rgba(100,150,255,0.2) !important; } /* Buttons - Premium Glassmorphism */ button { background: linear-gradient(135deg, rgba(80,120,255,0.2), rgba(100,150,255,0.15)) !important; border: 1px solid rgba(100,150,255,0.3) !important; border-radius: 12px !important; color: #e0e4e8 !important; padding: 12px 24px !important; font-weight: 500 !important; backdrop-filter: blur(20px) !important; -webkit-backdrop-filter: blur(20px) !important; box-shadow: 0 4px 16px rgba(100,150,255,0.15), inset 0 1px 0 rgba(255,255,255,0.1) !important; transition: all 0.3s ease !important; cursor: pointer !important; } button:hover { background: linear-gradient(135deg, rgba(80,120,255,0.3), rgba(100,150,255,0.25)) !important; border-color: rgba(100,150,255,0.5) !important; box-shadow: 0 6px 24px rgba(100,150,255,0.25), inset 0 1px 0 rgba(255,255,255,0.15) !important; transform: translateY(-1px); } button:active { transform: translateY(0); box-shadow: 0 2px 8px rgba(100,150,255,0.2) !important; } /* Labels - Subtle Typography */ label { color: rgba(224, 228, 232, 0.7) !important; font-size: 13px !important; font-weight: 500 !important; text-transform: uppercase !important; letter-spacing: 1.2px !important; margin-bottom: 8px !important; } /* Remove Gradio Footer */ footer { display: none !important; } /* Mobile Responsive */ @media (max-width: 480px) { #monitor-frame { height: 300px; } .chat-window { height: 400px !important; } } /* Subtle Animations */ @keyframes fadeIn { from { opacity: 0; transform: translateY(10px); } to { opacity: 1; transform: translateY(0); } } .message { animation: fadeIn 0.3s ease; } """ with gr.Blocks(title="DEFRAG NODE") as demo: # 1. VISUAL LAYER (Top) with open("mandala_component.html", "r") as f: html_content = f.read() # Container for the mandala mandala_view = gr.HTML(html_content, elem_id="monitor-frame") # 2. AUDIO LAYER (Hidden Player - Autoplay via JS if possible, or manual click) audio_player = gr.Audio(label="System Voice", visible=True, interactive=False) # 3. CHAT LAYER (Bottom) chatbot = gr.Chatbot(label="DEFRAG // LOG", elem_classes="chat-window") msg = gr.Textbox(placeholder="Input System Status...", show_label=False) # Hidden Data Bridge visual_data = gr.JSON(visible=False) # --- EVENT WIRING --- def respond(message, chat_history): # Run logic bot_msg, audio, viz_data = chat_interaction(message, chat_history) # Update history chat_history.append((message, bot_msg)) return "", chat_history, audio, viz_data msg.submit(respond, [msg, chatbot], [msg, chatbot, audio_player, visual_data]) # JS Bridge to update Canvas js_bridge = """ (args) => { if (window.hexa) { // args = [hex, petals, elem, stability, speaking] window.hexa.updateParams(args[0], args[1], args[2], args[3], args[4]); // Stop speaking pulse after 5 seconds setTimeout(() => { window.hexa.updateParams(args[0], args[1], args[2], args[3], false); }, 5000); } return args; } """ visual_data.change(None, [visual_data], None, js=js_bridge) if __name__ == "__main__": demo.launch(theme=gr.themes.Base(), css=css)