Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import time | |
| import random | |
| # ==================== MEMORY ==================== | |
| user_memory = [] | |
| def add_memory(entry): | |
| user_memory.append(entry) | |
| if len(user_memory) > 10: | |
| user_memory.pop(0) | |
| def get_memory_context(): | |
| return "\n".join(user_memory[-5:]) | |
| # ==================== AGENTS ==================== | |
| def agent_story(prompt): | |
| return f"📖 Histoire:\nIl était une fois {prompt} qui changea le monde." | |
| def agent_code(prompt): | |
| return f"""```python | |
| def solution(): | |
| return "{prompt}" | |
| ```""" | |
| def agent_idea(prompt): | |
| return f"💡 Startup: {prompt} → SaaS scalable + abonnement mensuel." | |
| # ==================== ROUTER ==================== | |
| def route(prompt): | |
| if "code" in prompt.lower(): | |
| return agent_code | |
| elif "idée" in prompt.lower() or "startup" in prompt.lower(): | |
| return agent_idea | |
| else: | |
| return agent_story | |
| # ==================== STREAMING ==================== | |
| def stream_generate(prompt, auto_mode): | |
| if not prompt.strip(): | |
| yield "⚠️ Entrez un prompt" | |
| return | |
| context = get_memory_context() | |
| agent = route(prompt) | |
| full_prompt = f"{context}\n{prompt}" | |
| result = agent(full_prompt) | |
| # simulate streaming (token by token) | |
| output = "" | |
| for char in result: | |
| output += char | |
| time.sleep(0.01) | |
| yield output | |
| add_memory(prompt) | |
| # ================= AUTO MODE ================= | |
| if auto_mode: | |
| for i in range(2): # limite = sécurité | |
| time.sleep(0.5) | |
| new_prompt = f"Améliore: {result[:50]}" | |
| agent = route(new_prompt) | |
| result = agent(new_prompt) | |
| output = "\n\n🤖 AUTO:\n" | |
| for char in result: | |
| output += char | |
| time.sleep(0.005) | |
| yield output | |
| add_memory(new_prompt) | |
| # ==================== UI ==================== | |
| with gr.Blocks(title="NEUROFLUX AI ⚡") as app: | |
| gr.Markdown("# 🧠 NEUROFLUX AI ⚡") | |
| gr.Markdown("Streaming • Mémoire • Multi-agents • AutoGPT light") | |
| with gr.Row(): | |
| prompt = gr.Textbox(label="Prompt", lines=3) | |
| auto_mode = gr.Checkbox(label="🤖 Mode autonome") | |
| output = gr.Textbox(label="Résultat", lines=15) | |
| btn = gr.Button("🚀 Générer") | |
| btn.click(stream_generate, inputs=[prompt, auto_mode], outputs=output) | |
| prompt.submit(stream_generate, inputs=[prompt, auto_mode], outputs=output) | |
| # ==================== LAUNCH ==================== | |
| if __name__ == "__main__": | |
| app.launch() |