Spaces:
Running on Zero
Running on Zero
| import sys | |
| import os | |
| # --- βοΈ THE SOVEREIGN PATCH --- | |
| import huggingface_hub | |
| if not hasattr(huggingface_hub, 'HfFolder'): | |
| class SovereignHfFolder: | |
| def get_token(): return None | |
| huggingface_hub.HfFolder = SovereignHfFolder | |
| sys.modules['huggingface_hub.HfFolder'] = SovereignHfFolder | |
| import subprocess | |
| import gradio as gr | |
| import spaces | |
| import time | |
| REPO_DIR = os.path.join(os.getcwd(), "autoresearch") | |
| # Maximum ZeroGPU burst (5 Minutes) | |
| def execute_on_h200(command, openrouter_key): | |
| if not command: | |
| yield "β Please enter a command." | |
| return | |
| env = os.environ.copy() | |
| if openrouter_key: | |
| env["OPENAI_API_KEY"] = openrouter_key | |
| env["OPENAI_BASE_URL"] = "https://openrouter.ai/api/v1" | |
| # Force Python to stream logs instantly instead of buffering them | |
| env["PYTHONUNBUFFERED"] = "1" | |
| output_log = f"[*] Attaching H200 and executing: {command}\n\n" | |
| yield output_log | |
| try: | |
| # Popen allows us to stream the output live | |
| process = subprocess.Popen( | |
| command, | |
| shell=True, | |
| cwd=REPO_DIR, | |
| env=env, | |
| stdout=subprocess.PIPE, | |
| stderr=subprocess.STDOUT, | |
| text=True, | |
| bufsize=1, | |
| universal_newlines=True | |
| ) | |
| # Stream the output line-by-line to the Gradio UI | |
| for line in iter(process.stdout.readline, ''): | |
| output_log += line | |
| yield output_log | |
| process.wait() | |
| output_log += f"\n--- EXIT CODE: {process.returncode} ---" | |
| yield output_log | |
| except Exception as e: | |
| yield output_log + f"\nβ CRASH: {str(e)}" | |
| def get_readme(): | |
| try: | |
| with open(os.path.join(REPO_DIR, "README.md"), "r") as f: | |
| return f.read() | |
| except: | |
| return "README not found." | |
| with gr.Blocks(theme=gr.themes.Monochrome()) as demo: | |
| gr.Markdown("# 𧬠Dex Sovereign H200 Terminal (Live Stream Edition)") | |
| gr.Markdown("Direct command-line execution on the Hugging Face ZeroGPU. Output streams live.") | |
| with gr.Row(): | |
| with gr.Column(scale=2): | |
| cmd_input = gr.Textbox( | |
| value="ls -la && python prepare.py", | |
| label="Execute Command on H200" | |
| ) | |
| or_key = gr.Textbox(label="OpenRouter Key (Optional, bypasses OpenAI)", type="password") | |
| btn = gr.Button("π Run Command on GPU", variant="primary") | |
| with gr.Column(scale=3): | |
| output = gr.Textbox(label="Live Terminal Output", lines=20, max_lines=40) | |
| gr.Markdown("### π Framework Documentation (README.md)") | |
| gr.Markdown(get_readme()) | |
| btn.click(fn=execute_on_h200, inputs=[cmd_input, or_key], outputs=[output]) | |
| if __name__ == "__main__": | |
| demo.launch() | |