Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| from huggingface_hub import InferenceClient | |
| import os | |
| # Initialize clients for all models | |
| models = { | |
| "Exquisitely": "arthu1/Exquisitely", | |
| "Olympus": "arthu1/Olympus", | |
| "Astryx": "arthu1/Astryx", | |
| "Aether": "arthu1/Aether" | |
| } | |
| clients = {name: InferenceClient(model) for name, model in models.items()} | |
| # File system simulation | |
| file_system = {} | |
| def generate_response(message, model_name, history): | |
| client = clients[model_name] | |
| # Format conversation history | |
| messages = [] | |
| for h in history: | |
| messages.append({"role": "user", "content": h[0]}) | |
| messages.append({"role": "assistant", "content": h[1]}) | |
| messages.append({"role": "user", "content": message}) | |
| response = "" | |
| for msg in client.chat_completion( | |
| messages=messages, | |
| max_tokens=2000, | |
| stream=True, | |
| ): | |
| token = msg.choices[0].delta.content | |
| if token: | |
| response += token | |
| yield response | |
| # Parse for files | |
| if "```" in response: | |
| import re | |
| file_pattern = r"(?:FILENAME|filename|File):\s*(\S+)\s*```(?:\w+)?\n([\s\S]+?)```" | |
| matches = re.findall(file_pattern, response) | |
| for filename, content in matches: | |
| file_system[filename] = content.strip() | |
| def execute_code(code, language): | |
| if language == "python": | |
| try: | |
| import io | |
| import sys | |
| from contextlib import redirect_stdout | |
| output = io.StringIO() | |
| with redirect_stdout(output): | |
| exec(code) | |
| return output.getvalue() or "β Executed successfully" | |
| except Exception as e: | |
| return f"Error: {str(e)}" | |
| elif language == "javascript": | |
| return "JavaScript execution not available in this environment" | |
| else: | |
| return "Language not supported" | |
| def list_files(): | |
| if not file_system: | |
| return "No files created yet" | |
| return "\n".join([f"π {name}" for name in file_system.keys()]) | |
| def read_file(filename): | |
| return file_system.get(filename, f"File '{filename}' not found") | |
| def download_file(filename): | |
| content = file_system.get(filename, "") | |
| if content: | |
| return content | |
| return None | |
| # Gradio Interface | |
| with gr.Blocks(theme=gr.themes.Soft()) as demo: | |
| gr.Markdown("# π» Algorix Coding IDE") | |
| gr.Markdown("Test all Algorix models in an interactive environment") | |
| with gr.Row(): | |
| with gr.Column(scale=2): | |
| model_selector = gr.Dropdown( | |
| choices=list(models.keys()), | |
| value="Aether", | |
| label="Select Model" | |
| ) | |
| chatbot = gr.Chatbot(height=400, label="AI Assistant") | |
| with gr.Row(): | |
| msg = gr.Textbox( | |
| label="Message", | |
| placeholder="Ask me to write code...", | |
| lines=3 | |
| ) | |
| with gr.Row(): | |
| submit = gr.Button("Send", variant="primary") | |
| clear = gr.Button("Clear") | |
| with gr.Column(scale=1): | |
| gr.Markdown("### π§ Terminal") | |
| terminal_output = gr.Textbox( | |
| label="Output", | |
| lines=10, | |
| interactive=False, | |
| value="Welcome to Algorix Terminal\n$" | |
| ) | |
| with gr.Row(): | |
| cmd_input = gr.Textbox( | |
| label="Command", | |
| placeholder="ls, cat <file>, python <file>" | |
| ) | |
| run_cmd = gr.Button("Run") | |
| gr.Markdown("### π Files") | |
| file_list = gr.Textbox( | |
| label="Created Files", | |
| lines=5, | |
| interactive=False | |
| ) | |
| with gr.Row(): | |
| file_name = gr.Textbox(label="Filename", scale=3) | |
| download_btn = gr.Button("Download", scale=1) | |
| file_content = gr.Textbox(label="File Content", lines=8) | |
| def handle_command(cmd, current_output): | |
| parts = cmd.strip().split() | |
| if not parts: | |
| return current_output | |
| command = parts[0] | |
| output = current_output + f"\n$ {cmd}\n" | |
| if command == "ls": | |
| output += "\n".join(file_system.keys()) or "outputs/" | |
| elif command == "cat" and len(parts) > 1: | |
| output += file_system.get(parts[1], f"cat: {parts[1]}: No such file") | |
| elif command == "python" and len(parts) > 1: | |
| code = file_system.get(parts[1], "") | |
| if code: | |
| output += execute_code(code, "python") | |
| else: | |
| output += f"python: can't open file '{parts[1]}'" | |
| elif command == "clear": | |
| return "Welcome to Algorix Terminal\n$" | |
| elif command == "help": | |
| output += "Commands: ls, cat <file>, python <file>, clear, help" | |
| else: | |
| output += f"{command}: command not found" | |
| return output + "\n" | |
| def user_message(message, history): | |
| return "", history + [[message, None]] | |
| def bot_response(history, model_name): | |
| user_msg = history[-1][0] | |
| # Generate response | |
| full_response = "" | |
| for response in generate_response(user_msg, model_name, history[:-1]): | |
| full_response = response | |
| history[-1][1] = response | |
| yield history | |
| # Update file list after generation | |
| return history | |
| # Event handlers | |
| msg.submit(user_message, [msg, chatbot], [msg, chatbot], queue=False).then( | |
| bot_response, [chatbot, model_selector], chatbot | |
| ) | |
| submit.click(user_message, [msg, chatbot], [msg, chatbot], queue=False).then( | |
| bot_response, [chatbot, model_selector], chatbot | |
| ) | |
| clear.click(lambda: None, None, chatbot, queue=False) | |
| run_cmd.click( | |
| handle_command, | |
| [cmd_input, terminal_output], | |
| terminal_output | |
| ).then(lambda: "", None, cmd_input) | |
| # Update file list every second | |
| demo.load(list_files, None, file_list, every=1) | |
| file_name.change(read_file, file_name, file_content) | |
| download_btn.click( | |
| download_file, | |
| file_name, | |
| gr.File(label="Download") | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch() | |