Spaces:
Configuration error
Configuration error
| import gradio as gr | |
| from transformers import GPT2Tokenizer, GPTNeoForCausalLM | |
| import torch | |
| import json | |
| import os | |
| # Load configuration | |
| with open("shx-config.json", "r") as f: | |
| config = json.load(f) | |
| tokenizer = GPT2Tokenizer.from_pretrained(config["model_name"]) | |
| tokenizer.pad_token = tokenizer.eos_token | |
| model = GPTNeoForCausalLM.from_pretrained(config["model_name"]) | |
| chat_history = [] | |
| def shx_terminal(prompt, history): | |
| inputs = tokenizer(prompt, return_tensors="pt", padding=True) | |
| input_ids = inputs.input_ids | |
| attention_mask = inputs.attention_mask | |
| pad_token_id = tokenizer.eos_token_id | |
| try: | |
| with torch.no_grad(): | |
| output = model.generate( | |
| input_ids=input_ids, | |
| attention_mask=attention_mask, | |
| pad_token_id=pad_token_id, | |
| max_length=config["max_length"], | |
| temperature=config["temperature"], | |
| top_k=config["top_k"], | |
| top_p=config["top_p"], | |
| do_sample=True | |
| ) | |
| response = tokenizer.decode(output[0], skip_special_tokens=True) | |
| chat_history.append((prompt, response)) | |
| return response, chat_history | |
| except Exception as e: | |
| return f"⚠️ SHX caught an error during generation:\n{str(e)}", chat_history | |
| with gr.Blocks(css="body { background-color: black; color: #00FF41; font-family: monospace; }") as demo: | |
| gr.Markdown("## 🤖 **SHX-Auto: Multiversal System Builder**") | |
| with gr.Row(): | |
| with gr.Column(): | |
| input_box = gr.Textbox(label="Your Command") | |
| output_box = gr.Textbox(label="SHX Response") | |
| run_btn = gr.Button("Run") | |
| run_btn.click(shx_terminal, inputs=[input_box, gr.State(chat_history)], outputs=[output_box, gr.State(chat_history)]) | |
| with gr.Column(): | |
| chat_box = gr.Chatbot(label="Chat History") | |
| chat_box.update(chat_history) | |
| demo.launch() | |