import gradio as gr from transformers import AutoTokenizer, AutoModelForCausalLM # Загрузка токенизатора и модели model_name = "GoidaAlignment/GOIDA-0.5B" # Укажите путь к вашей модели tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForCausalLM.from_pretrained(model_name) def generate_response(prompt): inputs = tokenizer(prompt, return_tensors="pt", padding=True, truncation=True) outputs = model.generate(inputs["input_ids"], max_length=200, num_return_sequences=1) response = tokenizer.decode(outputs[0], skip_special_tokens=True) return response # Интерфейс Gradio with gr.Blocks() as demo: gr.Markdown("# Введите запрос, и модель ответит.") with gr.Row(): with gr.Column(): prompt_input = gr.Textbox(label="Ваш запрос", lines=4, placeholder="Введите текст") with gr.Column(): output = gr.Textbox(label="Ответ модели", lines=6, interactive=False) submit_button = gr.Button("Сгенерировать") submit_button.click(generate_response, inputs=prompt_input, outputs=output) # Запуск приложения if __name__ == "__main__": demo.launch()