import gradio as gr from transformers import AutoTokenizer, AutoModelForSeq2SeqLM # Load your LOCAL model (update this path) MODEL_PATH = "./aba-retrained-final" # Initialize components tokenizer = AutoTokenizer.from_pretrained(MODEL_PATH) model = AutoModelForSeq2SeqLM.from_pretrained(MODEL_PATH) def ask_aba(question): inputs = tokenizer(f"question: {question}", return_tensors="pt") outputs = model.generate(**inputs, max_length=150) return tokenizer.decode(outputs[0], skip_special_tokens=True) # Gradio interface with gr.Blocks() as demo: gr.Markdown("# ABA Therapy Assistant") with gr.Row(): question = gr.Textbox(label="Ask about ABA") output = gr.Textbox(label="Answer") gr.Examples( examples=["What is positive reinforcement?", "How to reduce tantrums?"], inputs=question ) question.submit(ask_aba, inputs=question, outputs=output) demo.launch()