import gradio as gr from transformers import AutoTokenizer, AutoModelForCausalLM # Load your custom model and tokenizer tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-large") model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-large") def chat_with_model(input_text): input_ids = tokenizer.encode("You: " + input_text, return_tensors="pt", max_length=1024, truncation=True) response_ids = model.generate(input_ids, max_length=100, num_return_sequences=1, no_repeat_ngram_size=2) reply = tokenizer.decode(response_ids[0], skip_special_tokens=True) return reply iface = gr.Interface( fn=chat_with_model, inputs=gr.Textbox("You:"), outputs=gr.Textbox("Bot:"), ) iface.launch()