from fastai.text.all import load_learner def chat_with_model(): learn = load_learner('model.pkl') context = "Hey" while True: print(f"You: {context}") generated = learn.predict(context, 200, temperature=0.9) print(f"Bot: {generated}\n") user_input = input("You: ") if user_input.strip().lower() in ["quit", "exit", "stop"]: break context = user_input if __name__ == "__main__": chat_with_model()