Spaces:
Runtime error
Runtime error
| from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline | |
| # 1️⃣ Pick the model you want (replace with actual Hugging Face model IDs) | |
| model_name = "dolphin-mistral" # Example: "bigcode/starcoder" for coding | |
| # 2️⃣ Load the tokenizer and model | |
| tokenizer = AutoTokenizer.from_pretrained(model_name) | |
| model = AutoModelForCausalLM.from_pretrained(model_name) | |
| # 3️⃣ Create the chatbot pipeline | |
| chatbot = pipeline("text-generation", model=model, tokenizer=tokenizer) | |
| # 4️⃣ Define a prompt that blends behaviors | |
| prompt_template = """ | |
| You are a chatbot trained to combine: | |
| - The conversational style of Dolphin-Mistral | |
| - The coding expertise of DolphinCoder | |
| User: {} | |
| Buddy:""" | |
| # 5️⃣ Example user input | |
| user_input = "Help me debug this Python code" | |
| # 6️⃣ Generate response | |
| response = chatbot(prompt_template.format(user_input), max_length=300, do_sample=True, temperature=0.7) | |
| print(response[0]["generated_text"]) | |