Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
|
@@ -69,8 +69,6 @@ tokenizer.padding_side = 'left'
|
|
| 69 |
peft_config = PeftConfig.from_pretrained("Tonic/mistralmed", token="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")
|
| 70 |
peft_model = MistralForCausalLM.from_pretrained("mistralai/Mistral-7B-v0.1", trust_remote_code=True)
|
| 71 |
peft_model = PeftModel.from_pretrained(peft_model, "Tonic/mistralmed", token="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")
|
| 72 |
-
# Remove the memory function
|
| 73 |
-
# ... (previous code)
|
| 74 |
|
| 75 |
class ChatBot:
|
| 76 |
def __init__(self):
|
|
@@ -89,7 +87,7 @@ class ChatBot:
|
|
| 89 |
user_input_ids = tokenizer.encode(formatted_input, return_tensors="pt")
|
| 90 |
|
| 91 |
# Generate a response using the PEFT model
|
| 92 |
-
response = peft_model.generate(user_input_ids, max_length=512, pad_token_id=tokenizer.eos_token_id)
|
| 93 |
|
| 94 |
# Decode the generated response to text
|
| 95 |
response_text = tokenizer.decode(response[0], skip_special_tokens=True)
|
|
|
|
| 69 |
peft_config = PeftConfig.from_pretrained("Tonic/mistralmed", token="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")
|
| 70 |
peft_model = MistralForCausalLM.from_pretrained("mistralai/Mistral-7B-v0.1", trust_remote_code=True)
|
| 71 |
peft_model = PeftModel.from_pretrained(peft_model, "Tonic/mistralmed", token="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")
|
|
|
|
|
|
|
| 72 |
|
| 73 |
class ChatBot:
|
| 74 |
def __init__(self):
|
|
|
|
| 87 |
user_input_ids = tokenizer.encode(formatted_input, return_tensors="pt")
|
| 88 |
|
| 89 |
# Generate a response using the PEFT model
|
| 90 |
+
response = peft_model.generate(user_input_ids=user_input_ids, max_length=512, pad_token_id=tokenizer.eos_token_id)
|
| 91 |
|
| 92 |
# Decode the generated response to text
|
| 93 |
response_text = tokenizer.decode(response[0], skip_special_tokens=True)
|