Spaces:
Sleeping
Sleeping
Commit ·
c07b281
1
Parent(s): 5579425
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,10 +1,9 @@
|
|
| 1 |
-
import
|
|
|
|
|
|
|
| 2 |
|
| 3 |
# Load the FastChat model
|
| 4 |
-
model =
|
| 5 |
-
|
| 6 |
-
# Create a conversation
|
| 7 |
-
conversation = fastchat.Conversation(model=model)
|
| 8 |
|
| 9 |
# Start the conversation loop
|
| 10 |
while True:
|
|
@@ -13,10 +12,7 @@ while True:
|
|
| 13 |
user_input = input("You: ")
|
| 14 |
|
| 15 |
# Generate a response from the model
|
| 16 |
-
model_response = model.generate(
|
| 17 |
-
|
| 18 |
-
# Add the response to the conversation
|
| 19 |
-
conversation.add_message("Assistant", model_response)
|
| 20 |
|
| 21 |
# Print the response to the user
|
| 22 |
-
print("Assistant:", model_response)
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import transformers
|
| 3 |
+
from transformers import AutoModelForCausalLM
|
| 4 |
|
| 5 |
# Load the FastChat model
|
| 6 |
+
model = AutoModelForCausalLM.from_pretrained("lmsys/fastchat-t5-3b-v1.0")
|
|
|
|
|
|
|
|
|
|
| 7 |
|
| 8 |
# Start the conversation loop
|
| 9 |
while True:
|
|
|
|
| 12 |
user_input = input("You: ")
|
| 13 |
|
| 14 |
# Generate a response from the model
|
| 15 |
+
model_response = model.generate(input_ids=torch.tensor([model.bos_token_id]).unsqueeze(0), prompt=user_input)
|
|
|
|
|
|
|
|
|
|
| 16 |
|
| 17 |
# Print the response to the user
|
| 18 |
+
print("Assistant:", model_response[0][1:])
|