Working but using google/gemma-2b-it model
Browse files
app.py
CHANGED
|
@@ -4,8 +4,8 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
|
|
| 4 |
|
| 5 |
# Load model and tokenizer
|
| 6 |
print("Loading DialoGPT-medium...")
|
| 7 |
-
tokenizer = AutoTokenizer.from_pretrained("
|
| 8 |
-
model = AutoModelForCausalLM.from_pretrained("
|
| 9 |
|
| 10 |
# Add pad token if it doesn't exist
|
| 11 |
if tokenizer.pad_token is None:
|
|
|
|
| 4 |
|
| 5 |
# Load model and tokenizer
|
| 6 |
print("Loading DialoGPT-medium...")
|
| 7 |
+
tokenizer = AutoTokenizer.from_pretrained("google/gemma-2b-it")
|
| 8 |
+
model = AutoModelForCausalLM.from_pretrained("google/gemma-2b-it")
|
| 9 |
|
| 10 |
# Add pad token if it doesn't exist
|
| 11 |
if tokenizer.pad_token is None:
|