Fix usage example
Browse files
README.md
CHANGED
|
@@ -33,9 +33,7 @@ from transformers import AutoTokenizer, AutoModelForCausalLM
|
|
| 33 |
tokenizer = AutoTokenizer.from_pretrained("Metin/gemma-2b-tr")
|
| 34 |
model = AutoModelForCausalLM.from_pretrained("Metin/gemma-2b-tr")
|
| 35 |
|
| 36 |
-
|
| 37 |
-
instruction = "Bugün sinemaya gidemedim çünkü"
|
| 38 |
-
prompt = f"{system_prompt} [INST] {instruction} [/INST]"
|
| 39 |
input_ids = tokenizer(prompt, return_tensors="pt")
|
| 40 |
|
| 41 |
outputs = model.generate(**input_ids)
|
|
|
|
| 33 |
tokenizer = AutoTokenizer.from_pretrained("Metin/gemma-2b-tr")
|
| 34 |
model = AutoModelForCausalLM.from_pretrained("Metin/gemma-2b-tr")
|
| 35 |
|
| 36 |
+
prompt = "Bugün sinemaya gidemedim çünkü"
|
|
|
|
|
|
|
| 37 |
input_ids = tokenizer(prompt, return_tensors="pt")
|
| 38 |
|
| 39 |
outputs = model.generate(**input_ids)
|