Update README.md
Browse files
README.md
CHANGED
|
@@ -58,7 +58,7 @@ To download NeXGen use this code:
|
|
| 58 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
| 59 |
|
| 60 |
# Specify the model name from Hugging Face Model Hub
|
| 61 |
-
model_name = "
|
| 62 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
| 63 |
model = AutoModelForCausalLM.from_pretrained(model_name)
|
| 64 |
|
|
@@ -88,7 +88,7 @@ prompt = "Your prompt here"
|
|
| 88 |
generated_text = generate_text(prompt, max_length=200)
|
| 89 |
|
| 90 |
print("Generated Text:")
|
| 91 |
-
print(
|
| 92 |
```
|
| 93 |
|
| 94 |
## Limitation
|
|
|
|
| 58 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
| 59 |
|
| 60 |
# Specify the model name from Hugging Face Model Hub
|
| 61 |
+
model_name = "CrabfishAI/NeXGen-small"
|
| 62 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
| 63 |
model = AutoModelForCausalLM.from_pretrained(model_name)
|
| 64 |
|
|
|
|
| 88 |
generated_text = generate_text(prompt, max_length=200)
|
| 89 |
|
| 90 |
print("Generated Text:")
|
| 91 |
+
print(generated_text)
|
| 92 |
```
|
| 93 |
|
| 94 |
## Limitation
|