|
|
--- |
|
|
license: apache-2.0 |
|
|
language: |
|
|
- en |
|
|
--- |
|
|
|
|
|
```python |
|
|
import torch |
|
|
from transformers import AutoTokenizer, AutoModelForCausalLM |
|
|
|
|
|
device = 'cuda' if torch.cuda.is_available() else 'cpu' |
|
|
|
|
|
model = AutoModelForCausalLM.from_pretrained('simpx/noob', trust_remote_code=True) |
|
|
model = model.to(device) |
|
|
model.eval() |
|
|
tokenizer = AutoTokenizer.from_pretrained('simpx/noob', trust_remote_code=True) |
|
|
|
|
|
context = torch.zeros((1, 1), dtype=torch.long, device=device) |
|
|
|
|
|
with torch.no_grad(): |
|
|
output_ids = model.generate(context, max_new_tokens=100)[0].tolist() |
|
|
output_text = tokenizer.decode(output_ids) |
|
|
print(output_text) |
|
|
``` |