File size: 373 Bytes
cee67fa dd980a0 cee67fa dd980a0 cee67fa |
1 2 3 4 5 6 7 8 9 10 11 |
from transformers import AutoModelForCausalLM, AutoTokenizer
IMPORTS
MODEL_LOAD
tokenizer = AutoTokenizer.from_pretrained("PrunaAI/UnfilteredAI-Promt-generator-bnb-4bit-smashed")
input_ids = tokenizer("What is the color of prunes?,", return_tensors='pt').to(model.device)["input_ids"]
outputs = model.generate(input_ids, max_new_tokens=216)
tokenizer.decode(outputs[0])
|