File size: 327 Bytes
62dbf75 | 1 2 3 4 5 6 7 8 9 10 11 | from transformers import AutoTokenizer, AutoModelForCausalLM
tok = AutoTokenizer.from_pretrained("out")
model = AutoModelForCausalLM.from_pretrained("out")
while True:
q = input("> ")
x = tok(q, return_tensors="pt")
y = model.generate(**x, max_new_tokens=40)
print(tok.decode(y[0], skip_special_tokens=True))
|