amongusrickroll68's picture
Update app.py
3f293be
raw
history blame contribute delete
472 Bytes
from transformers import GPTNeoForCausalLM, GPT2Tokenizer
model = GPTNeoForCausalLM.from_pretrained("EleutherAI/gpt-neo-2.7B")
tokenizer = GPT2Tokenizer.from_pretrained("EleutherAI/gpt-neo-2.7B")
prompt = "The quick brown fox"
input_ids = tokenizer(prompt, return_tensors="pt").input_ids
generated_text = model.generate(input_ids=input_ids, max_length=50, do_sample=True)
decoded_text = tokenizer.decode(generated_text[0], skip_special_tokens=True)
print(decoded_text)