my-space / app.py
fexeak
feat: 添加transformers模型加载和推理功能
cee67fa
raw
history blame
373 Bytes
from transformers import AutoModelForCausalLM, AutoTokenizer
IMPORTS
MODEL_LOAD
tokenizer = AutoTokenizer.from_pretrained("PrunaAI/UnfilteredAI-Promt-generator-bnb-4bit-smashed")
input_ids = tokenizer("What is the color of prunes?,", return_tensors='pt').to(model.device)["input_ids"]
outputs = model.generate(input_ids, max_new_tokens=216)
tokenizer.decode(outputs[0])