| from transformers import AutoModelForCausalLM, AutoTokenizer | |
| IMPORTS | |
| MODEL_LOAD | |
| tokenizer = AutoTokenizer.from_pretrained("PrunaAI/UnfilteredAI-Promt-generator-bnb-4bit-smashed") | |
| input_ids = tokenizer("What is the color of prunes?,", return_tensors='pt').to(model.device)["input_ids"] | |
| outputs = model.generate(input_ids, max_new_tokens=216) | |
| tokenizer.decode(outputs[0]) | |