# Load model directly
from transformers import AutoTokenizer, AutoModelForCausalLM
tokenizer = AutoTokenizer.from_pretrained("circulus/alpaca-base-7b")
model = AutoModelForCausalLM.from_pretrained("circulus/alpaca-base-7b")Quick Links
lora weight tunned with 8 epoch
- Downloads last month
- 11
# Use a pipeline as a high-level helper from transformers import pipeline pipe = pipeline("text-generation", model="circulus/alpaca-base-7b")