# Use a pipeline as a high-level helper
from transformers import pipeline
pipe = pipeline("text-generation", model="Crow34/Chloe")# Load model directly
from transformers import AutoTokenizer, AutoModelForCausalLM
tokenizer = AutoTokenizer.from_pretrained("Crow34/Chloe")
model = AutoModelForCausalLM.from_pretrained("Crow34/Chloe")Quick Links
README.md exists but content is empty.
- Downloads last month
- -
# Gated model: Login with a HF token with gated access permission hf auth login