Text Generation
Transformers
PyTorch
English
bloom
feature-extraction
integration
text-generation-inference
# Load model directly
from transformers import AutoTokenizer, AutoModel
tokenizer = AutoTokenizer.from_pretrained("Muennighoff/bloom-tiny-random")
model = AutoModel.from_pretrained("Muennighoff/bloom-tiny-random")Quick Links
# Use a pipeline as a high-level helper from transformers import pipeline pipe = pipeline("text-generation", model="Muennighoff/bloom-tiny-random")