# Load model directly
from transformers import AutoModel
model = AutoModel.from_pretrained("JoshKeesee/Alfred-Indigo", dtype="auto")Quick Links
This model has been pushed to the Hub using the PytorchModelHubMixin integration:
- Library: https://github.com/JoshKeesee/Alfred-Indigo
- Docs: [More Information Needed]
- Downloads last month
- 28
# Use a pipeline as a high-level helper from transformers import pipeline pipe = pipeline("text-generation", model="JoshKeesee/Alfred-Indigo") messages = [ {"role": "user", "content": "Who are you?"}, ] pipe(messages)