Transformers How to use humane-lab/CFT-CLIP with Transformers:
# Use a pipeline as a high-level helper
from transformers import pipeline
pipe = pipeline("zero-shot-image-classification", model="humane-lab/CFT-CLIP")
pipe(
"https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/hub/parrots.png",
candidate_labels=["animals", "humans", "landscape"],
) # Load model directly
from transformers import AutoProcessor, AutoModelForZeroShotImageClassification
processor = AutoProcessor.from_pretrained("humane-lab/CFT-CLIP")
model = AutoModelForZeroShotImageClassification.from_pretrained("humane-lab/CFT-CLIP")