| | |
| | """ |
| | CLIP (Flux variant) zero-shot image-classification on Neuron. |
| | Flux pipeline uses: openai/clip-vit-large-patch14 |
| | """ |
| | import argparse |
| | import logging |
| | import time |
| |
|
| | import torch |
| | from transformers import CLIPProcessor, CLIPModel |
| | from datasets import load_dataset |
| | import torch_neuronx |
| |
|
| | logging.basicConfig(level=logging.INFO) |
| | logger = logging.getLogger(__name__) |
| |
|
| |
|
| | def main(): |
| | parser = argparse.ArgumentParser( |
| | description="CLIP (Flux checkpoint) zero-shot image classification with torch.compile on Neuron" |
| | ) |
| | parser.add_argument( |
| | "--model", |
| | type=str, |
| | default="openai/clip-vit-large-patch14", |
| | help="CLIP model name on Hugging Face Hub", |
| | ) |
| | parser.add_argument("--batch-size", type=int, default=1, help="Batch size") |
| | args = parser.parse_args() |
| |
|
| | torch.set_default_dtype(torch.float32) |
| | torch.manual_seed(42) |
| |
|
| | |
| | dataset = load_dataset("huggingface/cats-image") |
| | image = dataset["test"]["image"][0] |
| |
|
| | |
| | processor = CLIPProcessor.from_pretrained(args.model) |
| | model = CLIPModel.from_pretrained( |
| | args.model, torch_dtype=torch.float32, attn_implementation="eager" |
| | ).eval() |
| |
|
| | |
| | texts = ["a photo of a cat", "a photo of a dog", "a photo of a bird"] |
| | inputs = processor(text=texts, images=image, return_tensors="pt", padding=True) |
| |
|
| | |
| | with torch.no_grad(): |
| | outputs = model(**inputs) |
| |
|
| | |
| | model.forward = torch.compile(model.forward, backend="neuron", fullgraph=False) |
| |
|
| | |
| | warmup_start = time.time() |
| | with torch.no_grad(): |
| | _ = model(**inputs) |
| | warmup_time = time.time() - warmup_start |
| |
|
| | |
| | run_start = time.time() |
| | with torch.no_grad(): |
| | outputs = model(**inputs) |
| | run_time = time.time() - run_start |
| |
|
| | |
| | logits_per_image = outputs.logits_per_image |
| | probs = logits_per_image.softmax(dim=-1) |
| | best_idx = int(probs.argmax().item()) |
| | best_label = texts[best_idx] |
| |
|
| | logger.info("Warmup: %.2f s, Run: %.4f s", warmup_time, run_time) |
| | logger.info("Probabilities: %s", probs.tolist()) |
| | logger.info("Predicted label: %s", best_label) |
| |
|
| |
|
| | if __name__ == "__main__": |
| | main() |