model-dataset commited on
Commit
2997597
·
verified ·
1 Parent(s): 2193559

Upload usage-image-caption.py

Browse files
Files changed (1) hide show
  1. usage-image-caption.py +16 -0
usage-image-caption.py ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch,sys
2
+ from PIL import Image
3
+ from transformers import BlipProcessor, BlipForConditionalGeneration
4
+ processor = BlipProcessor.from_pretrained("norwoodsystems/image-caption")
5
+ model = BlipForConditionalGeneration.from_pretrained("norwoodsystems/image-caption", use_safetensors=True)
6
+ image_path = sys.argv[1]
7
+ raw_image = Image.open(image_path).convert('RGB')
8
+ inputs = processor(images=raw_image, return_tensors="pt")
9
+ with torch.no_grad():
10
+ generated_ids = model.generate(**inputs, do_sample=True, top_p=0.9, temperature=1.0)
11
+
12
+
13
+ description = processor.decode(generated_ids[0], skip_special_tokens=True)
14
+ print("Description:", description)
15
+
16
+