memegpt commited on
Commit
7653865
·
1 Parent(s): 0648d5e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -14
app.py CHANGED
@@ -1,20 +1,22 @@
1
- from PIL import Image
2
- import requests
3
  import gradio as gr
4
- from io import BytesIO
5
 
6
- from transformers import BlipProcessor, BlipForConditionalGeneration
7
 
8
- model_id = "Salesforce/blip-image-captioning-base"
9
 
10
- model = BlipForConditionalGeneration.from_pretrained(model_id)
11
- processor = BlipProcessor.from_pretrained(model_id)
12
 
13
- def launch(image):
14
- #image = Image.open(BytesIO(input_image)).convert('RGB')
15
- inputs = processor(image, return_tensors="pt")
16
- out = model.generate(**inputs)
17
- return processor.decode(out[0], skip_special_tokens=True)
18
 
19
- iface = gr.Interface(launch, inputs=gr.inputs.Image(), outputs="text")
20
- iface.launch()
 
 
 
1
+ #from PIL import Image
2
+ #import requests
3
  import gradio as gr
4
+ #from io import BytesIO
5
 
6
+ #from transformers import BlipProcessor, BlipForConditionalGeneration
7
 
8
+ #model_id = "Salesforce/blip-image-captioning-base"
9
 
10
+ #model = BlipForConditionalGeneration.from_pretrained(model_id)
11
+ #processor = BlipProcessor.from_pretrained(model_id)
12
 
13
+ #def launch(image):
14
+ # #image = Image.open(BytesIO(input_image)).convert('RGB')
15
+ # inputs = processor(image, return_tensors="pt")
16
+ # out = model.generate(**inputs)
17
+ # return processor.decode(out[0], skip_special_tokens=True)
18
 
19
+ #iface = gr.Interface(launch, inputs=gr.inputs.Image(), outputs="text")
20
+ #iface.launch()
21
+
22
+ gr.Interface.load("models/Salesforce/blip-image-captioning-base").launch()