memegpt commited on
Commit
c5e708d
·
1 Parent(s): 7653865

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -16
app.py CHANGED
@@ -1,22 +1,23 @@
1
- #from PIL import Image
2
- #import requests
3
  import gradio as gr
4
- #from io import BytesIO
5
 
6
- #from transformers import BlipProcessor, BlipForConditionalGeneration
7
 
8
- #model_id = "Salesforce/blip-image-captioning-base"
9
 
10
- #model = BlipForConditionalGeneration.from_pretrained(model_id)
11
- #processor = BlipProcessor.from_pretrained(model_id)
12
 
13
- #def launch(image):
14
- # #image = Image.open(BytesIO(input_image)).convert('RGB')
15
- # inputs = processor(image, return_tensors="pt")
16
- # out = model.generate(**inputs)
17
- # return processor.decode(out[0], skip_special_tokens=True)
 
 
 
18
 
19
- #iface = gr.Interface(launch, inputs=gr.inputs.Image(), outputs="text")
20
- #iface.launch()
21
-
22
- gr.Interface.load("models/Salesforce/blip-image-captioning-base").launch()
 
1
+ from PIL import Image
2
+ import requests
3
  import gradio as gr
4
+ from io import BytesIO
5
 
6
+ from transformers import BlipProcessor, BlipForConditionalGeneration
7
 
8
+ model_id = "Salesforce/blip-image-captioning-base"
9
 
10
+ model = BlipForConditionalGeneration.from_pretrained(model_id)
11
+ processor = BlipProcessor.from_pretrained(model_id)
12
 
13
+ def launch(image):
14
+ #image = Image.open(BytesIO(input_image)).convert('RGB')
15
+ inputsData = data.pop("inputs", data)
16
+ # decode base64 image to PIL
17
+ image = Image.open(BytesIO(base64.b64decode(inputsData["image"])))
18
+ inputs = processor(image, return_tensors="pt")
19
+ out = model.generate(**inputs)
20
+ return processor.decode(out[0], skip_special_tokens=True)
21
 
22
+ iface = gr.Interface(launch, inputs=gr.inputs.Image(), outputs="text")
23
+ iface.launch()