File size: 266 Bytes
566cff7
107c705
 
566cff7
 
107c705
1
2
3
4
5
6
from transformers import BlipProcessor, BlipForQuestionAnswering

def load_model():
    processor = BlipProcessor.from_pretrained("Salesforce/blip-vqa-base")
    model = BlipForQuestionAnswering.from_pretrained("Salesforce/blip-vqa-base")
    return processor, model