add pipeline
#6
by
not-lain
- opened
- MyPipe.py +32 -0
- config.json +22 -3
- generation_config.json +1 -1
MyPipe.py
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from transformers.pipelines import PIPELINE_REGISTRY
|
| 2 |
+
from transformers import Pipeline, AutoModelForCausalLM, CodeGenTokenizerFast as Tokenizer
|
| 3 |
+
from PIL import Image
|
| 4 |
+
from typing import Union
|
| 5 |
+
|
| 6 |
+
class VQA(Pipeline):
|
| 7 |
+
def __init__(self,**kwargs):
|
| 8 |
+
# kwargs["trust_remote_code"]=True # custom architecture
|
| 9 |
+
Pipeline.__init__(self,**kwargs)
|
| 10 |
+
self.tokenizer = Tokenizer.from_pretrained("vikhyatk/moondream1")
|
| 11 |
+
def _sanitize_parameters(self, **kwargs):
|
| 12 |
+
# preprocess_params = {}
|
| 13 |
+
process = {}
|
| 14 |
+
# if "image" in kwargs :
|
| 15 |
+
# preprocess_params["image"] = kwargs["image"]
|
| 16 |
+
if "question" in kwargs :
|
| 17 |
+
process["question"] = kwargs["question"]
|
| 18 |
+
return {}, process, {}
|
| 19 |
+
|
| 20 |
+
def preprocess(self, inputs:Union[str, Image.Image]):
|
| 21 |
+
if isinstance(inputs,str) :
|
| 22 |
+
return Image.open(inputs)
|
| 23 |
+
else:
|
| 24 |
+
return inputs
|
| 25 |
+
|
| 26 |
+
def _forward(self, inputs,question):
|
| 27 |
+
enc_image = self.model.encode_image(inputs)
|
| 28 |
+
out = self.model.answer_question(enc_image, question, self.tokenizer)
|
| 29 |
+
return out
|
| 30 |
+
|
| 31 |
+
def postprocess(self, out):
|
| 32 |
+
return out
|
config.json
CHANGED
|
@@ -1,15 +1,34 @@
|
|
| 1 |
{
|
|
|
|
| 2 |
"architectures": [
|
| 3 |
"Moondream"
|
| 4 |
],
|
| 5 |
"auto_map": {
|
| 6 |
-
"AutoConfig": "configuration_moondream.MoondreamConfig",
|
| 7 |
-
"AutoModelForCausalLM": "moondream.Moondream"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 8 |
},
|
| 9 |
"model_type": "moondream1",
|
| 10 |
"phi_config": {
|
| 11 |
"model_type": "phi-msft"
|
| 12 |
},
|
| 13 |
"torch_dtype": "float32",
|
| 14 |
-
"transformers_version": "4.
|
| 15 |
}
|
|
|
|
| 1 |
{
|
| 2 |
+
"_name_or_path": "vikhyatk/moondream1",
|
| 3 |
"architectures": [
|
| 4 |
"Moondream"
|
| 5 |
],
|
| 6 |
"auto_map": {
|
| 7 |
+
"AutoConfig": "vikhyatk/moondream1--configuration_moondream.MoondreamConfig",
|
| 8 |
+
"AutoModelForCausalLM": "vikhyatk/moondream1--moondream.Moondream"
|
| 9 |
+
},
|
| 10 |
+
"custom_pipelines": {
|
| 11 |
+
"visual-question-answering": {
|
| 12 |
+
"default": {
|
| 13 |
+
"model": {
|
| 14 |
+
"pt": [
|
| 15 |
+
"vikhyatk/moondream1",
|
| 16 |
+
"main"
|
| 17 |
+
]
|
| 18 |
+
}
|
| 19 |
+
},
|
| 20 |
+
"impl": "MyPipe.VQA",
|
| 21 |
+
"pt": [
|
| 22 |
+
"AutoModelForCausalLM"
|
| 23 |
+
],
|
| 24 |
+
"tf": [],
|
| 25 |
+
"type": "multimodal"
|
| 26 |
+
}
|
| 27 |
},
|
| 28 |
"model_type": "moondream1",
|
| 29 |
"phi_config": {
|
| 30 |
"model_type": "phi-msft"
|
| 31 |
},
|
| 32 |
"torch_dtype": "float32",
|
| 33 |
+
"transformers_version": "4.35.2"
|
| 34 |
}
|
generation_config.json
CHANGED
|
@@ -1,4 +1,4 @@
|
|
| 1 |
{
|
| 2 |
"_from_model_config": true,
|
| 3 |
-
"transformers_version": "4.
|
| 4 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"_from_model_config": true,
|
| 3 |
+
"transformers_version": "4.35.2"
|
| 4 |
}
|