Update with commit 4854dbf9da4086731256496cf4a8e4ea45d4d54e
Browse filesSee: https://github.com/huggingface/transformers/commit/4854dbf9da4086731256496cf4a8e4ea45d4d54e
- frameworks.json +3 -0
- pipeline_tags.json +4 -0
frameworks.json
CHANGED
|
@@ -143,6 +143,9 @@
|
|
| 143 |
{"model_type":"glm_image","pytorch":true,"processor":"AutoProcessor"}
|
| 144 |
{"model_type":"glm_image_text","pytorch":true,"processor":"AutoTokenizer"}
|
| 145 |
{"model_type":"glm_image_vision","pytorch":true,"processor":"AutoTokenizer"}
|
|
|
|
|
|
|
|
|
|
| 146 |
{"model_type":"glmasr","pytorch":true,"processor":"AutoProcessor"}
|
| 147 |
{"model_type":"glpn","pytorch":true,"processor":"AutoImageProcessor"}
|
| 148 |
{"model_type":"got_ocr2","pytorch":true,"processor":"AutoProcessor"}
|
|
|
|
| 143 |
{"model_type":"glm_image","pytorch":true,"processor":"AutoProcessor"}
|
| 144 |
{"model_type":"glm_image_text","pytorch":true,"processor":"AutoTokenizer"}
|
| 145 |
{"model_type":"glm_image_vision","pytorch":true,"processor":"AutoTokenizer"}
|
| 146 |
+
{"model_type":"glm_ocr","pytorch":true,"processor":"AutoTokenizer"}
|
| 147 |
+
{"model_type":"glm_ocr_text","pytorch":true,"processor":"AutoTokenizer"}
|
| 148 |
+
{"model_type":"glm_ocr_vision","pytorch":true,"processor":"AutoTokenizer"}
|
| 149 |
{"model_type":"glmasr","pytorch":true,"processor":"AutoProcessor"}
|
| 150 |
{"model_type":"glpn","pytorch":true,"processor":"AutoImageProcessor"}
|
| 151 |
{"model_type":"got_ocr2","pytorch":true,"processor":"AutoProcessor"}
|
pipeline_tags.json
CHANGED
|
@@ -546,6 +546,10 @@
|
|
| 546 |
{"model_class":"GlmImageVQVAE","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
| 547 |
{"model_class":"GlmImageVisionModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
| 548 |
{"model_class":"GlmModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
|
|
|
|
|
|
|
|
|
|
|
|
| 549 |
{"model_class":"GotOcr2ForConditionalGeneration","pipeline_tag":"image-to-text","auto_class":"AutoModelForImageTextToText"}
|
| 550 |
{"model_class":"GotOcr2Model","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
| 551 |
{"model_class":"GptOssForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
|
|
|
|
| 546 |
{"model_class":"GlmImageVQVAE","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
| 547 |
{"model_class":"GlmImageVisionModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
| 548 |
{"model_class":"GlmModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
| 549 |
+
{"model_class":"GlmOcrForConditionalGeneration","pipeline_tag":"image-to-text","auto_class":"AutoModelForImageTextToText"}
|
| 550 |
+
{"model_class":"GlmOcrModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
| 551 |
+
{"model_class":"GlmOcrTextModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
| 552 |
+
{"model_class":"GlmOcrVisionModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
| 553 |
{"model_class":"GotOcr2ForConditionalGeneration","pipeline_tag":"image-to-text","auto_class":"AutoModelForImageTextToText"}
|
| 554 |
{"model_class":"GotOcr2Model","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
| 555 |
{"model_class":"GptOssForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
|