Update with commit 01485ceec3d2e0a9a957ec86f0a10096cecb4a94
Browse filesSee: https://github.com/huggingface/transformers/commit/01485ceec3d2e0a9a957ec86f0a10096cecb4a94
- frameworks.json +1 -1
- pipeline_tags.json +6 -0
frameworks.json
CHANGED
|
@@ -87,7 +87,7 @@
|
|
| 87 |
{"model_type":"xglm","pytorch":true,"tensorflow":false,"flax":true,"processor":"AutoTokenizer"}
|
| 88 |
{"model_type":"xlm","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
|
| 89 |
{"model_type":"xlm-prophetnet","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
| 90 |
-
{"model_type":"xlm-roberta","pytorch":true,"tensorflow":true,"flax":
|
| 91 |
{"model_type":"xlm-roberta-xl","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
| 92 |
{"model_type":"xlnet","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
|
| 93 |
{"model_type":"yoso","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
|
|
|
| 87 |
{"model_type":"xglm","pytorch":true,"tensorflow":false,"flax":true,"processor":"AutoTokenizer"}
|
| 88 |
{"model_type":"xlm","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
|
| 89 |
{"model_type":"xlm-prophetnet","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
| 90 |
+
{"model_type":"xlm-roberta","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoTokenizer"}
|
| 91 |
{"model_type":"xlm-roberta-xl","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
| 92 |
{"model_type":"xlnet","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
|
| 93 |
{"model_type":"yoso","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
pipeline_tags.json
CHANGED
|
@@ -211,6 +211,12 @@
|
|
| 211 |
{"model_class":"FlaxWav2Vec2Model","pipeline_tag":"feature-extraction","auto_class":"Flax_AutoModel"}
|
| 212 |
{"model_class":"FlaxXGLMForCausalLM","pipeline_tag":"text-generation","auto_class":"Flax_AutoModelForCausalLM"}
|
| 213 |
{"model_class":"FlaxXGLMModel","pipeline_tag":"feature-extraction","auto_class":"Flax_AutoModel"}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 214 |
{"model_class":"FunnelBaseModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
| 215 |
{"model_class":"FunnelForMaskedLM","pipeline_tag":"fill-mask","auto_class":"AutoModelForMaskedLM"}
|
| 216 |
{"model_class":"FunnelForMultipleChoice","pipeline_tag":"multiple-choice","auto_class":"AutoModelForMultipleChoice"}
|
|
|
|
| 211 |
{"model_class":"FlaxWav2Vec2Model","pipeline_tag":"feature-extraction","auto_class":"Flax_AutoModel"}
|
| 212 |
{"model_class":"FlaxXGLMForCausalLM","pipeline_tag":"text-generation","auto_class":"Flax_AutoModelForCausalLM"}
|
| 213 |
{"model_class":"FlaxXGLMModel","pipeline_tag":"feature-extraction","auto_class":"Flax_AutoModel"}
|
| 214 |
+
{"model_class":"FlaxXLMRobertaForMaskedLM","pipeline_tag":"fill-mask","auto_class":"Flax_AutoModelForMaskedLM"}
|
| 215 |
+
{"model_class":"FlaxXLMRobertaForMultipleChoice","pipeline_tag":"multiple-choice","auto_class":"Flax_AutoModelForMultipleChoice"}
|
| 216 |
+
{"model_class":"FlaxXLMRobertaForQuestionAnswering","pipeline_tag":"question-answering","auto_class":"Flax_AutoModelForQuestionAnswering"}
|
| 217 |
+
{"model_class":"FlaxXLMRobertaForSequenceClassification","pipeline_tag":"text-classification","auto_class":"Flax_AutoModelForSequenceClassification"}
|
| 218 |
+
{"model_class":"FlaxXLMRobertaForTokenClassification","pipeline_tag":"token-classification","auto_class":"Flax_AutoModelForTokenClassification"}
|
| 219 |
+
{"model_class":"FlaxXLMRobertaModel","pipeline_tag":"feature-extraction","auto_class":"Flax_AutoModel"}
|
| 220 |
{"model_class":"FunnelBaseModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
| 221 |
{"model_class":"FunnelForMaskedLM","pipeline_tag":"fill-mask","auto_class":"AutoModelForMaskedLM"}
|
| 222 |
{"model_class":"FunnelForMultipleChoice","pipeline_tag":"multiple-choice","auto_class":"AutoModelForMultipleChoice"}
|