Update with commit 75336c17945c6b1b5552dbf0236d25f869168aab
Browse filesSee: https://github.com/huggingface/transformers/commit/75336c17945c6b1b5552dbf0236d25f869168aab
- frameworks.json +1 -1
- pipeline_tags.json +2 -0
frameworks.json
CHANGED
|
@@ -91,7 +91,7 @@
|
|
| 91 |
{"model_type":"led","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
|
| 92 |
{"model_type":"levit","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoImageProcessor"}
|
| 93 |
{"model_type":"lilt","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
| 94 |
-
{"model_type":"llama","pytorch":true,"tensorflow":false,"flax":
|
| 95 |
{"model_type":"longformer","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
|
| 96 |
{"model_type":"longt5","pytorch":true,"tensorflow":false,"flax":true,"processor":"AutoTokenizer"}
|
| 97 |
{"model_type":"luke","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
|
|
|
| 91 |
{"model_type":"led","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
|
| 92 |
{"model_type":"levit","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoImageProcessor"}
|
| 93 |
{"model_type":"lilt","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
| 94 |
+
{"model_type":"llama","pytorch":true,"tensorflow":false,"flax":true,"processor":"AutoTokenizer"}
|
| 95 |
{"model_type":"longformer","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
|
| 96 |
{"model_type":"longt5","pytorch":true,"tensorflow":false,"flax":true,"processor":"AutoTokenizer"}
|
| 97 |
{"model_type":"luke","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
pipeline_tags.json
CHANGED
|
@@ -269,6 +269,8 @@
|
|
| 269 |
{"model_class":"FlaxGPTJModel","pipeline_tag":"feature-extraction","auto_class":"Flax_AutoModel"}
|
| 270 |
{"model_class":"FlaxGPTNeoForCausalLM","pipeline_tag":"text-generation","auto_class":"Flax_AutoModelForCausalLM"}
|
| 271 |
{"model_class":"FlaxGPTNeoModel","pipeline_tag":"feature-extraction","auto_class":"Flax_AutoModel"}
|
|
|
|
|
|
|
| 272 |
{"model_class":"FlaxLongT5ForConditionalGeneration","pipeline_tag":"text2text-generation","auto_class":"Flax_AutoModelForSeq2SeqLM"}
|
| 273 |
{"model_class":"FlaxLongT5Model","pipeline_tag":"feature-extraction","auto_class":"Flax_AutoModel"}
|
| 274 |
{"model_class":"FlaxMBartForConditionalGeneration","pipeline_tag":"text2text-generation","auto_class":"Flax_AutoModelForSeq2SeqLM"}
|
|
|
|
| 269 |
{"model_class":"FlaxGPTJModel","pipeline_tag":"feature-extraction","auto_class":"Flax_AutoModel"}
|
| 270 |
{"model_class":"FlaxGPTNeoForCausalLM","pipeline_tag":"text-generation","auto_class":"Flax_AutoModelForCausalLM"}
|
| 271 |
{"model_class":"FlaxGPTNeoModel","pipeline_tag":"feature-extraction","auto_class":"Flax_AutoModel"}
|
| 272 |
+
{"model_class":"FlaxLlamaForCausalLM","pipeline_tag":"text-generation","auto_class":"Flax_AutoModelForCausalLM"}
|
| 273 |
+
{"model_class":"FlaxLlamaModel","pipeline_tag":"feature-extraction","auto_class":"Flax_AutoModel"}
|
| 274 |
{"model_class":"FlaxLongT5ForConditionalGeneration","pipeline_tag":"text2text-generation","auto_class":"Flax_AutoModelForSeq2SeqLM"}
|
| 275 |
{"model_class":"FlaxLongT5Model","pipeline_tag":"feature-extraction","auto_class":"Flax_AutoModel"}
|
| 276 |
{"model_class":"FlaxMBartForConditionalGeneration","pipeline_tag":"text2text-generation","auto_class":"Flax_AutoModelForSeq2SeqLM"}
|