jrodriiguezg commited on
Commit
badaec0
·
verified ·
1 Parent(s): c60d322

Upload folder using huggingface_hub

Browse files
config.json CHANGED
@@ -11,7 +11,6 @@
11
  "decoder_start_token_id": 0,
12
  "dense_act_fn": "relu",
13
  "dropout_rate": 0.1,
14
- "dtype": "float32",
15
  "eos_token_id": 2,
16
  "feed_forward_proj": "relu",
17
  "gradient_checkpointing": false,
@@ -63,6 +62,7 @@
63
  "prefix": "translate English to Romanian: "
64
  }
65
  },
 
66
  "transformers_version": "4.44.0",
67
  "use_cache": true,
68
  "vocab_size": 32100
 
11
  "decoder_start_token_id": 0,
12
  "dense_act_fn": "relu",
13
  "dropout_rate": 0.1,
 
14
  "eos_token_id": 2,
15
  "feed_forward_proj": "relu",
16
  "gradient_checkpointing": false,
 
62
  "prefix": "translate English to Romanian: "
63
  }
64
  },
65
+ "torch_dtype": "float32",
66
  "transformers_version": "4.44.0",
67
  "use_cache": true,
68
  "vocab_size": 32100
decoder_model_quantized.onnx CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fcf6f9b4888450be4af9b80b9cd8bb26d74569a34d332eab1d971d5874eee463
3
  size 58433120
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ccfff83f5ec98eeb502c554302f8818b397a207bc5a379ab1fd7ff62a086f3af
3
  size 58433120
decoder_with_past_model_quantized.onnx CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fb2f6ed0eb5a0640f2bd409ab6bd94e62e24f8cc193f9366f1e22e0aa2bcfc79
3
  size 55261279
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:93cb558dce1ced68053aad3f69ee496705a4b725e5ccb632d9081cd2e53c1f08
3
  size 55261279
encoder_model_quantized.onnx CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2db88fe7402cbd9c40250df145b2b3f1ad9028c75097666963acf2371cd74b24
3
  size 35517324
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9d61d3bcd11d3b652a2f3b95486111276f9cc57d77bf673c11aa536333eeb5a3
3
  size 35517324
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -945,11 +945,10 @@
945
  "<extra_id_0>"
946
  ],
947
  "bos_token": "<s>",
948
- "clean_up_tokenization_spaces": false,
949
  "cls_token": "<s>",
950
  "eos_token": "</s>",
951
  "errors": "replace",
952
- "extra_special_tokens": {},
953
  "mask_token": "<mask>",
954
  "max_length": 128,
955
  "model_max_length": 512,
 
945
  "<extra_id_0>"
946
  ],
947
  "bos_token": "<s>",
948
+ "clean_up_tokenization_spaces": true,
949
  "cls_token": "<s>",
950
  "eos_token": "</s>",
951
  "errors": "replace",
 
952
  "mask_token": "<mask>",
953
  "max_length": 128,
954
  "model_max_length": 512,