mohsennp commited on
Commit
d560a19
·
verified ·
1 Parent(s): ab4ecce

Upload EnCodon

Browse files
Files changed (2) hide show
  1. config.json +1 -0
  2. configuration_encodon.py +1 -0
config.json CHANGED
@@ -20,6 +20,7 @@
20
  "layer_norm_eps": 1e-12,
21
  "lm_type": "distilled",
22
  "max_position_embeddings": 2048,
 
23
  "num_attention_heads": 16,
24
  "num_hidden_layers": 12,
25
  "pad_token_id": 3,
 
20
  "layer_norm_eps": 1e-12,
21
  "lm_type": "distilled",
22
  "max_position_embeddings": 2048,
23
+ "model_type": "encodon",
24
  "num_attention_heads": 16,
25
  "num_hidden_layers": 12,
26
  "pad_token_id": 3,
configuration_encodon.py CHANGED
@@ -1,6 +1,7 @@
1
  from transformers import PretrainedConfig
2
 
3
  class EnCodonConfig(PretrainedConfig):
 
4
  def __init__(
5
  self,
6
  vocab_size=70,
 
1
  from transformers import PretrainedConfig
2
 
3
  class EnCodonConfig(PretrainedConfig):
4
+ model_type = "encodon"
5
  def __init__(
6
  self,
7
  vocab_size=70,