Flansma commited on
Commit
5e5108d
·
verified ·
1 Parent(s): 45723db

Upload folder using huggingface_hub

Browse files
Files changed (2) hide show
  1. config.json +12 -12
  2. model.safetensors +2 -2
config.json CHANGED
@@ -2,12 +2,19 @@
2
  "architectures": [
3
  "HELMBertForMaskedLM"
4
  ],
 
 
 
 
 
 
 
5
  "attention_probs_dropout_prob": 0.1,
6
  "bos_token_id": 1,
7
  "dtype": "float32",
8
  "eos_token_id": 2,
9
  "hidden_dropout_prob": 0.1,
10
- "hidden_size": 256,
11
  "intermediate_size": 3072,
12
  "mask_token_id": 4,
13
  "max_position_embeddings": 512,
@@ -15,20 +22,13 @@
15
  "model_type": "helmbert",
16
  "ngie_dropout": 0.1,
17
  "ngie_kernel_size": 3,
18
- "num_attention_heads": 4,
19
- "num_hidden_layers": 2,
20
  "pad_token_id": 0,
21
  "pos_att_type": "c2p|p2c",
22
  "position_buckets": 256,
23
  "sep_token_id": 2,
24
  "share_att_key": false,
25
  "transformers_version": "4.57.3",
26
- "vocab_size": 78,
27
- "auto_map": {
28
- "AutoConfig": "configuration_helmbert.HELMBertConfig",
29
- "AutoModel": "modeling_helmbert.HELMBertModel",
30
- "AutoModelForMaskedLM": "modeling_helmbert.HELMBertForMaskedLM",
31
- "AutoModelForSequenceClassification": "modeling_helmbert.HELMBertForSequenceClassification",
32
- "AutoTokenizer": "tokenization_helmbert.HELMBertTokenizer"
33
- }
34
- }
 
2
  "architectures": [
3
  "HELMBertForMaskedLM"
4
  ],
5
+ "auto_map": {
6
+ "AutoConfig": "configuration_helmbert.HELMBertConfig",
7
+ "AutoModel": "modeling_helmbert.HELMBertModel",
8
+ "AutoModelForMaskedLM": "modeling_helmbert.HELMBertForMaskedLM",
9
+ "AutoModelForSequenceClassification": "modeling_helmbert.HELMBertForSequenceClassification",
10
+ "AutoTokenizer": ["tokenization_helmbert.HELMBertTokenizer", null]
11
+ },
12
  "attention_probs_dropout_prob": 0.1,
13
  "bos_token_id": 1,
14
  "dtype": "float32",
15
  "eos_token_id": 2,
16
  "hidden_dropout_prob": 0.1,
17
+ "hidden_size": 768,
18
  "intermediate_size": 3072,
19
  "mask_token_id": 4,
20
  "max_position_embeddings": 512,
 
22
  "model_type": "helmbert",
23
  "ngie_dropout": 0.1,
24
  "ngie_kernel_size": 3,
25
+ "num_attention_heads": 12,
26
+ "num_hidden_layers": 6,
27
  "pad_token_id": 0,
28
  "pos_att_type": "c2p|p2c",
29
  "position_buckets": 256,
30
  "sep_token_id": 2,
31
  "share_att_key": false,
32
  "transformers_version": "4.57.3",
33
+ "vocab_size": 78
34
+ }
 
 
 
 
 
 
 
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4b2a6686b46de073e9a373938637fe0de1817cfb1fd2961f7daf1e4dafb7ced9
3
- size 18489472
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c735315218d2e8864e4ea93f2c76d8eef58f78989f007b6a2bf446af6657a194
3
+ size 219166144