littleprophisher commited on
Commit
6d3589e
·
verified ·
1 Parent(s): 5afe387

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. config.json +31 -31
  2. model.safetensors +1 -1
  3. tokenizer.json +0 -0
  4. tokenizer_config.json +14 -14
config.json CHANGED
@@ -1,31 +1,31 @@
1
- {
2
- "add_cross_attention": false,
3
- "architectures": [
4
- "BertForSequenceClassification"
5
- ],
6
- "attention_probs_dropout_prob": 0.5,
7
- "bos_token_id": null,
8
- "classifier_dropout": null,
9
- "dtype": "float32",
10
- "eos_token_id": null,
11
- "gradient_checkpointing": false,
12
- "hidden_act": "gelu",
13
- "hidden_dropout_prob": 0.5,
14
- "hidden_size": 768,
15
- "initializer_range": 0.02,
16
- "intermediate_size": 3072,
17
- "is_decoder": false,
18
- "layer_norm_eps": 1e-12,
19
- "max_position_embeddings": 512,
20
- "model_type": "bert",
21
- "num_attention_heads": 12,
22
- "num_hidden_layers": 12,
23
- "pad_token_id": 0,
24
- "position_embedding_type": "absolute",
25
- "problem_type": "single_label_classification",
26
- "tie_word_embeddings": true,
27
- "transformers_version": "5.5.0",
28
- "type_vocab_size": 2,
29
- "use_cache": true,
30
- "vocab_size": 30522
31
- }
 
1
+ {
2
+ "add_cross_attention": false,
3
+ "architectures": [
4
+ "BertForSequenceClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.5,
7
+ "bos_token_id": null,
8
+ "classifier_dropout": null,
9
+ "dtype": "float32",
10
+ "eos_token_id": null,
11
+ "gradient_checkpointing": false,
12
+ "hidden_act": "gelu",
13
+ "hidden_dropout_prob": 0.5,
14
+ "hidden_size": 768,
15
+ "initializer_range": 0.02,
16
+ "intermediate_size": 3072,
17
+ "is_decoder": false,
18
+ "layer_norm_eps": 1e-12,
19
+ "max_position_embeddings": 512,
20
+ "model_type": "bert",
21
+ "num_attention_heads": 12,
22
+ "num_hidden_layers": 12,
23
+ "pad_token_id": 0,
24
+ "position_embedding_type": "absolute",
25
+ "problem_type": "single_label_classification",
26
+ "tie_word_embeddings": true,
27
+ "transformers_version": "5.0.0",
28
+ "type_vocab_size": 2,
29
+ "use_cache": false,
30
+ "vocab_size": 30522
31
+ }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:39cc92a4f01a87e8161cf89ec3f45e393d916d29c0c2dc834b27c3460a930344
3
  size 437958624
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7314d17b03966973af178886baf8b91ca88988acc96a4ae80072114fa23d1d0c
3
  size 437958624
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,14 +1,14 @@
1
- {
2
- "backend": "tokenizers",
3
- "cls_token": "[CLS]",
4
- "do_lower_case": true,
5
- "is_local": true,
6
- "mask_token": "[MASK]",
7
- "model_max_length": 1000000000000000019884624838656,
8
- "pad_token": "[PAD]",
9
- "sep_token": "[SEP]",
10
- "strip_accents": null,
11
- "tokenize_chinese_chars": true,
12
- "tokenizer_class": "BertTokenizer",
13
- "unk_token": "[UNK]"
14
- }
 
1
+ {
2
+ "backend": "tokenizers",
3
+ "cls_token": "[CLS]",
4
+ "do_lower_case": true,
5
+ "is_local": false,
6
+ "mask_token": "[MASK]",
7
+ "model_max_length": 512,
8
+ "pad_token": "[PAD]",
9
+ "sep_token": "[SEP]",
10
+ "strip_accents": null,
11
+ "tokenize_chinese_chars": true,
12
+ "tokenizer_class": "BertTokenizer",
13
+ "unk_token": "[UNK]"
14
+ }