akashmaggon commited on
Commit
5cc1af1
·
verified ·
1 Parent(s): 8230972

Training in progress, epoch 1

Browse files
config.json CHANGED
@@ -1,18 +1,13 @@
1
  {
2
- "_name_or_path": "albert/albert-xlarge-v2",
 
3
  "architectures": [
4
- "AlbertForSequenceClassification"
5
  ],
6
- "attention_probs_dropout_prob": 0,
7
- "bos_token_id": 2,
8
- "classifier_dropout_prob": 0.1,
9
- "down_scale_factor": 1,
10
- "embedding_size": 128,
11
- "eos_token_id": 3,
12
- "gap_size": 0,
13
- "hidden_act": "gelu_new",
14
- "hidden_dropout_prob": 0,
15
- "hidden_size": 2048,
16
  "id2label": {
17
  "0": "ARTS",
18
  "1": "ARTS & CULTURE",
@@ -58,8 +53,6 @@
58
  "41": "WORLDPOST"
59
  },
60
  "initializer_range": 0.02,
61
- "inner_group_num": 1,
62
- "intermediate_size": 8192,
63
  "label2id": {
64
  "ARTS": 0,
65
  "ARTS & CULTURE": 1,
@@ -104,19 +97,17 @@
104
  "WORLD NEWS": 40,
105
  "WORLDPOST": 41
106
  },
107
- "layer_norm_eps": 1e-12,
108
  "max_position_embeddings": 512,
109
- "model_type": "albert",
110
- "net_structure_type": 0,
111
- "num_attention_heads": 16,
112
- "num_hidden_groups": 1,
113
- "num_hidden_layers": 24,
114
- "num_memory_blocks": 0,
115
  "pad_token_id": 0,
116
- "position_embedding_type": "absolute",
117
  "problem_type": "single_label_classification",
 
 
 
 
118
  "torch_dtype": "float32",
119
  "transformers_version": "4.42.4",
120
- "type_vocab_size": 2,
121
- "vocab_size": 30000
122
  }
 
1
  {
2
+ "_name_or_path": "distilbert-base-uncased",
3
+ "activation": "gelu",
4
  "architectures": [
5
+ "DistilBertForSequenceClassification"
6
  ],
7
+ "attention_dropout": 0.1,
8
+ "dim": 768,
9
+ "dropout": 0.1,
10
+ "hidden_dim": 3072,
 
 
 
 
 
 
11
  "id2label": {
12
  "0": "ARTS",
13
  "1": "ARTS & CULTURE",
 
53
  "41": "WORLDPOST"
54
  },
55
  "initializer_range": 0.02,
 
 
56
  "label2id": {
57
  "ARTS": 0,
58
  "ARTS & CULTURE": 1,
 
97
  "WORLD NEWS": 40,
98
  "WORLDPOST": 41
99
  },
 
100
  "max_position_embeddings": 512,
101
+ "model_type": "distilbert",
102
+ "n_heads": 12,
103
+ "n_layers": 6,
 
 
 
104
  "pad_token_id": 0,
 
105
  "problem_type": "single_label_classification",
106
+ "qa_dropout": 0.1,
107
+ "seq_classif_dropout": 0.2,
108
+ "sinusoidal_pos_embds": false,
109
+ "tie_weights_": true,
110
  "torch_dtype": "float32",
111
  "transformers_version": "4.42.4",
112
+ "vocab_size": 30522
 
113
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0f9b0ff834cee6f2d437a665209c841c760585e9946026b06c7a0b4b98eb7cbf
3
- size 235247160
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8460f0b53ba66301ad63d186d6d56e085d3704bf04568f2ec7cab02b06cc387
3
+ size 267955616
special_tokens_map.json CHANGED
@@ -1,15 +1,7 @@
1
  {
2
- "bos_token": "[CLS]",
3
  "cls_token": "[CLS]",
4
- "eos_token": "[SEP]",
5
- "mask_token": {
6
- "content": "[MASK]",
7
- "lstrip": true,
8
- "normalized": false,
9
- "rstrip": false,
10
- "single_word": false
11
- },
12
- "pad_token": "<pad>",
13
  "sep_token": "[SEP]",
14
- "unk_token": "<unk>"
15
  }
 
1
  {
 
2
  "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
 
 
 
 
 
 
 
5
  "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
  }
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,22 +1,22 @@
1
  {
2
  "added_tokens_decoder": {
3
  "0": {
4
- "content": "<pad>",
5
  "lstrip": false,
6
  "normalized": false,
7
  "rstrip": false,
8
  "single_word": false,
9
  "special": true
10
  },
11
- "1": {
12
- "content": "<unk>",
13
  "lstrip": false,
14
  "normalized": false,
15
  "rstrip": false,
16
  "single_word": false,
17
  "special": true
18
  },
19
- "2": {
20
  "content": "[CLS]",
21
  "lstrip": false,
22
  "normalized": false,
@@ -24,7 +24,7 @@
24
  "single_word": false,
25
  "special": true
26
  },
27
- "3": {
28
  "content": "[SEP]",
29
  "lstrip": false,
30
  "normalized": false,
@@ -32,26 +32,24 @@
32
  "single_word": false,
33
  "special": true
34
  },
35
- "4": {
36
  "content": "[MASK]",
37
- "lstrip": true,
38
  "normalized": false,
39
  "rstrip": false,
40
  "single_word": false,
41
  "special": true
42
  }
43
  },
44
- "bos_token": "[CLS]",
45
  "clean_up_tokenization_spaces": true,
46
  "cls_token": "[CLS]",
47
  "do_lower_case": true,
48
- "eos_token": "[SEP]",
49
- "keep_accents": false,
50
  "mask_token": "[MASK]",
51
  "model_max_length": 512,
52
- "pad_token": "<pad>",
53
- "remove_space": true,
54
  "sep_token": "[SEP]",
55
- "tokenizer_class": "AlbertTokenizer",
56
- "unk_token": "<unk>"
 
 
57
  }
 
1
  {
2
  "added_tokens_decoder": {
3
  "0": {
4
+ "content": "[PAD]",
5
  "lstrip": false,
6
  "normalized": false,
7
  "rstrip": false,
8
  "single_word": false,
9
  "special": true
10
  },
11
+ "100": {
12
+ "content": "[UNK]",
13
  "lstrip": false,
14
  "normalized": false,
15
  "rstrip": false,
16
  "single_word": false,
17
  "special": true
18
  },
19
+ "101": {
20
  "content": "[CLS]",
21
  "lstrip": false,
22
  "normalized": false,
 
24
  "single_word": false,
25
  "special": true
26
  },
27
+ "102": {
28
  "content": "[SEP]",
29
  "lstrip": false,
30
  "normalized": false,
 
32
  "single_word": false,
33
  "special": true
34
  },
35
+ "103": {
36
  "content": "[MASK]",
37
+ "lstrip": false,
38
  "normalized": false,
39
  "rstrip": false,
40
  "single_word": false,
41
  "special": true
42
  }
43
  },
 
44
  "clean_up_tokenization_spaces": true,
45
  "cls_token": "[CLS]",
46
  "do_lower_case": true,
 
 
47
  "mask_token": "[MASK]",
48
  "model_max_length": 512,
49
+ "pad_token": "[PAD]",
 
50
  "sep_token": "[SEP]",
51
+ "strip_accents": null,
52
+ "tokenize_chinese_chars": true,
53
+ "tokenizer_class": "DistilBertTokenizer",
54
+ "unk_token": "[UNK]"
55
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e253e03c4ae548456b5ec51063a39203279606c829b3e485b64a0b0904c6f2cc
3
  size 5112
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d82d148b3449ce5b5d4f3c6e9022ce0041951e4ae7100d323ca68a7c90ed6e0e
3
  size 5112