galkowskim commited on
Commit
e780b5e
·
verified ·
1 Parent(s): cf8c2b2

Training in progress, step 500

Browse files
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "_name_or_path": "FacebookAI/roberta-base",
3
  "architectures": [
4
- "RobertaForQuestionAnswering"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "bos_token_id": 0,
@@ -10,8 +10,16 @@
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.1,
12
  "hidden_size": 768,
 
 
 
 
13
  "initializer_range": 0.02,
14
  "intermediate_size": 3072,
 
 
 
 
15
  "layer_norm_eps": 1e-05,
16
  "max_position_embeddings": 514,
17
  "model_type": "roberta",
@@ -19,6 +27,7 @@
19
  "num_hidden_layers": 12,
20
  "pad_token_id": 1,
21
  "position_embedding_type": "absolute",
 
22
  "torch_dtype": "float32",
23
  "transformers_version": "4.40.0",
24
  "type_vocab_size": 1,
 
1
  {
2
  "_name_or_path": "FacebookAI/roberta-base",
3
  "architectures": [
4
+ "RobertaForSequenceClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "bos_token_id": 0,
 
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.1,
12
  "hidden_size": 768,
13
+ "id2label": {
14
+ "0": "NEGATIVE",
15
+ "1": "POSITIVE"
16
+ },
17
  "initializer_range": 0.02,
18
  "intermediate_size": 3072,
19
+ "label2id": {
20
+ "NEGATIVE": 0,
21
+ "POSITIVE": 1
22
+ },
23
  "layer_norm_eps": 1e-05,
24
  "max_position_embeddings": 514,
25
  "model_type": "roberta",
 
27
  "num_hidden_layers": 12,
28
  "pad_token_id": 1,
29
  "position_embedding_type": "absolute",
30
+ "problem_type": "single_label_classification",
31
  "torch_dtype": "float32",
32
  "transformers_version": "4.40.0",
33
  "type_vocab_size": 1,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0f5557cc8e7fa43f36c052a23174791f7e5e865a059404d19e83263a1a969f0f
3
- size 496250232
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:54bb5e1d9e9afeaad00d9e2a17e5a51522fa4c5572b3930c2acd08237b4de5ad
3
+ size 498612824
runs/May21_16-11-27_dgx-2.eden/events.out.tfevents.1716300688.dgx-2.eden.553646.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5492b9e9543718540ab474df23fc307f32876aa9548d07dcf8154d8acd152875
3
+ size 5052
tokenizer.json CHANGED
@@ -2,20 +2,11 @@
2
  "version": "1.0",
3
  "truncation": {
4
  "direction": "Right",
5
- "max_length": 384,
6
- "strategy": "OnlySecond",
7
  "stride": 0
8
  },
9
- "padding": {
10
- "strategy": {
11
- "Fixed": 384
12
- },
13
- "direction": "Right",
14
- "pad_to_multiple_of": null,
15
- "pad_id": 1,
16
- "pad_type_id": 0,
17
- "pad_token": "<pad>"
18
- },
19
  "added_tokens": [
20
  {
21
  "id": 0,
 
2
  "version": "1.0",
3
  "truncation": {
4
  "direction": "Right",
5
+ "max_length": 512,
6
+ "strategy": "LongestFirst",
7
  "stride": 0
8
  },
9
+ "padding": null,
 
 
 
 
 
 
 
 
 
10
  "added_tokens": [
11
  {
12
  "id": 0,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b7ae793fd86ced3da1dbe28a8b7be4bb74c6f86fe3c976174be8f001253402f7
3
  size 4984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:66b0481d86dcc4d35e7d4801f8fb454806abc188ea6835363d04b8f91ad10e78
3
  size 4984