KB8407 commited on
Commit
ca8a7b9
·
verified ·
1 Parent(s): e3ace78

Update SFT model

Browse files
config.json CHANGED
@@ -10,7 +10,7 @@
10
  "created_date": "2021-04-28",
11
  "dtype": "float32",
12
  "embd_pdrop": 0.1,
13
- "eos_token_id": 1,
14
  "gradient_checkpointing": false,
15
  "id2label": {
16
  "0": "LABEL_0"
@@ -28,7 +28,7 @@
28
  "n_inner": null,
29
  "n_layer": 12,
30
  "n_positions": 1024,
31
- "pad_token_id": 3,
32
  "reorder_and_upcast_attn": false,
33
  "resid_pdrop": 0.1,
34
  "scale_attn_by_inverse_layer_idx": false,
@@ -44,7 +44,7 @@
44
  "max_length": 50
45
  }
46
  },
47
- "transformers_version": "4.57.2",
48
  "use_cache": true,
49
- "vocab_size": 51200
50
  }
 
10
  "created_date": "2021-04-28",
11
  "dtype": "float32",
12
  "embd_pdrop": 0.1,
13
+ "eos_token_id": 51200,
14
  "gradient_checkpointing": false,
15
  "id2label": {
16
  "0": "LABEL_0"
 
28
  "n_inner": null,
29
  "n_layer": 12,
30
  "n_positions": 1024,
31
+ "pad_token_id": 51200,
32
  "reorder_and_upcast_attn": false,
33
  "resid_pdrop": 0.1,
34
  "scale_attn_by_inverse_layer_idx": false,
 
44
  "max_length": 50
45
  }
46
  },
47
+ "transformers_version": "4.57.3",
48
  "use_cache": true,
49
+ "vocab_size": 51201
50
  }
generation_config.json CHANGED
@@ -3,5 +3,5 @@
3
  "bos_token_id": 0,
4
  "eos_token_id": 1,
5
  "pad_token_id": 3,
6
- "transformers_version": "4.57.2"
7
  }
 
3
  "bos_token_id": 0,
4
  "eos_token_id": 1,
5
  "pad_token_id": 3,
6
+ "transformers_version": "4.57.3"
7
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c1919059d9956309595d51daca59417c39cb89bd0c42857327c80d991d6dee71
3
- size 500671104
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b50116d2d26a72129d9e891fa46b3317d07c1ee2e59d79a56868747ef023ff0c
3
+ size 500674176
tokenizer.json CHANGED
@@ -1,6 +1,11 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 512,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
  "padding": null,
10
  "added_tokens": [
11
  {
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:439292e96dc34aed37c6f3ad84204c732db18799624b3ed36b10cd8ca11f0a02
3
  size 5777
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bcf35f9d9fa66fe1a8265967a26284fb416bbbaedc42d944939000722aff7dfc
3
  size 5777