Sabbir772 commited on
Commit
218816c
·
verified ·
1 Parent(s): d0a98cb

Upload folder using huggingface_hub

Browse files
added_tokens.json CHANGED
@@ -1,104 +1,4 @@
1
  {
2
  "<BN>": 32100,
3
- "<SY>": 32101,
4
- "<extra_id_0>": 32099,
5
- "<extra_id_10>": 32089,
6
- "<extra_id_11>": 32088,
7
- "<extra_id_12>": 32087,
8
- "<extra_id_13>": 32086,
9
- "<extra_id_14>": 32085,
10
- "<extra_id_15>": 32084,
11
- "<extra_id_16>": 32083,
12
- "<extra_id_17>": 32082,
13
- "<extra_id_18>": 32081,
14
- "<extra_id_19>": 32080,
15
- "<extra_id_1>": 32098,
16
- "<extra_id_20>": 32079,
17
- "<extra_id_21>": 32078,
18
- "<extra_id_22>": 32077,
19
- "<extra_id_23>": 32076,
20
- "<extra_id_24>": 32075,
21
- "<extra_id_25>": 32074,
22
- "<extra_id_26>": 32073,
23
- "<extra_id_27>": 32072,
24
- "<extra_id_28>": 32071,
25
- "<extra_id_29>": 32070,
26
- "<extra_id_2>": 32097,
27
- "<extra_id_30>": 32069,
28
- "<extra_id_31>": 32068,
29
- "<extra_id_32>": 32067,
30
- "<extra_id_33>": 32066,
31
- "<extra_id_34>": 32065,
32
- "<extra_id_35>": 32064,
33
- "<extra_id_36>": 32063,
34
- "<extra_id_37>": 32062,
35
- "<extra_id_38>": 32061,
36
- "<extra_id_39>": 32060,
37
- "<extra_id_3>": 32096,
38
- "<extra_id_40>": 32059,
39
- "<extra_id_41>": 32058,
40
- "<extra_id_42>": 32057,
41
- "<extra_id_43>": 32056,
42
- "<extra_id_44>": 32055,
43
- "<extra_id_45>": 32054,
44
- "<extra_id_46>": 32053,
45
- "<extra_id_47>": 32052,
46
- "<extra_id_48>": 32051,
47
- "<extra_id_49>": 32050,
48
- "<extra_id_4>": 32095,
49
- "<extra_id_50>": 32049,
50
- "<extra_id_51>": 32048,
51
- "<extra_id_52>": 32047,
52
- "<extra_id_53>": 32046,
53
- "<extra_id_54>": 32045,
54
- "<extra_id_55>": 32044,
55
- "<extra_id_56>": 32043,
56
- "<extra_id_57>": 32042,
57
- "<extra_id_58>": 32041,
58
- "<extra_id_59>": 32040,
59
- "<extra_id_5>": 32094,
60
- "<extra_id_60>": 32039,
61
- "<extra_id_61>": 32038,
62
- "<extra_id_62>": 32037,
63
- "<extra_id_63>": 32036,
64
- "<extra_id_64>": 32035,
65
- "<extra_id_65>": 32034,
66
- "<extra_id_66>": 32033,
67
- "<extra_id_67>": 32032,
68
- "<extra_id_68>": 32031,
69
- "<extra_id_69>": 32030,
70
- "<extra_id_6>": 32093,
71
- "<extra_id_70>": 32029,
72
- "<extra_id_71>": 32028,
73
- "<extra_id_72>": 32027,
74
- "<extra_id_73>": 32026,
75
- "<extra_id_74>": 32025,
76
- "<extra_id_75>": 32024,
77
- "<extra_id_76>": 32023,
78
- "<extra_id_77>": 32022,
79
- "<extra_id_78>": 32021,
80
- "<extra_id_79>": 32020,
81
- "<extra_id_7>": 32092,
82
- "<extra_id_80>": 32019,
83
- "<extra_id_81>": 32018,
84
- "<extra_id_82>": 32017,
85
- "<extra_id_83>": 32016,
86
- "<extra_id_84>": 32015,
87
- "<extra_id_85>": 32014,
88
- "<extra_id_86>": 32013,
89
- "<extra_id_87>": 32012,
90
- "<extra_id_88>": 32011,
91
- "<extra_id_89>": 32010,
92
- "<extra_id_8>": 32091,
93
- "<extra_id_90>": 32009,
94
- "<extra_id_91>": 32008,
95
- "<extra_id_92>": 32007,
96
- "<extra_id_93>": 32006,
97
- "<extra_id_94>": 32005,
98
- "<extra_id_95>": 32004,
99
- "<extra_id_96>": 32003,
100
- "<extra_id_97>": 32002,
101
- "<extra_id_98>": 32001,
102
- "<extra_id_99>": 32000,
103
- "<extra_id_9>": 32090
104
  }
 
1
  {
2
  "<BN>": 32100,
3
+ "<SY>": 32101
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4
  }
config.json CHANGED
@@ -9,6 +9,7 @@
9
  "decoder_start_token_id": 0,
10
  "dense_act_fn": "gelu_new",
11
  "dropout_rate": 0.1,
 
12
  "eos_token_id": 1,
13
  "feed_forward_proj": "gated-gelu",
14
  "gradient_checkpointing": false,
@@ -25,8 +26,7 @@
25
  "relative_attention_max_distance": 128,
26
  "relative_attention_num_buckets": 32,
27
  "tie_word_embeddings": false,
28
- "torch_dtype": "float32",
29
- "transformers_version": "4.54.1",
30
  "use_cache": true,
31
  "vocab_size": 32102
32
  }
 
9
  "decoder_start_token_id": 0,
10
  "dense_act_fn": "gelu_new",
11
  "dropout_rate": 0.1,
12
+ "dtype": "float32",
13
  "eos_token_id": 1,
14
  "feed_forward_proj": "gated-gelu",
15
  "gradient_checkpointing": false,
 
26
  "relative_attention_max_distance": 128,
27
  "relative_attention_num_buckets": 32,
28
  "tie_word_embeddings": false,
29
+ "transformers_version": "4.57.1",
 
30
  "use_cache": true,
31
  "vocab_size": 32102
32
  }
generation_config.json CHANGED
@@ -1,7 +1,9 @@
1
  {
2
  "_from_model_config": true,
3
  "decoder_start_token_id": 0,
4
- "eos_token_id": 1,
 
 
5
  "pad_token_id": 0,
6
- "transformers_version": "4.54.1"
7
  }
 
1
  {
2
  "_from_model_config": true,
3
  "decoder_start_token_id": 0,
4
+ "eos_token_id": [
5
+ 1
6
+ ],
7
  "pad_token_id": 0,
8
+ "transformers_version": "4.57.1"
9
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a362ad74ecdd3d29a102c8117642174c93ee465df5da1b4595fa94a234de7234
3
  size 990185320
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:efa0d3a9c1506002f928fecb048a5cd100d53c2f5b5dbd9403c23027a7acb60c
3
  size 990185320
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1eb33c931180dbe9257345f90d386a1a68acae0c56733e81b74d0f9751ab8173
3
- size 5496
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c798ea8ac0ca86d7868b78095ec29734f70878bf54dcd1795e2d74957668f717
3
+ size 5905