wardenga commited on
Commit
ae5ef1d
·
verified ·
1 Parent(s): 3ed74a8

Upload config

Browse files
Files changed (1) hide show
  1. config.json +5 -34
config.json CHANGED
@@ -5,9 +5,9 @@
5
  "cl_dropout_prob": 0.1,
6
  "cl_hidden_size": 32,
7
  "classifier_dropout": 0.0,
8
- "d_ff": 1024,
9
  "d_kv": 64,
10
- "d_model": 512,
11
  "decoder_start_token_id": 0,
12
  "dense_act_fn": "gelu_new",
13
  "dropout_rate": 0.1,
@@ -20,42 +20,13 @@
20
  "layer_norm_epsilon": 1e-06,
21
  "model_type": "t5",
22
  "n_positions": 512,
23
- "num_decoder_layers": 8,
24
- "num_heads": 6,
25
- "num_layers": 8,
26
  "output_past": true,
27
  "pad_token_id": 0,
28
  "relative_attention_max_distance": 128,
29
  "relative_attention_num_buckets": 32,
30
- "task_specific_params": {
31
- "summarization": {
32
- "early_stopping": true,
33
- "length_penalty": 2.0,
34
- "max_length": 200,
35
- "min_length": 30,
36
- "no_repeat_ngram_size": 3,
37
- "num_beams": 4,
38
- "prefix": "summarize: "
39
- },
40
- "translation_en_to_de": {
41
- "early_stopping": true,
42
- "max_length": 300,
43
- "num_beams": 4,
44
- "prefix": "translate English to German: "
45
- },
46
- "translation_en_to_fr": {
47
- "early_stopping": true,
48
- "max_length": 300,
49
- "num_beams": 4,
50
- "prefix": "translate English to French: "
51
- },
52
- "translation_en_to_ro": {
53
- "early_stopping": true,
54
- "max_length": 300,
55
- "num_beams": 4,
56
- "prefix": "translate English to Romanian: "
57
- }
58
- },
59
  "tie_word_embeddings": false,
60
  "transformers_version": "4.46.3",
61
  "use_cache": true,
 
5
  "cl_dropout_prob": 0.1,
6
  "cl_hidden_size": 32,
7
  "classifier_dropout": 0.0,
8
+ "d_ff": 2816,
9
  "d_kv": 64,
10
+ "d_model": 1024,
11
  "decoder_start_token_id": 0,
12
  "dense_act_fn": "gelu_new",
13
  "dropout_rate": 0.1,
 
20
  "layer_norm_epsilon": 1e-06,
21
  "model_type": "t5",
22
  "n_positions": 512,
23
+ "num_decoder_layers": 24,
24
+ "num_heads": 16,
25
+ "num_layers": 24,
26
  "output_past": true,
27
  "pad_token_id": 0,
28
  "relative_attention_max_distance": 128,
29
  "relative_attention_num_buckets": 32,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  "tie_word_embeddings": false,
31
  "transformers_version": "4.46.3",
32
  "use_cache": true,