Matessane commited on
Commit
fab685c
·
verified ·
1 Parent(s): 09fa08a

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +73 -0
config.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_dropout": 0.0,
3
+ "activation_function": "gelu",
4
+ "architectures": [
5
+ "MarianMTModel"
6
+ ],
7
+ "attention_dropout": 0.0,
8
+ "d_ff": 3072,
9
+ "d_kv": 64,
10
+ "d_model": 768,
11
+ "decoder_attention_heads": 16,
12
+ "decoder_ffn_dim": 4096,
13
+ "decoder_layerdrop": 0.0,
14
+ "decoder_layers": 12,
15
+ "decoder_start_token_id": 0,
16
+ "decoder_vocab_size": 32128,
17
+ "dropout": 0.1,
18
+ "dropout_rate": 0.1,
19
+ "encoder_attention_heads": 16,
20
+ "encoder_ffn_dim": 4096,
21
+ "encoder_layerdrop": 0.0,
22
+ "encoder_layers": 12,
23
+ "eos_token_id": 1,
24
+ "forced_eos_token_id": 0,
25
+ "init_std": 0.02,
26
+ "initializer_factor": 1.0,
27
+ "is_encoder_decoder": true,
28
+ "layer_norm_epsilon": 1e-06,
29
+ "max_position_embeddings": 1024,
30
+ "model_type": "marian",
31
+ "n_positions": 512,
32
+ "num_heads": 12,
33
+ "num_hidden_layers": 12,
34
+ "num_layers": 12,
35
+ "output_past": true,
36
+ "pad_token_id": 0,
37
+ "relative_attention_num_buckets": 32,
38
+ "scale_embedding": false,
39
+ "share_encoder_decoder_embeddings": true,
40
+ "task_specific_params": {
41
+ "summarization": {
42
+ "early_stopping": true,
43
+ "length_penalty": 2.0,
44
+ "max_length": 200,
45
+ "min_length": 30,
46
+ "no_repeat_ngram_size": 3,
47
+ "num_beams": 4,
48
+ "prefix": "summarize: "
49
+ },
50
+ "translation_en_to_de": {
51
+ "early_stopping": true,
52
+ "max_length": 300,
53
+ "num_beams": 4,
54
+ "prefix": "translate English to German: "
55
+ },
56
+ "translation_en_to_fr": {
57
+ "early_stopping": true,
58
+ "max_length": 300,
59
+ "num_beams": 4,
60
+ "prefix": "translate English to French: "
61
+ },
62
+ "translation_en_to_ro": {
63
+ "early_stopping": true,
64
+ "max_length": 300,
65
+ "num_beams": 4,
66
+ "prefix": "translate English to Romanian: "
67
+ }
68
+ },
69
+ "torch_dtype": "float32",
70
+ "transformers_version": "4.51.3",
71
+ "use_cache": true,
72
+ "vocab_size": 32128
73
+ }