asjoberg commited on
Commit
95f9bc4
·
verified ·
1 Parent(s): a649281

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +88 -0
config.json ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_fn_name": "swish",
3
+ "architectures": [
4
+ "OpenELMForCausalLM"
5
+ ],
6
+ "auto_map": {
7
+ "AutoConfig": "configuration_openelm.OpenELMConfig",
8
+ "AutoModelForCausalLM": "modeling_openelm.OpenELMForCausalLM"
9
+ },
10
+ "bos_token_id": 1,
11
+ "eos_token_id": 2,
12
+ "ffn_dim_divisor": 256,
13
+ "ffn_multipliers": [
14
+ 0.5,
15
+ 0.73,
16
+ 0.97,
17
+ 1.2,
18
+ 1.43,
19
+ 1.67,
20
+ 1.9,
21
+ 2.13,
22
+ 2.37,
23
+ 2.6,
24
+ 2.83,
25
+ 3.07,
26
+ 3.3,
27
+ 3.53,
28
+ 3.77,
29
+ 4.0
30
+ ],
31
+ "ffn_with_glu": true,
32
+ "head_dim": 64,
33
+ "initializer_range": 0.02,
34
+ "max_context_length": 2048,
35
+ "model_dim": 1280,
36
+ "model_type": "openelm",
37
+ "normalization_layer_name": "rms_norm",
38
+ "normalize_qk_projections": true,
39
+ "num_gqa_groups": 4,
40
+ "num_kv_heads": [
41
+ 3,
42
+ 3,
43
+ 3,
44
+ 3,
45
+ 3,
46
+ 4,
47
+ 4,
48
+ 4,
49
+ 4,
50
+ 4,
51
+ 4,
52
+ 4,
53
+ 5,
54
+ 5,
55
+ 5,
56
+ 5
57
+ ],
58
+ "num_query_heads": [
59
+ 12,
60
+ 12,
61
+ 12,
62
+ 12,
63
+ 12,
64
+ 16,
65
+ 16,
66
+ 16,
67
+ 16,
68
+ 16,
69
+ 16,
70
+ 16,
71
+ 20,
72
+ 20,
73
+ 20,
74
+ 20
75
+ ],
76
+ "num_transformer_layers": 16,
77
+ "qkv_multipliers": [
78
+ 0.5,
79
+ 1.0
80
+ ],
81
+ "rope_freq_constant": 10000,
82
+ "rope_max_length": 4096,
83
+ "share_input_output_layers": true,
84
+ "torch_dtype": "bfloat16",
85
+ "transformers_version": "4.39.3",
86
+ "use_cache": true,
87
+ "vocab_size": 32000
88
+ }