File size: 912 Bytes
3cb3b28
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
attention_logit_softcapping: null
attention_scores_scalar: null
attn_bias: false
bias: false
block_size: 2048
final_logit_softcapping: null
gelu_approximate: none
head_size: 128
hf_config:
  name: OLMo-1B-hf
  org: allenai
intermediate_size: 8192
lm_head_bias: false
mlp_class_name: LLaMAMLP
moe_intermediate_size: null
n_embd: 2048
n_expert: 0
n_expert_per_token: 0
n_head: 16
n_layer: 16
n_query_groups: 16
name: OLMo-1B-hf
norm_1: true
norm_2: true
norm_class_name: LayerNorm
norm_eps: 1.0e-05
norm_qk: false
norm_qk_type: default
padded_vocab_size: 50304
padding_multiple: 512
parallel_residual: false
post_attention_norm: false
post_mlp_norm: false
rope_adjustments: null
rope_base: 10000
rope_condense_ratio: 1
rope_indices: null
rope_local_base_freq: null
rotary_percentage: 1.0
scale_embeddings: false
shared_attention_norm: false
sliding_window_indices: null
sliding_window_size: null
vocab_size: 50280