AlekseyCalvin commited on
Commit
2c4616d
·
verified ·
1 Parent(s): afe3cd6

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +97 -0
config.json ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_expanded_embeddings": true,
3
+ "architectures": [
4
+ "BolmoForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "auto_map": {
9
+ "AutoConfig": "configuration_bolmo.BolmoConfig",
10
+ "AutoModelForCausalLM": "modeling_bolmo.BolmoForCausalLM"
11
+ },
12
+ "bos_token_id": 1,
13
+ "boundary_predictor_lookahead": 1,
14
+ "boundary_threshold": "sample:0",
15
+ "dtype": "float32",
16
+ "eos_token_id": 1,
17
+ "hidden_act": "silu",
18
+ "hidden_size": 4096,
19
+ "initializer_range": 0.02,
20
+ "intermediate_size": 11008,
21
+ "layer_types": [
22
+ "sliding_attention",
23
+ "sliding_attention",
24
+ "sliding_attention",
25
+ "full_attention",
26
+ "sliding_attention",
27
+ "sliding_attention",
28
+ "sliding_attention",
29
+ "full_attention",
30
+ "sliding_attention",
31
+ "sliding_attention",
32
+ "sliding_attention",
33
+ "full_attention",
34
+ "sliding_attention",
35
+ "sliding_attention",
36
+ "sliding_attention",
37
+ "full_attention",
38
+ "sliding_attention",
39
+ "sliding_attention",
40
+ "sliding_attention",
41
+ "full_attention",
42
+ "sliding_attention",
43
+ "sliding_attention",
44
+ "sliding_attention",
45
+ "full_attention",
46
+ "sliding_attention",
47
+ "sliding_attention",
48
+ "sliding_attention",
49
+ "full_attention",
50
+ "sliding_attention",
51
+ "sliding_attention",
52
+ "sliding_attention",
53
+ "full_attention"
54
+ ],
55
+ "local_intermediate_size": 5504,
56
+ "local_rms_norm_eps": 1e-05,
57
+ "max_position_embeddings": 65536,
58
+ "model_type": "bolmo",
59
+ "num_attention_heads": 32,
60
+ "num_hidden_layers": 32,
61
+ "num_key_value_heads": 32,
62
+ "num_local_decoder_layers": 4,
63
+ "num_local_encoder_layers": 1,
64
+ "num_local_heads": 16,
65
+ "pad_token_id": 0,
66
+ "rms_norm_eps": 1e-06,
67
+ "rope_scaling": {
68
+ "attention_factor": 1.2079441541679836,
69
+ "beta_fast": 32,
70
+ "beta_slow": 1,
71
+ "factor": 8.0,
72
+ "original_max_position_embeddings": 8192,
73
+ "rope_type": "yarn"
74
+ },
75
+ "rope_theta": 500000,
76
+ "sliding_window": 4096,
77
+ "subword_vocab_size": 100278,
78
+ "tie_word_embeddings": false,
79
+ "tokenizer_config": {
80
+ "bos_token_id": 1,
81
+ "bpe_token_end_id": 3,
82
+ "eos_token_id": 1,
83
+ "original_identifier": "allenai/dolma2-tokenizer",
84
+ "pad_token_id": 0,
85
+ "special_tokens": [
86
+ "<pad>",
87
+ "<bos>",
88
+ "<eos>",
89
+ "<bpe_token_end>"
90
+ ],
91
+ "special_tokens_first": true,
92
+ "vocab_size": 520
93
+ },
94
+ "transformers_version": "4.57.3",
95
+ "use_cache": true,
96
+ "vocab_size": 520
97
+ }