File size: 3,132 Bytes
e019756
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
{
  "_inserted_LatentThinkingModelSettings": {
    "add_latent_to_end": false,
    "binary_head_temp": null,
    "continue_token_id": 128258,
    "debug_mode": false,
    "detach_binary_head_inputs": true,
    "disable_checkpointing_cache_update": true,
    "disable_forward_input_embeds": true,
    "disable_input_past_key_values": false,
    "end_token_id": 128257,
    "lora_mode": false,
    "recurrent_filter_mode": "MLP",
    "start_token_id": 128256,
    "stop_token_id": 128259,
    "unused_token_ids": [
      128260,
      128261,
      128262,
      128263,
      128264,
      128265,
      128266,
      128267,
      128268,
      128269,
      128270,
      128271,
      128272,
      128273,
      128274,
      128275,
      128276,
      128277,
      128278,
      128279,
      128280,
      128281,
      128282,
      128283,
      128284,
      128285,
      128286,
      128287,
      128288,
      128289,
      128290,
      128291,
      128292,
      128293,
      128294,
      128295,
      128296,
      128297,
      128298,
      128299,
      128300,
      128301,
      128302,
      128303,
      128304,
      128305,
      128306,
      128307,
      128308,
      128309,
      128310,
      128311,
      128312,
      128313,
      128314,
      128315,
      128316,
      128317,
      128318,
      128319,
      128320,
      128321,
      128322,
      128323,
      128324,
      128325,
      128326,
      128327,
      128328,
      128329,
      128330,
      128331,
      128332,
      128333,
      128334,
      128335,
      128336,
      128337,
      128338,
      128339,
      128340,
      128341,
      128342,
      128343,
      128344,
      128345,
      128346,
      128347,
      128348,
      128349,
      128350,
      128351,
      128352,
      128353,
      128354,
      128355,
      128356,
      128357,
      128358,
      128359,
      128360,
      128361,
      128362,
      128363,
      128364,
      128365,
      128366,
      128367,
      128368,
      128369,
      128370,
      128371,
      128372,
      128373,
      128374,
      128375,
      128376,
      128377,
      128378,
      128379,
      128380,
      128381,
      128382,
      128383
    ]
  },
  "architectures": [
    "LatentThinkingModel"
  ],
  "attention_bias": false,
  "attention_dropout": 0.0,
  "bos_token_id": 128000,
  "eos_token_id": [
    128001,
    128008,
    128009
  ],
  "head_dim": 64,
  "hidden_act": "silu",
  "hidden_size": 2048,
  "initializer_range": 0.02,
  "intermediate_size": 8192,
  "max_position_embeddings": 131072,
  "mlp_bias": false,
  "model_type": "llama",
  "num_attention_heads": 32,
  "num_hidden_layers": 16,
  "num_key_value_heads": 8,
  "pretraining_tp": 1,
  "rms_norm_eps": 1e-05,
  "rope_scaling": {
    "factor": 32.0,
    "high_freq_factor": 4.0,
    "low_freq_factor": 1.0,
    "original_max_position_embeddings": 8192,
    "rope_type": "llama3"
  },
  "rope_theta": 500000.0,
  "tie_word_embeddings": true,
  "torch_dtype": "bfloat16",
  "transformers_version": "4.53.0",
  "use_cache": true,
  "vocab_size": 128384
}