Commit
·
c1081af
1
Parent(s):
78d9827
Create config.json
Browse files- config.json +18 -0
config.json
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"activation_function": "relu",
|
| 3 |
+
"architectures": [
|
| 4 |
+
"GPT2LMHeadModel"
|
| 5 |
+
],
|
| 6 |
+
"attn_pdrop": 0.1,
|
| 7 |
+
"bos_token_id": 13,
|
| 8 |
+
"embd_pdrop": 0.1,
|
| 9 |
+
"eos_token_id": 13,
|
| 10 |
+
"initializer_range": 0.02,
|
| 11 |
+
"layer_norm_epsilon": 1e-05,
|
| 12 |
+
"model_type": "2digit",
|
| 13 |
+
"n_ctx": 15,
|
| 14 |
+
"n_embd": 4,
|
| 15 |
+
"n_head": 2,
|
| 16 |
+
"n_layer": 1,
|
| 17 |
+
"vocab_size": 14
|
| 18 |
+
}
|