Upload Cybersecurity-SLM v2
Browse files- README.md +1 -1
- config.json +2 -2
- tokenizer_config.json +1 -1
README.md
CHANGED
|
@@ -26,7 +26,7 @@ Supports up to **1M token context** via RoPE.
|
|
| 26 |
| Layers | 8 |
|
| 27 |
| Heads | 8 |
|
| 28 |
| Embedding | 512 |
|
| 29 |
-
| Max Context |
|
| 30 |
| Vocab | 16,000 BPE |
|
| 31 |
| Best Loss | 1.0129245206713677 |
|
| 32 |
|
|
|
|
| 26 |
| Layers | 8 |
|
| 27 |
| Heads | 8 |
|
| 28 |
| Embedding | 512 |
|
| 29 |
+
| Max Context | 100,000,000,000 tokens |
|
| 30 |
| Vocab | 16,000 BPE |
|
| 31 |
| Best Loss | 1.0129245206713677 |
|
| 32 |
|
config.json
CHANGED
|
@@ -12,7 +12,7 @@
|
|
| 12 |
"dropout": 0.1,
|
| 13 |
"bias": false,
|
| 14 |
"ffn_multiplier": 2.667,
|
| 15 |
-
"max_position_embeddings":
|
| 16 |
-
"rope_theta":
|
| 17 |
"n_parameters": 33890816
|
| 18 |
}
|
|
|
|
| 12 |
"dropout": 0.1,
|
| 13 |
"bias": false,
|
| 14 |
"ffn_multiplier": 2.667,
|
| 15 |
+
"max_position_embeddings": 100000000000,
|
| 16 |
+
"rope_theta": 50000000000.0,
|
| 17 |
"n_parameters": 33890816
|
| 18 |
}
|
tokenizer_config.json
CHANGED
|
@@ -4,5 +4,5 @@
|
|
| 4 |
"eos_token": "<eos>",
|
| 5 |
"unk_token": "<unk>",
|
| 6 |
"pad_token": "<pad>",
|
| 7 |
-
"model_max_length":
|
| 8 |
}
|
|
|
|
| 4 |
"eos_token": "<eos>",
|
| 5 |
"unk_token": "<unk>",
|
| 6 |
"pad_token": "<pad>",
|
| 7 |
+
"model_max_length": 100000000000
|
| 8 |
}
|