Upload do GPT 124M custom
Browse files- config.json +15 -0
- model.safetensors +3 -0
config.json
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"architectures": [
|
| 3 |
+
"HFGPTModel"
|
| 4 |
+
],
|
| 5 |
+
"context_length": 256,
|
| 6 |
+
"drop_rate": 0.1,
|
| 7 |
+
"dtype": "float32",
|
| 8 |
+
"emb_dim": 768,
|
| 9 |
+
"model_type": "custom-gpt",
|
| 10 |
+
"n_heads": 12,
|
| 11 |
+
"n_layers": 12,
|
| 12 |
+
"qkv_bias": true,
|
| 13 |
+
"transformers_version": "4.56.1",
|
| 14 |
+
"vocab_size": 50257
|
| 15 |
+
}
|
model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:4ca5a9304313df2df29567638928b2621b8fe4511147040bf5b59d4c294cdcdc
|
| 3 |
+
size 652957264
|