Update config.json
Browse filesFix layer type error with transformers v5.
See https://huggingface.co/codefuse-ai/F2LLM-v2-80M/discussions/1
- config.json +0 -12
config.json
CHANGED
|
@@ -28,18 +28,6 @@
|
|
| 28 |
"full_attention",
|
| 29 |
"full_attention",
|
| 30 |
"full_attention",
|
| 31 |
-
"full_attention",
|
| 32 |
-
"full_attention",
|
| 33 |
-
"full_attention",
|
| 34 |
-
"full_attention",
|
| 35 |
-
"full_attention",
|
| 36 |
-
"full_attention",
|
| 37 |
-
"full_attention",
|
| 38 |
-
"full_attention",
|
| 39 |
-
"full_attention",
|
| 40 |
-
"full_attention",
|
| 41 |
-
"full_attention",
|
| 42 |
-
"full_attention",
|
| 43 |
"full_attention"
|
| 44 |
],
|
| 45 |
"max_position_embeddings": 40960,
|
|
|
|
| 28 |
"full_attention",
|
| 29 |
"full_attention",
|
| 30 |
"full_attention",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 31 |
"full_attention"
|
| 32 |
],
|
| 33 |
"max_position_embeddings": 40960,
|