Upload LlamaForCausalLM
Browse files- config.json +2 -3
- generation_config.json +1 -1
- model.safetensors.index.json +1 -0
config.json
CHANGED
|
@@ -1,11 +1,11 @@
|
|
| 1 |
{
|
| 2 |
-
"_name_or_path": "/mnt/home/RetrieveR1/checkpoints/easy_r1/llama3.18b_mix/final/actor/huggingface/",
|
| 3 |
"architectures": [
|
| 4 |
"LlamaForCausalLM"
|
| 5 |
],
|
| 6 |
"attention_bias": false,
|
| 7 |
"attention_dropout": 0.0,
|
| 8 |
"bos_token_id": 128000,
|
|
|
|
| 9 |
"eos_token_id": 128009,
|
| 10 |
"head_dim": 128,
|
| 11 |
"hidden_act": "silu",
|
|
@@ -30,8 +30,7 @@
|
|
| 30 |
},
|
| 31 |
"rope_theta": 500000.0,
|
| 32 |
"tie_word_embeddings": false,
|
| 33 |
-
"
|
| 34 |
-
"transformers_version": "4.49.0",
|
| 35 |
"use_cache": true,
|
| 36 |
"vocab_size": 128320
|
| 37 |
}
|
|
|
|
| 1 |
{
|
|
|
|
| 2 |
"architectures": [
|
| 3 |
"LlamaForCausalLM"
|
| 4 |
],
|
| 5 |
"attention_bias": false,
|
| 6 |
"attention_dropout": 0.0,
|
| 7 |
"bos_token_id": 128000,
|
| 8 |
+
"dtype": "float32",
|
| 9 |
"eos_token_id": 128009,
|
| 10 |
"head_dim": 128,
|
| 11 |
"hidden_act": "silu",
|
|
|
|
| 30 |
},
|
| 31 |
"rope_theta": 500000.0,
|
| 32 |
"tie_word_embeddings": false,
|
| 33 |
+
"transformers_version": "4.56.1",
|
|
|
|
| 34 |
"use_cache": true,
|
| 35 |
"vocab_size": 128320
|
| 36 |
}
|
generation_config.json
CHANGED
|
@@ -3,5 +3,5 @@
|
|
| 3 |
"bos_token_id": 128000,
|
| 4 |
"eos_token_id": 128009,
|
| 5 |
"pad_token_id": 128009,
|
| 6 |
-
"transformers_version": "4.
|
| 7 |
}
|
|
|
|
| 3 |
"bos_token_id": 128000,
|
| 4 |
"eos_token_id": 128009,
|
| 5 |
"pad_token_id": 128009,
|
| 6 |
+
"transformers_version": "4.56.1"
|
| 7 |
}
|
model.safetensors.index.json
CHANGED
|
@@ -1,5 +1,6 @@
|
|
| 1 |
{
|
| 2 |
"metadata": {
|
|
|
|
| 3 |
"total_size": 32123142144
|
| 4 |
},
|
| 5 |
"weight_map": {
|
|
|
|
| 1 |
{
|
| 2 |
"metadata": {
|
| 3 |
+
"total_parameters": 8030785536,
|
| 4 |
"total_size": 32123142144
|
| 5 |
},
|
| 6 |
"weight_map": {
|