mahmoudhas9 commited on
Commit
de7b7b3
·
verified ·
1 Parent(s): 9844051

Upload folder using huggingface_hub

Browse files
Files changed (2) hide show
  1. config.json +6 -6
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -9,8 +9,8 @@
9
  "eagle_config": {
10
  "eagle_aux_hidden_state_layer_ids": [
11
  1,
12
- 17,
13
- 32
14
  ],
15
  "use_aux_hidden_state": true,
16
  "use_input_layernorm_in_first_layer": true,
@@ -18,11 +18,11 @@
18
  "use_mtp_layernorm": false
19
  },
20
  "eos_token_id": null,
21
- "head_dim": 64,
22
  "hidden_act": "silu",
23
- "hidden_size": 28,
24
  "initializer_range": 0.02,
25
- "intermediate_size": 172,
26
  "max_position_embeddings": 128,
27
  "mlp_bias": false,
28
  "model_type": "llama",
@@ -43,5 +43,5 @@
43
  "torch_dtype": "bfloat16",
44
  "transformers_version": "4.55.0",
45
  "use_cache": true,
46
- "vocab_size": 2010
47
  }
 
9
  "eagle_config": {
10
  "eagle_aux_hidden_state_layer_ids": [
11
  1,
12
+ 46,
13
+ 90
14
  ],
15
  "use_aux_hidden_state": true,
16
  "use_input_layernorm_in_first_layer": true,
 
18
  "use_mtp_layernorm": false
19
  },
20
  "eos_token_id": null,
21
+ "head_dim": 128,
22
  "hidden_act": "silu",
23
+ "hidden_size": 40,
24
  "initializer_range": 0.02,
25
+ "intermediate_size": 122,
26
  "max_position_embeddings": 128,
27
  "mlp_bias": false,
28
  "model_type": "llama",
 
43
  "torch_dtype": "bfloat16",
44
  "transformers_version": "4.55.0",
45
  "use_cache": true,
46
+ "vocab_size": 1519
47
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:521ffec845966a8384494e90743d69c9cb9013311f68f4fa64bd152196ba1fab
3
- size 297576
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c0a6df8d35a0b74c561e95250c03746eb27a5eed91e30e7058e24e3472f71cb7
3
+ size 333864