pere commited on
Commit
9dcca83
·
verified ·
1 Parent(s): cd0d745

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +8 -6
config.json CHANGED
@@ -1,13 +1,15 @@
1
  {
2
- "_name_or_path": "meta-llama/Meta-Llama-3.1-8B",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 128000,
9
- "eos_token_id": 128001,
10
- "head_dim": 128,
 
 
 
11
  "hidden_act": "silu",
12
  "hidden_size": 4096,
13
  "initializer_range": 0.02,
@@ -22,15 +24,15 @@
22
  "rms_norm_eps": 1e-05,
23
  "rope_scaling": {
24
  "factor": 8.0,
25
- "high_freq_factor": 4.0,
26
  "low_freq_factor": 1.0,
 
27
  "original_max_position_embeddings": 8192,
28
  "rope_type": "llama3"
29
  },
30
  "rope_theta": 500000.0,
31
  "tie_word_embeddings": false,
32
- "torch_dtype": "float32",
33
- "transformers_version": "4.45.2",
34
  "use_cache": true,
35
  "vocab_size": 128256
36
  }
 
1
  {
 
2
  "architectures": [
3
  "LlamaForCausalLM"
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
8
+ "eos_token_id": [
9
+ 128001,
10
+ 128008,
11
+ 128009
12
+ ],
13
  "hidden_act": "silu",
14
  "hidden_size": 4096,
15
  "initializer_range": 0.02,
 
24
  "rms_norm_eps": 1e-05,
25
  "rope_scaling": {
26
  "factor": 8.0,
 
27
  "low_freq_factor": 1.0,
28
+ "high_freq_factor": 4.0,
29
  "original_max_position_embeddings": 8192,
30
  "rope_type": "llama3"
31
  },
32
  "rope_theta": 500000.0,
33
  "tie_word_embeddings": false,
34
+ "torch_dtype": "bfloat16",
35
+ "transformers_version": "4.42.3",
36
  "use_cache": true,
37
  "vocab_size": 128256
38
  }