asif00 commited on
Commit
54aa97a
·
verified ·
1 Parent(s): 4807455

Current es_en model

Browse files
Files changed (4) hide show
  1. README.md +3 -0
  2. config.json +5 -4
  3. generation_config.json +1 -1
  4. model.safetensors +3 -0
README.md CHANGED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ ---
2
+ {}
3
+ ---
config.json CHANGED
@@ -18,18 +18,19 @@
18
  "num_attention_heads": 24,
19
  "num_hidden_layers": 28,
20
  "num_key_value_heads": 8,
 
21
  "pretraining_tp": 1,
22
  "rms_norm_eps": 1e-05,
23
- "rope_scaling": {
24
  "factor": 32.0,
25
  "high_freq_factor": 4.0,
26
  "low_freq_factor": 1.0,
27
  "original_max_position_embeddings": 8192,
 
28
  "rope_type": "llama3"
29
  },
30
- "rope_theta": 500000.0,
31
  "tie_word_embeddings": true,
32
- "transformers_version": "4.57.3",
33
- "use_cache": true,
34
  "vocab_size": 156949
35
  }
 
18
  "num_attention_heads": 24,
19
  "num_hidden_layers": 28,
20
  "num_key_value_heads": 8,
21
+ "pad_token_id": null,
22
  "pretraining_tp": 1,
23
  "rms_norm_eps": 1e-05,
24
+ "rope_parameters": {
25
  "factor": 32.0,
26
  "high_freq_factor": 4.0,
27
  "low_freq_factor": 1.0,
28
  "original_max_position_embeddings": 8192,
29
+ "rope_theta": 500000.0,
30
  "rope_type": "llama3"
31
  },
 
32
  "tie_word_embeddings": true,
33
+ "transformers_version": "5.0.0",
34
+ "use_cache": false,
35
  "vocab_size": 156949
36
  }
generation_config.json CHANGED
@@ -8,5 +8,5 @@
8
  ],
9
  "temperature": 0.6,
10
  "top_p": 0.9,
11
- "transformers_version": "4.57.3"
12
  }
 
8
  ],
9
  "temperature": 0.6,
10
  "top_p": 0.9,
11
+ "transformers_version": "5.0.0"
12
  }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8cdefad374fc469dcc71e130d9abeebaaced77768bb236322c26d256866b4997
3
+ size 6601818920