TII_LSE commited on
Commit
413d277
·
verified ·
1 Parent(s): 1b408ad

Upload LlamaForCausalLM

Browse files
adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": null,
5
  "bias": "none",
6
  "eva_config": null,
7
  "exclude_modules": null,
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "meta-llama/Llama-3.2-1B",
5
  "bias": "none",
6
  "eva_config": null,
7
  "exclude_modules": null,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2c0402ef4674c5000f763e44adda5648115d10912a5d5f868031eda8f98ea907
3
- size 6825304
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:18c27b7c42148d3fab53bce5a31cd562d37580de4c732465130010f4a208d20e
3
+ size 6824216
generation_config.json CHANGED
@@ -1,9 +1,8 @@
1
  {
2
- "bos_token_id": 1,
 
3
  "do_sample": true,
4
- "eos_token_id": 2,
5
- "max_length": 4096,
6
- "pad_token_id": 0,
7
  "temperature": 0.6,
8
  "top_p": 0.9,
9
  "transformers_version": "4.50.3"
 
1
  {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 128000,
4
  "do_sample": true,
5
+ "eos_token_id": 128001,
 
 
6
  "temperature": 0.6,
7
  "top_p": 0.9,
8
  "transformers_version": "4.50.3"