Sudhu2004 commited on
Commit
3b3e761
·
1 Parent(s): 33aa37b

Upload LlamaForCausalLM

Browse files
Files changed (3) hide show
  1. config.json +3 -3
  2. generation_config.json +10 -0
  3. pytorch_model.bin +2 -2
config.json CHANGED
@@ -19,12 +19,12 @@
19
  "bnb_4bit_compute_dtype": "bfloat16",
20
  "bnb_4bit_quant_type": "nf4",
21
  "bnb_4bit_use_double_quant": true,
22
- "llm_int8_enable_fp32_cpu_offload": true,
23
  "llm_int8_has_fp16_weight": false,
24
  "llm_int8_skip_modules": null,
25
  "llm_int8_threshold": 6.0,
26
- "load_in_4bit": true,
27
- "load_in_8bit": false,
28
  "quant_method": "bitsandbytes"
29
  },
30
  "rms_norm_eps": 1e-06,
 
19
  "bnb_4bit_compute_dtype": "bfloat16",
20
  "bnb_4bit_quant_type": "nf4",
21
  "bnb_4bit_use_double_quant": true,
22
+ "llm_int8_enable_fp32_cpu_offload": false,
23
  "llm_int8_has_fp16_weight": false,
24
  "llm_int8_skip_modules": null,
25
  "llm_int8_threshold": 6.0,
26
+ "load_in_4bit": false,
27
+ "load_in_8bit": true,
28
  "quant_method": "bitsandbytes"
29
  },
30
  "rms_norm_eps": 1e-06,
generation_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 1,
3
+ "do_sample": true,
4
+ "eos_token_id": 2,
5
+ "max_length": 4096,
6
+ "pad_token_id": 0,
7
+ "temperature": 0.6,
8
+ "top_p": 0.9,
9
+ "transformers_version": "4.33.2"
10
+ }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:275782100c15cb8b24944b2ff927c064f402028c8e424bd763cee0ba7ffbd9fc
3
- size 3762944985
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de66f29c657b9bbc7fe8872da407f957f294929920e9285c81d7e13cdff1219e
3
+ size 7006439213