martinctl commited on
Commit
31c08b4
·
verified ·
1 Parent(s): 0c066da

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +16 -1
config.json CHANGED
@@ -25,12 +25,27 @@
25
  "num_attention_heads": 16,
26
  "num_hidden_layers": 28,
27
  "num_key_value_heads": 8,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  "rms_norm_eps": 1e-06,
29
  "rope_scaling": null,
30
  "rope_theta": 1000000,
31
  "sliding_window": null,
32
  "tie_word_embeddings": true,
33
- "torch_dtype": "float32",
34
  "transformers_version": "4.52.4",
35
  "use_cache": false,
36
  "use_sliding_window": false,
 
25
  "num_attention_heads": 16,
26
  "num_hidden_layers": 28,
27
  "num_key_value_heads": 8,
28
+ "quantization_config": {
29
+ "_load_in_4bit": false,
30
+ "_load_in_8bit": true,
31
+ "bnb_4bit_compute_dtype": "float32",
32
+ "bnb_4bit_quant_storage": "uint8",
33
+ "bnb_4bit_quant_type": "fp4",
34
+ "bnb_4bit_use_double_quant": false,
35
+ "llm_int8_enable_fp32_cpu_offload": false,
36
+ "llm_int8_has_fp16_weight": false,
37
+ "llm_int8_skip_modules": null,
38
+ "llm_int8_threshold": 6.0,
39
+ "load_in_4bit": false,
40
+ "load_in_8bit": true,
41
+ "quant_method": "bitsandbytes"
42
+ },
43
  "rms_norm_eps": 1e-06,
44
  "rope_scaling": null,
45
  "rope_theta": 1000000,
46
  "sliding_window": null,
47
  "tie_word_embeddings": true,
48
+ "torch_dtype": "float16",
49
  "transformers_version": "4.52.4",
50
  "use_cache": false,
51
  "use_sliding_window": false,