File size: 434 Bytes
8dc01a0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
{
  "method": "gptq_v2",
  "version": "v2",
  "bits": 4,
  "group_size": 128,
  "quantization_time": 319.0039792060852,
  "model_size_gb": 0.5041657388210297,
  "peak_memory_gb": 2.2490234375,
  "calibration_size": 1024,
  "config": {
    "method": "gptq_v2",
    "version": "v2",
    "bits": 4,
    "group_size": 128,
    "desc_act": true,
    "sym": true,
    "true_sequential": true,
    "batch_size": 1,
    "auto_gc": false
  }
}