File size: 602 Bytes
ed10267
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
{
  "source_dir": "onnx/",
  "input_model": "onnx/model.onnx",
  "preprocessed_input_model": "onnx/model.preprocessed.onnx",
  "output_model": "onnx/model_quantized.onnx",
  "weight_type": "qint8",
  "per_channel": true,
  "reduce_range": false,
  "preprocess_applied": true,
  "op_types": [
    "MatMul",
    "Gemm",
    "Attention"
  ],
  "copied_assets": [
    "onnx/model.onnx",
    "onnx_export.json",
    "config.json",
    "special_tokens_map.json",
    "tokenizer.json",
    "tokenizer_config.json",
    "vocab.txt"
  ],
  "format": "onnx_dynamic_quantized",
  "task": "token-classification"
}