File size: 377 Bytes
82b03fa
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
{
    "per_channel": true,
    "reduce_range": true,
    "per_model_config": {
        "model": {
            "op_types": [
                "Conv",
                "Flatten",
                "Gemm",
                "Relu",
                "GlobalAveragePool",
                "Add",
                "MaxPool"
            ],
            "weight_type": "QUInt8"
        }
    }
}