File size: 2,149 Bytes
07bdaa1
 
 
 
 
 
79b0de4
d3109e2
07bdaa1
d3109e2
 
79b0de4
d3109e2
07bdaa1
79b0de4
d3109e2
07bdaa1
 
79b0de4
d3109e2
79b0de4
d3109e2
79b0de4
d3109e2
 
 
 
 
07bdaa1
 
 
 
 
79b0de4
d3109e2
07bdaa1
d3109e2
 
79b0de4
d3109e2
07bdaa1
79b0de4
d3109e2
07bdaa1
 
79b0de4
d3109e2
79b0de4
d3109e2
79b0de4
d3109e2
 
 
 
 
 
07bdaa1
 
 
 
 
79b0de4
d3109e2
07bdaa1
d3109e2
 
79b0de4
d3109e2
07bdaa1
79b0de4
d3109e2
07bdaa1
 
79b0de4
d3109e2
79b0de4
d3109e2
79b0de4
d3109e2
 
 
 
 
07bdaa1
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
{
    "per_channel": true,
    "reduce_range": true,
    "per_model_config": {
        "decoder_with_past_model": {
            "op_types": [
                "MatMul",
                "Pow",
                "Sub",
                "Unsqueeze",
                "Constant",
                "Concat",
                "Where",
                "Range",
                "Mul",
                "ReduceMean",
                "Transpose",
                "Reshape",
                "Slice",
                "Shape",
                "Add",
                "Tanh",
                "Sqrt",
                "Cast",
                "Softmax",
                "Squeeze",
                "Gather",
                "Div"
            ],
            "weight_type": "QInt8"
        },
        "decoder_model_merged": {
            "op_types": [
                "MatMul",
                "Pow",
                "Sub",
                "Unsqueeze",
                "Constant",
                "Concat",
                "Where",
                "Range",
                "Mul",
                "ReduceMean",
                "Transpose",
                "Reshape",
                "Slice",
                "Shape",
                "Add",
                "Tanh",
                "Sqrt",
                "Cast",
                "Softmax",
                "If",
                "Squeeze",
                "Gather",
                "Div"
            ],
            "weight_type": "QInt8"
        },
        "decoder_model": {
            "op_types": [
                "MatMul",
                "Pow",
                "Sub",
                "Unsqueeze",
                "Constant",
                "Concat",
                "Where",
                "Range",
                "Mul",
                "ReduceMean",
                "Transpose",
                "Reshape",
                "Slice",
                "Shape",
                "Add",
                "Tanh",
                "Sqrt",
                "Cast",
                "Softmax",
                "Squeeze",
                "Gather",
                "Div"
            ],
            "weight_type": "QInt8"
        }
    }
}