{ "per_channel": false, "reduce_range": false, "per_model_config": { "decoder_model": { "op_types": [ "MatMul", "Squeeze", "Transpose", "Cast", "Erf", "Sqrt", "Constant", "Slice", "Where", "Softmax", "Shape", "Add", "Gemm", "Gather", "Reshape", "Div", "Concat", "Mul", "Range", "Split", "ReduceMean", "Pow", "ConstantOfShape", "Unsqueeze", "Sub" ], "weight_type": "QInt8" }, "decoder_model_merged": { "op_types": [ "MatMul", "Squeeze", "Transpose", "Cast", "Erf", "Sqrt", "Constant", "Slice", "Where", "Softmax", "Shape", "Add", "Gemm", "Gather", "Reshape", "Div", "Concat", "Mul", "If", "Range", "Split", "ReduceMean", "Pow", "ConstantOfShape", "Unsqueeze", "Sub" ], "weight_type": "QInt8" }, "decoder_with_past_model": { "op_types": [ "MatMul", "Squeeze", "Transpose", "Cast", "Erf", "Sqrt", "Constant", "Slice", "Where", "Softmax", "Shape", "Add", "Gemm", "Gather", "Reshape", "Div", "Concat", "Mul", "Range", "Split", "ReduceMean", "Pow", "ConstantOfShape", "Unsqueeze", "Sub" ], "weight_type": "QInt8" } } }