File size: 580 Bytes
855685b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
{
  "compression": null,
  "dtype": "int8",
  "input_info": null,
  "optimum_version": "1.23.3",
  "quantization_config": {
    "all_layers": null,
    "bits": 8,
    "dataset": null,
    "gptq": null,
    "group_size": -1,
    "ignored_scope": null,
    "num_samples": null,
    "processor": null,
    "quant_method": "default",
    "ratio": 1.0,
    "scale_estimation": null,
    "sensitivity_metric": null,
    "sym": false,
    "tokenizer": null,
    "trust_remote_code": true,
    "weight_format": "int8"
  },
  "save_onnx_model": false,
  "transformers_version": "4.46.2"
}