sharpenb's picture
Upload folder using huggingface_hub (#1)
b50f2f6 verified
{
"comp_cgenerate_active": false,
"comp_ctranslate_active": false,
"comp_cwhisper_active": false,
"comp_diffusers2_active": false,
"comp_ifw_active": false,
"comp_onediff_active": false,
"comp_step_caching_active": false,
"comp_torch_compile_active": false,
"comp_ws2t_active": false,
"comp_x-fast_active": false,
"prune_torch-structured_active": false,
"quant_aqlm_active": false,
"quant_awq_active": false,
"quant_gptq_active": false,
"quant_half_active": false,
"quant_hqq_active": false,
"quant_llm-int8_active": true,
"quant_quanto_active": false,
"quant_torch_dynamic_active": false,
"quant_torch_static_active": false,
"quant_llm-int8_compute_dtype": "bfloat16",
"quant_llm-int8_double_quant": false,
"quant_llm-int8_enable_fp32_cpu_offload": false,
"quant_llm-int8_has_fp16_weight": false,
"quant_llm-int8_quant_type": "fp4",
"quant_llm-int8_threshold": 6.0,
"quant_llm-int8_weight_bits": 8,
"max_batch_size": 1,
"device": "cuda",
"cache_dir": "/covalent/.cache/models/tmp_tosm9hy",
"task": "",
"save_load_fn": "bitsandbytes",
"save_load_fn_args": {
"weight_quantization_bits": "param.dtype"
},
"api_key": null
}