Spaces:
Build error
Build error
{ | |
"builder_config": { | |
"gather_all_token_logits": false, | |
"hidden_act": "silu", | |
"hidden_size": 4096, | |
"int8": true, | |
"lora_target_modules": [], | |
"max_batch_size": 1, | |
"max_beam_width": 1, | |
"max_input_len": 7168, | |
"max_num_tokens": null, | |
"max_output_len": 1024, | |
"max_position_embeddings": 32768, | |
"max_prompt_embedding_table_size": 0, | |
"name": "llama", | |
"num_heads": 32, | |
"num_kv_heads": 8, | |
"num_layers": 32, | |
"parallel_build": false, | |
"pipeline_parallel": 1, | |
"precision": "float16", | |
"quant_mode": 33, | |
"tensor_parallel": 1, | |
"use_refit": false, | |
"vocab_size": 32000 | |
}, | |
"plugin_config": { | |
"attention_qk_half_accumulation": false, | |
"bert_attention_plugin": false, | |
"context_fmha_type": 1, | |
"gemm_plugin": "float16", | |
"gpt_attention_plugin": "float16", | |
"identity_plugin": false, | |
"layernorm_plugin": false, | |
"layernorm_quantization_plugin": false, | |
"lookup_plugin": false, | |
"lora_plugin": false, | |
"multi_block_mode": false, | |
"nccl_plugin": false, | |
"paged_kv_cache": false, | |
"quantize_per_token_plugin": false, | |
"quantize_tensor_plugin": false, | |
"remove_input_padding": true, | |
"rmsnorm_plugin": false, | |
"rmsnorm_quantization_plugin": false, | |
"smooth_quant_gemm_plugin": false, | |
"tokens_per_block": 0, | |
"use_context_fmha_for_generation": false, | |
"use_custom_all_reduce": false, | |
"use_paged_context_fmha": false, | |
"weight_only_groupwise_quant_matmul_plugin": "float16", | |
"weight_only_quant_matmul_plugin": false | |
} | |
} | |