sharpenb's picture
Upload folder using huggingface_hub (#2)
d256912 verified
raw
history blame
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 6.637787342071533,
"base_token_generation_latency_sync": 40.13633842468262,
"base_token_generation_latency_async": 38.44778724014759,
"base_token_generation_throughput_sync": 0.02491507793807695,
"base_token_generation_throughput_async": 0.026009299150401802,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 117.65217208862305,
"base_inference_latency_async": 38.692593574523926,
"base_inference_throughput_sync": 0.00849963058265288,
"base_inference_throughput_async": 0.02584473946089834,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 20686.58203125,
"smashed_token_generation_latency_sync": 168.5132278442383,
"smashed_token_generation_latency_async": 168.39814521372318,
"smashed_token_generation_throughput_sync": 0.0059342522411613245,
"smashed_token_generation_throughput_async": 0.005938307685817121,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 262.7811309814453,
"smashed_inference_latency_async": 210.38615703582764,
"smashed_inference_throughput_sync": 0.0038054482689269227,
"smashed_inference_throughput_async": 0.004753164438617058,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}