sharpenb's picture
Upload folder using huggingface_hub (#2)
f208e2d verified
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 8.629734992980957,
"base_token_generation_latency_sync": 35.20570373535156,
"base_token_generation_latency_async": 35.54570898413658,
"base_token_generation_throughput_sync": 0.028404488304429404,
"base_token_generation_throughput_async": 0.028132790949430275,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 119.19165420532227,
"base_inference_latency_async": 38.88437747955322,
"base_inference_throughput_sync": 0.008389849160725441,
"base_inference_throughput_async": 0.02571726911471928,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 9.04076099395752,
"smashed_token_generation_latency_sync": 43.718316650390626,
"smashed_token_generation_latency_async": 42.23632402718067,
"smashed_token_generation_throughput_sync": 0.02287370778698692,
"smashed_token_generation_throughput_async": 0.02367630287513805,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 162.82562713623048,
"smashed_inference_latency_async": 70.3972578048706,
"smashed_inference_throughput_sync": 0.006141539373057874,
"smashed_inference_throughput_async": 0.014205098766372866,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}