sharpenb's picture
Upload folder using huggingface_hub (#1)
cac3133 verified
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 8.633415222167969,
"base_token_generation_latency_sync": 34.2150146484375,
"base_token_generation_latency_async": 34.84920393675566,
"base_token_generation_throughput_sync": 0.029226934732458666,
"base_token_generation_throughput_async": 0.028695060059759192,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 122.62328338623047,
"base_inference_latency_async": 45.04399299621582,
"base_inference_throughput_sync": 0.008155058096513923,
"base_inference_throughput_async": 0.022200518503854905,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 9.383063316345215,
"smashed_token_generation_latency_sync": 167.0510223388672,
"smashed_token_generation_latency_async": 166.85648560523987,
"smashed_token_generation_throughput_sync": 0.005986195031907527,
"smashed_token_generation_throughput_async": 0.005993174292103133,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 267.10435943603517,
"smashed_inference_latency_async": 197.52447605133057,
"smashed_inference_throughput_sync": 0.00374385503146187,
"smashed_inference_throughput_async": 0.005062663726494992,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}