sharpenb's picture
Upload folder using huggingface_hub (#1)
3abdccf verified
raw
history blame
No virus
1.61 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_token_generation_latency_sync": 70.8256935119629,
"base_token_generation_latency_async": 70.77690288424492,
"base_token_generation_throughput_sync": 0.014119169900272052,
"base_token_generation_throughput_async": 0.014128903063694272,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 70.20144691467286,
"base_inference_latency_async": 70.00713348388672,
"base_inference_throughput_sync": 0.014244720642516976,
"base_inference_throughput_async": 0.014284258621018476,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_token_generation_latency_sync": 120.27656784057618,
"smashed_token_generation_latency_async": 115.46138767153025,
"smashed_token_generation_throughput_sync": 0.008314171396422593,
"smashed_token_generation_throughput_async": 0.008660904049108131,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 117.87663421630859,
"smashed_inference_latency_async": 117.6546573638916,
"smashed_inference_throughput_sync": 0.008483445482206064,
"smashed_inference_throughput_async": 0.008499451040914778,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}