sharpenb's picture
Upload folder using huggingface_hub (#1)
214d2e4 verified
raw
history blame
1.5 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_token_generation_latency_sync": 82.04161834716797,
"base_token_generation_latency_async": 82.49805271625519,
"base_token_generation_throughput_sync": 0.012188935568876664,
"base_token_generation_throughput_async": 0.012121498230260201,
"base_token_generation_CO2_emissions": 2.1165249602943306e-05,
"base_token_generation_energy_consumption": 0.006756192727641794,
"base_inference_latency_sync": 81.24088439941406,
"base_inference_latency_async": 78.74789237976074,
"base_inference_throughput_sync": 0.012309073287331327,
"base_inference_throughput_async": 0.012698752560608382,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_token_generation_latency_sync": 180.8959991455078,
"smashed_token_generation_latency_async": 181.48778304457664,
"smashed_token_generation_throughput_sync": 0.005528038235912709,
"smashed_token_generation_throughput_async": 0.005510012758017889,
"smashed_token_generation_CO2_emissions": 5.7140606324742935e-05,
"smashed_token_generation_energy_consumption": 0.019805487794740197,
"smashed_inference_latency_sync": 188.26690673828125,
"smashed_inference_latency_async": 162.53809928894043,
"smashed_inference_throughput_sync": 0.005311607957686092,
"smashed_inference_throughput_async": 0.006152403678735789
}