sharpenb's picture
Upload folder using huggingface_hub (#2)
e35e1a5 verified
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_token_generation_latency_sync": 83.62196044921875,
"base_token_generation_latency_async": 83.07278789579868,
"base_token_generation_throughput_sync": 0.011958581150549224,
"base_token_generation_throughput_async": 0.012037636214332154,
"base_token_generation_CO2_emissions": 2.0762580172851258e-05,
"base_token_generation_energy_consumption": 0.0068736865279905605,
"base_inference_latency_sync": 82.20262451171875,
"base_inference_latency_async": 79.88781929016113,
"base_inference_throughput_sync": 0.012165061711106812,
"base_inference_throughput_async": 0.012517552849551352,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_token_generation_latency_sync": 177.8312194824219,
"smashed_token_generation_latency_async": 178.8266021758318,
"smashed_token_generation_throughput_sync": 0.0056233095792206905,
"smashed_token_generation_throughput_async": 0.005592009174433382,
"smashed_token_generation_CO2_emissions": 5.6607409622391035e-05,
"smashed_token_generation_energy_consumption": 0.019573642151998485,
"smashed_inference_latency_sync": 185.82732849121095,
"smashed_inference_latency_async": 144.77639198303223,
"smashed_inference_throughput_sync": 0.0053813398067943325,
"smashed_inference_throughput_async": 0.0069072034901740045
}