sharpenb's picture
Upload folder using huggingface_hub (#3)
258f14b verified
raw
history blame
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 8.711685180664062,
"base_token_generation_latency_sync": 35.84392547607422,
"base_token_generation_latency_async": 36.515641771256924,
"base_token_generation_throughput_sync": 0.02789873002797919,
"base_token_generation_throughput_async": 0.027385524435370713,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 119.2900604248047,
"base_inference_latency_async": 38.611435890197754,
"base_inference_throughput_sync": 0.008382928103472267,
"base_inference_throughput_async": 0.02589906272441603,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 9.146251678466797,
"smashed_token_generation_latency_sync": 42.331953811645505,
"smashed_token_generation_latency_async": 42.045436799526215,
"smashed_token_generation_throughput_sync": 0.023622817043821406,
"smashed_token_generation_throughput_async": 0.023783793822098394,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 162.66905670166017,
"smashed_inference_latency_async": 69.82095241546631,
"smashed_inference_throughput_sync": 0.006147450660109436,
"smashed_inference_throughput_async": 0.0143223483124313,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}