sharpenb's picture
Upload folder using huggingface_hub (#1)
ea001b6 verified
raw history blame
No virus
1.62 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_token_generation_latency_sync": 56.065922546386716,
"base_token_generation_latency_async": 56.165905855596066,
"base_token_generation_throughput_sync": 0.017836146353832665,
"base_token_generation_throughput_async": 0.017804395473848936,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 57.000243759155275,
"base_inference_latency_async": 55.65207004547119,
"base_inference_throughput_sync": 0.017543784623541753,
"base_inference_throughput_async": 0.01796878353640643,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_token_generation_latency_sync": 173.6800567626953,
"smashed_token_generation_latency_async": 174.10976719111204,
"smashed_token_generation_throughput_sync": 0.005757713456797935,
"smashed_token_generation_throughput_async": 0.005743503171205481,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 179.55246124267578,
"smashed_inference_latency_async": 157.53896236419678,
"smashed_inference_throughput_sync": 0.0055694029092056875,
"smashed_inference_throughput_async": 0.006347636070423083,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}