sharpenb's picture
Upload folder using huggingface_hub (#1)
69affe3 verified
raw
history blame contribute delete
No virus
1.61 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_token_generation_latency_sync": 74.54675521850587,
"base_token_generation_latency_async": 74.43354055285454,
"base_token_generation_throughput_sync": 0.013414400091176,
"base_token_generation_throughput_async": 0.013434803619074249,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 73.78821182250977,
"base_inference_latency_async": 72.89724349975586,
"base_inference_throughput_sync": 0.013552300229275118,
"base_inference_throughput_async": 0.01371793982859378,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_token_generation_latency_sync": 188.3400436401367,
"smashed_token_generation_latency_async": 188.5381616652012,
"smashed_token_generation_throughput_sync": 0.0053095453344521384,
"smashed_token_generation_throughput_async": 0.005303966004377201,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 186.75937194824218,
"smashed_inference_latency_async": 183.61716270446777,
"smashed_inference_throughput_sync": 0.005354483630824891,
"smashed_inference_throughput_async": 0.005446113997576045,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}