sharpenb's picture
Upload folder using huggingface_hub (#1)
3a1045c verified
raw
history blame
No virus
1.68 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": Infinity,
"base_token_generation_latency_sync": 34.3837194442749,
"base_token_generation_latency_async": 32.92566556483507,
"base_token_generation_throughput_sync": 0.029083531862243196,
"base_token_generation_throughput_async": 0.03037144376112505,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 128.52879257202147,
"base_inference_latency_async": 31.447863578796387,
"base_inference_throughput_sync": 0.007780357848142447,
"base_inference_throughput_async": 0.031798662490835995,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": Infinity,
"smashed_token_generation_latency_sync": 55.48102798461914,
"smashed_token_generation_latency_async": 55.78669644892216,
"smashed_token_generation_throughput_sync": 0.018024179369517584,
"smashed_token_generation_throughput_async": 0.017925420640664602,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 214.5728515625,
"smashed_inference_latency_async": 109.41667556762695,
"smashed_inference_throughput_sync": 0.004660421822789281,
"smashed_inference_throughput_async": 0.00913937473252815,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}