sharpenb's picture
Upload folder using huggingface_hub (#2)
57cc3aa verified
raw
history blame
No virus
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 8.891793251037598,
"base_token_generation_latency_sync": 39.96690216064453,
"base_token_generation_latency_async": 39.75822236388922,
"base_token_generation_throughput_sync": 0.025020703280443424,
"base_token_generation_throughput_async": 0.02515202995866987,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 119.90845489501953,
"base_inference_latency_async": 39.05057907104492,
"base_inference_throughput_sync": 0.008339695485822956,
"base_inference_throughput_async": 0.025607814884913607,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 9.47966194152832,
"smashed_token_generation_latency_sync": 167.34748077392578,
"smashed_token_generation_latency_async": 168.06612703949213,
"smashed_token_generation_throughput_sync": 0.0059755904025285385,
"smashed_token_generation_throughput_async": 0.005950038937739193,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 266.22822265625,
"smashed_inference_latency_async": 196.9926118850708,
"smashed_inference_throughput_sync": 0.0037561757728863535,
"smashed_inference_throughput_async": 0.005076332510294441,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}