File size: 872 Bytes
ee5a7ba
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
{
    "current_gpu_type": "Tesla T4",
    "current_gpu_total_memory": 15095.0625,
    "perplexity": 30815030.0,
    "memory_inference_first": 1142.0,
    "memory_inference": 1142.0,
    "token_generation_latency_sync": 147.6204864501953,
    "token_generation_latency_async": 147.56362289190292,
    "token_generation_throughput_sync": 0.006774127521503482,
    "token_generation_throughput_async": 0.006776737927697435,
    "token_generation_CO2_emissions": 0.00020048614177282614,
    "token_generation_energy_consumption": 0.008781923951582248,
    "inference_latency_sync": 1151.6722137451172,
    "inference_latency_async": 424.6630907058716,
    "inference_throughput_sync": 0.0008683026194997837,
    "inference_throughput_async": 0.00235480789803938,
    "inference_CO2_emissions": 0.0002004475962713345,
    "inference_energy_consumption": 0.0006890801651842688
}