IlyasMoutawwakil HF staff commited on
Commit
b209ec4
1 Parent(s): ff4ba14

Upload cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub

Browse files
cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json CHANGED
@@ -2,7 +2,7 @@
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 940.38016,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
@@ -10,55 +10,52 @@
10
  },
11
  "latency": {
12
  "unit": "s",
13
- "count": 24,
14
- "total": 1.0075622099999464,
15
- "mean": 0.041981758749997766,
16
- "stdev": 0.001432619222854652,
17
- "p50": 0.041822753499985765,
18
- "p90": 0.042444186099976376,
19
- "p95": 0.0431326134499642,
20
- "p99": 0.04701418162996163,
21
  "values": [
22
- 0.04813927699996157,
23
- 0.04151944700004151,
24
- 0.040316035999978794,
25
- 0.03994275100001232,
26
- 0.041711749000000964,
27
- 0.041422323000006145,
28
- 0.041900816000008945,
29
- 0.041768396000009034,
30
- 0.041147123000030206,
31
- 0.04147726799999418,
32
- 0.041774416999999175,
33
- 0.04128924299999426,
34
- 0.04183763700001464,
35
- 0.04180786999995689,
36
- 0.042357677999973475,
37
- 0.042116393000014796,
38
- 0.041546927000013056,
39
- 0.04197065799996835,
40
- 0.04191117499999564,
41
- 0.04209283900001992,
42
- 0.04248126099997762,
43
- 0.041893211999990854,
44
- 0.04324755799996183,
45
- 0.041890156000022216
46
  ]
47
  },
48
  "throughput": {
49
  "unit": "samples/s",
50
- "value": 23.819869147336597
51
  },
52
  "energy": {
53
  "unit": "kWh",
54
- "cpu": 1.5997710757785375e-06,
55
- "ram": 6.685607702564766e-08,
56
  "gpu": 0.0,
57
- "total": 1.666627152804185e-06
58
  },
59
  "efficiency": {
60
  "unit": "samples/kWh",
61
- "value": 600014.225327752
62
  }
63
  }
64
  }
 
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 939.618304,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
 
10
  },
11
  "latency": {
12
  "unit": "s",
13
+ "count": 21,
14
+ "total": 1.0166431719999878,
15
+ "mean": 0.04841157961904704,
16
+ "stdev": 0.0008352788246667446,
17
+ "p50": 0.04853026000000682,
18
+ "p90": 0.04931461600000375,
19
+ "p95": 0.049659447999999884,
20
+ "p99": 0.04998655200000712,
21
  "values": [
22
+ 0.04931461600000375,
23
+ 0.048226350999982515,
24
+ 0.04863095500002146,
25
+ 0.046930662000022494,
26
+ 0.049032120999982,
27
+ 0.04860684999999876,
28
+ 0.04853026000000682,
29
+ 0.049100869000028524,
30
+ 0.0468076049999695,
31
+ 0.05006832800000893,
32
+ 0.048791683999979796,
33
+ 0.04808170399996925,
34
+ 0.047575662999975066,
35
+ 0.049659447999999884,
36
+ 0.04739059900003895,
37
+ 0.04894478900001786,
38
+ 0.04823131199998443,
39
+ 0.04910815300002014,
40
+ 0.04761511600003132,
41
+ 0.048013677999961146,
42
+ 0.04798240899998518
 
 
 
43
  ]
44
  },
45
  "throughput": {
46
  "unit": "samples/s",
47
+ "value": 20.65621505988952
48
  },
49
  "energy": {
50
  "unit": "kWh",
51
+ "cpu": 1.6181519296434194e-06,
52
+ "ram": 6.762301141558661e-08,
53
  "gpu": 0.0,
54
+ "total": 1.6857749410590059e-06
55
  },
56
  "efficiency": {
57
  "unit": "samples/kWh",
58
+ "value": 593198.9945062292
59
  }
60
  }
61
  }