IlyasMoutawwakil HF staff commited on
Commit
ebb6b70
1 Parent(s): fc7b24b

Upload cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub

Browse files
cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark_report.json CHANGED
@@ -2,7 +2,7 @@
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 938.328064,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
@@ -10,60 +10,59 @@
10
  },
11
  "latency": {
12
  "unit": "s",
13
- "count": 29,
14
- "total": 1.033992539000053,
15
- "mean": 0.03565491513793286,
16
- "stdev": 0.0023222085218418775,
17
- "p50": 0.03451691599997275,
18
- "p90": 0.03955771140002753,
19
- "p95": 0.0396460457999865,
20
- "p99": 0.04010088595998241,
21
  "values": [
22
- 0.03475112300003502,
23
- 0.03436538300002212,
24
- 0.03398699899997837,
25
- 0.033762078999984624,
26
- 0.033646524999994654,
27
- 0.03398468400001775,
28
- 0.03426834300000792,
29
- 0.03451691599997275,
30
- 0.034104076000005534,
31
- 0.03388195299999097,
32
- 0.03426375399999415,
33
- 0.03352693200002932,
34
- 0.03367064899998695,
35
- 0.03391670899998189,
36
- 0.03423575200002915,
37
- 0.03465082600001779,
38
- 0.03510268700000552,
39
- 0.0345147019999672,
40
- 0.03468595099997174,
41
- 0.03491542900002287,
42
- 0.035026103999996394,
43
- 0.03690583700000616,
44
- 0.039522873000009895,
45
- 0.039162180000005264,
46
- 0.040262870999981715,
47
- 0.039684352999984185,
48
- 0.03953827100002627,
49
- 0.03958858499998996,
50
- 0.03954999300003692
51
  ]
52
  },
53
  "throughput": {
54
  "unit": "samples/s",
55
- "value": 28.046624038549773
56
  },
57
  "energy": {
58
  "unit": "kWh",
59
- "cpu": 1.3160619912324132e-06,
60
- "ram": 5.5000020012954564e-08,
61
  "gpu": 0.0,
62
- "total": 1.3710620112453677e-06
63
  },
64
  "efficiency": {
65
  "unit": "samples/kWh",
66
- "value": 729361.6129672185
67
  }
68
  }
69
  }
 
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 938.438656,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
 
10
  },
11
  "latency": {
12
  "unit": "s",
13
+ "count": 28,
14
+ "total": 1.0215995410001142,
15
+ "mean": 0.03648569789286122,
16
+ "stdev": 0.0008005848646550593,
17
+ "p50": 0.036659293000013804,
18
+ "p90": 0.03733936299996685,
19
+ "p95": 0.03750497559999246,
20
+ "p99": 0.037740641569969285,
21
  "values": [
22
+ 0.036768950999999106,
23
+ 0.037187377999998716,
24
+ 0.03699049200002946,
25
+ 0.03708207299996502,
26
+ 0.03467078600004925,
27
+ 0.0370597620000126,
28
+ 0.03732167799995523,
29
+ 0.037380627999993976,
30
+ 0.03780304099996101,
31
+ 0.03757193199999165,
32
+ 0.03482035500002212,
33
+ 0.035600171999988106,
34
+ 0.03664010200003531,
35
+ 0.036425975000042854,
36
+ 0.03583059000004596,
37
+ 0.03652385600003072,
38
+ 0.03646509700001843,
39
+ 0.036678483999992295,
40
+ 0.03626789199995528,
41
+ 0.0348330580000038,
42
+ 0.03696261000004597,
43
+ 0.03685502100000804,
44
+ 0.036109758999998576,
45
+ 0.03686896699997533,
46
+ 0.03555964700001368,
47
+ 0.03629084400000693,
48
+ 0.0369296100000156,
49
+ 0.03610078099995917
 
50
  ]
51
  },
52
  "throughput": {
53
  "unit": "samples/s",
54
+ "value": 27.407999784914615
55
  },
56
  "energy": {
57
  "unit": "kWh",
58
+ "cpu": 1.2260438253482183e-06,
59
+ "ram": 5.1238421406347715e-08,
60
  "gpu": 0.0,
61
+ "total": 1.277282246754566e-06
62
  },
63
  "efficiency": {
64
  "unit": "samples/kWh",
65
+ "value": 782912.3144402032
66
  }
67
  }
68
  }