IlyasMoutawwakil HF staff commited on
Commit
eae2a67
1 Parent(s): f7d2c26

Upload cpu_inference_transformers_fill-mask_google-bert/bert-base-uncased/benchmark_report.json with huggingface_hub

Browse files
cpu_inference_transformers_fill-mask_google-bert/bert-base-uncased/benchmark_report.json CHANGED
@@ -2,7 +2,7 @@
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 865.591296,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
@@ -10,54 +10,55 @@
10
  },
11
  "latency": {
12
  "unit": "s",
13
- "count": 23,
14
- "total": 1.0084241530000781,
15
- "mean": 0.043844528391307744,
16
- "stdev": 0.002268249695141345,
17
- "p50": 0.04396238300000732,
18
- "p90": 0.04619112940000605,
19
- "p95": 0.04748975040001539,
20
- "p99": 0.048353882999998064,
21
  "values": [
22
- 0.04500136999999427,
23
- 0.043781125000009524,
24
- 0.04416962999999896,
25
- 0.04396238300000732,
26
- 0.043865283000002364,
27
- 0.044247705000003634,
28
- 0.044434212999988176,
29
- 0.04344656100002453,
30
- 0.045235666999985824,
31
- 0.04391572700001234,
32
- 0.04428195900001697,
33
- 0.04299877599999036,
34
- 0.04856440099999304,
35
- 0.04367765200001372,
36
- 0.044935597999995025,
37
- 0.04760750100001587,
38
- 0.046429995000011104,
39
- 0.04502636800000914,
40
- 0.04370453200002089,
41
- 0.03902493600000412,
42
- 0.03928449899999009,
43
- 0.04055923699999653,
44
- 0.040269034999994346
 
45
  ]
46
  },
47
  "throughput": {
48
  "unit": "samples/s",
49
- "value": 22.80786307187767
50
  },
51
  "energy": {
52
  "unit": "kWh",
53
- "cpu": 1.5072463924049314e-06,
54
- "ram": 6.29891627794275e-08,
55
  "gpu": 0.0,
56
- "total": 1.570235555184359e-06
57
  },
58
  "efficiency": {
59
  "unit": "samples/kWh",
60
- "value": 636847.1257056661
61
  }
62
  }
63
  }
 
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 871.440384,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
 
10
  },
11
  "latency": {
12
  "unit": "s",
13
+ "count": 24,
14
+ "total": 1.0029692920000173,
15
+ "mean": 0.041790387166667387,
16
+ "stdev": 0.0028392891505609834,
17
+ "p50": 0.04333874300000673,
18
+ "p90": 0.04431937060000166,
19
+ "p95": 0.04454408370001346,
20
+ "p99": 0.04471243060000319,
21
  "values": [
22
+ 0.04442560900000103,
23
+ 0.04407148100000313,
24
+ 0.043459165000001576,
25
+ 0.04364036699999474,
26
+ 0.04341329900000801,
27
+ 0.042521265999994284,
28
+ 0.04389436700000715,
29
+ 0.043661055999990595,
30
+ 0.04387008099999434,
31
+ 0.04326418700000545,
32
+ 0.04345747200000005,
33
+ 0.042918806000017184,
34
+ 0.04399387499998397,
35
+ 0.04456499100001565,
36
+ 0.04322350000001052,
37
+ 0.044756470999999465,
38
+ 0.038465573999985736,
39
+ 0.03760530099998505,
40
+ 0.0369485620000205,
41
+ 0.03727134999999748,
42
+ 0.03761058099999559,
43
+ 0.03739622599999848,
44
+ 0.0372512630000017,
45
+ 0.04128444200000558
46
  ]
47
  },
48
  "throughput": {
49
  "unit": "samples/s",
50
+ "value": 23.92894796623503
51
  },
52
  "energy": {
53
  "unit": "kWh",
54
+ "cpu": 1.455981103481089e-06,
55
+ "ram": 6.084673865730378e-08,
56
  "gpu": 0.0,
57
+ "total": 1.5168278421383927e-06
58
  },
59
  "efficiency": {
60
  "unit": "samples/kWh",
61
+ "value": 659270.5989561877
62
  }
63
  }
64
  }