IlyasMoutawwakil HF staff commited on
Commit
fd1478d
1 Parent(s): 42dec08

Upload cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub

Browse files
cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "forward": {
3
+ "memory": {
4
+ "unit": "MB",
5
+ "max_ram": 942.55104,
6
+ "max_global_vram": null,
7
+ "max_process_vram": null,
8
+ "max_reserved": null,
9
+ "max_allocated": null
10
+ },
11
+ "latency": {
12
+ "unit": "s",
13
+ "count": 25,
14
+ "total": 1.022847923000029,
15
+ "mean": 0.04091391692000115,
16
+ "stdev": 0.0018977492359326417,
17
+ "p50": 0.04142801299997245,
18
+ "p90": 0.042968025599986956,
19
+ "p95": 0.043157534400017995,
20
+ "p99": 0.04334932583999944,
21
+ "values": [
22
+ 0.0417552949999731,
23
+ 0.04151113800003259,
24
+ 0.0410776079999664,
25
+ 0.0409038129999999,
26
+ 0.04100305800000115,
27
+ 0.041073681000000306,
28
+ 0.0408478280000395,
29
+ 0.042322915999989164,
30
+ 0.04213539500000252,
31
+ 0.04142801299997245,
32
+ 0.041465391999963686,
33
+ 0.041812492000019574,
34
+ 0.042816388000005645,
35
+ 0.04286695199999713,
36
+ 0.04340024999999059,
37
+ 0.04303540799998018,
38
+ 0.042841746000021885,
39
+ 0.04318806600002745,
40
+ 0.0381774969999924,
41
+ 0.03814085899995234,
42
+ 0.03836804400003757,
43
+ 0.03759846399998423,
44
+ 0.040151196000010714,
45
+ 0.037438476000033916,
46
+ 0.03748794800003452
47
+ ]
48
+ },
49
+ "throughput": {
50
+ "unit": "samples/s",
51
+ "value": 24.441561094121024
52
+ },
53
+ "energy": {
54
+ "unit": "kWh",
55
+ "cpu": 1.5024992135854868e-06,
56
+ "ram": 6.279269910329606e-08,
57
+ "gpu": 0.0,
58
+ "total": 1.5652919126887828e-06
59
+ },
60
+ "efficiency": {
61
+ "unit": "samples/kWh",
62
+ "value": 638858.4722719536
63
+ }
64
+ }
65
+ }