IlyasMoutawwakil HF staff commited on
Commit
ddb85fa
·
verified ·
1 Parent(s): 93f7ff0

Upload cuda_inference_diffusers_text-to-image_CompVis/stable-diffusion-v1-4/benchmark.json with huggingface_hub

Browse files
cuda_inference_diffusers_text-to-image_CompVis/stable-diffusion-v1-4/benchmark.json CHANGED
@@ -86,7 +86,7 @@
86
  "gpu_vram_mb": 24146608128,
87
  "optimum_benchmark_version": "0.3.1",
88
  "optimum_benchmark_commit": null,
89
- "transformers_version": "4.42.4",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.33.0",
92
  "accelerate_commit": null,
@@ -104,7 +104,7 @@
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 4197.814272,
108
  "max_global_vram": 6214.385664,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 5628.755968,
@@ -113,31 +113,31 @@
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
- "total": 60.938515625,
117
- "mean": 60.938515625,
118
  "stdev": 0.0,
119
- "p50": 60.938515625,
120
- "p90": 60.938515625,
121
- "p95": 60.938515625,
122
- "p99": 60.938515625,
123
  "values": [
124
- 60.938515625
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
- "cpu": 0.0006355796055661308,
131
- "ram": 0.0003483380808134615,
132
- "gpu": 0.0008674315272780009,
133
- "total": 0.0018513492136575933
134
  },
135
  "efficiency": null
136
  },
137
  "call": {
138
  "memory": {
139
  "unit": "MB",
140
- "max_ram": 1329.39776,
141
  "max_global_vram": 8196.194304,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 7541.358592,
@@ -146,32 +146,32 @@
146
  "latency": {
147
  "unit": "s",
148
  "count": 2,
149
- "total": 1.2947911987304688,
150
- "mean": 0.6473955993652344,
151
- "stdev": 5.776977539062589e-05,
152
- "p50": 0.6473955993652344,
153
- "p90": 0.6474418151855469,
154
- "p95": 0.6474475921630859,
155
- "p99": 0.6474522137451172,
156
  "values": [
157
- 0.6473378295898438,
158
- 0.647453369140625
159
  ]
160
  },
161
  "throughput": {
162
  "unit": "images/s",
163
- "value": 1.544650598460186
164
  },
165
  "energy": {
166
  "unit": "kWh",
167
- "cpu": 7.658679038286209e-06,
168
- "ram": 4.1895763779460535e-06,
169
- "gpu": 4.1842950141e-05,
170
- "total": 5.369120555723226e-05
171
  },
172
  "efficiency": {
173
  "unit": "images/kWh",
174
- "value": 18625.024147279535
175
  }
176
  }
177
  }
 
86
  "gpu_vram_mb": 24146608128,
87
  "optimum_benchmark_version": "0.3.1",
88
  "optimum_benchmark_commit": null,
89
+ "transformers_version": "4.43.1",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.33.0",
92
  "accelerate_commit": null,
 
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 4217.05728,
108
  "max_global_vram": 6214.385664,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 5628.755968,
 
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
+ "total": 66.0646953125,
117
+ "mean": 66.0646953125,
118
  "stdev": 0.0,
119
+ "p50": 66.0646953125,
120
+ "p90": 66.0646953125,
121
+ "p95": 66.0646953125,
122
+ "p99": 66.0646953125,
123
  "values": [
124
+ 66.0646953125
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
+ "cpu": 0.0006957949997650253,
131
+ "ram": 0.00038134068776775413,
132
+ "gpu": 0.0009891819024559975,
133
+ "total": 0.0020663175899887773
134
  },
135
  "efficiency": null
136
  },
137
  "call": {
138
  "memory": {
139
  "unit": "MB",
140
+ "max_ram": 1333.653504,
141
  "max_global_vram": 8196.194304,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 7541.358592,
 
146
  "latency": {
147
  "unit": "s",
148
  "count": 2,
149
+ "total": 1.2870484008789063,
150
+ "mean": 0.6435242004394531,
151
+ "stdev": 0.0002220153808594194,
152
+ "p50": 0.6435242004394531,
153
+ "p90": 0.6437018127441406,
154
+ "p95": 0.6437240142822266,
155
+ "p99": 0.6437417755126954,
156
  "values": [
157
+ 0.6433021850585937,
158
+ 0.6437462158203126
159
  ]
160
  },
161
  "throughput": {
162
  "unit": "images/s",
163
+ "value": 1.553943114054009
164
  },
165
  "energy": {
166
  "unit": "kWh",
167
+ "cpu": 7.614044100046158e-06,
168
+ "ram": 4.164908847699707e-06,
169
+ "gpu": 4.2184894859000066e-05,
170
+ "total": 5.396384780674593e-05
171
  },
172
  "efficiency": {
173
  "unit": "images/kWh",
174
+ "value": 18530.92469575514
175
  }
176
  }
177
  }