Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub
Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json
CHANGED
@@ -3,7 +3,7 @@
|
|
3 |
"name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base",
|
4 |
"backend": {
|
5 |
"name": "pytorch",
|
6 |
-
"version": "2.
|
7 |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
|
8 |
"task": "text-classification",
|
9 |
"library": "transformers",
|
@@ -104,7 +104,7 @@
|
|
104 |
"load": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
-
"max_ram":
|
108 |
"max_global_vram": 1192.7552,
|
109 |
"max_process_vram": 0.0,
|
110 |
"max_reserved": 555.74528,
|
@@ -113,31 +113,31 @@
|
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
"count": 1,
|
116 |
-
"total":
|
117 |
-
"mean":
|
118 |
"stdev": 0.0,
|
119 |
-
"p50":
|
120 |
-
"p90":
|
121 |
-
"p95":
|
122 |
-
"p99":
|
123 |
"values": [
|
124 |
-
|
125 |
]
|
126 |
},
|
127 |
"throughput": null,
|
128 |
"energy": {
|
129 |
"unit": "kWh",
|
130 |
-
"cpu":
|
131 |
-
"ram":
|
132 |
-
"gpu":
|
133 |
-
"total":
|
134 |
},
|
135 |
"efficiency": null
|
136 |
},
|
137 |
"forward": {
|
138 |
"memory": {
|
139 |
"unit": "MB",
|
140 |
-
"max_ram":
|
141 |
"max_global_vram": 1203.24096,
|
142 |
"max_process_vram": 0.0,
|
143 |
"max_reserved": 555.74528,
|
@@ -146,180 +146,180 @@
|
|
146 |
"latency": {
|
147 |
"unit": "s",
|
148 |
"count": 150,
|
149 |
-
"total":
|
150 |
-
"mean": 0.
|
151 |
-
"stdev": 0.
|
152 |
-
"p50": 0.
|
153 |
-
"p90": 0.
|
154 |
-
"p95": 0.
|
155 |
-
"p99": 0.
|
156 |
"values": [
|
157 |
-
0.
|
158 |
-
0.
|
159 |
-
0.
|
160 |
-
0.
|
161 |
-
0.
|
162 |
-
0.
|
163 |
-
0.
|
164 |
-
0.
|
165 |
-
0.
|
166 |
-
0.
|
167 |
-
0.
|
168 |
-
0.
|
169 |
-
0.
|
170 |
-
0.
|
171 |
-
0.
|
172 |
-
0.
|
173 |
-
0.
|
174 |
-
0.
|
175 |
-
0.
|
176 |
-
0.
|
177 |
-
0.
|
178 |
-
0.
|
179 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
180 |
0.006479872226715088,
|
181 |
-
0.
|
182 |
-
0.
|
183 |
-
0.
|
184 |
-
0.
|
185 |
-
0.
|
186 |
-
0.
|
187 |
-
0.
|
188 |
-
0.
|
189 |
-
0.
|
190 |
-
0.
|
191 |
-
0.
|
192 |
-
0.
|
193 |
-
0.
|
194 |
-
0.
|
195 |
-
0.
|
196 |
-
0.
|
197 |
-
0.
|
198 |
-
0.
|
199 |
-
0.
|
200 |
-
0.
|
201 |
-
0.
|
202 |
-
0.
|
203 |
-
0.
|
204 |
-
0.
|
205 |
-
0.
|
206 |
-
0.
|
207 |
-
0.
|
208 |
-
0.
|
209 |
-
0.
|
210 |
-
0.
|
211 |
-
0.
|
212 |
-
0.
|
213 |
-
0.
|
214 |
-
0.
|
215 |
-
0.
|
216 |
-
0.
|
217 |
-
0.
|
218 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
219 |
0.006651904106140137,
|
220 |
-
0.
|
221 |
-
0.006662144184112549,
|
222 |
-
0.0065731201171875,
|
223 |
-
0.0065382399559021,
|
224 |
-
0.006533120155334473,
|
225 |
-
0.006536191940307618,
|
226 |
-
0.006504447937011719,
|
227 |
-
0.006533120155334473,
|
228 |
-
0.006497280120849609,
|
229 |
-
0.00648089599609375,
|
230 |
-
0.006497280120849609,
|
231 |
-
0.006486015796661377,
|
232 |
-
0.0066078720092773435,
|
233 |
-
0.006504479885101318,
|
234 |
-
0.006496255874633789,
|
235 |
-
0.006490111827850342,
|
236 |
-
0.006474751949310303,
|
237 |
-
0.006418464183807373,
|
238 |
-
0.006540287971496582,
|
239 |
-
0.006493184089660644,
|
240 |
-
0.006466559886932373,
|
241 |
-
0.006487040042877197,
|
242 |
-
0.0064839677810668945,
|
243 |
-
0.006500288009643555,
|
244 |
-
0.0065484800338745115,
|
245 |
-
0.006481919765472412,
|
246 |
0.00667955207824707,
|
247 |
-
0.
|
248 |
-
0.
|
249 |
-
0.
|
250 |
-
0.
|
251 |
-
0.
|
252 |
-
0.
|
253 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
254 |
0.006685696125030518,
|
255 |
-
0.
|
256 |
-
0.
|
257 |
-
0.
|
258 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
259 |
0.0067420158386230465,
|
260 |
-
0.
|
261 |
-
0.
|
262 |
-
0.
|
263 |
-
0.
|
264 |
-
0.
|
265 |
-
0.
|
266 |
-
0.
|
267 |
-
0.
|
268 |
-
0.
|
269 |
-
0.
|
270 |
-
0.
|
271 |
-
0.
|
272 |
-
0.
|
273 |
-
0.
|
274 |
-
0.
|
275 |
-
0.
|
276 |
-
0.
|
277 |
-
0.
|
278 |
-
0.
|
279 |
-
0.
|
280 |
-
0.
|
281 |
-
0.
|
282 |
-
0.
|
283 |
-
0.
|
284 |
-
0.006887423992156983,
|
285 |
-
0.006848512172698974,
|
286 |
-
0.006766592025756836,
|
287 |
-
0.006801407814025879,
|
288 |
-
0.006759424209594727,
|
289 |
-
0.006802432060241699,
|
290 |
-
0.006804480075836182,
|
291 |
-
0.0068310718536376954,
|
292 |
-
0.0067645440101623535,
|
293 |
-
0.006723584175109864,
|
294 |
-
0.006778880119323731,
|
295 |
-
0.006763519763946534,
|
296 |
-
0.006752255916595459,
|
297 |
-
0.006788095951080322,
|
298 |
-
0.006798367977142334,
|
299 |
-
0.006790143966674805,
|
300 |
-
0.006812672138214112,
|
301 |
-
0.006823935985565186,
|
302 |
-
0.006815711975097657,
|
303 |
-
0.006761472225189209,
|
304 |
-
0.006790143966674805,
|
305 |
-
0.0067983360290527345,
|
306 |
-
0.006808576107025147
|
307 |
]
|
308 |
},
|
309 |
"throughput": {
|
310 |
"unit": "samples/s",
|
311 |
-
"value":
|
312 |
},
|
313 |
"energy": {
|
314 |
"unit": "kWh",
|
315 |
-
"cpu": 7.
|
316 |
-
"ram": 4.
|
317 |
-
"gpu": 1.
|
318 |
-
"total": 2.
|
319 |
},
|
320 |
"efficiency": {
|
321 |
"unit": "samples/kWh",
|
322 |
-
"value":
|
323 |
}
|
324 |
}
|
325 |
}
|
|
|
3 |
"name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base",
|
4 |
"backend": {
|
5 |
"name": "pytorch",
|
6 |
+
"version": "2.4.1+cu124",
|
7 |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
|
8 |
"task": "text-classification",
|
9 |
"library": "transformers",
|
|
|
104 |
"load": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
+
"max_ram": 799.162368,
|
108 |
"max_global_vram": 1192.7552,
|
109 |
"max_process_vram": 0.0,
|
110 |
"max_reserved": 555.74528,
|
|
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
"count": 1,
|
116 |
+
"total": 7.9870517578125,
|
117 |
+
"mean": 7.9870517578125,
|
118 |
"stdev": 0.0,
|
119 |
+
"p50": 7.9870517578125,
|
120 |
+
"p90": 7.9870517578125,
|
121 |
+
"p95": 7.9870517578125,
|
122 |
+
"p99": 7.9870517578125,
|
123 |
"values": [
|
124 |
+
7.9870517578125
|
125 |
]
|
126 |
},
|
127 |
"throughput": null,
|
128 |
"energy": {
|
129 |
"unit": "kWh",
|
130 |
+
"cpu": 2.523608562500199e-06,
|
131 |
+
"ram": 1.3423439428583533e-06,
|
132 |
+
"gpu": 3.62166956399939e-06,
|
133 |
+
"total": 7.487622069357942e-06
|
134 |
},
|
135 |
"efficiency": null
|
136 |
},
|
137 |
"forward": {
|
138 |
"memory": {
|
139 |
"unit": "MB",
|
140 |
+
"max_ram": 1089.286144,
|
141 |
"max_global_vram": 1203.24096,
|
142 |
"max_process_vram": 0.0,
|
143 |
"max_reserved": 555.74528,
|
|
|
146 |
"latency": {
|
147 |
"unit": "s",
|
148 |
"count": 150,
|
149 |
+
"total": 0.9992682905197142,
|
150 |
+
"mean": 0.006661788603464763,
|
151 |
+
"stdev": 0.00021736465453809132,
|
152 |
+
"p50": 0.006667263984680176,
|
153 |
+
"p90": 0.006838272094726563,
|
154 |
+
"p95": 0.006951833581924438,
|
155 |
+
"p99": 0.007703009381294247,
|
156 |
"values": [
|
157 |
+
0.007003136157989502,
|
158 |
+
0.006979584217071533,
|
159 |
+
0.006907904148101806,
|
160 |
+
0.006960127830505371,
|
161 |
+
0.006931456089019775,
|
162 |
+
0.0069283838272094726,
|
163 |
+
0.006832096099853515,
|
164 |
+
0.006941696166992187,
|
165 |
+
0.007547904014587403,
|
166 |
+
0.007121920108795166,
|
167 |
+
0.006694911956787109,
|
168 |
+
0.006699007987976074,
|
169 |
+
0.006686719894409179,
|
170 |
+
0.00667955207824707,
|
171 |
+
0.00664572811126709,
|
172 |
+
0.006675456047058105,
|
173 |
+
0.006659071922302246,
|
174 |
+
0.006669312000274658,
|
175 |
+
0.0066938881874084475,
|
176 |
+
0.006556672096252441,
|
177 |
+
0.0064471039772033695,
|
178 |
+
0.006456352233886718,
|
179 |
+
0.00642252779006958,
|
180 |
+
0.006500351905822754,
|
181 |
+
0.006432767868041992,
|
182 |
+
0.0064174079895019534,
|
183 |
+
0.0066344962120056155,
|
184 |
+
0.0067051520347595215,
|
185 |
+
0.006614016056060791,
|
186 |
+
0.006680575847625733,
|
187 |
+
0.0066713919639587406,
|
188 |
+
0.006670335769653321,
|
189 |
+
0.0066713600158691405,
|
190 |
+
0.006670335769653321,
|
191 |
+
0.0064245758056640625,
|
192 |
0.006479872226715088,
|
193 |
+
0.006460415840148926,
|
194 |
+
0.00658841609954834,
|
195 |
+
0.006749184131622315,
|
196 |
+
0.00662937593460083,
|
197 |
+
0.0066713600158691405,
|
198 |
+
0.006686719894409179,
|
199 |
+
0.006673408031463623,
|
200 |
+
0.006651904106140137,
|
201 |
+
0.0066499199867248535,
|
202 |
+
0.006619135856628418,
|
203 |
+
0.006856704235076904,
|
204 |
+
0.006690815925598144,
|
205 |
+
0.006636544227600098,
|
206 |
+
0.006627327919006347,
|
207 |
+
0.006659071922302246,
|
208 |
+
0.006662112236022949,
|
209 |
+
0.0066375679969787596,
|
210 |
+
0.006710271835327148,
|
211 |
+
0.006651904106140137,
|
212 |
+
0.006603775978088379,
|
213 |
+
0.0066406397819519045,
|
214 |
+
0.006633471965789795,
|
215 |
+
0.006594560146331787,
|
216 |
+
0.0066344962120056155,
|
217 |
+
0.0066119680404663084,
|
218 |
+
0.006616000175476074,
|
219 |
+
0.006623231887817383,
|
220 |
+
0.006631423950195312,
|
221 |
+
0.006433792114257812,
|
222 |
+
0.0066119680404663084,
|
223 |
+
0.0068618240356445315,
|
224 |
+
0.006642687797546387,
|
225 |
+
0.006721504211425781,
|
226 |
+
0.006743040084838867,
|
227 |
+
0.006396927833557129,
|
228 |
+
0.006421504020690918,
|
229 |
+
0.00638976001739502,
|
230 |
+
0.006434815883636475,
|
231 |
+
0.006435840129852295,
|
232 |
+
0.0064174079895019534,
|
233 |
+
0.006412288188934326,
|
234 |
+
0.0064204797744750975,
|
235 |
+
0.006399968147277832,
|
236 |
+
0.006433792114257812,
|
237 |
+
0.00638259220123291,
|
238 |
+
0.00683622407913208,
|
239 |
+
0.0066938881874084475,
|
240 |
+
0.006696959972381592,
|
241 |
+
0.006631423950195312,
|
242 |
0.006651904106140137,
|
243 |
+
0.006799424171447754,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
244 |
0.00667955207824707,
|
245 |
+
0.006668288230895996,
|
246 |
+
0.0066007041931152345,
|
247 |
+
0.006624192237854004,
|
248 |
+
0.006552576065063476,
|
249 |
+
0.0063907837867736815,
|
250 |
+
0.006440959930419922,
|
251 |
+
0.00642252779006958,
|
252 |
+
0.0063836159706115725,
|
253 |
+
0.00642252779006958,
|
254 |
+
0.006401023864746094,
|
255 |
+
0.006421504020690918,
|
256 |
+
0.0064737281799316405,
|
257 |
+
0.006402048110961914,
|
258 |
+
0.006433792114257812,
|
259 |
+
0.006378496170043945,
|
260 |
+
0.006402048110961914,
|
261 |
+
0.0064102401733398436,
|
262 |
+
0.0067758078575134275,
|
263 |
0.006685696125030518,
|
264 |
+
0.006919167995452881,
|
265 |
+
0.00785203218460083,
|
266 |
+
0.007924736022949219,
|
267 |
+
0.007020544052124023,
|
268 |
+
0.0065771517753601075,
|
269 |
+
0.006684671878814697,
|
270 |
+
0.006716415882110595,
|
271 |
+
0.006659071922302246,
|
272 |
+
0.006713344097137451,
|
273 |
+
0.006700032234191895,
|
274 |
+
0.006638591766357422,
|
275 |
+
0.0066938881874084475,
|
276 |
+
0.00673689603805542,
|
277 |
+
0.0066713600158691405,
|
278 |
+
0.006726719856262207,
|
279 |
+
0.006718463897705078,
|
280 |
+
0.0066744318008422855,
|
281 |
+
0.006740992069244385,
|
282 |
0.0067420158386230465,
|
283 |
+
0.006654975891113281,
|
284 |
+
0.006713344097137451,
|
285 |
+
0.006696959972381592,
|
286 |
+
0.006651904106140137,
|
287 |
+
0.006721536159515381,
|
288 |
+
0.006672383785247803,
|
289 |
+
0.006632448196411133,
|
290 |
+
0.00672051191329956,
|
291 |
+
0.006719488143920899,
|
292 |
+
0.00667955207824707,
|
293 |
+
0.00669593620300293,
|
294 |
+
0.00673689603805542,
|
295 |
+
0.006690815925598144,
|
296 |
+
0.006667263984680176,
|
297 |
+
0.0066877121925354,
|
298 |
+
0.006669312000274658,
|
299 |
+
0.006659071922302246,
|
300 |
+
0.0066938881874084475,
|
301 |
+
0.0066826238632202144,
|
302 |
+
0.006661119937896728,
|
303 |
+
0.0067010560035705566,
|
304 |
+
0.0067041277885437015,
|
305 |
+
0.006667263984680176,
|
306 |
+
0.006722559928894043
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
307 |
]
|
308 |
},
|
309 |
"throughput": {
|
310 |
"unit": "samples/s",
|
311 |
+
"value": 150.10983679066385
|
312 |
},
|
313 |
"energy": {
|
314 |
"unit": "kWh",
|
315 |
+
"cpu": 7.672732945340256e-08,
|
316 |
+
"ram": 4.190022030068636e-08,
|
317 |
+
"gpu": 1.4601803796129068e-07,
|
318 |
+
"total": 2.6464558771537963e-07
|
319 |
},
|
320 |
"efficiency": {
|
321 |
"unit": "samples/kWh",
|
322 |
+
"value": 3778638.4750743606
|
323 |
}
|
324 |
}
|
325 |
}
|