IlyasMoutawwakil HF staff commited on
Commit
2fd3c4a
·
verified ·
1 Parent(s): cb0cc44

Upload cuda_inference_transformers_fill-mask_google-bert/bert-base-uncased/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_fill-mask_google-bert/bert-base-uncased/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_transformers_fill-mask_google-bert/bert-base-uncased",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.4.0+cu124",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "fill-mask",
9
  "library": "transformers",
@@ -104,7 +104,7 @@
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 783.142912,
108
  "max_global_vram": 1218.9696,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 589.299712,
@@ -113,31 +113,31 @@
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
- "total": 7.35937939453125,
117
- "mean": 7.35937939453125,
118
  "stdev": 0.0,
119
- "p50": 7.35937939453125,
120
- "p90": 7.35937939453125,
121
- "p95": 7.35937939453125,
122
- "p99": 7.35937939453125,
123
  "values": [
124
- 7.35937939453125
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
- "cpu": 9.631715972218618e-07,
131
- "ram": 5.121125882191104e-07,
132
- "gpu": 1.7069458099995202e-06,
133
- "total": 3.1822299954404926e-06
134
  },
135
  "efficiency": null
136
  },
137
  "forward": {
138
  "memory": {
139
  "unit": "MB",
140
- "max_ram": 1038.7456,
141
  "max_global_vram": 1229.45536,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 589.299712,
@@ -145,211 +145,219 @@
145
  },
146
  "latency": {
147
  "unit": "s",
148
- "count": 180,
149
- "total": 1.0002493767738336,
150
- "mean": 0.005556940982076857,
151
- "stdev": 0.0001881500804512677,
152
- "p50": 0.005517312049865723,
153
- "p90": 0.005677772665023804,
154
- "p95": 0.006115174460411071,
155
- "p99": 0.00629932026386261,
156
  "values": [
157
- 0.006275072097778321,
158
- 0.006386688232421875,
159
- 0.006276095867156982,
160
- 0.006171648025512695,
161
- 0.0062494721412658695,
162
- 0.006164480209350586,
163
- 0.006114304065704346,
164
- 0.006151167869567871,
165
- 0.006131711959838867,
166
- 0.006419456005096436,
167
- 0.005553152084350586,
168
- 0.005411839962005615,
169
- 0.005323775768280029,
170
- 0.00531763219833374,
171
- 0.005351424217224121,
172
- 0.005557248115539551,
173
- 0.005536767959594726,
174
- 0.005540863990783691,
175
- 0.005526527881622314,
176
- 0.0055244798660278325,
177
- 0.005538815975189209,
178
- 0.005511168003082275,
179
- 0.005550079822540284,
180
- 0.005501952171325684,
181
- 0.0054876160621643065,
182
- 0.005535744190216065,
183
- 0.005533696174621582,
184
- 0.005516287803649903,
185
- 0.005536767959594726,
186
- 0.00550707197189331,
187
  0.005566463947296142,
188
- 0.005525504112243652,
189
- 0.005684224128723145,
190
- 0.005575679779052735,
191
- 0.005513216018676758,
192
- 0.005552127838134766,
193
- 0.005533696174621582,
194
- 0.00553984022140503,
195
- 0.005575679779052735,
196
- 0.005506048202514649,
197
- 0.005530623912811279,
198
- 0.0055101442337036136,
199
- 0.005496831893920898,
200
- 0.005606400012969971,
201
- 0.0055285758972167965,
202
- 0.005497856140136719,
203
- 0.005548031806945801,
204
- 0.005494783878326416,
205
- 0.005708799839019775,
206
- 0.005501952171325684,
207
- 0.005500927925109863,
208
  0.005513216018676758,
209
- 0.005484543800354004,
210
- 0.005488639831542969,
211
- 0.005508096218109131,
212
- 0.005494783878326416,
213
- 0.005559296131134033,
214
- 0.005496831893920898,
215
- 0.0054906878471374515,
216
  0.005536767959594726,
217
- 0.005501952171325684,
218
- 0.005517312049865723,
219
- 0.005499904155731201,
220
- 0.005576704025268555,
221
- 0.005537792205810547,
222
- 0.005761023998260498,
223
- 0.00562278413772583,
224
- 0.005517312049865723,
225
- 0.005517312049865723,
226
- 0.0055511040687561035,
227
- 0.005518335819244385,
228
- 0.005494783878326416,
229
  0.005565440177917481,
230
- 0.005504000186920166,
231
- 0.005536767959594726,
232
- 0.0054876160621643065,
233
- 0.005494783878326416,
234
- 0.005525504112243652,
235
- 0.005489664077758789,
236
- 0.0055101442337036136,
237
- 0.0055285758972167965,
238
- 0.00547430419921875,
239
- 0.005537792205810547,
240
- 0.005496831893920898,
241
- 0.0054906878471374515,
242
- 0.005542912006378174,
243
- 0.0055203838348388675,
244
- 0.005485568046569824,
245
- 0.005493760108947754,
246
  0.005469183921813964,
247
- 0.0054989118576049805,
248
- 0.005495808124542236,
249
- 0.005962751865386963,
250
- 0.005517312049865723,
251
- 0.005478400230407715,
252
- 0.005513216018676758,
253
- 0.005471231937408447,
254
- 0.005489664077758789,
255
- 0.0055203838348388675,
256
- 0.005559296131134033,
257
- 0.005512191772460938,
258
- 0.005527552127838135,
259
- 0.005500927925109863,
260
- 0.005543935775756836,
261
- 0.005526527881622314,
262
- 0.005483520030975342,
263
- 0.00553984022140503,
264
- 0.005477375984191894,
265
- 0.005552127838134766,
266
- 0.005489664077758789,
267
- 0.0054876160621643065,
268
- 0.00551526403427124,
269
- 0.0054917120933532714,
270
- 0.005501952171325684,
271
- 0.005516287803649903,
272
  0.005493760108947754,
273
- 0.005513216018676758,
 
 
 
 
274
  0.005499904155731201,
275
- 0.005480447769165039,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
276
  0.005513216018676758,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
277
  0.005484543800354004,
 
 
 
 
 
 
 
 
278
  0.0054876160621643065,
279
- 0.005669888019561768,
280
- 0.005677055835723877,
281
- 0.005746687889099121,
282
- 0.0056514558792114256,
283
- 0.0057149438858032225,
284
- 0.0056258559226989744,
285
- 0.005667840003967285,
286
- 0.00572211217880249,
287
- 0.005642240047454834,
288
- 0.0057190399169921875,
289
- 0.00561356782913208,
290
- 0.005516287803649903,
291
- 0.005566463947296142,
292
- 0.005516287803649903,
293
  0.005560319900512695,
294
- 0.0055101442337036136,
295
- 0.005525504112243652,
296
- 0.0056145920753479005,
297
- 0.00555622386932373,
298
- 0.0055552000999450684,
299
- 0.005505023956298828,
300
- 0.005495808124542236,
301
- 0.005567488193511963,
302
- 0.005518335819244385,
303
  0.005512191772460938,
304
- 0.005525504112243652,
305
- 0.005513216018676758,
306
- 0.005527552127838135,
307
- 0.005489664077758789,
308
- 0.005489664077758789,
309
- 0.005544960021972656,
310
- 0.005488639831542969,
311
- 0.005493760108947754,
312
- 0.005526527881622314,
313
- 0.005501952171325684,
314
- 0.005543935775756836,
315
- 0.0054906878471374515,
316
- 0.005481472015380859,
317
- 0.0055316481590271,
318
- 0.005477375984191894,
319
- 0.005542912006378174,
320
- 0.005518335819244385,
321
  0.005495808124542236,
322
- 0.005526527881622314,
323
- 0.0055848960876464845,
324
- 0.005343232154846191,
325
- 0.005631999969482422,
326
- 0.0052930560111999515,
327
- 0.005283840179443359,
328
- 0.005319680213928223,
329
- 0.00531660795211792,
330
- 0.005284863948822022,
331
- 0.005321728229522705,
332
- 0.005312511920928955,
333
- 0.005298175811767578,
334
- 0.005318655967712403,
335
- 0.005291007995605469,
336
- 0.005278719902038574
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
337
  ]
338
  },
339
  "throughput": {
340
  "unit": "samples/s",
341
- "value": 179.95512337189857
342
  },
343
  "energy": {
344
  "unit": "kWh",
345
- "cpu": 6.353910284091177e-08,
346
- "ram": 3.4735521129230305e-08,
347
- "gpu": 1.286275538823534e-07,
348
- "total": 2.269021778524955e-07
349
  },
350
  "efficiency": {
351
  "unit": "samples/kWh",
352
- "value": 4407185.5522254165
353
  }
354
  }
355
  }
 
3
  "name": "cuda_inference_transformers_fill-mask_google-bert/bert-base-uncased",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.4.0+cu121",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "fill-mask",
9
  "library": "transformers",
 
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 757.579776,
108
  "max_global_vram": 1218.9696,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 589.299712,
 
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
+ "total": 7.45509765625,
117
+ "mean": 7.45509765625,
118
  "stdev": 0.0,
119
+ "p50": 7.45509765625,
120
+ "p90": 7.45509765625,
121
+ "p95": 7.45509765625,
122
+ "p99": 7.45509765625,
123
  "values": [
124
+ 7.45509765625
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
+ "cpu": 9.307679090277867e-07,
131
+ "ram": 4.943789285316454e-07,
132
+ "gpu": 0.0,
133
+ "total": 1.4251468375594321e-06
134
  },
135
  "efficiency": null
136
  },
137
  "forward": {
138
  "memory": {
139
  "unit": "MB",
140
+ "max_ram": 924.839936,
141
  "max_global_vram": 1229.45536,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 589.299712,
 
145
  },
146
  "latency": {
147
  "unit": "s",
148
+ "count": 188,
149
+ "total": 1.0048574457168578,
150
+ "mean": 0.005344986413387542,
151
+ "stdev": 0.00024542633124477865,
152
+ "p50": 0.005389312028884888,
153
+ "p90": 0.005617971229553223,
154
+ "p95": 0.005688473677635192,
155
+ "p99": 0.006041344132423401,
156
  "values": [
157
+ 0.005710847854614258,
158
+ 0.005793791770935058,
159
+ 0.005662720203399658,
160
+ 0.005445631980895996,
161
+ 0.0054568958282470706,
162
+ 0.005518335819244385,
163
+ 0.005756927967071533,
164
+ 0.005600255966186523,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
165
  0.005566463947296142,
166
+ 0.00566476821899414,
167
+ 0.006031360149383545,
168
+ 0.005504000186920166,
169
+ 0.005600255966186523,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
170
  0.005513216018676758,
171
+ 0.0056258559226989744,
 
 
 
 
 
 
172
  0.005536767959594726,
173
+ 0.005462016105651855,
174
+ 0.005434368133544922,
175
+ 0.00540880012512207,
176
+ 0.005414912223815918,
 
 
 
 
 
 
 
 
177
  0.005565440177917481,
178
+ 0.005426176071166992,
179
+ 0.005442560195922852,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
180
  0.005469183921813964,
181
+ 0.005425151824951172,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
182
  0.005493760108947754,
183
+ 0.00546611213684082,
184
+ 0.005436416149139404,
185
+ 0.005547008037567139,
186
+ 0.0053309440612792965,
187
+ 0.0054241280555725096,
188
  0.005499904155731201,
189
+ 0.0054282240867614745,
190
+ 0.005447679996490478,
191
+ 0.005485568046569824,
192
+ 0.005402624130249023,
193
+ 0.005485568046569824,
194
+ 0.0053821439743042,
195
+ 0.0053821439743042,
196
+ 0.005434368133544922,
197
+ 0.005384191989898681,
198
+ 0.005377024173736572,
199
+ 0.005509119987487793,
200
+ 0.005351424217224121,
201
+ 0.005241856098175048,
202
+ 0.005440512180328369,
203
+ 0.005451776027679443,
204
+ 0.005337088108062744,
205
+ 0.005434368133544922,
206
+ 0.005315584182739258,
207
+ 0.005369855880737305,
208
+ 0.005392384052276611,
209
+ 0.005345280170440674,
210
+ 0.005414912223815918,
211
+ 0.0054579200744628905,
212
+ 0.005437439918518067,
213
+ 0.00557260799407959,
214
  0.005513216018676758,
215
+ 0.0055316481590271,
216
+ 0.005595136165618897,
217
+ 0.005475327968597412,
218
+ 0.005468160152435303,
219
+ 0.005499904155731201,
220
+ 0.005536767959594726,
221
+ 0.005666816234588623,
222
+ 0.005478400230407715,
223
+ 0.005386240005493164,
224
+ 0.005588992118835449,
225
+ 0.005903359889984131,
226
+ 0.00566374397277832,
227
+ 0.00572211217880249,
228
+ 0.006278143882751465,
229
+ 0.005787648200988769,
230
+ 0.005692416191101074,
231
  0.005484543800354004,
232
+ 0.005530623912811279,
233
+ 0.0056145920753479005,
234
+ 0.005483520030975342,
235
+ 0.005438464164733887,
236
+ 0.005501952171325684,
237
+ 0.005469183921813964,
238
+ 0.005494783878326416,
239
+ 0.005501952171325684,
240
  0.0054876160621643065,
241
+ 0.005588992118835449,
242
+ 0.0054609918594360355,
243
+ 0.005550079822540284,
244
+ 0.005633024215698243,
245
+ 0.0056483840942382815,
246
+ 0.005681151866912842,
 
 
 
 
 
 
 
 
247
  0.005560319900512695,
248
+ 0.0056258559226989744,
249
+ 0.006108160018920898,
250
+ 0.005476352214813233,
251
+ 0.005590015888214111,
 
 
 
 
 
252
  0.005512191772460938,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
253
  0.005495808124542236,
254
+ 0.005323775768280029,
255
+ 0.0054568958282470706,
256
+ 0.005585919857025146,
257
+ 0.005400576114654541,
258
+ 0.0054609918594360355,
259
+ 0.005438464164733887,
260
+ 0.005295104026794434,
261
+ 0.005189631938934326,
262
+ 0.005342207908630371,
263
+ 0.005413887977600097,
264
+ 0.005346303939819336,
265
+ 0.005224448204040527,
266
+ 0.005127168178558349,
267
+ 0.005175295829772949,
268
+ 0.005227519989013672,
269
+ 0.005213183879852295,
270
+ 0.005224448204040527,
271
+ 0.005355519771575928,
272
+ 0.005377024173736572,
273
+ 0.005269504070281982,
274
+ 0.0053012480735778805,
275
+ 0.005403647899627686,
276
+ 0.005610496044158936,
277
+ 0.005371903896331787,
278
+ 0.005173247814178467,
279
+ 0.005192704200744629,
280
+ 0.005185535907745361,
281
+ 0.005243904113769531,
282
+ 0.005153791904449463,
283
+ 0.005167103767395019,
284
+ 0.005254144191741943,
285
+ 0.005194752216339111,
286
+ 0.005156864166259765,
287
+ 0.005212160110473632,
288
+ 0.005148672103881836,
289
+ 0.005141503810882568,
290
+ 0.005150720119476319,
291
+ 0.005105663776397705,
292
+ 0.005037055969238281,
293
+ 0.005040128231048584,
294
+ 0.00506879997253418,
295
+ 0.005148672103881836,
296
+ 0.005129216194152832,
297
+ 0.005092351913452148,
298
+ 0.005165056228637695,
299
+ 0.0050022401809692385,
300
+ 0.004983808040618896,
301
+ 0.005004288196563721,
302
+ 0.005015552043914795,
303
+ 0.0050032639503479,
304
+ 0.004989952087402344,
305
+ 0.00501145601272583,
306
+ 0.005074944019317627,
307
+ 0.00501043176651001,
308
+ 0.005000192165374756,
309
+ 0.0050063362121582035,
310
+ 0.005025792121887207,
311
+ 0.005022719860076904,
312
+ 0.005015552043914795,
313
+ 0.005078015804290771,
314
+ 0.00506060791015625,
315
+ 0.005128191947937012,
316
+ 0.005071872234344482,
317
+ 0.0050432000160217285,
318
+ 0.004999167919158935,
319
+ 0.005021696090698242,
320
+ 0.005004288196563721,
321
+ 0.0050462718009948735,
322
+ 0.0050094079971313476,
323
+ 0.005015552043914795,
324
+ 0.005039103984832764,
325
+ 0.005041215896606445,
326
+ 0.004994048118591309,
327
+ 0.004992000102996826,
328
+ 0.005001215934753418,
329
+ 0.005059584140777588,
330
+ 0.0050022401809692385,
331
+ 0.005153791904449463,
332
+ 0.005054463863372802,
333
+ 0.005033984184265137,
334
+ 0.0049797120094299315,
335
+ 0.005001215934753418,
336
+ 0.0050657281875610355,
337
+ 0.005332992076873779,
338
+ 0.005051392078399658,
339
+ 0.00501145601272583,
340
+ 0.005035007953643799,
341
+ 0.0050421757698059086,
342
+ 0.004993023872375488,
343
+ 0.005549056053161621,
344
+ 0.005238783836364746
345
  ]
346
  },
347
  "throughput": {
348
  "unit": "samples/s",
349
+ "value": 187.09121458107145
350
  },
351
  "energy": {
352
  "unit": "kWh",
353
+ "cpu": 5.998896607744065e-08,
354
+ "ram": 3.27961887022411e-08,
355
+ "gpu": 1.2353545236363426e-07,
356
+ "total": 2.1632060714331604e-07
357
  },
358
  "efficiency": {
359
  "unit": "samples/kWh",
360
+ "value": 4622768.090408895
361
  }
362
  }
363
  }