IlyasMoutawwakil HF staff commited on
Commit
d2ef10b
·
verified ·
1 Parent(s): 25f36e7

Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.3.1+cu121",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "image-classification",
9
  "library": "transformers",
@@ -104,7 +104,7 @@
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 758.304768,
108
  "max_global_vram": 1030.22592,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 400.556032,
@@ -113,274 +113,256 @@
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
- "total": 7.2574208984375,
117
- "mean": 7.2574208984375,
118
  "stdev": 0.0,
119
- "p50": 7.2574208984375,
120
- "p90": 7.2574208984375,
121
- "p95": 7.2574208984375,
122
- "p99": 7.2574208984375,
123
  "values": [
124
- 7.2574208984375
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
- "cpu": 1.5620256861118442e-06,
131
- "ram": 8.401957228599201e-07,
132
- "gpu": 1.7197235979990802e-06,
133
- "total": 4.121945006970844e-06
134
  },
135
  "efficiency": null
136
  },
137
  "forward": {
138
  "memory": {
139
  "unit": "MB",
140
- "max_ram": 890.568704,
141
- "max_global_vram": 1057.488896,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 406.847488,
144
  "max_allocated": 363.853824
145
  },
146
  "latency": {
147
  "unit": "s",
148
- "count": 211,
149
- "total": 1.0307317762374866,
150
- "mean": 0.004884984721504681,
151
- "stdev": 0.0002220187784680356,
152
- "p50": 0.0048527359962463375,
153
- "p90": 0.004857855796813965,
154
- "p95": 0.0048783359527587895,
155
- "p99": 0.0065101822376251285,
156
  "values": [
157
- 0.006789120197296142,
158
- 0.006617087841033936,
159
- 0.0066375679969787596,
160
- 0.005548031806945801,
 
 
 
 
 
 
 
 
161
  0.0050769920349121095,
162
- 0.004908031940460205,
163
- 0.004925439834594727,
164
- 0.004859903812408447,
165
- 0.00485478401184082,
166
- 0.004853759765625,
167
- 0.004857855796813965,
168
- 0.0048568320274353025,
169
- 0.0048568320274353025,
170
- 0.0048558077812194825,
171
- 0.0048568320274353025,
172
- 0.0048568320274353025,
173
- 0.00485478401184082,
174
- 0.0048568320274353025,
175
- 0.004858880043029785,
176
- 0.0048568320274353025,
177
- 0.004853759765625,
178
- 0.0048568320274353025,
179
- 0.0048558077812194825,
180
- 0.004858880043029785,
181
- 0.0048568320274353025,
182
- 0.00485478401184082,
183
- 0.0048527359962463375,
184
- 0.004853759765625,
185
- 0.00485478401184082,
186
- 0.0048496642112731934,
187
- 0.004857855796813965,
188
- 0.0048527359962463375,
189
- 0.0048568320274353025,
190
- 0.0048568320274353025,
191
- 0.004851712226867676,
192
- 0.0048558077812194825,
193
- 0.0048568320274353025,
194
- 0.00485478401184082,
195
- 0.004853759765625,
196
- 0.0048558077812194825,
197
- 0.0048558077812194825,
198
- 0.00485478401184082,
199
- 0.004853759765625,
200
- 0.0048527359962463375,
201
- 0.00485478401184082,
202
- 0.00485478401184082,
203
- 0.00485478401184082,
204
- 0.0048568320274353025,
205
- 0.004857855796813965,
206
- 0.004853759765625,
207
- 0.004853759765625,
208
- 0.004857855796813965,
209
- 0.004853759765625,
210
- 0.0048558077812194825,
211
- 0.0048568320274353025,
212
- 0.0048568320274353025,
213
- 0.004857855796813965,
214
- 0.0048568320274353025,
215
- 0.00485478401184082,
216
- 0.00485478401184082,
217
- 0.004858880043029785,
218
- 0.0048568320274353025,
219
- 0.004872191905975342,
220
- 0.0048855037689208985,
221
- 0.004884479999542236,
222
- 0.004857855796813965,
223
- 0.0048496642112731934,
224
- 0.004848639965057373,
225
- 0.0048527359962463375,
226
- 0.004966400146484375,
227
- 0.004850687980651855,
228
- 0.004850687980651855,
229
- 0.004851712226867676,
230
- 0.0048568320274353025,
231
- 0.0048558077812194825,
232
- 0.004850687980651855,
233
- 0.004847616195678711,
234
- 0.004850687980651855,
235
- 0.004850687980651855,
236
- 0.004851712226867676,
237
- 0.0048568320274353025,
238
- 0.004850687980651855,
239
- 0.004850687980651855,
240
- 0.0048527359962463375,
241
- 0.004850687980651855,
242
- 0.004851712226867676,
243
- 0.004850687980651855,
244
- 0.0048496642112731934,
245
- 0.0048527359962463375,
246
- 0.004965375900268554,
247
- 0.004850687980651855,
248
- 0.004851712226867676,
249
- 0.004847616195678711,
250
- 0.004846591949462891,
251
- 0.0048496642112731934,
252
- 0.004848639965057373,
253
- 0.0048527359962463375,
254
- 0.004850687980651855,
255
- 0.0048527359962463375,
256
- 0.004853759765625,
257
- 0.0048496642112731934,
258
- 0.0048496642112731934,
259
- 0.00485478401184082,
260
- 0.0048527359962463375,
261
- 0.0048496642112731934,
262
- 0.004848639965057373,
263
- 0.004853759765625,
264
- 0.0048527359962463375,
265
- 0.004850687980651855,
266
- 0.004851712226867676,
267
- 0.004853759765625,
268
- 0.004843520164489746,
269
- 0.004850687980651855,
270
- 0.0048455681800842285,
271
- 0.0048455681800842285,
272
- 0.0048527359962463375,
273
- 0.004850687980651855,
274
- 0.00485478401184082,
275
- 0.004851712226867676,
276
- 0.004851712226867676,
277
- 0.004850687980651855,
278
- 0.004853759765625,
279
- 0.004847616195678711,
280
- 0.004851712226867676,
281
- 0.004847616195678711,
282
- 0.0048496642112731934,
283
- 0.0048527359962463375,
284
- 0.004850687980651855,
285
- 0.004850687980651855,
286
- 0.004847616195678711,
287
- 0.004848639965057373,
288
- 0.004851712226867676,
289
- 0.004851712226867676,
290
- 0.004846591949462891,
291
- 0.004851712226867676,
292
- 0.004853759765625,
293
- 0.004848639965057373,
294
- 0.004853759765625,
295
- 0.004850687980651855,
296
- 0.004851712226867676,
297
- 0.004851712226867676,
298
- 0.004850687980651855,
299
- 0.004847616195678711,
300
- 0.004853759765625,
301
- 0.004851712226867676,
302
- 0.0048527359962463375,
303
- 0.004853759765625,
304
- 0.0048496642112731934,
305
- 0.004853759765625,
306
- 0.004848639965057373,
307
- 0.0048496642112731934,
308
- 0.004850687980651855,
309
- 0.004846591949462891,
310
- 0.0048558077812194825,
311
- 0.0048527359962463375,
312
- 0.0048527359962463375,
313
- 0.004851712226867676,
314
- 0.0048568320274353025,
315
- 0.0048527359962463375,
316
- 0.0048527359962463375,
317
- 0.0048527359962463375,
318
- 0.0048527359962463375,
319
- 0.004846591949462891,
320
- 0.004846591949462891,
321
- 0.0048558077812194825,
322
- 0.0048527359962463375,
323
- 0.004848639965057373,
324
- 0.0048527359962463375,
325
- 0.0048527359962463375,
326
- 0.0048558077812194825,
327
- 0.0048527359962463375,
328
- 0.004853759765625,
329
- 0.0048527359962463375,
330
- 0.004853759765625,
331
- 0.0048558077812194825,
332
- 0.004847616195678711,
333
- 0.004851712226867676,
334
- 0.004850687980651855,
335
- 0.0048527359962463375,
336
- 0.00485478401184082,
337
- 0.004850687980651855,
338
- 0.0048496642112731934,
339
- 0.0048496642112731934,
340
- 0.004853759765625,
341
- 0.0048558077812194825,
342
- 0.004850687980651855,
343
- 0.0048527359962463375,
344
- 0.004850687980651855,
345
- 0.004851712226867676,
346
- 0.0048496642112731934,
347
- 0.004853759765625,
348
- 0.004850687980651855,
349
- 0.004850687980651855,
350
- 0.0048496642112731934,
351
- 0.004851712226867676,
352
- 0.0048527359962463375,
353
- 0.004851712226867676,
354
- 0.004850687980651855,
355
- 0.004850687980651855,
356
- 0.0048496642112731934,
357
- 0.004850687980651855,
358
- 0.0048496642112731934,
359
- 0.004850687980651855,
360
- 0.0048527359962463375,
361
- 0.0048496642112731934,
362
- 0.004850687980651855,
363
- 0.004850687980651855,
364
- 0.004851712226867676,
365
- 0.004851712226867676,
366
- 0.004853759765625,
367
- 0.004850687980651855
368
  ]
369
  },
370
  "throughput": {
371
  "unit": "samples/s",
372
- "value": 204.70893094052062
373
  },
374
  "energy": {
375
  "unit": "kWh",
376
- "cpu": 5.761042733555897e-08,
377
- "ram": 3.149960962185591e-08,
378
- "gpu": 3.7158703731132024e-07,
379
- "total": 4.606970742687351e-07
380
  },
381
  "efficiency": {
382
  "unit": "samples/kWh",
383
- "value": 2170623.726202952
384
  }
385
  }
386
  }
 
3
  "name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.4.0+cu121",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "image-classification",
9
  "library": "transformers",
 
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 800.354304,
108
  "max_global_vram": 1030.22592,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 400.556032,
 
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
+ "total": 7.44705029296875,
117
+ "mean": 7.44705029296875,
118
  "stdev": 0.0,
119
+ "p50": 7.44705029296875,
120
+ "p90": 7.44705029296875,
121
+ "p95": 7.44705029296875,
122
+ "p99": 7.44705029296875,
123
  "values": [
124
+ 7.44705029296875
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
+ "cpu": 1.5601498659723884e-06,
131
+ "ram": 8.392450985372218e-07,
132
+ "gpu": 1.62555685599957e-06,
133
+ "total": 4.02495182050918e-06
134
  },
135
  "efficiency": null
136
  },
137
  "forward": {
138
  "memory": {
139
  "unit": "MB",
140
+ "max_ram": 933.732352,
141
+ "max_global_vram": 1049.100288,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 406.847488,
144
  "max_allocated": 363.853824
145
  },
146
  "latency": {
147
  "unit": "s",
148
+ "count": 193,
149
+ "total": 0.998469791889191,
150
+ "mean": 0.005173418610824822,
151
+ "stdev": 0.0001632424517239881,
152
+ "p50": 0.00511897611618042,
153
+ "p90": 0.005450137615203857,
154
+ "p95": 0.005532057666778565,
155
+ "p99": 0.0056514558219909646,
156
  "values": [
157
+ 0.005769216060638428,
158
+ 0.005540863990783691,
159
+ 0.005582848072052002,
160
+ 0.005500991821289062,
161
+ 0.00552243185043335,
162
+ 0.005574656009674072,
163
+ 0.005557248115539551,
164
+ 0.005453824043273926,
165
+ 0.005443583965301513,
166
+ 0.005157887935638428,
167
+ 0.005161983966827393,
168
+ 0.005157887935638428,
169
  0.0050769920349121095,
170
+ 0.005096447944641113,
171
+ 0.0051701760292053225,
172
+ 0.00516812801361084,
173
+ 0.005149695873260498,
174
+ 0.005059584140777588,
175
+ 0.005017600059509277,
176
+ 0.004951039791107178,
177
+ 0.0050135040283203125,
178
+ 0.005041152000427246,
179
+ 0.005087232112884522,
180
+ 0.005123072147369385,
181
+ 0.005126143932342529,
182
+ 0.005141503810882568,
183
+ 0.00515174388885498,
184
+ 0.005173247814178467,
185
+ 0.005134335994720459,
186
+ 0.005074944019317627,
187
+ 0.0050206718444824215,
188
+ 0.005064703941345215,
189
+ 0.0051476478576660155,
190
+ 0.005150720119476319,
191
+ 0.0051404800415039064,
192
+ 0.005149695873260498,
193
+ 0.005194752216339111,
194
+ 0.00516096019744873,
195
+ 0.005111807823181152,
196
+ 0.005137407779693603,
197
+ 0.005079040050506592,
198
+ 0.005117951869964599,
199
+ 0.0051066880226135255,
200
+ 0.005131264209747314,
201
+ 0.005105663776397705,
202
+ 0.005134335994720459,
203
+ 0.005008384227752686,
204
+ 0.004987904071807861,
205
+ 0.005001215934753418,
206
+ 0.0049725441932678225,
207
+ 0.004993023872375488,
208
+ 0.0051066880226135255,
209
+ 0.005053440093994141,
210
+ 0.005053440093994141,
211
+ 0.005070847988128662,
212
+ 0.005101568222045898,
213
+ 0.0050841598510742185,
214
+ 0.005119999885559082,
215
+ 0.005184512138366699,
216
+ 0.005150720119476319,
217
+ 0.00515174388885498,
218
+ 0.00516812801361084,
219
+ 0.0051660799980163576,
220
+ 0.005105663776397705,
221
+ 0.005028863906860351,
222
+ 0.005022719860076904,
223
+ 0.00582041597366333,
224
+ 0.00517632007598877,
225
+ 0.005198847770690918,
226
+ 0.005148672103881836,
227
+ 0.0051066880226135255,
228
+ 0.00515993595123291,
229
+ 0.005138432025909424,
230
+ 0.005149695873260498,
231
+ 0.005008384227752686,
232
+ 0.005044223785400391,
233
+ 0.005092351913452148,
234
+ 0.005108736038208008,
235
+ 0.005048319816589355,
236
+ 0.005223423957824707,
237
+ 0.0051701760292053225,
238
+ 0.0050657281875610355,
239
+ 0.005037055969238281,
240
+ 0.005527552127838135,
241
+ 0.005391359806060791,
242
+ 0.005641215801239013,
243
+ 0.005538815975189209,
244
+ 0.005439487934112549,
245
+ 0.005459968090057373,
246
+ 0.005368832111358642,
247
+ 0.005565440177917481,
248
+ 0.005464064121246338,
249
+ 0.005553120136260986,
250
+ 0.005452864170074463,
251
+ 0.005471231937408447,
252
+ 0.005497856140136719,
253
+ 0.005451776027679443,
254
+ 0.005391359806060791,
255
+ 0.005440512180328369,
256
+ 0.005412864208221436,
257
+ 0.005384191989898681,
258
+ 0.005318655967712403,
259
+ 0.005393407821655274,
260
+ 0.005433407783508301,
261
+ 0.005342207908630371,
262
+ 0.005262335777282715,
263
+ 0.005248000144958496,
264
+ 0.00519375991821289,
265
+ 0.0051333122253417966,
266
+ 0.0051701760292053225,
267
+ 0.0053606400489807126,
268
+ 0.005341184139251709,
269
+ 0.005114848136901855,
270
+ 0.005078015804290771,
271
+ 0.005085184097290039,
272
+ 0.005101568222045898,
273
+ 0.005182464122772217,
274
+ 0.005276671886444092,
275
+ 0.005239808082580567,
276
+ 0.005128191947937012,
277
+ 0.00522649621963501,
278
+ 0.005135359764099121,
279
+ 0.005164031982421875,
280
+ 0.005259263992309571,
281
+ 0.005340159893035889,
282
+ 0.00541593599319458,
283
+ 0.005268479824066162,
284
+ 0.005111807823181152,
285
+ 0.005112832069396973,
286
+ 0.005116960048675537,
287
+ 0.0052295680046081545,
288
+ 0.005130239963531494,
289
+ 0.005123072147369385,
290
+ 0.005145599842071533,
291
+ 0.005153791904449463,
292
+ 0.005112832069396973,
293
+ 0.005182464122772217,
294
+ 0.0051036162376403805,
295
+ 0.005120992183685303,
296
+ 0.005248000144958496,
297
+ 0.00511897611618042,
298
+ 0.0050728960037231445,
299
+ 0.0050954241752624516,
300
+ 0.005127168178558349,
301
+ 0.005107711791992187,
302
+ 0.005064703941345215,
303
+ 0.005082111835479736,
304
+ 0.005102591991424561,
305
+ 0.0050841598510742185,
306
+ 0.0050728960037231445,
307
+ 0.005141503810882568,
308
+ 0.005066751956939697,
309
+ 0.005073919773101807,
310
+ 0.00506060791015625,
311
+ 0.005071872234344482,
312
+ 0.005055488109588623,
313
+ 0.005074944019317627,
314
+ 0.005053440093994141,
315
+ 0.0050841598510742185,
316
+ 0.0050728960037231445,
317
+ 0.005056511878967285,
318
+ 0.005079040050506592,
319
+ 0.0050728960037231445,
320
+ 0.005090303897857666,
321
+ 0.005004288196563721,
322
+ 0.0050462718009948735,
323
+ 0.005024767875671386,
324
+ 0.005028863906860351,
325
+ 0.005022719860076904,
326
+ 0.005040128231048584,
327
+ 0.005045248031616211,
328
+ 0.005038080215454102,
329
+ 0.005007359981536865,
330
+ 0.005057536125183106,
331
+ 0.005007359981536865,
332
+ 0.005033984184265137,
333
+ 0.0049827837944030765,
334
+ 0.0050432000160217285,
335
+ 0.0050769920349121095,
336
+ 0.005055488109588623,
337
+ 0.005083136081695557,
338
+ 0.005108736038208008,
339
+ 0.005071872234344482,
340
+ 0.005053440093994141,
341
+ 0.005092351913452148,
342
+ 0.005053440093994141,
343
+ 0.00505241584777832,
344
+ 0.005058559894561767,
345
+ 0.005097472190856934,
346
+ 0.005119999885559082,
347
+ 0.005082111835479736,
348
+ 0.00506982421875,
349
+ 0.005107711791992187
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
350
  ]
351
  },
352
  "throughput": {
353
  "unit": "samples/s",
354
+ "value": 193.2957827745869
355
  },
356
  "energy": {
357
  "unit": "kWh",
358
+ "cpu": 5.7816332928804e-08,
359
+ "ram": 3.155426039664641e-08,
360
+ "gpu": 3.382580915339752e-07,
361
+ "total": 4.2762868485942564e-07
362
  },
363
  "efficiency": {
364
  "unit": "samples/kWh",
365
+ "value": 2338477.3646060014
366
  }
367
  }
368
  }