IlyasMoutawwakil HF staff commited on
Commit
ead13e2
·
verified ·
1 Parent(s): 5ac05de

Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.2.2",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "image-classification",
9
  "model": "google/vit-base-patch16-224",
@@ -78,7 +78,7 @@
78
  "machine": "x86_64",
79
  "platform": "Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35",
80
  "processor": "x86_64",
81
- "python_version": "3.10.14",
82
  "gpu": [
83
  "NVIDIA A10G"
84
  ],
@@ -104,7 +104,7 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 904.749056,
108
  "max_global_vram": 1057.488896,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 406.847488,
@@ -112,215 +112,200 @@
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 184,
116
- "total": 1.0002912659645087,
117
- "mean": 0.005436365575894065,
118
- "stdev": 0.0002299414006024986,
119
- "p50": 0.005403647899627686,
120
- "p90": 0.005625081682205201,
121
- "p95": 0.006027775955200195,
122
- "p99": 0.006210948967933654,
123
  "values": [
124
- 0.006874112129211426,
125
- 0.006032383918762207,
126
- 0.00620851182937622,
127
- 0.006116352081298828,
 
 
 
 
 
 
 
 
 
128
  0.006158336162567139,
129
- 0.0061265921592712404,
130
- 0.006107135772705078,
131
- 0.006070240020751953,
132
- 0.006111231803894043,
133
- 0.0062228479385375976,
134
- 0.005389311790466309,
135
- 0.005327871799468994,
136
- 0.005198847770690918,
137
- 0.005265408039093018,
138
- 0.005191679954528809,
139
- 0.005210112094879151,
140
- 0.00521017599105835,
141
- 0.0051701760292053225,
142
- 0.005193727970123291,
143
- 0.005203968048095703,
144
- 0.005180384159088135,
145
- 0.0051773438453674315,
146
- 0.005620736122131348,
147
- 0.005414912223815918,
148
- 0.005398528099060058,
149
- 0.005455872058868408,
150
- 0.005392384052276611,
151
- 0.005392384052276611,
152
- 0.005400576114654541,
153
- 0.005390336036682129,
154
- 0.005430272102355957,
155
- 0.005387263774871826,
156
- 0.005398528099060058,
157
- 0.00522438383102417,
158
- 0.005197824001312256,
159
- 0.005198847770690918,
160
- 0.005215231895446777,
161
- 0.005215231895446777,
162
- 0.005239808082580567,
163
- 0.005410816192626953,
164
- 0.005445631980895996,
165
- 0.005388288021087646,
166
- 0.00551526403427124,
167
- 0.005414912223815918,
168
- 0.005453824043273926,
169
- 0.005438464164733887,
170
- 0.005403647899627686,
171
- 0.005346303939819336,
172
- 0.005403647899627686,
173
- 0.005619647979736328,
174
- 0.0054271998405456545,
175
- 0.00542310380935669,
176
- 0.00542310380935669,
177
- 0.005231616020202637,
178
- 0.005314559936523438,
179
- 0.005192704200744629,
180
- 0.00522547197341919,
181
- 0.005208064079284668,
182
- 0.0051968002319335935,
183
- 0.005197824001312256,
184
- 0.005480447769165039,
185
- 0.0053944320678710935,
186
- 0.0054271998405456545,
187
- 0.005437439918518067,
188
- 0.005416959762573242,
189
- 0.0054282240867614745,
190
- 0.0054282240867614745,
191
- 0.0055214080810546875,
192
- 0.005431295871734619,
193
- 0.005403647899627686,
194
- 0.0054241280555725096,
195
- 0.005432320117950439,
196
- 0.005384128093719482,
197
- 0.005499904155731201,
198
- 0.005401599884033203,
199
- 0.00537497615814209,
200
- 0.0054651198387145996,
201
- 0.00541593599319458,
202
- 0.005404672145843506,
203
- 0.005406720161437988,
204
- 0.005411839962005615,
205
- 0.005441535949707031,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
206
  0.006001664161682129,
207
- 0.005670911788940429,
208
- 0.005607423782348633,
209
- 0.0055848960876464845,
210
- 0.0056269440650939945,
211
- 0.005597184181213379,
212
- 0.005599232196807862,
213
- 0.005732319831848145,
214
- 0.005709824085235596,
215
- 0.005769216060638428,
216
- 0.005285888195037842,
217
- 0.005182464122772217,
218
- 0.005191679954528809,
219
- 0.005155839920043945,
220
- 0.005158912181854248,
221
- 0.005185535907745361,
222
- 0.0051660799980163576,
223
- 0.005158912181854248,
224
- 0.005174272060394287,
225
- 0.005183487892150879,
226
- 0.005181439876556396,
227
- 0.005197824001312256,
228
- 0.00561359977722168,
229
- 0.005385248184204101,
230
- 0.0054282240867614745,
231
- 0.005443583965301513,
232
- 0.0053944320678710935,
233
- 0.0054282240867614745,
234
- 0.005354496002197265,
235
- 0.005381120204925537,
236
- 0.005396480083465576,
237
- 0.005390336036682129,
238
- 0.005407743930816651,
239
- 0.005408768177032471,
240
- 0.005362688064575195,
241
- 0.005385216236114502,
242
- 0.005400576114654541,
243
- 0.005391359806060791,
244
- 0.005403647899627686,
245
- 0.005413919925689697,
246
- 0.005434368133544922,
247
- 0.005387263774871826,
248
- 0.0053851838111877446,
249
- 0.005410816192626953,
250
- 0.005410816192626953,
251
- 0.005463039875030518,
252
- 0.005417984008789062,
253
- 0.005496831893920898,
254
- 0.005390336036682129,
255
- 0.005639167785644531,
256
- 0.005419007778167725,
257
- 0.005399551868438721,
258
- 0.0054876160621643065,
259
- 0.005408768177032471,
260
- 0.005412831783294678,
261
- 0.005414912223815918,
262
- 0.005391359806060791,
263
- 0.005398528099060058,
264
- 0.005414912223815918,
265
- 0.005390336036682129,
266
- 0.00542310380935669,
267
- 0.005362688064575195,
268
- 0.005319680213928223,
269
- 0.00532480001449585,
270
- 0.005338111877441406,
271
- 0.005312511920928955,
272
- 0.005321728229522705,
273
- 0.00532480001449585,
274
- 0.0053309440612792965,
275
- 0.005535744190216065,
276
- 0.005462016105651855,
277
- 0.005385216236114502,
278
- 0.0054579200744628905,
279
- 0.005437439918518067,
280
- 0.005371903896331787,
281
- 0.005412864208221436,
282
- 0.005745664119720459,
283
- 0.0054496641159057614,
284
- 0.005426176071166992,
285
- 0.005407743930816651,
286
- 0.005493760108947754,
287
- 0.005390336036682129,
288
- 0.0053944320678710935,
289
- 0.005403711795806885,
290
- 0.005656576156616211,
291
- 0.005495808124542236,
292
- 0.005444608211517334,
293
- 0.005392384052276611,
294
- 0.005453824043273926,
295
- 0.005387263774871826,
296
- 0.005409791946411133,
297
- 0.005375999927520752,
298
- 0.0053637118339538575,
299
- 0.005375999927520752,
300
- 0.005372928142547607,
301
- 0.005430272102355957,
302
- 0.005384223937988281,
303
- 0.005409791946411133,
304
- 0.0053821439743042,
305
- 0.005396480083465576,
306
- 0.005455872058868408,
307
- 0.005352447986602784
308
  ]
309
  },
310
  "throughput": {
311
  "unit": "samples/s",
312
- "value": 183.94642266778388
313
  },
314
  "energy": {
315
  "unit": "kWh",
316
- "cpu": 6.298004841128141e-08,
317
- "ram": 3.4427910473978237e-08,
318
- "gpu": 3.6340602358507294e-07,
319
- "total": 4.608139824703326e-07
320
  },
321
  "efficiency": {
322
  "unit": "samples/kWh",
323
- "value": 2170073.0404038476
324
  }
325
  }
326
  }
 
3
  "name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.3.0+cu121",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "image-classification",
9
  "model": "google/vit-base-patch16-224",
 
78
  "machine": "x86_64",
79
  "platform": "Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35",
80
  "processor": "x86_64",
81
+ "python_version": "3.10.12",
82
  "gpu": [
83
  "NVIDIA A10G"
84
  ],
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 911.888384,
108
  "max_global_vram": 1057.488896,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 406.847488,
 
112
  },
113
  "latency": {
114
  "unit": "s",
115
+ "count": 169,
116
+ "total": 1.0005367369651792,
117
+ "mean": 0.0059203357216874525,
118
+ "stdev": 0.00019163485696278295,
119
+ "p50": 0.00587059211730957,
120
+ "p90": 0.006157516765594482,
121
+ "p95": 0.006291884708404541,
122
+ "p99": 0.006449930248260498,
123
  "values": [
124
+ 0.0065146880149841305,
125
+ 0.0061296639442443845,
126
+ 0.006369279861450195,
127
+ 0.0062638077735900875,
128
+ 0.006136703968048096,
129
+ 0.006222655773162842,
130
+ 0.0061562881469726565,
131
+ 0.0062156801223754886,
132
+ 0.00619212818145752,
133
+ 0.005985280036926269,
134
+ 0.006096896171569824,
135
+ 0.006362112045288086,
136
+ 0.006135807991027832,
137
  0.006158336162567139,
138
+ 0.006148096084594727,
139
+ 0.006157311916351318,
140
+ 0.006052864074707031,
141
+ 0.005989376068115234,
142
+ 0.006277120113372803,
143
+ 0.006076416015625,
144
+ 0.005957632064819336,
145
+ 0.005880832195281982,
146
+ 0.005898240089416504,
147
+ 0.005908480167388916,
148
+ 0.006007775783538818,
149
+ 0.005835775852203369,
150
+ 0.006030335903167725,
151
+ 0.005847040176391601,
152
+ 0.005891071796417236,
153
+ 0.005857279777526855,
154
+ 0.005922815799713135,
155
+ 0.0063610877990722655,
156
+ 0.00636518383026123,
157
+ 0.006124544143676758,
158
+ 0.006132736206054688,
159
+ 0.006145023822784424,
160
+ 0.006340608119964599,
161
+ 0.006726528167724609,
162
+ 0.006105088233947754,
163
+ 0.0061337919235229495,
164
+ 0.0059955201148986816,
165
+ 0.006077439785003662,
166
+ 0.005955584049224853,
167
+ 0.0063017277717590334,
168
+ 0.006079487800598145,
169
+ 0.005743775844573975,
170
+ 0.00581324815750122,
171
+ 0.0058787841796875,
172
+ 0.005912576198577881,
173
+ 0.005923840045928955,
174
+ 0.005884928226470947,
175
+ 0.005937151908874512,
176
+ 0.006043647766113281,
177
+ 0.005910528182983398,
178
+ 0.005822463989257813,
179
+ 0.005854207992553711,
180
+ 0.005754879951477051,
181
+ 0.005980160236358643,
182
+ 0.005948416233062744,
183
+ 0.005900288105010986,
184
+ 0.005859327793121338,
185
+ 0.005844992160797119,
186
+ 0.005831583976745606,
187
+ 0.0057794561386108395,
188
+ 0.0057415680885314945,
189
+ 0.005903359889984131,
190
+ 0.005799935817718506,
191
+ 0.005811200141906738,
192
+ 0.006053887844085694,
193
+ 0.005907455921173096,
194
+ 0.005803008079528809,
195
+ 0.005765120029449463,
196
+ 0.005857279777526855,
197
+ 0.005912576198577881,
198
+ 0.005903264045715332,
199
+ 0.006050816059112549,
200
+ 0.00587059211730957,
201
+ 0.00586137580871582,
202
+ 0.00582860803604126,
203
+ 0.005787583827972412,
204
+ 0.0058715839385986325,
205
+ 0.005853184223175049,
206
+ 0.006132736206054688,
207
+ 0.00608460807800293,
208
+ 0.006158336162567139,
209
+ 0.006025216102600098,
210
+ 0.006141952037811279,
211
+ 0.006090816020965576,
212
+ 0.005830783843994141,
213
+ 0.0057712640762329105,
214
+ 0.006039552211761475,
215
+ 0.005720064163208007,
216
+ 0.005755904197692871,
217
+ 0.00561030387878418,
218
+ 0.005672959804534912,
219
+ 0.005980160236358643,
220
+ 0.006108191967010498,
221
+ 0.00587775993347168,
222
+ 0.005698560237884521,
223
+ 0.005811200141906738,
224
+ 0.005751872062683105,
225
+ 0.005795839786529541,
226
+ 0.005873663902282715,
227
+ 0.006121535778045654,
228
+ 0.005874688148498535,
229
+ 0.005942272186279297,
230
+ 0.00601907205581665,
231
+ 0.0062740478515625,
232
  0.006001664161682129,
233
+ 0.005917695999145508,
234
+ 0.005725183963775635,
235
+ 0.006419456005096436,
236
+ 0.006128640174865723,
237
+ 0.005857279777526855,
238
+ 0.005783552169799804,
239
+ 0.005778528213500976,
240
+ 0.005700607776641845,
241
+ 0.005798912048339844,
242
+ 0.005716991901397705,
243
+ 0.005701632022857666,
244
+ 0.005822527885437012,
245
+ 0.005832704067230224,
246
+ 0.005783552169799804,
247
+ 0.005961728096008301,
248
+ 0.005848000049591065,
249
+ 0.0057190399169921875,
250
+ 0.005805056095123291,
251
+ 0.005720064163208007,
252
+ 0.005763072013854981,
253
+ 0.0058009600639343266,
254
+ 0.005758975982666016,
255
+ 0.00576204776763916,
256
+ 0.005894144058227539,
257
+ 0.005766176223754883,
258
+ 0.005765120029449463,
259
+ 0.005790719985961914,
260
+ 0.00577023983001709,
261
+ 0.00581324815750122,
262
+ 0.005723135948181152,
263
+ 0.0058726401329040525,
264
+ 0.005907455921173096,
265
+ 0.0058111357688903804,
266
+ 0.005883903980255127,
267
+ 0.005816319942474365,
268
+ 0.005730303764343261,
269
+ 0.00576204776763916,
270
+ 0.0057712640762329105,
271
+ 0.005751808166503906,
272
+ 0.0057825279235839844,
273
+ 0.005734399795532226,
274
+ 0.005811200141906738,
275
+ 0.0057712640762329105,
276
+ 0.005810175895690918,
277
+ 0.005780479907989502,
278
+ 0.005795839786529541,
279
+ 0.0057149438858032225,
280
+ 0.005786623954772949,
281
+ 0.005755839824676513,
282
+ 0.005690368175506591,
283
+ 0.0057292799949646,
284
+ 0.005633024215698243,
285
+ 0.005687295913696289,
286
+ 0.005687295913696289,
287
+ 0.005702655792236328,
288
+ 0.005772287845611572,
289
+ 0.005782559871673584,
290
+ 0.005739520072937012,
291
+ 0.005725183963775635,
292
+ 0.005814271926879883
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
293
  ]
294
  },
295
  "throughput": {
296
  "unit": "samples/s",
297
+ "value": 168.90934011339706
298
  },
299
  "energy": {
300
  "unit": "kWh",
301
+ "cpu": 6.758813755680816e-08,
302
+ "ram": 3.693851211892251e-08,
303
+ "gpu": 3.274813225909061e-07,
304
+ "total": 4.320079722666368e-07
305
  },
306
  "efficiency": {
307
  "unit": "samples/kWh",
308
+ "value": 2314772.097267679
309
  }
310
  }
311
  }