IlyasMoutawwakil HF staff commited on
Commit
c8bdeb9
·
verified ·
1 Parent(s): 8066cb1

Upload cuda_inference_timm_image-classification_timm/resnet50.a1_in1k/benchmark.json with huggingface_hub

Browse files
cuda_inference_timm_image-classification_timm/resnet50.a1_in1k/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_timm_image-classification_timm/resnet50.a1_in1k",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.4.0+cu124",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "image-classification",
9
  "library": "timm",
@@ -104,7 +104,7 @@
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 853.66784,
108
  "max_global_vram": 709.361664,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 123.731968,
@@ -113,31 +113,31 @@
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
- "total": 7.6462294921875,
117
- "mean": 7.6462294921875,
118
  "stdev": 0.0,
119
- "p50": 7.6462294921875,
120
- "p90": 7.6462294921875,
121
- "p95": 7.6462294921875,
122
- "p99": 7.6462294921875,
123
  "values": [
124
- 7.6462294921875
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
- "cpu": 4.283666434027783e-06,
131
- "ram": 2.329168317686223e-06,
132
- "gpu": 6.5341718940001075e-06,
133
- "total": 1.3147006645714113e-05
134
  },
135
  "efficiency": null
136
  },
137
  "forward": {
138
  "memory": {
139
  "unit": "MB",
140
- "max_ram": 1092.345856,
141
  "max_global_vram": 791.150592,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 148.897792,
@@ -146,165 +146,165 @@
146
  "latency": {
147
  "unit": "s",
148
  "count": 135,
149
- "total": 0.9978073587417603,
150
- "mean": 0.007391165620309335,
151
- "stdev": 0.00021254868884949108,
152
- "p50": 0.0074711041450500485,
153
- "p90": 0.007607475280761718,
154
- "p95": 0.007657471895217895,
155
- "p99": 0.007833672199249266,
156
  "values": [
157
- 0.007651328086853027,
158
- 0.007904255867004394,
159
- 0.007676928043365478,
160
- 0.007704607963562012,
161
- 0.007589888095855713,
162
- 0.007608320236206055,
163
- 0.007591936111450195,
164
- 0.007664639949798584,
165
- 0.007504896163940429,
166
- 0.007508927822113037,
167
- 0.007606207847595215,
168
- 0.007535615921020508,
169
- 0.007452672004699707,
170
- 0.007461887836456299,
171
- 0.0075038719177246095,
172
- 0.007554048061370849,
173
- 0.007592959880828858,
174
- 0.007598144054412842,
175
- 0.007576576232910156,
176
- 0.007494656085968018,
177
- 0.0074741759300231934,
178
- 0.007525375843048096,
179
- 0.00757862377166748,
180
- 0.007571455955505371,
181
  0.00753868818283081,
 
 
 
 
 
182
  0.007561215877532959,
183
- 0.007583744049072265,
184
- 0.0075049281120300295,
185
- 0.007489535808563232,
186
- 0.007567296028137207,
187
- 0.007608320236206055,
188
- 0.007460864067077637,
189
- 0.007432191848754883,
190
- 0.007429183959960938,
191
- 0.007506944179534912,
192
- 0.0074926080703735356,
193
- 0.007517183780670166,
194
- 0.0074711041450500485,
195
- 0.007623680114746094,
196
- 0.007688223838806152,
197
- 0.00758681583404541,
198
- 0.0075673599243164065,
199
- 0.007568384170532226,
200
- 0.007521279811859131,
201
- 0.007461887836456299,
202
- 0.007548927783966064,
203
- 0.007654399871826172,
204
- 0.007549952030181885,
205
- 0.00790015983581543,
206
- 0.007575551986694336,
207
- 0.007523327827453613,
208
- 0.0074711041450500485,
209
- 0.007485504150390625,
210
- 0.007517183780670166,
211
- 0.00760422420501709,
212
- 0.007458816051483155,
213
- 0.00750486421585083,
214
- 0.007518208026885987,
215
- 0.007489535808563232,
216
  0.007505919933319092,
217
- 0.007513088226318359,
218
- 0.007529471874237061,
219
- 0.007577600002288819,
220
- 0.0076308479309082035,
 
 
 
 
 
 
 
 
221
  0.007573503971099854,
222
- 0.007576608180999756,
223
- 0.007577600002288819,
224
- 0.007489535808563232,
225
- 0.007549952030181885,
226
- 0.007536640167236328,
227
  0.007444479942321777,
228
- 0.007641088008880615,
229
- 0.007501823902130127,
230
- 0.007459839820861816,
231
- 0.00738918399810791,
232
- 0.0074926080703735356,
 
 
 
 
 
 
 
 
 
233
  0.007683072090148926,
234
- 0.0074065918922424315,
235
- 0.007205887794494629,
236
- 0.007208000183105469,
237
- 0.0072325119972229,
238
- 0.007448575973510742,
239
- 0.007346144199371338,
240
- 0.007362559795379638,
241
- 0.007383039951324463,
242
- 0.007561215877532959,
243
- 0.007502848148345947,
244
- 0.00725708818435669,
245
- 0.007168000221252442,
246
- 0.007196671962738037,
247
- 0.007270400047302246,
248
- 0.007214079856872559,
249
- 0.007219200134277344,
250
- 0.007241727828979493,
251
- 0.007122943878173828,
252
- 0.007123968124389648,
253
- 0.007175168037414551,
254
- 0.007160768032073974,
255
- 0.0071823358535766605,
256
- 0.007121920108795166,
257
- 0.00709939193725586,
258
- 0.007090176105499267,
259
- 0.007102464199066162,
260
- 0.007055424213409424,
261
- 0.007196671962738037,
 
 
 
 
 
 
 
262
  0.00709939193725586,
263
- 0.007090176105499267,
264
- 0.00714137601852417,
 
 
 
 
 
265
  0.007131135940551757,
266
- 0.007142399787902832,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
267
  0.007094272136688232,
268
- 0.007079936027526855,
269
- 0.007015423774719238,
270
- 0.00709119987487793,
271
- 0.007139328002929687,
272
- 0.007130112171173096,
273
- 0.007044095993041993,
274
- 0.007028736114501953,
275
- 0.007156735897064209,
276
- 0.007176191806793213,
277
- 0.007158783912658692,
278
- 0.007170048236846924,
279
- 0.007061503887176514,
280
- 0.00709222412109375,
281
- 0.007077888011932373,
282
- 0.0070553598403930665,
283
- 0.007036928176879883,
284
- 0.007127039909362793,
285
- 0.007122943878173828,
286
- 0.007045119762420654,
287
- 0.007137375831604004,
288
- 0.007394303798675537,
289
- 0.007230463981628418,
290
- 0.007151648044586182,
291
- 0.00714137601852417
292
  ]
293
  },
294
  "throughput": {
295
  "unit": "samples/s",
296
- "value": 135.29665703231097
297
  },
298
  "energy": {
299
  "unit": "kWh",
300
- "cpu": 8.4304477201538e-08,
301
- "ram": 4.5999935589792976e-08,
302
- "gpu": 1.741805963971633e-07,
303
- "total": 3.0448500918849426e-07
304
  },
305
  "efficiency": {
306
  "unit": "samples/kWh",
307
- "value": 3284233.9354084283
308
  }
309
  }
310
  }
 
3
  "name": "cuda_inference_timm_image-classification_timm/resnet50.a1_in1k",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.4.1+cu124",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "image-classification",
9
  "library": "timm",
 
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 853.23776,
108
  "max_global_vram": 709.361664,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 123.731968,
 
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
+ "total": 7.68804052734375,
117
+ "mean": 7.68804052734375,
118
  "stdev": 0.0,
119
+ "p50": 7.68804052734375,
120
+ "p90": 7.68804052734375,
121
+ "p95": 7.68804052734375,
122
+ "p99": 7.68804052734375,
123
  "values": [
124
+ 7.68804052734375
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
+ "cpu": 4.352293013889469e-06,
131
+ "ram": 2.3392547052618832e-06,
132
+ "gpu": 6.37444954399997e-06,
133
+ "total": 1.3065997263151322e-05
134
  },
135
  "efficiency": null
136
  },
137
  "forward": {
138
  "memory": {
139
  "unit": "MB",
140
+ "max_ram": 1093.271552,
141
  "max_global_vram": 791.150592,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 148.897792,
 
146
  "latency": {
147
  "unit": "s",
148
  "count": 135,
149
+ "total": 0.999445728302002,
150
+ "mean": 0.0074033016911259405,
151
+ "stdev": 0.00020936887337130502,
152
+ "p50": 0.007481344223022461,
153
+ "p90": 0.007637849617004394,
154
+ "p95": 0.00769105920791626,
155
+ "p99": 0.007720591259002686,
156
  "values": [
157
+ 0.007952383995056152,
158
+ 0.007717887878417969,
159
+ 0.007709695816040039,
160
+ 0.007721983909606934,
161
+ 0.00771068811416626,
162
+ 0.007665664196014404,
163
+ 0.007550975799560547,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
164
  0.00753868818283081,
165
+ 0.007556096076965332,
166
+ 0.007499839782714844,
167
+ 0.007629824161529541,
168
+ 0.0075632638931274416,
169
+ 0.0074301438331604,
170
  0.007561215877532959,
171
+ 0.007512063980102539,
172
+ 0.007501823902130127,
173
+ 0.007576576232910156,
174
+ 0.007672832012176513,
175
+ 0.007715839862823487,
176
+ 0.007649280071258545,
177
+ 0.007485439777374267,
178
+ 0.00749567985534668,
179
+ 0.0076267518997192385,
180
+ 0.007607295989990235,
181
+ 0.0075939841270446775,
182
+ 0.00758784008026123,
183
+ 0.00757862377166748,
184
+ 0.007494592189788819,
185
+ 0.007540736198425293,
186
+ 0.007592959880828858,
187
+ 0.00759500789642334,
188
+ 0.007717887878417969,
189
+ 0.0074629120826721195,
190
+ 0.007437312126159668,
 
 
 
 
 
 
 
 
 
 
 
 
 
191
  0.007505919933319092,
192
+ 0.007516160011291504,
193
+ 0.007481344223022461,
194
+ 0.007547904014587403,
195
+ 0.007481344223022461,
196
+ 0.007540671825408935,
197
+ 0.007624703884124756,
198
+ 0.007612448215484619,
199
+ 0.00764518404006958,
200
+ 0.007616511821746826,
201
+ 0.007574528217315674,
202
+ 0.007616511821746826,
203
+ 0.007566336154937744,
204
  0.007573503971099854,
205
+ 0.007581696033477783,
206
+ 0.007571455955505371,
207
+ 0.007532544136047363,
208
+ 0.0074741759300231934,
 
209
  0.007444479942321777,
210
+ 0.007540736198425293,
211
+ 0.007529471874237061,
212
+ 0.007621632099151611,
213
+ 0.007449567794799805,
214
+ 0.007485439777374267,
215
+ 0.007522304058074952,
216
+ 0.007507967948913574,
217
+ 0.007507967948913574,
218
+ 0.007524352073669433,
219
+ 0.007514143943786621,
220
+ 0.007572480201721191,
221
+ 0.0076574721336364745,
222
+ 0.007590911865234375,
223
+ 0.0076431999206542965,
224
  0.007683072090148926,
225
+ 0.0075642881393432615,
226
+ 0.007580671787261963,
227
+ 0.007566336154937744,
228
+ 0.007481344223022461,
229
+ 0.007580671787261963,
230
+ 0.007519231796264648,
231
+ 0.007485439777374267,
232
+ 0.00733900785446167,
233
+ 0.007359488010406494,
234
+ 0.0074741759300231934,
235
+ 0.007336959838867187,
236
+ 0.0072202239036560055,
237
+ 0.007188479900360107,
238
+ 0.007172095775604248,
239
+ 0.0074035201072692874,
240
+ 0.00728166389465332,
241
+ 0.007309311866760254,
242
+ 0.007322624206542969,
243
+ 0.007354368209838867,
244
+ 0.007516160011291504,
245
+ 0.0073134078979492185,
246
+ 0.00722438383102417,
247
+ 0.007236608028411865,
248
+ 0.00724070405960083,
249
+ 0.0072724480628967286,
250
+ 0.0071833600997924804,
251
+ 0.007229440212249756,
252
+ 0.00719155216217041,
253
+ 0.007158783912658692,
254
+ 0.007169023990631103,
255
+ 0.007095232009887696,
256
+ 0.007151616096496582,
257
+ 0.007300096035003662,
258
+ 0.007128064155578613,
259
+ 0.007138304233551026,
260
  0.00709939193725586,
261
+ 0.007132160186767578,
262
+ 0.007112703800201416,
263
+ 0.007181312084197998,
264
+ 0.007132160186767578,
265
+ 0.007101439952850342,
266
+ 0.007124991893768311,
267
+ 0.007124991893768311,
268
  0.007131135940551757,
269
+ 0.0071198720932006835,
270
+ 0.0070974078178405765,
271
+ 0.007299071788787842,
272
+ 0.007153664112091064,
273
+ 0.00714137601852417,
274
+ 0.007111680030822754,
275
+ 0.007126016139984131,
276
+ 0.007138304233551026,
277
+ 0.007135231971740722,
278
+ 0.007137279987335205,
279
+ 0.007105535984039306,
280
+ 0.00709939193725586,
281
+ 0.007143424034118652,
282
+ 0.0071157760620117185,
283
+ 0.007153664112091064,
284
+ 0.007108640193939209,
285
+ 0.007137279987335205,
286
+ 0.007137343883514404,
287
+ 0.007145472049713135,
288
+ 0.007116799831390381,
289
  0.007094272136688232,
290
+ 0.0071506562232971195,
291
+ 0.007136256217956543
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
292
  ]
293
  },
294
  "throughput": {
295
  "unit": "samples/s",
296
+ "value": 135.07486817654106
297
  },
298
  "energy": {
299
  "unit": "kWh",
300
+ "cpu": 8.35601075126258e-08,
301
+ "ram": 4.553801833588441e-08,
302
+ "gpu": 1.856139791048956e-07,
303
+ "total": 3.147121049534058e-07
304
  },
305
  "efficiency": {
306
  "unit": "samples/kWh",
307
+ "value": 3177507.265403895
308
  }
309
  }
310
  }