IlyasMoutawwakil HF staff commited on
Commit
eaf2c71
·
verified ·
1 Parent(s): b6a1610

Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.6.0.dev20240917+cu124",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "multiple-choice",
9
  "library": "transformers",
@@ -104,7 +104,7 @@
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 810.63936,
108
  "max_global_vram": 1192.7552,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 555.74528,
@@ -113,31 +113,31 @@
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
- "total": 8.533736328125,
117
- "mean": 8.533736328125,
118
  "stdev": 0.0,
119
- "p50": 8.533736328125,
120
- "p90": 8.533736328125,
121
- "p95": 8.533736328125,
122
- "p99": 8.533736328125,
123
  "values": [
124
- 8.533736328125
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
- "cpu": 9.009296861260434e-07,
131
- "ram": 4.743735752196683e-07,
132
- "gpu": 0.0,
133
- "total": 1.3753032613457117e-06
134
  },
135
  "efficiency": null
136
  },
137
  "forward": {
138
  "memory": {
139
  "unit": "MB",
140
- "max_ram": 1104.769024,
141
  "max_global_vram": 1203.24096,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 555.74528,
@@ -145,161 +145,163 @@
145
  },
146
  "latency": {
147
  "unit": "s",
148
- "count": 130,
149
- "total": 1.0037987489700317,
150
- "mean": 0.007721528838231015,
151
- "stdev": 0.00024111811726450617,
152
- "p50": 0.007658495903015137,
153
- "p90": 0.007870771360397339,
154
- "p95": 0.00833643488883972,
155
- "p99": 0.0087493630027771,
156
  "values": [
157
- 0.008416255950927735,
158
- 0.008787967681884766,
159
- 0.0084203519821167,
160
- 0.008455167770385743,
161
- 0.008898591995239258,
162
- 0.008002559661865234,
163
  0.0077619199752807615,
164
- 0.007711743831634522,
165
- 0.007727136135101318,
166
- 0.008055808067321778,
167
- 0.0077844481468200685,
168
- 0.007756800174713135,
169
- 0.007788544178009033,
170
- 0.007742464065551758,
171
- 0.007815135955810546,
172
- 0.007737343788146973,
173
- 0.007650303840637207,
174
- 0.007651328086853027,
175
- 0.007709695816040039,
176
- 0.008233983993530274,
177
- 0.008359935760498047,
178
- 0.008654848098754882,
179
- 0.008307711601257324,
180
- 0.007741439819335938,
181
- 0.00774348783493042,
182
- 0.007700479984283447,
183
- 0.007696383953094482,
184
- 0.007654399871826172,
185
- 0.007540736198425293,
186
- 0.007607295989990235,
187
- 0.007529471874237061,
188
- 0.007678976058959961,
189
- 0.007671807765960693,
190
- 0.007692287921905518,
191
- 0.007600128173828125,
192
- 0.007565311908721924,
193
- 0.007555071830749512,
194
- 0.007575551986694336,
195
- 0.007589824199676513,
196
- 0.007618559837341309,
197
- 0.007549983978271484,
198
- 0.007656447887420655,
199
- 0.007642111778259277,
200
- 0.007655424118041992,
201
- 0.007622655868530274,
202
- 0.007527423858642578,
203
- 0.0075263681411743165,
204
- 0.007543807983398438,
205
- 0.007541823863983155,
206
- 0.0075335679054260255,
207
- 0.00753766393661499,
208
- 0.00754585599899292,
209
- 0.007553023815155029,
210
- 0.007555071830749512,
211
- 0.007556096076965332,
212
- 0.007566336154937744,
213
- 0.007541759967803955,
214
- 0.007548927783966064,
215
- 0.00753766393661499,
216
- 0.007565311908721924,
217
- 0.007505919933319092,
218
- 0.007565311908721924,
219
  0.008068096160888672,
220
- 0.007690176010131836,
221
- 0.007716864109039307,
222
- 0.007708672046661377,
223
- 0.007714816093444824,
224
- 0.007663616180419922,
225
- 0.0075304961204528805,
226
- 0.007571455955505371,
227
- 0.007600128173828125,
228
- 0.0077547521591186525,
229
- 0.007664639949798584,
230
- 0.007660575866699219,
231
- 0.007639039993286132,
232
- 0.007670783996582031,
233
- 0.0076953921318054196,
234
- 0.007705599784851074,
235
- 0.007682047843933106,
236
- 0.007837696075439453,
237
  0.008064000129699708,
238
- 0.007836671829223632,
239
- 0.0077281279563903805,
240
- 0.007709695816040039,
241
- 0.007706655979156494,
242
- 0.0076605439186096195,
243
- 0.007648255825042725,
244
- 0.007627871990203858,
245
- 0.00766153621673584,
246
- 0.007628799915313721,
247
- 0.00765337610244751,
248
- 0.00762063980102539,
 
 
 
 
 
 
 
 
 
249
  0.007676928043365478,
250
- 0.00764415979385376,
251
- 0.007641088008880615,
252
- 0.00769536018371582,
253
- 0.007705599784851074,
254
- 0.007669760227203369,
255
  0.007659520149230957,
256
- 0.0076605439186096195,
257
- 0.00765337610244751,
258
- 0.007646207809448242,
259
- 0.007663616180419922,
260
- 0.007856128215789794,
261
- 0.007677951812744141,
262
- 0.007669760227203369,
263
- 0.007704576015472412,
264
- 0.007654335975646973,
265
- 0.0076605439186096195,
266
- 0.007656447887420655,
267
- 0.007679008007049561,
268
- 0.007658495903015137,
269
- 0.00765235185623169,
270
- 0.007632927894592285,
271
- 0.007669760227203369,
272
- 0.00759500789642334,
273
- 0.0076145601272583005,
274
- 0.007631872177124023,
275
- 0.007615488052368164,
276
- 0.007647232055664062,
277
- 0.007640031814575196,
278
- 0.007638016223907471,
279
- 0.007654399871826172,
280
- 0.007658495903015137,
281
- 0.007633920192718506,
282
- 0.007610367774963379,
283
- 0.007746560096740723,
284
- 0.007635968208312988,
285
- 0.007647232055664062,
286
- 0.007650303840637207
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
287
  ]
288
  },
289
  "throughput": {
290
  "unit": "samples/s",
291
- "value": 129.50803149873332
292
  },
293
  "energy": {
294
  "unit": "kWh",
295
- "cpu": 8.813629265936013e-08,
296
- "ram": 4.816472916833351e-08,
297
- "gpu": 1.6159066425183412e-07,
298
- "total": 2.9789168607952777e-07
299
  },
300
  "efficiency": {
301
  "unit": "samples/kWh",
302
- "value": 3356924.8378856443
303
  }
304
  }
305
  }
 
3
  "name": "cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.4.1+cu124",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "multiple-choice",
9
  "library": "transformers",
 
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 798.39232,
108
  "max_global_vram": 1192.7552,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 555.74528,
 
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
+ "total": 7.99041650390625,
117
+ "mean": 7.99041650390625,
118
  "stdev": 0.0,
119
+ "p50": 7.99041650390625,
120
+ "p90": 7.99041650390625,
121
+ "p95": 7.99041650390625,
122
+ "p99": 7.99041650390625,
123
  "values": [
124
+ 7.99041650390625
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
+ "cpu": 2.3360344465273563e-06,
131
+ "ram": 1.2609252684638103e-06,
132
+ "gpu": 3.566391741999936e-06,
133
+ "total": 7.163351456991103e-06
134
  },
135
  "efficiency": null
136
  },
137
  "forward": {
138
  "memory": {
139
  "unit": "MB",
140
+ "max_ram": 1095.380992,
141
  "max_global_vram": 1203.24096,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 555.74528,
 
145
  },
146
  "latency": {
147
  "unit": "s",
148
+ "count": 132,
149
+ "total": 1.0021448650360112,
150
+ "mean": 0.007592006553303112,
151
+ "stdev": 0.00023317350389960023,
152
+ "p50": 0.0075407359600067145,
153
+ "p90": 0.007846911907196046,
154
+ "p95": 0.007972348499298095,
155
+ "p99": 0.00828100625038147,
156
  "values": [
 
 
 
 
 
 
157
  0.0077619199752807615,
158
+ 0.007778304100036621,
159
+ 0.007654431819915771,
160
+ 0.007688191890716553,
161
+ 0.007666656017303467,
162
+ 0.008409088134765624,
163
+ 0.008000543594360351,
164
+ 0.00794927978515625,
165
+ 0.007885824203491211,
166
+ 0.007848959922790527,
167
+ 0.007799808025360107,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
168
  0.008068096160888672,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
169
  0.008064000129699708,
170
+ 0.007798783779144287,
171
+ 0.007769087791442871,
172
+ 0.007790592193603516,
173
+ 0.00775164794921875,
174
+ 0.007800767898559571,
175
+ 0.00780185604095459,
176
+ 0.00782537603378296,
177
+ 0.00793497610092163,
178
+ 0.0077916159629821775,
179
+ 0.007811071872711181,
180
+ 0.007575551986694336,
181
+ 0.007552000045776367,
182
+ 0.007539711952209473,
183
+ 0.007529471874237061,
184
+ 0.007476223945617676,
185
+ 0.007488512039184571,
186
+ 0.007481344223022461,
187
+ 0.007542784214019775,
188
+ 0.007516160011291504,
189
+ 0.007508992195129394,
190
  0.007676928043365478,
 
 
 
 
 
191
  0.007659520149230957,
192
+ 0.007416831970214844,
193
+ 0.007428095817565918,
194
+ 0.007548927783966064,
195
+ 0.007774176120758057,
196
+ 0.007897088050842285,
197
+ 0.007846911907196046,
198
+ 0.007798783779144287,
199
+ 0.007775231838226319,
200
+ 0.007832608222961425,
201
+ 0.00781824016571045,
202
+ 0.007828479766845703,
203
+ 0.008013824462890624,
204
+ 0.007748608112335205,
205
+ 0.007749663829803467,
206
+ 0.007840799808502196,
207
+ 0.007781375885009765,
208
+ 0.007730207920074463,
209
+ 0.007846911907196046,
210
+ 0.007783423900604248,
211
+ 0.007696383953094482,
212
+ 0.007730175971984863,
213
+ 0.007756800174713135,
214
+ 0.007417856216430664,
215
+ 0.007529471874237061,
216
+ 0.007475200176239013,
217
+ 0.007388160228729248,
218
+ 0.007472127914428711,
219
+ 0.007482367992401123,
220
+ 0.007445504188537597,
221
+ 0.0074301438331604,
222
+ 0.007879680156707763,
223
+ 0.008162303924560547,
224
+ 0.007568384170532226,
225
+ 0.008334336280822753,
226
+ 0.007641151905059814,
227
+ 0.007524352073669433,
228
+ 0.007541759967803955,
229
+ 0.007531519889831543,
230
+ 0.007709695816040039,
231
+ 0.007772160053253174,
232
+ 0.00774348783493042,
233
+ 0.007733248233795166,
234
+ 0.007703551769256592,
235
+ 0.007684095859527588,
236
+ 0.007624703884124756,
237
+ 0.007531519889831543,
238
+ 0.007464960098266602,
239
+ 0.007345151901245117,
240
+ 0.007171072006225586,
241
+ 0.007094272136688232,
242
+ 0.007247871875762939,
243
+ 0.007588863849639893,
244
+ 0.007583744049072265,
245
+ 0.007478271961212158,
246
+ 0.0075980801582336424,
247
+ 0.0077107839584350584,
248
+ 0.007819263935089112,
249
+ 0.007479296207427978,
250
+ 0.007402495861053467,
251
+ 0.007354368209838867,
252
+ 0.007379968166351319,
253
+ 0.007372799873352051,
254
+ 0.007387135982513428,
255
+ 0.007407616138458252,
256
+ 0.007409664154052734,
257
+ 0.007385087966918945,
258
+ 0.0074711041450500485,
259
+ 0.007407584190368652,
260
+ 0.007349279880523682,
261
+ 0.00733081579208374,
262
+ 0.007365632057189942,
263
+ 0.007361536026000977,
264
+ 0.007344128131866455,
265
+ 0.007361536026000977,
266
+ 0.007358496189117432,
267
+ 0.007331840038299561,
268
+ 0.0073768959045410155,
269
+ 0.007356416225433349,
270
+ 0.007359488010406494,
271
+ 0.007367680072784424,
272
+ 0.007394303798675537,
273
+ 0.007362559795379638,
274
+ 0.007326720237731933,
275
+ 0.007359488010406494,
276
+ 0.007368703842163086,
277
+ 0.007322624206542969,
278
+ 0.00734819221496582,
279
+ 0.007321599960327148,
280
+ 0.007352320194244385,
281
+ 0.007371776103973389,
282
+ 0.007367680072784424,
283
+ 0.007407616138458252,
284
+ 0.007395328044891358,
285
+ 0.007370751857757568,
286
+ 0.007383039951324463,
287
+ 0.007327744007110596,
288
+ 0.007372799873352051
289
  ]
290
  },
291
  "throughput": {
292
  "unit": "samples/s",
293
+ "value": 131.71748377442094
294
  },
295
  "energy": {
296
  "unit": "kWh",
297
+ "cpu": 8.622213070148848e-08,
298
+ "ram": 4.698878844934837e-08,
299
+ "gpu": 1.6834755818840427e-07,
300
+ "total": 3.015584773392411e-07
301
  },
302
  "efficiency": {
303
  "unit": "samples/kWh",
304
+ "value": 3316106.4110130803
305
  }
306
  }
307
  }