IlyasMoutawwakil HF staff commited on
Commit
fe48a5a
·
verified ·
1 Parent(s): 3bdec52

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_transformers_token-classification_microsoft/deberta-v3-base",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.6.0.dev20240917+cu124",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "token-classification",
9
  "library": "transformers",
@@ -104,7 +104,7 @@
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 810.647552,
108
  "max_global_vram": 1410.859008,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 773.849088,
@@ -113,31 +113,31 @@
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
- "total": 8.5339462890625,
117
- "mean": 8.5339462890625,
118
  "stdev": 0.0,
119
- "p50": 8.5339462890625,
120
- "p90": 8.5339462890625,
121
- "p95": 8.5339462890625,
122
- "p99": 8.5339462890625,
123
  "values": [
124
- 8.5339462890625
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
- "cpu": 9.491593944441472e-07,
131
- "ram": 5.003901503139403e-07,
132
- "gpu": 1.667223556003905e-06,
133
- "total": 3.1167731007619927e-06
134
  },
135
  "efficiency": null
136
  },
137
  "forward": {
138
  "memory": {
139
  "unit": "MB",
140
- "max_ram": 1197.899776,
141
  "max_global_vram": 1442.316288,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 794.820608,
@@ -145,101 +145,103 @@
145
  },
146
  "latency": {
147
  "unit": "s",
148
- "count": 70,
149
- "total": 1.0121186580657962,
150
- "mean": 0.014458837972368513,
151
- "stdev": 0.0005042664629949893,
152
- "p50": 0.014492671966552734,
153
- "p90": 0.014622208213806153,
154
- "p95": 0.015317299127578734,
155
- "p99": 0.01637127145767212,
156
  "values": [
157
- 0.016545791625976563,
158
- 0.016292863845825196,
159
- 0.015917056083679198,
160
- 0.01572556781768799,
161
- 0.014105600357055664,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
162
  0.013846528053283692,
163
- 0.013783040046691895,
164
- 0.013873184204101562,
165
- 0.013747200012207032,
166
- 0.013807711601257324,
167
- 0.01379532814025879,
168
- 0.013724736213684082,
169
- 0.013791232109069825,
170
- 0.013791232109069825,
171
- 0.01378713607788086,
172
- 0.013727744102478028,
173
- 0.014298111915588378,
174
- 0.014426048278808595,
175
- 0.014455807685852052,
176
- 0.014455807685852052,
177
- 0.014620672225952149,
178
- 0.014478272438049316,
179
- 0.014518272399902344,
180
- 0.01417728042602539,
181
- 0.01392131233215332,
182
- 0.013876223564147949,
183
- 0.014263296127319336,
184
- 0.014511199951171875,
185
- 0.014347264289855957,
186
- 0.01427353572845459,
187
- 0.014466048240661621,
188
- 0.014533663749694824,
189
- 0.014533632278442383,
190
- 0.01448960018157959,
191
- 0.014530559539794922,
192
- 0.014501888275146485,
193
- 0.014566399574279786,
194
- 0.014569472312927247,
195
- 0.014516223907470703,
196
- 0.014561280250549317,
197
- 0.014532447814941406,
198
- 0.01446399974822998,
199
- 0.014578720092773438,
200
- 0.014501888275146485,
201
- 0.014491647720336913,
202
- 0.014545920372009278,
203
- 0.01447321605682373,
204
- 0.01452025604248047,
205
- 0.014513152122497559,
206
- 0.014524415969848633,
207
- 0.01452236843109131,
208
- 0.014538751602172852,
209
- 0.014511103630065919,
210
- 0.014589952468872071,
211
- 0.014653440475463866,
212
- 0.01457254409790039,
213
- 0.014536704063415527,
214
- 0.014493696212768555,
215
- 0.01457049560546875,
216
- 0.014611455917358398,
217
- 0.014636032104492188,
218
- 0.014474240303039551,
219
- 0.014466143608093262,
220
- 0.014606335639953612,
221
- 0.014818304061889649,
222
- 0.01448857593536377,
223
- 0.014428159713745118,
224
- 0.014423040390014649,
225
- 0.01447321605682373,
226
- 0.014404607772827148
227
  ]
228
  },
229
  "throughput": {
230
  "unit": "samples/s",
231
- "value": 69.16185117441974
232
  },
233
  "energy": {
234
  "unit": "kWh",
235
- "cpu": 1.7004712120031702e-07,
236
- "ram": 9.286551879454572e-08,
237
- "gpu": 3.431828142287057e-07,
238
- "total": 6.060954542235685e-07
239
  },
240
  "efficiency": {
241
  "unit": "samples/kWh",
242
- "value": 1649905.131331233
243
  }
244
  }
245
  }
 
3
  "name": "cuda_inference_transformers_token-classification_microsoft/deberta-v3-base",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.4.1+cu124",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "token-classification",
9
  "library": "transformers",
 
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 797.925376,
108
  "max_global_vram": 1410.859008,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 773.849088,
 
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
+ "total": 8.00678173828125,
117
+ "mean": 8.00678173828125,
118
  "stdev": 0.0,
119
+ "p50": 8.00678173828125,
120
+ "p90": 8.00678173828125,
121
+ "p95": 8.00678173828125,
122
+ "p99": 8.00678173828125,
123
  "values": [
124
+ 8.00678173828125
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
+ "cpu": 2.382359352082375e-06,
131
+ "ram": 1.2867832843096755e-06,
132
+ "gpu": 3.7077807440001956e-06,
133
+ "total": 7.376923380392246e-06
134
  },
135
  "efficiency": null
136
  },
137
  "forward": {
138
  "memory": {
139
  "unit": "MB",
140
+ "max_ram": 1187.414016,
141
  "max_global_vram": 1442.316288,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 794.820608,
 
145
  },
146
  "latency": {
147
  "unit": "s",
148
+ "count": 72,
149
+ "total": 1.01028134059906,
150
+ "mean": 0.014031685286098057,
151
+ "stdev": 0.00028237615654976337,
152
+ "p50": 0.01393665599822998,
153
+ "p90": 0.01418168296813965,
154
+ "p95": 0.014421094417572021,
155
+ "p99": 0.01532762106895447,
156
  "values": [
157
+ 0.01548902416229248,
158
+ 0.014932991981506348,
159
+ 0.015261695861816407,
160
+ 0.014311424255371094,
161
+ 0.014166015625,
162
+ 0.013956095695495606,
163
+ 0.013964287757873535,
164
+ 0.014015487670898438,
165
+ 0.013936639785766602,
166
+ 0.013911040306091308,
167
+ 0.013969408035278321,
168
+ 0.013917183876037598,
169
+ 0.013880319595336914,
170
+ 0.013965312004089356,
171
+ 0.013894656181335448,
172
+ 0.013919232368469238,
173
+ 0.013974495887756348,
174
+ 0.014163968086242675,
175
+ 0.01394380760192871,
176
+ 0.013906944274902343,
177
+ 0.013926400184631347,
178
+ 0.014114815711975098,
179
+ 0.014000127792358399,
180
+ 0.013923328399658203,
181
+ 0.013914112091064454,
182
+ 0.013910016059875489,
183
+ 0.014149632453918457,
184
+ 0.013942784309387207,
185
+ 0.013932543754577637,
186
+ 0.013872127532958984,
187
+ 0.013939711570739746,
188
  0.013846528053283692,
189
+ 0.013944831848144532,
190
+ 0.013920255661010742,
191
+ 0.013907967567443847,
192
+ 0.013944831848144532,
193
+ 0.013922240257263184,
194
+ 0.01386291217803955,
195
+ 0.013900799751281738,
196
+ 0.013917183876037598,
197
+ 0.013864959716796875,
198
+ 0.013877216339111329,
199
+ 0.013880319595336914,
200
+ 0.013907967567443847,
201
+ 0.013834303855895996,
202
+ 0.014108672142028808,
203
+ 0.013873151779174805,
204
+ 0.013912063598632812,
205
+ 0.01387929630279541,
206
+ 0.013911040306091308,
207
+ 0.013917183876037598,
208
+ 0.013964287757873535,
209
+ 0.014065664291381836,
210
+ 0.01418239974975586,
211
+ 0.01455513572692871,
212
+ 0.014269439697265626,
213
+ 0.01417523193359375,
214
+ 0.013965312004089356,
215
+ 0.01399084758758545,
216
+ 0.013922304153442382,
217
+ 0.013996031761169434,
218
+ 0.013982720375061035,
219
+ 0.0140697603225708,
220
+ 0.013915136337280273,
221
+ 0.014024671554565429,
222
+ 0.014142463684082032,
223
+ 0.013933568000793458,
224
+ 0.013929471969604493,
225
+ 0.014240768432617188,
226
+ 0.01393667221069336,
227
+ 0.013924351692199707,
228
+ 0.013855744361877441
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
229
  ]
230
  },
231
  "throughput": {
232
  "unit": "samples/s",
233
+ "value": 71.26727685310571
234
  },
235
  "energy": {
236
  "unit": "kWh",
237
+ "cpu": 1.6522153233988893e-07,
238
+ "ram": 9.027629395572145e-08,
239
+ "gpu": 3.6466464358331924e-07,
240
+ "total": 6.201624698789297e-07
241
  },
242
  "efficiency": {
243
  "unit": "samples/kWh",
244
+ "value": 1612480.6781603917
245
  }
246
  }
247
  }