Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub
Browse files
cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json
CHANGED
@@ -102,7 +102,7 @@
|
|
102 |
"forward": {
|
103 |
"memory": {
|
104 |
"unit": "MB",
|
105 |
-
"max_ram": 907.
|
106 |
"max_global_vram": 1195.900928,
|
107 |
"max_process_vram": 0.0,
|
108 |
"max_reserved": 555.74528,
|
@@ -110,168 +110,166 @@
|
|
110 |
},
|
111 |
"latency": {
|
112 |
"unit": "s",
|
113 |
-
"count":
|
114 |
-
"total":
|
115 |
-
"mean": 0.
|
116 |
-
"stdev": 0.
|
117 |
-
"p50": 0.
|
118 |
-
"p90": 0.
|
119 |
-
"p95": 0.
|
120 |
-
"p99": 0.
|
121 |
"values": [
|
122 |
-
0.
|
123 |
-
0.
|
124 |
-
0.
|
125 |
-
0.
|
126 |
-
0.
|
127 |
-
0.
|
128 |
-
0.
|
129 |
-
0.
|
130 |
-
0.
|
131 |
-
0.
|
132 |
-
0.007221248149871826,
|
133 |
-
0.007243775844573975,
|
134 |
-
0.007243775844573975,
|
135 |
-
0.007200767993927002,
|
136 |
-
0.007226367950439453,
|
137 |
-
0.007271423816680909,
|
138 |
-
0.007234560012817383,
|
139 |
-
0.007271423816680909,
|
140 |
-
0.007222271919250488,
|
141 |
-
0.007237631797790528,
|
142 |
-
0.007251967906951904,
|
143 |
-
0.007279615879058838,
|
144 |
-
0.007294911861419678,
|
145 |
-
0.007245823860168457,
|
146 |
-
0.007189504146575928,
|
147 |
-
0.00722431993484497,
|
148 |
-
0.007237631797790528,
|
149 |
-
0.0072202239036560055,
|
150 |
-
0.007258111953735351,
|
151 |
-
0.007275519847869873,
|
152 |
-
0.00719155216217041,
|
153 |
-
0.007214079856872559,
|
154 |
-
0.007211008071899414,
|
155 |
-
0.0072672638893127445,
|
156 |
-
0.007197696208953858,
|
157 |
-
0.007269375801086426,
|
158 |
-
0.007319551944732666,
|
159 |
-
0.007207968235015869,
|
160 |
-
0.007229407787322998,
|
161 |
-
0.007216127872467041,
|
162 |
-
0.00719046401977539,
|
163 |
-
0.007176191806793213,
|
164 |
-
0.007200767993927002,
|
165 |
-
0.0072120318412780765,
|
166 |
-
0.0072334718704223635,
|
167 |
-
0.007265312194824219,
|
168 |
-
0.007187456130981445,
|
169 |
-
0.007226367950439453,
|
170 |
-
0.007217152118682861,
|
171 |
-
0.007305215835571289,
|
172 |
-
0.0071792640686035155,
|
173 |
-
0.007178239822387696,
|
174 |
-
0.007128064155578613,
|
175 |
-
0.00692633581161499,
|
176 |
-
0.006924287796020508,
|
177 |
-
0.0068915200233459475,
|
178 |
-
0.006908927917480469,
|
179 |
-
0.006882304191589355,
|
180 |
-
0.0068689918518066405,
|
181 |
-
0.006872064113616944,
|
182 |
-
0.0074793601036071775,
|
183 |
-
0.00725708818435669,
|
184 |
-
0.007192575931549072,
|
185 |
-
0.007180287837982178,
|
186 |
-
0.007231488227844239,
|
187 |
-
0.007332863807678222,
|
188 |
-
0.007829504013061523,
|
189 |
-
0.00729804801940918,
|
190 |
-
0.0072427520751953125,
|
191 |
-
0.0072468481063842774,
|
192 |
-
0.007211008071899414,
|
193 |
-
0.007221343994140625,
|
194 |
-
0.007209983825683594,
|
195 |
-
0.007217152118682861,
|
196 |
-
0.007214079856872559,
|
197 |
-
0.0071792640686035155,
|
198 |
-
0.007090176105499267,
|
199 |
-
0.0069959678649902345,
|
200 |
-
0.007205887794494629,
|
201 |
-
0.0073400321006774905,
|
202 |
-
0.007288832187652588,
|
203 |
-
0.007307263851165771,
|
204 |
-
0.00728985595703125,
|
205 |
-
0.007357439994812012,
|
206 |
-
0.007304192066192627,
|
207 |
-
0.007324672222137451,
|
208 |
-
0.007275519847869873,
|
209 |
-
0.007311359882354736,
|
210 |
-
0.007293951988220215,
|
211 |
-
0.007312384128570557,
|
212 |
-
0.007304192066192627,
|
213 |
-
0.007316480159759522,
|
214 |
-
0.007443456172943115,
|
215 |
-
0.007358463764190673,
|
216 |
-
0.007333888053894043,
|
217 |
-
0.007361536026000977,
|
218 |
-
0.007395328044891358,
|
219 |
0.0074414081573486324,
|
220 |
-
0.
|
221 |
-
0.
|
222 |
-
0.
|
223 |
-
0.
|
224 |
-
0.
|
225 |
-
0.
|
226 |
-
0.
|
227 |
-
0.
|
228 |
-
0.
|
229 |
-
0.
|
230 |
-
0.
|
231 |
-
0.
|
232 |
-
0.
|
233 |
-
0.
|
234 |
-
0.
|
235 |
-
0.
|
236 |
-
0.
|
237 |
-
0.
|
238 |
-
0.
|
239 |
-
0.
|
240 |
-
0.
|
241 |
-
0.
|
242 |
-
0.
|
243 |
-
0.
|
244 |
-
0.
|
245 |
-
0.
|
246 |
-
0.
|
247 |
-
0.
|
248 |
-
0.
|
249 |
-
0.
|
250 |
-
0.
|
251 |
-
0.
|
252 |
-
0.
|
253 |
-
0.
|
254 |
-
0.
|
255 |
-
0.
|
256 |
-
0.
|
257 |
-
0.
|
258 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
259 |
]
|
260 |
},
|
261 |
"throughput": {
|
262 |
"unit": "samples/s",
|
263 |
-
"value":
|
264 |
},
|
265 |
"energy": {
|
266 |
"unit": "kWh",
|
267 |
-
"cpu": 8.
|
268 |
-
"ram": 4.
|
269 |
-
"gpu": 1.
|
270 |
-
"total": 2.
|
271 |
},
|
272 |
"efficiency": {
|
273 |
"unit": "samples/kWh",
|
274 |
-
"value":
|
275 |
}
|
276 |
}
|
277 |
}
|
|
|
102 |
"forward": {
|
103 |
"memory": {
|
104 |
"unit": "MB",
|
105 |
+
"max_ram": 907.943936,
|
106 |
"max_global_vram": 1195.900928,
|
107 |
"max_process_vram": 0.0,
|
108 |
"max_reserved": 555.74528,
|
|
|
110 |
},
|
111 |
"latency": {
|
112 |
"unit": "s",
|
113 |
+
"count": 135,
|
114 |
+
"total": 1.002246529579163,
|
115 |
+
"mean": 0.007424048367253057,
|
116 |
+
"stdev": 0.0003563986058280127,
|
117 |
+
"p50": 0.007448575973510742,
|
118 |
+
"p90": 0.007803084754943848,
|
119 |
+
"p95": 0.007858790493011475,
|
120 |
+
"p99": 0.008373309879302977,
|
121 |
"values": [
|
122 |
+
0.007921664237976075,
|
123 |
+
0.007978047847747803,
|
124 |
+
0.00810905647277832,
|
125 |
+
0.00802406406402588,
|
126 |
+
0.0075939841270446775,
|
127 |
+
0.007611392021179199,
|
128 |
+
0.007623680114746094,
|
129 |
+
0.007450623989105225,
|
130 |
+
0.007452672004699707,
|
131 |
+
0.007449600219726562,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
132 |
0.0074414081573486324,
|
133 |
+
0.007482367992401123,
|
134 |
+
0.007437312126159668,
|
135 |
+
0.00738099193572998,
|
136 |
+
0.007425087928771973,
|
137 |
+
0.007402495861053467,
|
138 |
+
0.007460864067077637,
|
139 |
+
0.007486464023590088,
|
140 |
+
0.0074659519195556644,
|
141 |
+
0.0074741759300231934,
|
142 |
+
0.007442431926727295,
|
143 |
+
0.007485439777374267,
|
144 |
+
0.0075008001327514645,
|
145 |
+
0.007477248191833496,
|
146 |
+
0.007480319976806641,
|
147 |
+
0.007512063980102539,
|
148 |
+
0.0074711041450500485,
|
149 |
+
0.007561215877532959,
|
150 |
+
0.009447487831115722,
|
151 |
+
0.007502848148345947,
|
152 |
+
0.007531519889831543,
|
153 |
+
0.007425024032592774,
|
154 |
+
0.007386112213134765,
|
155 |
+
0.0073573760986328125,
|
156 |
+
0.007391232013702393,
|
157 |
+
0.007386112213134765,
|
158 |
+
0.007411776065826416,
|
159 |
+
0.007445504188537597,
|
160 |
+
0.007418879985809326,
|
161 |
+
0.007494719982147216,
|
162 |
+
0.007463935852050781,
|
163 |
+
0.007486464023590088,
|
164 |
+
0.007475200176239013,
|
165 |
+
0.0074035201072692874,
|
166 |
+
0.007396351814270019,
|
167 |
+
0.007460864067077637,
|
168 |
+
0.007494688034057617,
|
169 |
+
0.0074403839111328125,
|
170 |
+
0.007411712169647216,
|
171 |
+
0.007415808200836181,
|
172 |
+
0.007827455997467042,
|
173 |
+
0.00773737621307373,
|
174 |
+
0.00774348783493042,
|
175 |
+
0.007788544178009033,
|
176 |
+
0.00785100793838501,
|
177 |
+
0.007753727912902832,
|
178 |
+
0.007721983909606934,
|
179 |
+
0.007805952072143555,
|
180 |
+
0.00774451208114624,
|
181 |
+
0.007727104187011719,
|
182 |
+
0.0077578239440917966,
|
183 |
+
0.007781375885009765,
|
184 |
+
0.007810080051422119,
|
185 |
+
0.007874559879302979,
|
186 |
+
0.00784281587600708,
|
187 |
+
0.007798783779144287,
|
188 |
+
0.008509440422058106,
|
189 |
+
0.007770112037658691,
|
190 |
+
0.007759871959686279,
|
191 |
+
0.007654399871826172,
|
192 |
+
0.007778304100036621,
|
193 |
+
0.007669760227203369,
|
194 |
+
0.007616511821746826,
|
195 |
+
0.007501823902130127,
|
196 |
+
0.00765337610244751,
|
197 |
+
0.0076308479309082035,
|
198 |
+
0.007442431926727295,
|
199 |
+
0.007357439994812012,
|
200 |
+
0.007663680076599121,
|
201 |
+
0.007614463806152344,
|
202 |
+
0.007635968208312988,
|
203 |
+
0.007711743831634522,
|
204 |
+
0.007797760009765625,
|
205 |
+
0.00785203218460083,
|
206 |
+
0.00781004810333252,
|
207 |
+
0.007524352073669433,
|
208 |
+
0.007502848148345947,
|
209 |
+
0.007448575973510742,
|
210 |
+
0.007611392021179199,
|
211 |
+
0.007231488227844239,
|
212 |
+
0.00709939193725586,
|
213 |
+
0.007051263809204102,
|
214 |
+
0.0070594558715820314,
|
215 |
+
0.0070553598403930665,
|
216 |
+
0.007051263809204102,
|
217 |
+
0.007108607769012451,
|
218 |
+
0.007030720233917236,
|
219 |
+
0.0070553598403930665,
|
220 |
+
0.007068672180175781,
|
221 |
+
0.007000063896179199,
|
222 |
+
0.007011328220367432,
|
223 |
+
0.007012351989746094,
|
224 |
+
0.007023615837097168,
|
225 |
+
0.006971392154693603,
|
226 |
+
0.007010240077972412,
|
227 |
+
0.007013376235961914,
|
228 |
+
0.007041024208068848,
|
229 |
+
0.0070225920677185055,
|
230 |
+
0.007030816078186035,
|
231 |
+
0.007041024208068848,
|
232 |
+
0.0070349440574645995,
|
233 |
+
0.007045119762420654,
|
234 |
+
0.007014400005340577,
|
235 |
+
0.007024640083312988,
|
236 |
+
0.007046144008636474,
|
237 |
+
0.007045119762420654,
|
238 |
+
0.007058432102203369,
|
239 |
+
0.007049215793609619,
|
240 |
+
0.007067647933959961,
|
241 |
+
0.007037951946258545,
|
242 |
+
0.007020544052124023,
|
243 |
+
0.007051263809204102,
|
244 |
+
0.007021567821502686,
|
245 |
+
0.007039999961853028,
|
246 |
+
0.007008255958557129,
|
247 |
+
0.007060480117797851,
|
248 |
+
0.007049215793609619,
|
249 |
+
0.007067647933959961,
|
250 |
+
0.007051263809204102,
|
251 |
+
0.007002111911773682,
|
252 |
+
0.007027711868286133,
|
253 |
+
0.007039008140563965,
|
254 |
+
0.007031807899475098,
|
255 |
+
0.007006207942962647,
|
256 |
+
0.0070225920677185055
|
257 |
]
|
258 |
},
|
259 |
"throughput": {
|
260 |
"unit": "samples/s",
|
261 |
+
"value": 134.6973983104593
|
262 |
},
|
263 |
"energy": {
|
264 |
"unit": "kWh",
|
265 |
+
"cpu": 8.381258037541766e-08,
|
266 |
+
"ram": 4.581855671884267e-08,
|
267 |
+
"gpu": 1.5258423787324674e-07,
|
268 |
+
"total": 2.822153749675071e-07
|
269 |
},
|
270 |
"efficiency": {
|
271 |
"unit": "samples/kWh",
|
272 |
+
"value": 3543393.0561548434
|
273 |
}
|
274 |
}
|
275 |
}
|