Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub
Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json
CHANGED
@@ -102,7 +102,7 @@
|
|
102 |
"forward": {
|
103 |
"memory": {
|
104 |
"unit": "MB",
|
105 |
-
"max_ram":
|
106 |
"max_global_vram": 1195.900928,
|
107 |
"max_process_vram": 0.0,
|
108 |
"max_reserved": 555.74528,
|
@@ -110,177 +110,173 @@
|
|
110 |
},
|
111 |
"latency": {
|
112 |
"unit": "s",
|
113 |
-
"count":
|
114 |
-
"total": 0.
|
115 |
-
"mean": 0.
|
116 |
-
"stdev": 0.
|
117 |
-
"p50": 0.
|
118 |
-
"p90": 0.
|
119 |
-
"p95": 0.
|
120 |
-
"p99": 0.
|
121 |
"values": [
|
122 |
-
0.
|
123 |
-
0.
|
124 |
-
0.
|
125 |
-
0.
|
126 |
-
0.
|
127 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
128 |
0.007201791763305664,
|
129 |
-
0.
|
130 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
131 |
0.007165952205657959,
|
132 |
-
0.
|
133 |
-
0.007094272136688232,
|
134 |
-
0.007013376235961914,
|
135 |
-
0.006939648151397705,
|
136 |
-
0.006850560188293457,
|
137 |
-
0.007044095993041993,
|
138 |
-
0.007024640083312988,
|
139 |
-
0.007003136157989502,
|
140 |
-
0.007002111911773682,
|
141 |
-
0.007027711868286133,
|
142 |
-
0.0070266880989074704,
|
143 |
-
0.006948863983154297,
|
144 |
-
0.006937600135803222,
|
145 |
-
0.006984640121459961,
|
146 |
-
0.007017471790313721,
|
147 |
-
0.006988800048828125,
|
148 |
-
0.007013376235961914,
|
149 |
-
0.007005184173583984,
|
150 |
-
0.007049215793609619,
|
151 |
-
0.006967296123504638,
|
152 |
-
0.006982656002044678,
|
153 |
-
0.006960127830505371,
|
154 |
-
0.006971392154693603,
|
155 |
-
0.00698367977142334,
|
156 |
-
0.00693555212020874,
|
157 |
-
0.006872064113616944,
|
158 |
-
0.006949888229370117,
|
159 |
-
0.006931456089019775,
|
160 |
-
0.0069027838706970214,
|
161 |
-
0.006905824184417725,
|
162 |
-
0.007036928176879883,
|
163 |
-
0.007153664112091064,
|
164 |
-
0.007030784130096435,
|
165 |
-
0.007025599956512451,
|
166 |
-
0.006993919849395752,
|
167 |
-
0.007078911781311035,
|
168 |
-
0.006985727787017822,
|
169 |
-
0.006949888229370117,
|
170 |
-
0.006968319892883301,
|
171 |
-
0.007035903930664063,
|
172 |
-
0.0070266880989074704,
|
173 |
-
0.006951935768127441,
|
174 |
-
0.007020544052124023,
|
175 |
-
0.0069918718338012695,
|
176 |
-
0.0069959678649902345,
|
177 |
-
0.006973375797271728,
|
178 |
-
0.006969344139099121,
|
179 |
-
0.006959104061126709,
|
180 |
-
0.007074816226959229,
|
181 |
-
0.007054336071014404,
|
182 |
-
0.006929408073425293,
|
183 |
-
0.0069324798583984375,
|
184 |
-
0.0069253120422363285,
|
185 |
-
0.006940671920776367,
|
186 |
-
0.00694374418258667,
|
187 |
0.007180287837982178,
|
188 |
-
0.
|
189 |
-
0.
|
190 |
-
0.
|
191 |
-
0.
|
192 |
-
0.
|
193 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
194 |
0.007021567821502686,
|
195 |
-
0.
|
196 |
-
0.
|
197 |
-
0.
|
198 |
-
0.
|
199 |
-
0.
|
200 |
-
0.
|
201 |
-
0.
|
202 |
-
0.
|
203 |
-
0.
|
204 |
-
0.
|
205 |
-
0.
|
206 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
207 |
0.00678604793548584,
|
208 |
-
0.
|
209 |
-
0.
|
210 |
-
0.
|
211 |
-
0.
|
212 |
-
0.0067491202354431155,
|
213 |
-
0.006881247997283935,
|
214 |
-
0.006948863983154297,
|
215 |
-
0.006612991809844971,
|
216 |
-
0.006639616012573242,
|
217 |
-
0.006687679767608643,
|
218 |
-
0.0066406397819519045,
|
219 |
-
0.006658112049102783,
|
220 |
-
0.006673408031463623,
|
221 |
-
0.006608895778656006,
|
222 |
-
0.006601727962493896,
|
223 |
0.006729728221893311,
|
224 |
-
0.
|
225 |
-
0.
|
226 |
-
0.
|
227 |
-
0.
|
228 |
-
0.
|
229 |
-
0.
|
230 |
-
0.
|
231 |
-
0.
|
232 |
-
0.
|
233 |
-
0.
|
234 |
-
0.
|
235 |
-
0.006599679946899414,
|
236 |
-
0.006593535900115967,
|
237 |
-
0.006583295822143555,
|
238 |
-
0.006594560146331787,
|
239 |
-
0.0065853757858276366,
|
240 |
-
0.00657203197479248,
|
241 |
-
0.006573056221008301,
|
242 |
-
0.0065413122177124024,
|
243 |
-
0.006589439868927002,
|
244 |
-
0.006643712043762207,
|
245 |
-
0.006559743881225586,
|
246 |
-
0.006556672096252441,
|
247 |
-
0.006562816143035889,
|
248 |
-
0.006549503803253174,
|
249 |
-
0.006555647850036621,
|
250 |
-
0.006527999877929688,
|
251 |
-
0.006597631931304931,
|
252 |
-
0.006525951862335205,
|
253 |
-
0.0065474557876586915,
|
254 |
-
0.0066979842185974124,
|
255 |
-
0.006568960189819336,
|
256 |
-
0.0065495362281799314,
|
257 |
-
0.006549503803253174,
|
258 |
-
0.006553599834442139,
|
259 |
-
0.006574111938476563,
|
260 |
-
0.0065792322158813475,
|
261 |
-
0.0065372161865234375,
|
262 |
-
0.006545407772064209,
|
263 |
-
0.0065443840026855465,
|
264 |
-
0.0065771517753601075,
|
265 |
-
0.00657203197479248,
|
266 |
-
0.006847487926483154,
|
267 |
-
0.006626304149627686
|
268 |
]
|
269 |
},
|
270 |
"throughput": {
|
271 |
"unit": "samples/s",
|
272 |
-
"value":
|
273 |
},
|
274 |
"energy": {
|
275 |
"unit": "kWh",
|
276 |
-
"cpu": 7.
|
277 |
-
"ram": 4.
|
278 |
-
"gpu": 1.
|
279 |
-
"total": 2.
|
280 |
},
|
281 |
"efficiency": {
|
282 |
"unit": "samples/kWh",
|
283 |
-
"value":
|
284 |
}
|
285 |
}
|
286 |
}
|
|
|
102 |
"forward": {
|
103 |
"memory": {
|
104 |
"unit": "MB",
|
105 |
+
"max_ram": 900.579328,
|
106 |
"max_global_vram": 1195.900928,
|
107 |
"max_process_vram": 0.0,
|
108 |
"max_reserved": 555.74528,
|
|
|
110 |
},
|
111 |
"latency": {
|
112 |
"unit": "s",
|
113 |
+
"count": 142,
|
114 |
+
"total": 0.9999707517623899,
|
115 |
+
"mean": 0.007042047547622465,
|
116 |
+
"stdev": 0.00023800417354936538,
|
117 |
+
"p50": 0.0071116797924041745,
|
118 |
+
"p90": 0.007287904167175293,
|
119 |
+
"p95": 0.00740480010509491,
|
120 |
+
"p99": 0.0076277146625518795,
|
121 |
"values": [
|
122 |
+
0.00758784008026123,
|
123 |
+
0.007260159969329834,
|
124 |
+
0.007544832229614258,
|
125 |
+
0.00733900785446167,
|
126 |
+
0.007407616138458252,
|
127 |
+
0.0074301438331604,
|
128 |
+
0.0073359360694885255,
|
129 |
+
0.007296031951904297,
|
130 |
+
0.007288832187652588,
|
131 |
+
0.007221248149871826,
|
132 |
+
0.007197696208953858,
|
133 |
+
0.007206912040710449,
|
134 |
0.007201791763305664,
|
135 |
+
0.007120895862579346,
|
136 |
+
0.007080959796905518,
|
137 |
+
0.007164927959442138,
|
138 |
+
0.007164927959442138,
|
139 |
+
0.007166975975036621,
|
140 |
+
0.007189504146575928,
|
141 |
+
0.007168992042541504,
|
142 |
+
0.007172095775604248,
|
143 |
0.007165952205657959,
|
144 |
+
0.007110655784606934,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
145 |
0.007180287837982178,
|
146 |
+
0.0071905279159545895,
|
147 |
+
0.007194623947143554,
|
148 |
+
0.0072325119972229,
|
149 |
+
0.007229440212249756,
|
150 |
+
0.007163904190063477,
|
151 |
+
0.007101471900939941,
|
152 |
+
0.007161856174468994,
|
153 |
+
0.007188479900360107,
|
154 |
+
0.007187456130981445,
|
155 |
+
0.00708403205871582,
|
156 |
+
0.007081984043121338,
|
157 |
+
0.007147520065307617,
|
158 |
+
0.007344128131866455,
|
159 |
+
0.0075796799659729,
|
160 |
+
0.007162879943847656,
|
161 |
+
0.007177216053009033,
|
162 |
+
0.007126016139984131,
|
163 |
+
0.007245855808258057,
|
164 |
+
0.007252992153167725,
|
165 |
+
0.007262207984924316,
|
166 |
+
0.007237631797790528,
|
167 |
+
0.007176191806793213,
|
168 |
+
0.0071495680809020995,
|
169 |
+
0.007155712127685547,
|
170 |
+
0.007201791763305664,
|
171 |
+
0.007244800090789795,
|
172 |
+
0.007202816009521484,
|
173 |
+
0.007147520065307617,
|
174 |
+
0.007110655784606934,
|
175 |
+
0.00707583999633789,
|
176 |
+
0.007122943878173828,
|
177 |
+
0.007066624164581299,
|
178 |
+
0.007128032207489013,
|
179 |
+
0.007226367950439453,
|
180 |
+
0.007390207767486572,
|
181 |
+
0.007122943878173828,
|
182 |
+
0.007105567932128906,
|
183 |
+
0.007112703800201416,
|
184 |
+
0.007163904190063477,
|
185 |
+
0.007734272003173828,
|
186 |
+
0.007341055870056152,
|
187 |
+
0.007208960056304932,
|
188 |
+
0.007279551982879638,
|
189 |
+
0.007231488227844239,
|
190 |
+
0.007227456092834473,
|
191 |
+
0.007251935958862305,
|
192 |
+
0.00740556812286377,
|
193 |
+
0.007176191806793213,
|
194 |
+
0.007233535766601563,
|
195 |
+
0.00709119987487793,
|
196 |
+
0.007234560012817383,
|
197 |
+
0.0070860800743103025,
|
198 |
+
0.007076863765716553,
|
199 |
+
0.006933472156524658,
|
200 |
0.007021567821502686,
|
201 |
+
0.0071198720932006835,
|
202 |
+
0.00693452787399292,
|
203 |
+
0.006782976150512696,
|
204 |
+
0.006790143966674805,
|
205 |
+
0.006849535942077637,
|
206 |
+
0.0067900800704956054,
|
207 |
+
0.007271423816680909,
|
208 |
+
0.007655424118041992,
|
209 |
+
0.007142399787902832,
|
210 |
+
0.007138304233551026,
|
211 |
+
0.007181280136108399,
|
212 |
+
0.006982656002044678,
|
213 |
+
0.006960127830505371,
|
214 |
+
0.006915071964263916,
|
215 |
+
0.0068689918518066405,
|
216 |
+
0.006822912216186523,
|
217 |
+
0.006812672138214112,
|
218 |
+
0.006765567779541016,
|
219 |
+
0.006750207901000976,
|
220 |
+
0.006820864200592041,
|
221 |
+
0.006770688056945801,
|
222 |
+
0.006730751991271973,
|
223 |
+
0.006731776237487793,
|
224 |
+
0.00676966381072998,
|
225 |
+
0.006748159885406494,
|
226 |
+
0.00675219202041626,
|
227 |
+
0.006770688056945801,
|
228 |
+
0.006772736072540283,
|
229 |
+
0.006767615795135498,
|
230 |
+
0.006796256065368652,
|
231 |
+
0.006752255916595459,
|
232 |
+
0.006776832103729248,
|
233 |
+
0.006738944053649902,
|
234 |
+
0.006822912216186523,
|
235 |
+
0.006754303932189941,
|
236 |
+
0.0067358717918396,
|
237 |
+
0.006776800155639648,
|
238 |
+
0.006796288013458252,
|
239 |
+
0.006731776237487793,
|
240 |
+
0.0067717118263244626,
|
241 |
+
0.006776832103729248,
|
242 |
+
0.006738944053649902,
|
243 |
+
0.006773759841918945,
|
244 |
+
0.006783999919891357,
|
245 |
+
0.006746111869812011,
|
246 |
+
0.006725632190704346,
|
247 |
0.00678604793548584,
|
248 |
+
0.006779903888702392,
|
249 |
+
0.006754303932189941,
|
250 |
+
0.006775872230529785,
|
251 |
+
0.006737919807434082,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
252 |
0.006729728221893311,
|
253 |
+
0.006759424209594727,
|
254 |
+
0.006779935836791992,
|
255 |
+
0.006772736072540283,
|
256 |
+
0.006762495994567871,
|
257 |
+
0.006772736072540283,
|
258 |
+
0.006773759841918945,
|
259 |
+
0.0067430720329284665,
|
260 |
+
0.0067717118263244626,
|
261 |
+
0.006783999919891357,
|
262 |
+
0.006749184131622315,
|
263 |
+
0.006774784088134766
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
264 |
]
|
265 |
},
|
266 |
"throughput": {
|
267 |
"unit": "samples/s",
|
268 |
+
"value": 142.00415337121942
|
269 |
},
|
270 |
"energy": {
|
271 |
"unit": "kWh",
|
272 |
+
"cpu": 7.778227697184056e-08,
|
273 |
+
"ram": 4.2520409203693716e-08,
|
274 |
+
"gpu": 1.373911774509772e-07,
|
275 |
+
"total": 2.576938636265115e-07
|
276 |
},
|
277 |
"efficiency": {
|
278 |
"unit": "samples/kWh",
|
279 |
+
"value": 3880573.584202027
|
280 |
}
|
281 |
}
|
282 |
}
|