Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub
Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json
CHANGED
@@ -104,7 +104,7 @@
|
|
104 |
"forward": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
-
"max_ram": 903.
|
108 |
"max_global_vram": 1195.900928,
|
109 |
"max_process_vram": 0.0,
|
110 |
"max_reserved": 555.74528,
|
@@ -112,161 +112,167 @@
|
|
112 |
},
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
-
"count":
|
116 |
-
"total": 0.
|
117 |
-
"mean": 0.
|
118 |
-
"stdev": 0.
|
119 |
-
"p50": 0.
|
120 |
-
"p90": 0.
|
121 |
-
"p95": 0.
|
122 |
-
"p99": 0.
|
123 |
"values": [
|
124 |
-
0.
|
125 |
-
0.
|
126 |
-
0.
|
127 |
-
0.008054719924926759,
|
128 |
-
0.00800972843170166,
|
129 |
-
0.007980031967163086,
|
130 |
-
0.007862271785736084,
|
131 |
-
0.007864319801330566,
|
132 |
-
0.007845888137817383,
|
133 |
-
0.007813119888305664,
|
134 |
-
0.00778547191619873,
|
135 |
-
0.00781824016571045,
|
136 |
-
0.007904255867004394,
|
137 |
-
0.007919583797454834,
|
138 |
-
0.007889952182769775,
|
139 |
-
0.007654399871826172,
|
140 |
-
0.007903232097625732,
|
141 |
-
0.007867392063140868,
|
142 |
-
0.007882751941680909,
|
143 |
-
0.00784281587600708,
|
144 |
-
0.007899136066436767,
|
145 |
-
0.00782431983947754,
|
146 |
-
0.00783462381362915,
|
147 |
-
0.007864319801330566,
|
148 |
-
0.007788544178009033,
|
149 |
-
0.0077916159629821775,
|
150 |
-
0.007829504013061523,
|
151 |
-
0.007768064022064209,
|
152 |
-
0.00790015983581543,
|
153 |
-
0.007888895988464355,
|
154 |
-
0.00791756820678711,
|
155 |
-
0.007964672088623047,
|
156 |
-
0.0079267840385437,
|
157 |
-
0.0078919677734375,
|
158 |
-
0.007827455997467042,
|
159 |
-
0.007888895988464355,
|
160 |
-
0.007877567768096923,
|
161 |
0.007931903839111328,
|
162 |
-
0.
|
163 |
-
0.
|
164 |
-
0.
|
165 |
-
0.
|
166 |
-
0.007963647842407226,
|
167 |
-
0.007848959922790527,
|
168 |
-
0.007858176231384278,
|
169 |
-
0.007799808025360107,
|
170 |
-
0.007823359966278077,
|
171 |
-
0.007782400131225586,
|
172 |
-
0.007846911907196046,
|
173 |
-
0.007921664237976075,
|
174 |
-
0.007977983951568603,
|
175 |
-
0.007888927936553955,
|
176 |
-
0.007795711994171142,
|
177 |
-
0.007360511779785156,
|
178 |
-
0.0073431038856506346,
|
179 |
-
0.007453695774078369,
|
180 |
-
0.007311391830444336,
|
181 |
-
0.0072848000526428225,
|
182 |
-
0.007171072006225586,
|
183 |
-
0.007315455913543701,
|
184 |
-
0.0072499198913574215,
|
185 |
-
0.007064576148986816,
|
186 |
-
0.007282688140869141,
|
187 |
-
0.007524352073669433,
|
188 |
-
0.007674880027770996,
|
189 |
-
0.007734272003173828,
|
190 |
-
0.00775161600112915,
|
191 |
-
0.007769087791442871,
|
192 |
-
0.007822336196899414,
|
193 |
-
0.009207807540893554,
|
194 |
-
0.007716864109039307,
|
195 |
-
0.0074967041015625,
|
196 |
-
0.007450623989105225,
|
197 |
-
0.007401440143585205,
|
198 |
-
0.00744755220413208,
|
199 |
-
0.007479296207427978,
|
200 |
-
0.007460864067077637,
|
201 |
0.007408639907836914,
|
202 |
-
0.
|
203 |
-
0.
|
204 |
-
0.
|
205 |
-
0.
|
206 |
-
0.
|
207 |
-
0.007436287879943848,
|
208 |
-
0.007426047801971435,
|
209 |
-
0.007418879985809326,
|
210 |
-
0.007450623989105225,
|
211 |
-
0.0074035201072692874,
|
212 |
0.007413760185241699,
|
213 |
-
0.
|
214 |
-
0.
|
215 |
-
0.
|
216 |
-
0.
|
217 |
-
0.
|
218 |
-
0.
|
219 |
-
0.
|
220 |
-
0.
|
221 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
222 |
0.0074711041450500485,
|
223 |
-
0.
|
224 |
-
0.
|
225 |
-
0.
|
226 |
-
0.
|
227 |
-
0.00739737606048584,
|
228 |
-
0.007464960098266602,
|
229 |
-
0.007402560234069824,
|
230 |
-
0.007393280029296875,
|
231 |
-
0.00744652795791626,
|
232 |
-
0.0076267518997192385,
|
233 |
-
0.007450623989105225,
|
234 |
-
0.007385087966918945,
|
235 |
0.007396351814270019,
|
236 |
-
0.
|
237 |
-
0.
|
238 |
-
0.
|
239 |
-
0.
|
240 |
-
0.007404543876647949,
|
241 |
-
0.007374847888946533,
|
242 |
-
0.007426047801971435,
|
243 |
0.007457791805267334,
|
244 |
-
0.
|
245 |
-
0.
|
246 |
-
0.
|
247 |
-
0.
|
248 |
-
0.
|
249 |
-
0.
|
250 |
-
0.
|
251 |
-
0.
|
|
|
|
|
252 |
0.007408639907836914,
|
253 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
254 |
]
|
255 |
},
|
256 |
"throughput": {
|
257 |
"unit": "samples/s",
|
258 |
-
"value":
|
259 |
},
|
260 |
"energy": {
|
261 |
"unit": "kWh",
|
262 |
-
"cpu": 8.
|
263 |
-
"ram": 4.
|
264 |
-
"gpu": 1.
|
265 |
-
"total": 2.
|
266 |
},
|
267 |
"efficiency": {
|
268 |
"unit": "samples/kWh",
|
269 |
-
"value":
|
270 |
}
|
271 |
}
|
272 |
}
|
|
|
104 |
"forward": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
+
"max_ram": 903.3728,
|
108 |
"max_global_vram": 1195.900928,
|
109 |
"max_process_vram": 0.0,
|
110 |
"max_reserved": 555.74528,
|
|
|
112 |
},
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
+
"count": 136,
|
116 |
+
"total": 0.9963351974487306,
|
117 |
+
"mean": 0.007325994098887724,
|
118 |
+
"stdev": 0.00038683093314804636,
|
119 |
+
"p50": 0.007311360120773316,
|
120 |
+
"p90": 0.007736320018768311,
|
121 |
+
"p95": 0.007940351843833923,
|
122 |
+
"p99": 0.00887997407913208,
|
123 |
"values": [
|
124 |
+
0.008300543785095215,
|
125 |
+
0.00796569585800171,
|
126 |
+
0.007990272045135497,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
127 |
0.007931903839111328,
|
128 |
+
0.007861248016357422,
|
129 |
+
0.007896063804626464,
|
130 |
+
0.007536640167236328,
|
131 |
+
0.007419904232025146,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
132 |
0.007408639907836914,
|
133 |
+
0.007437312126159668,
|
134 |
+
0.007325664043426514,
|
135 |
+
0.0073062400817871095,
|
136 |
+
0.007363584041595459,
|
137 |
+
0.00738099193572998,
|
|
|
|
|
|
|
|
|
|
|
138 |
0.007413760185241699,
|
139 |
+
0.007458816051483155,
|
140 |
+
0.00748748779296875,
|
141 |
+
0.00738918399810791,
|
142 |
+
0.007430111885070801,
|
143 |
+
0.007673855781555176,
|
144 |
+
0.007751679897308349,
|
145 |
+
0.007741439819335938,
|
146 |
+
0.007709695816040039,
|
147 |
+
0.007702527999877929,
|
148 |
+
0.00841932773590088,
|
149 |
+
0.007717887878417969,
|
150 |
+
0.007629824161529541,
|
151 |
+
0.00761033582687378,
|
152 |
+
0.007669760227203369,
|
153 |
+
0.007650303840637207,
|
154 |
+
0.007731200218200684,
|
155 |
+
0.007725056171417236,
|
156 |
+
0.007775231838226319,
|
157 |
+
0.007480319976806641,
|
158 |
0.0074711041450500485,
|
159 |
+
0.007366655826568603,
|
160 |
+
0.0073472318649291995,
|
161 |
+
0.007417856216430664,
|
162 |
+
0.00743833589553833,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
163 |
0.007396351814270019,
|
164 |
+
0.007320576190948487,
|
165 |
+
0.007314432144165039,
|
166 |
+
0.007328767776489258,
|
167 |
+
0.0075632638931274416,
|
|
|
|
|
|
|
168 |
0.007457791805267334,
|
169 |
+
0.007332863807678222,
|
170 |
+
0.007316480159759522,
|
171 |
+
0.007354368209838867,
|
172 |
+
0.0073820161819458,
|
173 |
+
0.007336959838867187,
|
174 |
+
0.007365632057189942,
|
175 |
+
0.007469056129455566,
|
176 |
+
0.007477248191833496,
|
177 |
+
0.00739737606048584,
|
178 |
+
0.007372799873352051,
|
179 |
0.007408639907836914,
|
180 |
+
0.007326720237731933,
|
181 |
+
0.007425024032592774,
|
182 |
+
0.007288832187652588,
|
183 |
+
0.007270400047302246,
|
184 |
+
0.007104479789733887,
|
185 |
+
0.007245823860168457,
|
186 |
+
0.00738918399810791,
|
187 |
+
0.007122943878173828,
|
188 |
+
0.007014400005340577,
|
189 |
+
0.0069918718338012695,
|
190 |
+
0.007078911781311035,
|
191 |
+
0.007184383869171143,
|
192 |
+
0.00880742359161377,
|
193 |
+
0.008919039726257324,
|
194 |
+
0.008995840072631836,
|
195 |
+
0.007761888027191162,
|
196 |
+
0.007550975799560547,
|
197 |
+
0.007497727870941162,
|
198 |
+
0.007334911823272705,
|
199 |
+
0.007271423816680909,
|
200 |
+
0.007288832187652588,
|
201 |
+
0.007287775993347168,
|
202 |
+
0.007332863807678222,
|
203 |
+
0.007252992153167725,
|
204 |
+
0.007308288097381592,
|
205 |
+
0.007291903972625732,
|
206 |
+
0.007271423816680909,
|
207 |
+
0.007657440185546875,
|
208 |
+
0.007245823860168457,
|
209 |
+
0.007259136199951172,
|
210 |
+
0.007268352031707764,
|
211 |
+
0.007266304016113281,
|
212 |
+
0.007329792022705078,
|
213 |
+
0.006998015880584717,
|
214 |
+
0.006977536201477051,
|
215 |
+
0.006969344139099121,
|
216 |
+
0.006993919849395752,
|
217 |
+
0.006960127830505371,
|
218 |
+
0.006998015880584717,
|
219 |
+
0.006980607986450196,
|
220 |
+
0.007006175994873047,
|
221 |
+
0.006969344139099121,
|
222 |
+
0.006957056045532227,
|
223 |
+
0.006980607986450196,
|
224 |
+
0.006985727787017822,
|
225 |
+
0.006965248107910156,
|
226 |
+
0.0069621758460998535,
|
227 |
+
0.006990848064422607,
|
228 |
+
0.0069764480590820315,
|
229 |
+
0.006949888229370117,
|
230 |
+
0.006964223861694336,
|
231 |
+
0.006982656002044678,
|
232 |
+
0.0069550080299377445,
|
233 |
+
0.006946815967559815,
|
234 |
+
0.00694374418258667,
|
235 |
+
0.006977536201477051,
|
236 |
+
0.006993919849395752,
|
237 |
+
0.006974431991577148,
|
238 |
+
0.006980607986450196,
|
239 |
+
0.006968255996704102,
|
240 |
+
0.006988800048828125,
|
241 |
+
0.006985727787017822,
|
242 |
+
0.006964223861694336,
|
243 |
+
0.006976511955261231,
|
244 |
+
0.006998015880584717,
|
245 |
+
0.006964223861694336,
|
246 |
+
0.006976511955261231,
|
247 |
+
0.006960063934326172,
|
248 |
+
0.006966271877288818,
|
249 |
+
0.006952960014343262,
|
250 |
+
0.006966271877288818,
|
251 |
+
0.006988800048828125,
|
252 |
+
0.006957056045532227,
|
253 |
+
0.007003136157989502,
|
254 |
+
0.0069519681930542,
|
255 |
+
0.0069816322326660156,
|
256 |
+
0.0069816322326660156,
|
257 |
+
0.0069621758460998535,
|
258 |
+
0.006957056045532227,
|
259 |
+
0.006972415924072266
|
260 |
]
|
261 |
},
|
262 |
"throughput": {
|
263 |
"unit": "samples/s",
|
264 |
+
"value": 136.50024645144418
|
265 |
},
|
266 |
"energy": {
|
267 |
"unit": "kWh",
|
268 |
+
"cpu": 8.299775007698273e-08,
|
269 |
+
"ram": 4.521743116367171e-08,
|
270 |
+
"gpu": 1.4923815951388975e-07,
|
271 |
+
"total": 2.774533407545442e-07
|
272 |
},
|
273 |
"efficiency": {
|
274 |
"unit": "samples/kWh",
|
275 |
+
"value": 3604209.620545439
|
276 |
}
|
277 |
}
|
278 |
}
|