Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub
Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json
CHANGED
@@ -104,7 +104,7 @@
|
|
104 |
"forward": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
-
"max_ram": 903.
|
108 |
"max_global_vram": 1195.900928,
|
109 |
"max_process_vram": 0.0,
|
110 |
"max_reserved": 555.74528,
|
@@ -112,164 +112,161 @@
|
|
112 |
},
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
-
"count":
|
116 |
-
"total": 0.
|
117 |
-
"mean": 0.
|
118 |
-
"stdev": 0.
|
119 |
-
"p50": 0.
|
120 |
-
"p90": 0.
|
121 |
-
"p95": 0.
|
122 |
-
"p99": 0.
|
123 |
"values": [
|
124 |
-
0.
|
125 |
-
0.
|
126 |
-
0.
|
127 |
-
0.
|
128 |
-
0.
|
129 |
-
0.
|
130 |
-
0.
|
131 |
-
0.
|
132 |
-
0.
|
133 |
-
0.
|
134 |
-
0.007639039993286132,
|
135 |
-
0.007599103927612305,
|
136 |
-
0.007825407981872558,
|
137 |
-
0.008169471740722656,
|
138 |
0.00778547191619873,
|
139 |
-
0.
|
140 |
-
0.
|
141 |
-
0.
|
142 |
-
0.
|
143 |
-
0.
|
144 |
-
0.
|
145 |
-
0.
|
146 |
-
0.
|
147 |
-
0.
|
148 |
-
0.
|
149 |
-
0.
|
150 |
-
0.
|
151 |
-
0.
|
152 |
-
0.
|
153 |
-
0.
|
154 |
-
0.
|
155 |
-
0.007941120147705078,
|
156 |
-
0.007822336196899414,
|
157 |
-
0.007714816093444824,
|
158 |
-
0.007682047843933106,
|
159 |
-
0.007697408199310303,
|
160 |
-
0.007738368034362793,
|
161 |
0.007768064022064209,
|
162 |
-
0.
|
163 |
-
0.
|
164 |
-
0.
|
165 |
-
0.
|
166 |
-
0.
|
167 |
-
0.
|
168 |
-
0.
|
169 |
-
0.
|
170 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
171 |
0.007674880027770996,
|
172 |
-
0.
|
173 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
174 |
0.007393280029296875,
|
175 |
-
0.
|
176 |
-
0.
|
177 |
-
0.
|
178 |
-
0.
|
179 |
-
0.
|
180 |
-
0.
|
181 |
-
0.
|
182 |
-
0.
|
183 |
-
0.
|
184 |
-
0.
|
185 |
-
0.
|
186 |
-
0.007400447845458984,
|
187 |
-
0.007350272178649903,
|
188 |
-
0.007302144050598145,
|
189 |
-
0.0075038719177246095,
|
190 |
-
0.007658495903015137,
|
191 |
-
0.007693312168121338,
|
192 |
-
0.007669760227203369,
|
193 |
-
0.0077281279563903805,
|
194 |
-
0.007542784214019775,
|
195 |
0.007426047801971435,
|
196 |
-
0.
|
197 |
-
0.
|
198 |
-
0.
|
199 |
-
0.
|
200 |
-
0.
|
201 |
-
0.
|
202 |
-
0.
|
203 |
-
0.
|
204 |
-
0.
|
205 |
-
0.
|
206 |
-
0.
|
207 |
-
0.007278592109680176,
|
208 |
-
0.007279615879058838,
|
209 |
-
0.0076943359375,
|
210 |
-
0.007322624206542969,
|
211 |
-
0.007333888053894043,
|
212 |
-
0.007288832187652588,
|
213 |
-
0.0072765440940856935,
|
214 |
-
0.007602176189422607,
|
215 |
-
0.007334911823272705,
|
216 |
-
0.00773529577255249,
|
217 |
-
0.007328767776489258,
|
218 |
-
0.007054336071014404,
|
219 |
-
0.0072642240524291996,
|
220 |
-
0.00733081579208374,
|
221 |
-
0.0073062400817871095,
|
222 |
-
0.0073021121025085445,
|
223 |
-
0.007342080116271973,
|
224 |
-
0.00727347183227539,
|
225 |
-
0.007282688140869141,
|
226 |
-
0.007314400196075439,
|
227 |
-
0.007305215835571289,
|
228 |
-
0.007237631797790528,
|
229 |
-
0.0073062400817871095,
|
230 |
-
0.007271423816680909,
|
231 |
-
0.007300096035003662,
|
232 |
-
0.007585792064666748,
|
233 |
-
0.007602176189422607,
|
234 |
-
0.007308288097381592,
|
235 |
-
0.007364607810974121,
|
236 |
-
0.007308288097381592,
|
237 |
-
0.007273407936096191,
|
238 |
-
0.007300096035003662,
|
239 |
-
0.007299071788787842,
|
240 |
-
0.007390207767486572,
|
241 |
-
0.007247871875762939,
|
242 |
-
0.0072540159225463864,
|
243 |
-
0.007328767776489258,
|
244 |
-
0.007311295986175537,
|
245 |
-
0.007271423816680909,
|
246 |
-
0.007309311866760254,
|
247 |
-
0.007237631797790528,
|
248 |
-
0.007280640125274658,
|
249 |
-
0.007263232231140137,
|
250 |
-
0.007294976234436036,
|
251 |
-
0.007287871837615967,
|
252 |
-
0.007375904083251953,
|
253 |
-
0.00725708818435669,
|
254 |
-
0.007280640125274658,
|
255 |
-
0.0072765440940856935,
|
256 |
-
0.007414783954620361
|
257 |
]
|
258 |
},
|
259 |
"throughput": {
|
260 |
"unit": "samples/s",
|
261 |
-
"value":
|
262 |
},
|
263 |
"energy": {
|
264 |
"unit": "kWh",
|
265 |
-
"cpu": 8.
|
266 |
-
"ram": 4.
|
267 |
-
"gpu": 1.
|
268 |
-
"total": 2.
|
269 |
},
|
270 |
"efficiency": {
|
271 |
"unit": "samples/kWh",
|
272 |
-
"value":
|
273 |
}
|
274 |
}
|
275 |
}
|
|
|
104 |
"forward": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
+
"max_ram": 903.622656,
|
108 |
"max_global_vram": 1195.900928,
|
109 |
"max_process_vram": 0.0,
|
110 |
"max_reserved": 555.74528,
|
|
|
112 |
},
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
+
"count": 130,
|
116 |
+
"total": 0.9969550065994257,
|
117 |
+
"mean": 0.007668884666149433,
|
118 |
+
"stdev": 0.0004436874454334662,
|
119 |
+
"p50": 0.0074803361892700196,
|
120 |
+
"p90": 0.007922176218032838,
|
121 |
+
"p95": 0.007996365022659302,
|
122 |
+
"p99": 0.00982579184532165,
|
123 |
"values": [
|
124 |
+
0.010809344291687012,
|
125 |
+
0.010078207969665527,
|
126 |
+
0.00810598373413086,
|
127 |
+
0.008054719924926759,
|
128 |
+
0.00800972843170166,
|
129 |
+
0.007980031967163086,
|
130 |
+
0.007862271785736084,
|
131 |
+
0.007864319801330566,
|
132 |
+
0.007845888137817383,
|
133 |
+
0.007813119888305664,
|
|
|
|
|
|
|
|
|
134 |
0.00778547191619873,
|
135 |
+
0.00781824016571045,
|
136 |
+
0.007904255867004394,
|
137 |
+
0.007919583797454834,
|
138 |
+
0.007889952182769775,
|
139 |
+
0.007654399871826172,
|
140 |
+
0.007903232097625732,
|
141 |
+
0.007867392063140868,
|
142 |
+
0.007882751941680909,
|
143 |
+
0.00784281587600708,
|
144 |
+
0.007899136066436767,
|
145 |
+
0.00782431983947754,
|
146 |
+
0.00783462381362915,
|
147 |
+
0.007864319801330566,
|
148 |
+
0.007788544178009033,
|
149 |
+
0.0077916159629821775,
|
150 |
+
0.007829504013061523,
|
|
|
|
|
|
|
|
|
|
|
|
|
151 |
0.007768064022064209,
|
152 |
+
0.00790015983581543,
|
153 |
+
0.007888895988464355,
|
154 |
+
0.00791756820678711,
|
155 |
+
0.007964672088623047,
|
156 |
+
0.0079267840385437,
|
157 |
+
0.0078919677734375,
|
158 |
+
0.007827455997467042,
|
159 |
+
0.007888895988464355,
|
160 |
+
0.007877567768096923,
|
161 |
+
0.007931903839111328,
|
162 |
+
0.007824384212493896,
|
163 |
+
0.007773183822631836,
|
164 |
+
0.007786496162414551,
|
165 |
+
0.008056832313537597,
|
166 |
+
0.007963647842407226,
|
167 |
+
0.007848959922790527,
|
168 |
+
0.007858176231384278,
|
169 |
+
0.007799808025360107,
|
170 |
+
0.007823359966278077,
|
171 |
+
0.007782400131225586,
|
172 |
+
0.007846911907196046,
|
173 |
+
0.007921664237976075,
|
174 |
+
0.007977983951568603,
|
175 |
+
0.007888927936553955,
|
176 |
+
0.007795711994171142,
|
177 |
+
0.007360511779785156,
|
178 |
+
0.0073431038856506346,
|
179 |
+
0.007453695774078369,
|
180 |
+
0.007311391830444336,
|
181 |
+
0.0072848000526428225,
|
182 |
+
0.007171072006225586,
|
183 |
+
0.007315455913543701,
|
184 |
+
0.0072499198913574215,
|
185 |
+
0.007064576148986816,
|
186 |
+
0.007282688140869141,
|
187 |
+
0.007524352073669433,
|
188 |
0.007674880027770996,
|
189 |
+
0.007734272003173828,
|
190 |
+
0.00775161600112915,
|
191 |
+
0.007769087791442871,
|
192 |
+
0.007822336196899414,
|
193 |
+
0.009207807540893554,
|
194 |
+
0.007716864109039307,
|
195 |
+
0.0074967041015625,
|
196 |
+
0.007450623989105225,
|
197 |
+
0.007401440143585205,
|
198 |
+
0.00744755220413208,
|
199 |
+
0.007479296207427978,
|
200 |
+
0.007460864067077637,
|
201 |
+
0.007408639907836914,
|
202 |
+
0.007420928001403809,
|
203 |
+
0.00739737606048584,
|
204 |
+
0.007391232013702393,
|
205 |
+
0.007433152198791504,
|
206 |
+
0.0074403839111328125,
|
207 |
+
0.007436287879943848,
|
208 |
+
0.007426047801971435,
|
209 |
+
0.007418879985809326,
|
210 |
+
0.007450623989105225,
|
211 |
+
0.0074035201072692874,
|
212 |
+
0.007413760185241699,
|
213 |
+
0.007444479942321777,
|
214 |
+
0.007481376171112061,
|
215 |
+
0.007464960098266602,
|
216 |
+
0.00743833589553833,
|
217 |
+
0.0074403839111328125,
|
218 |
+
0.007408639907836914,
|
219 |
+
0.007391232013702393,
|
220 |
+
0.007457791805267334,
|
221 |
+
0.00743936014175415,
|
222 |
+
0.0074711041450500485,
|
223 |
+
0.00744652795791626,
|
224 |
+
0.007449600219726562,
|
225 |
+
0.007423999786376953,
|
226 |
+
0.007406527996063232,
|
227 |
+
0.00739737606048584,
|
228 |
+
0.007464960098266602,
|
229 |
+
0.007402560234069824,
|
230 |
0.007393280029296875,
|
231 |
+
0.00744652795791626,
|
232 |
+
0.0076267518997192385,
|
233 |
+
0.007450623989105225,
|
234 |
+
0.007385087966918945,
|
235 |
+
0.007396351814270019,
|
236 |
+
0.007402495861053467,
|
237 |
+
0.007419904232025146,
|
238 |
+
0.007435328006744385,
|
239 |
+
0.0074403839111328125,
|
240 |
+
0.007404543876647949,
|
241 |
+
0.007374847888946533,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
242 |
0.007426047801971435,
|
243 |
+
0.007457791805267334,
|
244 |
+
0.007444479942321777,
|
245 |
+
0.007417856216430664,
|
246 |
+
0.007392255783081054,
|
247 |
+
0.0073994240760803225,
|
248 |
+
0.0073471999168395995,
|
249 |
+
0.007386112213134765,
|
250 |
+
0.007461887836456299,
|
251 |
+
0.007525375843048096,
|
252 |
+
0.007408639907836914,
|
253 |
+
0.0074106879234313965
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
254 |
]
|
255 |
},
|
256 |
"throughput": {
|
257 |
"unit": "samples/s",
|
258 |
+
"value": 130.39705818161724
|
259 |
},
|
260 |
"energy": {
|
261 |
"unit": "kWh",
|
262 |
+
"cpu": 8.514930922833701e-08,
|
263 |
+
"ram": 4.6547032335449026e-08,
|
264 |
+
"gpu": 1.464901965571432e-07,
|
265 |
+
"total": 2.781865381209292e-07
|
266 |
},
|
267 |
"efficiency": {
|
268 |
"unit": "samples/kWh",
|
269 |
+
"value": 3594710.2500168234
|
270 |
}
|
271 |
}
|
272 |
}
|