Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub
Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json
CHANGED
@@ -104,7 +104,7 @@
|
|
104 |
"forward": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
-
"max_ram": 975.
|
108 |
"max_global_vram": 1434.976256,
|
109 |
"max_process_vram": 0.0,
|
110 |
"max_reserved": 794.820608,
|
@@ -112,100 +112,103 @@
|
|
112 |
},
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
-
"count":
|
116 |
-
"total": 1.
|
117 |
-
"mean": 0.
|
118 |
-
"stdev": 0.
|
119 |
-
"p50": 0.
|
120 |
-
"p90": 0.
|
121 |
-
"p95": 0.
|
122 |
-
"p99": 0.
|
123 |
"values": [
|
124 |
-
0.
|
125 |
-
0.
|
126 |
-
0.
|
127 |
-
0.
|
128 |
-
0.
|
|
|
129 |
0.014726143836975097,
|
130 |
-
0.
|
131 |
-
0.
|
132 |
-
0.
|
133 |
-
0.
|
134 |
-
0.
|
135 |
-
0.
|
136 |
-
0.
|
137 |
-
0.
|
138 |
-
0.
|
139 |
-
0.
|
140 |
-
0.
|
141 |
-
0.
|
142 |
-
0.
|
143 |
-
0.
|
144 |
-
0.
|
145 |
-
0.
|
146 |
-
0.014856191635131836,
|
147 |
-
0.01468518352508545,
|
148 |
-
0.014771200180053711,
|
149 |
-
0.01479475212097168,
|
150 |
-
0.015542271614074708,
|
151 |
-
0.01477222442626953,
|
152 |
-
0.014787584304809571,
|
153 |
-
0.014618623733520507,
|
154 |
-
0.014483455657958985,
|
155 |
-
0.014631999969482421,
|
156 |
-
0.014335040092468262,
|
157 |
-
0.014649344444274901,
|
158 |
-
0.01468825626373291,
|
159 |
-
0.014847999572753906,
|
160 |
-
0.01465446376800537,
|
161 |
-
0.01448857593536377,
|
162 |
-
0.014387200355529785,
|
163 |
-
0.01437491226196289,
|
164 |
-
0.014167039871215821,
|
165 |
-
0.014140416145324707,
|
166 |
-
0.014070783615112305,
|
167 |
-
0.014089216232299804,
|
168 |
-
0.014098431587219238,
|
169 |
-
0.014191616058349609,
|
170 |
-
0.014115839958190919,
|
171 |
-
0.014088191986083985,
|
172 |
-
0.014125056266784668,
|
173 |
-
0.014056415557861328,
|
174 |
-
0.014105600357055664,
|
175 |
-
0.014134271621704102,
|
176 |
-
0.014113792419433594,
|
177 |
-
0.014119935989379882,
|
178 |
-
0.0140830717086792,
|
179 |
-
0.014140416145324707,
|
180 |
-
0.014103520393371581,
|
181 |
-
0.014126079559326172,
|
182 |
-
0.014124032020568847,
|
183 |
-
0.01409331226348877,
|
184 |
-
0.014133248329162598,
|
185 |
0.014097408294677734,
|
186 |
-
0.
|
187 |
-
0.
|
188 |
-
0.
|
189 |
-
0.
|
190 |
-
0.
|
191 |
-
0.
|
192 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
193 |
]
|
194 |
},
|
195 |
"throughput": {
|
196 |
"unit": "samples/s",
|
197 |
-
"value":
|
198 |
},
|
199 |
"energy": {
|
200 |
"unit": "kWh",
|
201 |
-
"cpu": 1.
|
202 |
-
"ram":
|
203 |
-
"gpu": 3.
|
204 |
-
"total":
|
205 |
},
|
206 |
"efficiency": {
|
207 |
"unit": "samples/kWh",
|
208 |
-
"value":
|
209 |
}
|
210 |
}
|
211 |
}
|
|
|
104 |
"forward": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
+
"max_ram": 975.48288,
|
108 |
"max_global_vram": 1434.976256,
|
109 |
"max_process_vram": 0.0,
|
110 |
"max_reserved": 794.820608,
|
|
|
112 |
},
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
+
"count": 72,
|
116 |
+
"total": 1.0098104333877562,
|
117 |
+
"mean": 0.014025144908163284,
|
118 |
+
"stdev": 0.0006319623182443735,
|
119 |
+
"p50": 0.01401036787033081,
|
120 |
+
"p90": 0.014835507202148438,
|
121 |
+
"p95": 0.015305215644836428,
|
122 |
+
"p99": 0.01583332491874695,
|
123 |
"values": [
|
124 |
+
0.015602687835693359,
|
125 |
+
0.01609212875366211,
|
126 |
+
0.015727616310119628,
|
127 |
+
0.015558655738830567,
|
128 |
+
0.014997504234313964,
|
129 |
+
0.014815232276916505,
|
130 |
0.014726143836975097,
|
131 |
+
0.014925824165344239,
|
132 |
+
0.014808064460754394,
|
133 |
+
0.014807040214538575,
|
134 |
+
0.014044159889221192,
|
135 |
+
0.01439027214050293,
|
136 |
+
0.014156800270080566,
|
137 |
+
0.014034943580627441,
|
138 |
+
0.014207039833068848,
|
139 |
+
0.013949952125549316,
|
140 |
+
0.014006272315979004,
|
141 |
+
0.014011391639709473,
|
142 |
+
0.014837759971618653,
|
143 |
+
0.014325759887695312,
|
144 |
+
0.01407692813873291,
|
145 |
+
0.014180352210998535,
|
146 |
+
0.014053376197814941,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
147 |
0.014097408294677734,
|
148 |
+
0.01406771183013916,
|
149 |
+
0.014323712348937988,
|
150 |
+
0.014082048416137695,
|
151 |
+
0.014009344100952148,
|
152 |
+
0.014012415885925293,
|
153 |
+
0.014332927703857423,
|
154 |
+
0.01430016040802002,
|
155 |
+
0.014064640045166015,
|
156 |
+
0.014090239524841308,
|
157 |
+
0.01399295997619629,
|
158 |
+
0.01388646411895752,
|
159 |
+
0.014045184135437011,
|
160 |
+
0.013751296043395997,
|
161 |
+
0.013615103721618652,
|
162 |
+
0.014081024169921874,
|
163 |
+
0.014106623649597168,
|
164 |
+
0.014477312088012695,
|
165 |
+
0.014518176078796387,
|
166 |
+
0.015097855567932129,
|
167 |
+
0.014008319854736329,
|
168 |
+
0.013524991989135742,
|
169 |
+
0.013521920204162598,
|
170 |
+
0.013533151626586915,
|
171 |
+
0.013526016235351563,
|
172 |
+
0.01349120044708252,
|
173 |
+
0.013357088088989258,
|
174 |
+
0.01335091209411621,
|
175 |
+
0.013428735733032226,
|
176 |
+
0.013545536041259765,
|
177 |
+
0.01355571174621582,
|
178 |
+
0.013500415802001953,
|
179 |
+
0.01348300838470459,
|
180 |
+
0.013528063774108886,
|
181 |
+
0.013516799926757812,
|
182 |
+
0.01346457576751709,
|
183 |
+
0.013533184051513672,
|
184 |
+
0.013487104415893555,
|
185 |
+
0.013508607864379883,
|
186 |
+
0.01337446403503418,
|
187 |
+
0.013355008125305176,
|
188 |
+
0.013355008125305176,
|
189 |
+
0.013424639701843261,
|
190 |
+
0.013363200187683106,
|
191 |
+
0.013404159545898438,
|
192 |
+
0.013337599754333495,
|
193 |
+
0.013343744277954102,
|
194 |
+
0.013361151695251466,
|
195 |
+
0.013337599754333495
|
196 |
]
|
197 |
},
|
198 |
"throughput": {
|
199 |
"unit": "samples/s",
|
200 |
+
"value": 71.30051108548288
|
201 |
},
|
202 |
"energy": {
|
203 |
"unit": "kWh",
|
204 |
+
"cpu": 1.605848985928315e-07,
|
205 |
+
"ram": 8.777120406316621e-08,
|
206 |
+
"gpu": 3.3488014778378494e-07,
|
207 |
+
"total": 5.832362504397827e-07
|
208 |
},
|
209 |
"efficiency": {
|
210 |
"unit": "samples/kWh",
|
211 |
+
"value": 1714571.0666063046
|
212 |
}
|
213 |
}
|
214 |
}
|