IlyasMoutawwakil HF staff commited on
Commit
081a203
·
verified ·
1 Parent(s): a4deae2

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -104,7 +104,7 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 976.048128,
108
  "max_global_vram": 1434.976256,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 794.820608,
@@ -112,102 +112,101 @@
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 71,
116
- "total": 1.0009896955490114,
117
- "mean": 0.014098446416183259,
118
- "stdev": 0.0006384603254537008,
119
- "p50": 0.014302207946777343,
120
- "p90": 0.014630911827087402,
121
- "p95": 0.015238143920898437,
122
- "p99": 0.01611059169769287,
123
  "values": [
124
- 0.01595084762573242,
125
- 0.016483327865600587,
126
- 0.015924223899841307,
127
- 0.01489510440826416,
128
- 0.015516672134399414,
129
- 0.014889984130859376,
130
- 0.01495961570739746,
131
- 0.01375334358215332,
132
- 0.013577216148376465,
133
- 0.013564000129699707,
134
- 0.013637632369995116,
135
- 0.013596672058105469,
136
- 0.013575167655944824,
137
- 0.013502464294433594,
138
- 0.013526016235351563,
139
- 0.0134901762008667,
140
- 0.013545472145080567,
141
- 0.013562879562377929,
142
- 0.013607935905456543,
143
- 0.01357209587097168,
144
- 0.013522944450378417,
145
- 0.013522944450378417,
146
- 0.013513728141784668,
147
- 0.013554688453674316,
148
- 0.01354854393005371,
149
- 0.013495295524597169,
150
- 0.013554688453674316,
151
- 0.013536255836486816,
152
- 0.013537280082702637,
153
- 0.013533184051513672,
154
- 0.013478912353515626,
155
- 0.013537280082702637,
156
- 0.013517824172973633,
157
- 0.013504511833190918,
158
- 0.013527039527893067,
159
- 0.0135731201171875,
160
- 0.013526016235351563,
161
- 0.013515775680541992,
162
- 0.013527039527893067,
163
- 0.01358847999572754,
164
- 0.01413430404663086,
165
- 0.014583744049072266,
166
- 0.014547967910766601,
167
- 0.014630911827087402,
168
- 0.014499903678894043,
169
- 0.014415871620178223,
170
- 0.014400511741638184,
171
- 0.014389216423034668,
172
- 0.01438003158569336,
173
- 0.01439641571044922,
174
- 0.014323712348937988,
175
- 0.014331904411315918,
176
- 0.014354432106018066,
177
- 0.014361599922180175,
178
- 0.014352383613586426,
179
- 0.014313471794128419,
180
- 0.014323648452758788,
181
- 0.014313471794128419,
182
  0.014252032279968262,
183
- 0.014321663856506347,
184
- 0.014317567825317384,
185
- 0.014403583526611329,
186
- 0.014334976196289062,
187
- 0.014325728416442871,
188
- 0.014330880165100097,
189
- 0.014306303977966308,
190
- 0.014324735641479493,
191
- 0.014302207946777343,
192
- 0.014302207946777343,
193
- 0.014326784133911133,
194
- 0.014341119766235352
 
 
 
 
 
 
 
 
 
 
 
195
  ]
196
  },
197
  "throughput": {
198
  "unit": "samples/s",
199
- "value": 70.92980109156741
200
  },
201
  "energy": {
202
  "unit": "kWh",
203
- "cpu": 1.653465246896685e-07,
204
- "ram": 9.038774616184786e-08,
205
- "gpu": 3.504362371388536e-07,
206
- "total": 6.061705079903701e-07
207
  },
208
  "efficiency": {
209
  "unit": "samples/kWh",
210
- "value": 1649700.8462442164
211
  }
212
  }
213
  }
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 976.09728,
108
  "max_global_vram": 1434.976256,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 794.820608,
 
112
  },
113
  "latency": {
114
  "unit": "s",
115
+ "count": 70,
116
+ "total": 1.0049732198715209,
117
+ "mean": 0.014356760283878873,
118
+ "stdev": 0.0005069449326987283,
119
+ "p50": 0.014176256179809572,
120
+ "p90": 0.014902886390686036,
121
+ "p95": 0.015359041786193846,
122
+ "p99": 0.016185395374298098,
123
  "values": [
124
+ 0.015296544075012207,
125
+ 0.015410176277160645,
126
+ 0.01510707187652588,
127
+ 0.01490124797821045,
128
+ 0.014096384048461913,
129
+ 0.014072832107543945,
130
+ 0.014013440132141113,
131
+ 0.014226431846618653,
132
+ 0.01417523193359375,
133
+ 0.014279680252075195,
134
+ 0.014166015625,
135
+ 0.014194687843322755,
136
+ 0.014110719680786133,
137
+ 0.01417523193359375,
138
+ 0.014137344360351562,
139
+ 0.014078975677490235,
140
+ 0.014125056266784668,
141
+ 0.014464032173156739,
142
+ 0.014375935554504395,
143
+ 0.014260224342346191,
144
+ 0.014311424255371094,
145
+ 0.014268416404724121,
146
+ 0.014050304412841797,
147
+ 0.014303232192993164,
148
+ 0.014225407600402832,
149
+ 0.01407487964630127,
150
+ 0.014136320114135742,
151
+ 0.014467071533203125,
152
+ 0.014335040092468262,
153
+ 0.014097439765930175,
154
+ 0.01406668758392334,
155
+ 0.01491763210296631,
156
+ 0.01459712028503418,
157
+ 0.014526464462280274,
158
+ 0.014592000007629394,
159
+ 0.015723520278930665,
160
+ 0.01721343994140625,
161
+ 0.01546444797515869,
162
+ 0.014766079902648926,
163
+ 0.014264320373535156,
164
+ 0.014220288276672363,
165
+ 0.014145536422729492,
166
+ 0.014108672142028808,
167
+ 0.014125056266784668,
168
+ 0.014125056266784668,
169
+ 0.01426534366607666,
 
 
 
 
 
 
 
 
 
 
 
 
170
  0.014252032279968262,
171
+ 0.014897151947021485,
172
+ 0.01417728042602539,
173
+ 0.014160896301269531,
174
+ 0.014171135902404786,
175
+ 0.014114815711975098,
176
+ 0.014152704238891601,
177
+ 0.01451417636871338,
178
+ 0.014017536163330077,
179
+ 0.013896703720092773,
180
+ 0.013942784309387207,
181
+ 0.014303168296813966,
182
+ 0.014135295867919923,
183
+ 0.014082048416137695,
184
+ 0.014210047721862793,
185
+ 0.014194751739501953,
186
+ 0.014118911743164063,
187
+ 0.0140830717086792,
188
+ 0.014053376197814941,
189
+ 0.014156800270080566,
190
+ 0.014173184394836426,
191
+ 0.014101504325866699,
192
+ 0.014220288276672363,
193
+ 0.013785087585449218
194
  ]
195
  },
196
  "throughput": {
197
  "unit": "samples/s",
198
+ "value": 69.65359734555814
199
  },
200
  "energy": {
201
  "unit": "kWh",
202
+ "cpu": 1.7041056165619502e-07,
203
+ "ram": 9.309723819395305e-08,
204
+ "gpu": 3.547542520571448e-07,
205
+ "total": 6.182620519072929e-07
206
  },
207
  "efficiency": {
208
  "unit": "samples/kWh",
209
+ "value": 1617437.1319007427
210
  }
211
  }
212
  }