IlyasMoutawwakil HF staff commited on
Commit
fe3eb93
·
verified ·
1 Parent(s): 2733e5b

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -102,7 +102,7 @@
102
  "forward": {
103
  "memory": {
104
  "unit": "MB",
105
- "max_ram": 975.773696,
106
  "max_global_vram": 1434.976256,
107
  "max_process_vram": 0.0,
108
  "max_reserved": 794.820608,
@@ -110,101 +110,102 @@
110
  },
111
  "latency": {
112
  "unit": "s",
113
- "count": 70,
114
- "total": 1.008237566947937,
115
- "mean": 0.014403393813541955,
116
- "stdev": 0.0008115701644229827,
117
- "p50": 0.014286335945129396,
118
- "p90": 0.01438126106262207,
119
- "p95": 0.014828236579895019,
120
- "p99": 0.01784698854446412,
121
  "values": [
122
- 0.020510719299316405,
123
- 0.01665023994445801,
124
- 0.014963711738586426,
125
- 0.015407072067260743,
126
- 0.013814784049987794,
127
- 0.01417523193359375,
128
- 0.014121983528137207,
129
- 0.014149632453918457,
130
- 0.014120960235595703,
131
- 0.014044159889221192,
132
- 0.014143487930297852,
133
- 0.014120927810668945,
134
- 0.01417523193359375,
135
- 0.014053376197814941,
136
- 0.01428889560699463,
137
- 0.014301183700561524,
138
- 0.014158847808837891,
139
- 0.014154911994934082,
140
- 0.014087167739868164,
141
- 0.014379008293151856,
142
- 0.014330880165100097,
143
- 0.014317567825317384,
144
- 0.014081024169921874,
145
- 0.01417728042602539,
146
- 0.01417728042602539,
147
- 0.014141440391540527,
148
- 0.01437388801574707,
149
- 0.014270463943481445,
150
- 0.014379008293151856,
151
- 0.014261247634887696,
152
- 0.014375935554504395,
153
- 0.014362624168395996,
154
- 0.014303168296813966,
155
- 0.014401535987854003,
156
- 0.014339072227478027,
157
- 0.014213120460510254,
158
- 0.014327808380126953,
159
- 0.014336992263793945,
160
- 0.014346240043640136,
161
- 0.01415167999267578,
162
- 0.014356479644775391,
163
- 0.014358528137207031,
164
- 0.014333951950073242,
165
- 0.014366720199584961,
166
- 0.014347264289855957,
167
- 0.014366720199584961,
168
- 0.014442496299743653,
169
- 0.01429196834564209,
170
- 0.014158847808837891,
171
- 0.01409126377105713,
172
- 0.014287872314453125,
173
- 0.014269439697265626,
174
- 0.014306336402893067,
175
- 0.014255104064941406,
176
- 0.014294015884399413,
177
- 0.01428889560699463,
178
- 0.0146626558303833,
179
- 0.014284799575805664,
180
- 0.014225407600402832,
181
  0.014278656005859374,
182
- 0.014249983787536622,
183
- 0.014307328224182129,
184
- 0.014299136161804199,
185
- 0.014178303718566895,
186
- 0.014233599662780762,
187
- 0.014235648155212402,
188
- 0.014222335815429688,
189
- 0.014197759628295899,
190
- 0.014303199768066406,
191
- 0.014253055572509766
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
192
  ]
193
  },
194
  "throughput": {
195
  "unit": "samples/s",
196
- "value": 69.42808153032712
197
  },
198
  "energy": {
199
  "unit": "kWh",
200
- "cpu": 1.6748565649948958e-07,
201
- "ram": 9.137496564223293e-08,
202
- "gpu": 3.325354772957742e-07,
203
- "total": 5.913960994374968e-07
204
  },
205
  "efficiency": {
206
  "unit": "samples/kWh",
207
- "value": 1690914.0945487206
208
  }
209
  }
210
  }
 
102
  "forward": {
103
  "memory": {
104
  "unit": "MB",
105
+ "max_ram": 975.577088,
106
  "max_global_vram": 1434.976256,
107
  "max_process_vram": 0.0,
108
  "max_reserved": 794.820608,
 
110
  },
111
  "latency": {
112
  "unit": "s",
113
+ "count": 71,
114
+ "total": 1.005086657524109,
115
+ "mean": 0.014156150105973365,
116
+ "stdev": 0.00031493793304409923,
117
+ "p50": 0.01408512020111084,
118
+ "p90": 0.01447327995300293,
119
+ "p95": 0.01473689603805542,
120
+ "p99": 0.015300403404235838,
121
  "values": [
122
+ 0.01498419189453125,
123
+ 0.015462400436401368,
124
+ 0.015230976104736327,
125
+ 0.014900223731994629,
126
+ 0.01449062442779541,
127
+ 0.014098431587219238,
128
+ 0.014229503631591797,
129
+ 0.014137344360351562,
130
+ 0.01408614444732666,
131
+ 0.014318592071533203,
132
+ 0.014095359802246094,
133
+ 0.014156800270080566,
134
+ 0.014210047721862793,
135
+ 0.01408512020111084,
136
+ 0.014109696388244629,
137
+ 0.014109696388244629,
138
+ 0.014103551864624024,
139
+ 0.013922304153442382,
140
+ 0.013993984222412109,
141
+ 0.01430627155303955,
142
+ 0.014070783615112305,
143
+ 0.014433279991149902,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
144
  0.014278656005859374,
145
+ 0.01407795238494873,
146
+ 0.014215104103088379,
147
+ 0.014150655746459961,
148
+ 0.014049280166625976,
149
+ 0.01405951976776123,
150
+ 0.014132224082946777,
151
+ 0.014247936248779297,
152
+ 0.013961215972900391,
153
+ 0.014078975677490235,
154
+ 0.013974528312683105,
155
+ 0.01421513557434082,
156
+ 0.014573568344116212,
157
+ 0.014401535987854003,
158
+ 0.014147583961486816,
159
+ 0.014073856353759765,
160
+ 0.013998080253601074,
161
+ 0.013926400184631347,
162
+ 0.014013440132141113,
163
+ 0.013737983703613281,
164
+ 0.013808639526367187,
165
+ 0.01447327995300293,
166
+ 0.014417920112609863,
167
+ 0.01455513572692871,
168
+ 0.014313471794128419,
169
+ 0.01428275203704834,
170
+ 0.014201855659484864,
171
+ 0.014118911743164063,
172
+ 0.013982720375061035,
173
+ 0.014033920288085937,
174
+ 0.01390287971496582,
175
+ 0.013535231590270995,
176
+ 0.01366323184967041,
177
+ 0.013989888191223144,
178
+ 0.014417887687683106,
179
+ 0.013982720375061035,
180
+ 0.013900799751281738,
181
+ 0.014057472229003906,
182
+ 0.014002176284790039,
183
+ 0.014052351951599122,
184
+ 0.013959168434143066,
185
+ 0.014007295608520508,
186
+ 0.014070783615112305,
187
+ 0.013882368087768555,
188
+ 0.014052351951599122,
189
+ 0.013985792160034179,
190
+ 0.013990912437438965,
191
+ 0.013792256355285644,
192
+ 0.01380352020263672
193
  ]
194
  },
195
  "throughput": {
196
  "unit": "samples/s",
197
+ "value": 70.6406750786033
198
  },
199
  "energy": {
200
  "unit": "kWh",
201
+ "cpu": 1.598879893620809e-07,
202
+ "ram": 8.741408084460999e-08,
203
+ "gpu": 3.2341507354666346e-07,
204
+ "total": 5.707171437533543e-07
205
  },
206
  "efficiency": {
207
  "unit": "samples/kWh",
208
+ "value": 1752181.4631735822
209
  }
210
  }
211
  }