IlyasMoutawwakil HF staff commited on
Commit
d92c415
·
verified ·
1 Parent(s): 72b456e

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -105,7 +105,7 @@
105
  "forward": {
106
  "memory": {
107
  "unit": "MB",
108
- "max_ram": 982.831104,
109
  "max_global_vram": 1434.976256,
110
  "max_process_vram": 0.0,
111
  "max_reserved": 794.820608,
@@ -114,104 +114,104 @@
114
  "latency": {
115
  "unit": "s",
116
  "count": 74,
117
- "total": 0.9982882900238037,
118
- "mean": 0.013490382297618968,
119
- "stdev": 0.00035699389510758804,
120
- "p50": 0.013436383724212646,
121
- "p90": 0.013837785816192627,
122
- "p95": 0.01429652452468872,
123
- "p99": 0.014615294561386109,
124
  "values": [
125
- 0.01467801570892334,
126
- 0.014507871627807617,
127
- 0.014420991897583007,
128
- 0.014592096328735352,
129
- 0.014006112098693848,
130
- 0.01352188777923584,
131
- 0.013643775939941406,
132
- 0.01355673599243164,
133
- 0.013428799629211426,
134
- 0.013505536079406738,
135
- 0.013422592163085938,
136
- 0.013377599716186524,
137
- 0.013624320030212403,
138
- 0.013324288368225098,
139
- 0.013297663688659669,
140
- 0.013278240203857422,
141
  0.013336576461791993,
142
- 0.01347379207611084,
143
- 0.013532223701477051,
 
 
 
 
 
 
 
 
 
 
 
 
144
  0.013462528228759766,
145
- 0.013550496101379395,
146
- 0.013410304069519043,
147
- 0.013301792144775391,
148
- 0.013242400169372559,
149
- 0.01326591968536377,
150
- 0.013396991729736327,
151
- 0.013179712295532227,
152
- 0.01347481632232666,
153
- 0.013765631675720215,
154
- 0.013834112167358399,
155
- 0.013546367645263672,
156
- 0.013280256271362305,
157
- 0.013222016334533692,
158
- 0.013327360153198242,
159
- 0.013271039962768554,
160
- 0.013602815628051757,
161
- 0.014229503631591797,
162
- 0.014032896041870118,
163
- 0.013546496391296388,
164
- 0.013560832023620606,
165
- 0.013429759979248047,
166
- 0.013419520378112794,
167
- 0.013345824241638184,
168
- 0.012782591819763184,
169
- 0.012999775886535645,
170
- 0.013128704071044921,
171
- 0.012957695960998536,
172
- 0.012893183708190918,
173
- 0.01291155242919922,
174
- 0.012934240341186523,
175
- 0.012900223731994629,
176
- 0.013373439788818359,
177
- 0.013839360237121581,
178
- 0.01381987190246582,
179
- 0.013644800186157227,
180
- 0.013649920463562011,
181
- 0.013461503982543945,
182
- 0.01348300838470459,
183
- 0.013395968437194825,
184
- 0.013441056251525878,
185
- 0.013372447967529297,
186
- 0.01358028793334961,
187
- 0.013527039527893067,
188
- 0.013451264381408692,
189
- 0.013373439788818359,
190
- 0.013485024452209473,
191
- 0.01344102382659912,
192
- 0.013418496131896973,
193
- 0.013413472175598145,
194
- 0.013431743621826172,
195
- 0.013547679901123048,
196
- 0.013586496353149414,
197
- 0.013417471885681152,
198
- 0.013396991729736327
199
  ]
200
  },
201
  "throughput": {
202
  "unit": "samples/s",
203
- "value": 74.1268837263788
204
  },
205
  "energy": {
206
  "unit": "kWh",
207
- "cpu": 1.5601429115298165e-07,
208
- "ram": 8.48876546085235e-08,
209
- "gpu": 3.338458659220762e-07,
210
- "total": 5.747478116835814e-07
211
  },
212
  "efficiency": {
213
  "unit": "samples/kWh",
214
- "value": 1739893.5318618224
215
  }
216
  }
217
  }
 
105
  "forward": {
106
  "memory": {
107
  "unit": "MB",
108
+ "max_ram": 982.654976,
109
  "max_global_vram": 1434.976256,
110
  "max_process_vram": 0.0,
111
  "max_reserved": 794.820608,
 
114
  "latency": {
115
  "unit": "s",
116
  "count": 74,
117
+ "total": 1.0009302740097046,
118
+ "mean": 0.013526084783914927,
119
+ "stdev": 0.0003743763872745608,
120
+ "p50": 0.013384704113006591,
121
+ "p90": 0.013966624164581299,
122
+ "p95": 0.014142054605484008,
123
+ "p99": 0.015228067684173584,
124
  "values": [
125
+ 0.01407795238494873,
126
+ 0.015234047889709473,
127
+ 0.014339072227478027,
128
+ 0.01408614444732666,
129
+ 0.013557760238647461,
130
+ 0.013416447639465333,
131
+ 0.013621248245239258,
132
+ 0.013279232025146484,
133
+ 0.013232128143310547,
134
+ 0.013379584312438965,
135
+ 0.013542400360107423,
136
+ 0.013356032371520997,
 
 
 
 
137
  0.013336576461791993,
138
+ 0.013749247550964355,
139
+ 0.013454336166381836,
140
+ 0.013443072319030762,
141
+ 0.013403136253356934,
142
+ 0.013369343757629394,
143
+ 0.013338624000549316,
144
+ 0.013304832458496094,
145
+ 0.013239295959472656,
146
+ 0.013606911659240722,
147
+ 0.013421567916870117,
148
+ 0.013570048332214356,
149
+ 0.01357209587097168,
150
+ 0.01334172821044922,
151
+ 0.013422592163085938,
152
  0.013462528228759766,
153
+ 0.013494272232055664,
154
+ 0.0135895357131958,
155
+ 0.013312000274658203,
156
+ 0.013493247985839844,
157
+ 0.013283328056335449,
158
+ 0.013298656463623047,
159
+ 0.013223967552185058,
160
+ 0.013419487953186034,
161
+ 0.013751296043395997,
162
+ 0.014009344100952148,
163
+ 0.015225855827331543,
164
+ 0.014016511917114258,
165
+ 0.014245887756347657,
166
+ 0.013866944313049316,
167
+ 0.013857791900634766,
168
+ 0.013351936340332032,
169
+ 0.013287424087524414,
170
+ 0.013218815803527833,
171
+ 0.013325311660766602,
172
+ 0.013389823913574218,
173
+ 0.01376460838317871,
174
+ 0.013614080429077148,
175
+ 0.01335091209411621,
176
+ 0.013424639701843261,
177
+ 0.01364684772491455,
178
+ 0.013365247726440429,
179
+ 0.013379584312438965,
180
+ 0.013371392250061035,
181
+ 0.013326335906982421,
182
+ 0.0132925443649292,
183
+ 0.013290495872497558,
184
+ 0.013342720031738281,
185
+ 0.013608960151672364,
186
+ 0.013273088455200196,
187
+ 0.013306879997253418,
188
+ 0.01333350372314453,
189
+ 0.013338624000549316,
190
+ 0.013348863601684571,
191
+ 0.013348863601684571,
192
+ 0.013416447639465333,
193
+ 0.013507583618164062,
194
+ 0.013308927536010743,
195
+ 0.013294591903686523,
196
+ 0.013279232025146484,
197
+ 0.013305855751037597,
198
+ 0.013270015716552735
 
 
 
 
 
 
 
 
199
  ]
200
  },
201
  "throughput": {
202
  "unit": "samples/s",
203
+ "value": 73.93122370408244
204
  },
205
  "energy": {
206
  "unit": "kWh",
207
+ "cpu": 1.535737573930681e-07,
208
+ "ram": 8.395961256333168e-08,
209
+ "gpu": 3.295301781538397e-07,
210
+ "total": 5.670635481102394e-07
211
  },
212
  "efficiency": {
213
  "unit": "samples/kWh",
214
+ "value": 1763470.7844165573
215
  }
216
  }
217
  }