IlyasMoutawwakil HF staff commited on
Commit
c10c9fa
·
verified ·
1 Parent(s): c3a9c94

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -73,7 +73,7 @@
73
  "environment": {
74
  "cpu": " AMD EPYC 7R32",
75
  "cpu_count": 16,
76
- "cpu_ram_mb": 66697.285632,
77
  "system": "Linux",
78
  "machine": "x86_64",
79
  "platform": "Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35",
@@ -104,7 +104,7 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 976.195584,
108
  "max_global_vram": 1434.976256,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 794.820608,
@@ -112,102 +112,103 @@
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 71,
116
- "total": 1.001166723251343,
117
- "mean": 0.01410093976410342,
118
- "stdev": 0.00042881216000027777,
119
- "p50": 0.014079999923706055,
120
- "p90": 0.014682111740112304,
121
- "p95": 0.014846464157104493,
122
- "p99": 0.015186943817138672,
123
  "values": [
124
- 0.015134719848632813,
125
- 0.015308799743652344,
126
- 0.014873600006103516,
127
- 0.015029248237609863,
128
- 0.014682111740112304,
129
- 0.01458892822265625,
130
- 0.014532608032226562,
131
- 0.014683135986328125,
132
- 0.01420083236694336,
133
- 0.014112768173217773,
134
- 0.014207967758178711,
135
- 0.014211008071899414,
136
- 0.014196736335754395,
137
- 0.014147583961486816,
138
- 0.014401535987854003,
139
- 0.01439027214050293,
140
- 0.014175200462341308,
141
- 0.014375935554504395,
142
- 0.01406156826019287,
143
- 0.014269439697265626,
144
- 0.01407795238494873,
145
- 0.014187520027160644,
146
- 0.014346240043640136,
147
- 0.01417728042602539,
148
- 0.014036992073059081,
149
- 0.014002176284790039,
150
- 0.014403583526611329,
151
- 0.014429183959960937,
152
- 0.014114815711975098,
153
- 0.014076864242553711,
154
- 0.013934592247009277,
155
- 0.014079999923706055,
156
- 0.01397760009765625,
157
- 0.013896703720092773,
158
- 0.013814784049987794,
159
- 0.014498815536499024,
160
- 0.014468095779418945,
161
- 0.014289919853210448,
162
- 0.014235648155212402,
163
- 0.01425100803375244,
164
- 0.01441590404510498,
165
- 0.013855744361877441,
166
- 0.013503487586975099,
167
- 0.01355673599243164,
168
- 0.013544447898864746,
169
- 0.013552672386169433,
170
- 0.013595647811889648,
171
- 0.0134901762008667,
172
- 0.014034943580627441,
173
- 0.014533632278442383,
174
- 0.014240768432617188,
175
- 0.014819328308105468,
176
- 0.013946880340576171,
177
- 0.013633536338806153,
178
- 0.013541376113891602,
179
- 0.013522944450378417,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
180
  0.01375641632080078,
181
- 0.013640704154968262,
182
- 0.013719552040100098,
183
- 0.01354751968383789,
184
- 0.01345638370513916,
185
- 0.014017536163330077,
186
- 0.014736384391784667,
187
- 0.013836288452148437,
188
- 0.013508607864379883,
189
- 0.013651968002319336,
190
- 0.013501440048217773,
191
- 0.013642751693725585,
192
- 0.013544447898864746,
193
- 0.013963264465332031,
194
- 0.013971455574035644
195
  ]
196
  },
197
  "throughput": {
198
  "unit": "samples/s",
199
- "value": 70.91725918478761
200
  },
201
  "energy": {
202
  "unit": "kWh",
203
- "cpu": 1.618934890350422e-07,
204
- "ram": 8.84347716010331e-08,
205
- "gpu": 3.504056857296918e-07,
206
- "total": 6.007339463657672e-07
207
  },
208
  "efficiency": {
209
  "unit": "samples/kWh",
210
- "value": 1664630.417591106
211
  }
212
  }
213
  }
 
73
  "environment": {
74
  "cpu": " AMD EPYC 7R32",
75
  "cpu_count": 16,
76
+ "cpu_ram_mb": 66697.293824,
77
  "system": "Linux",
78
  "machine": "x86_64",
79
  "platform": "Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35",
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 976.584704,
108
  "max_global_vram": 1434.976256,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 794.820608,
 
112
  },
113
  "latency": {
114
  "unit": "s",
115
+ "count": 72,
116
+ "total": 1.0034430694580079,
117
+ "mean": 0.013936709298027884,
118
+ "stdev": 0.000304540270814966,
119
+ "p50": 0.013830080032348632,
120
+ "p90": 0.014352793502807616,
121
+ "p95": 0.01448596487045288,
122
+ "p99": 0.014827571125030518,
123
  "values": [
124
+ 0.014457856178283691,
125
+ 0.0148602876663208,
126
+ 0.014814208030700684,
127
+ 0.014771200180053711,
128
+ 0.014520319938659668,
129
+ 0.014205951690673829,
130
+ 0.0140830717086792,
131
+ 0.014090239524841308,
132
+ 0.013670399665832519,
133
+ 0.013530112266540528,
134
+ 0.01370019245147705,
135
+ 0.01365401554107666,
136
+ 0.014047103881835938,
137
+ 0.013808639526367187,
138
+ 0.01407590389251709,
139
+ 0.01400115203857422,
140
+ 0.013964320182800293,
141
+ 0.014272416114807129,
142
+ 0.014381055831909179,
143
+ 0.014013407707214355,
144
+ 0.013666208267211915,
145
+ 0.013516799926757812,
146
+ 0.013423616409301758,
147
+ 0.013727744102478028,
148
+ 0.01378598403930664,
149
+ 0.013485055923461914,
150
+ 0.013889535903930664,
151
+ 0.014347264289855957,
152
+ 0.014358528137207031,
153
+ 0.014246912002563476,
154
+ 0.014154751777648926,
155
+ 0.014056447982788087,
156
+ 0.013825023651123047,
157
+ 0.013985792160034179,
158
+ 0.013760512351989745,
159
+ 0.013303808212280274,
160
+ 0.013816831588745117,
161
+ 0.014284799575805664,
162
+ 0.014353407859802245,
163
+ 0.014203904151916504,
164
+ 0.01407692813873291,
165
+ 0.014041119575500489,
166
+ 0.013903871536254882,
167
+ 0.014104576110839843,
168
+ 0.013799424171447755,
169
+ 0.014000255584716797,
170
+ 0.013831040382385255,
171
+ 0.01376972770690918,
172
+ 0.013735936164855958,
173
+ 0.01377791976928711,
174
+ 0.01386393642425537,
175
+ 0.013793279647827148,
176
+ 0.013938688278198242,
177
+ 0.013905920028686524,
178
+ 0.013829119682312012,
179
+ 0.01377791976928711,
180
+ 0.013737983703613281,
181
+ 0.013727775573730469,
182
+ 0.013750271797180176,
183
+ 0.013698047637939453,
184
+ 0.013744128227233888,
185
+ 0.013756575584411622,
186
+ 0.013752320289611816,
187
+ 0.013870944023132324,
188
+ 0.013745152473449707,
189
+ 0.013760640144348144,
190
+ 0.013744128227233888,
191
+ 0.013775872230529786,
192
+ 0.013744128227233888,
193
+ 0.013767680168151856,
194
  0.01375641632080078,
195
+ 0.013846559524536133
 
 
 
 
 
 
 
 
 
 
 
 
 
196
  ]
197
  },
198
  "throughput": {
199
  "unit": "samples/s",
200
+ "value": 71.75294961067353
201
  },
202
  "energy": {
203
  "unit": "kWh",
204
+ "cpu": 1.5990795471050124e-07,
205
+ "ram": 8.74269723214373e-08,
206
+ "gpu": 3.3605582440000374e-07,
207
+ "total": 5.833907514319422e-07
208
  },
209
  "efficiency": {
210
  "unit": "samples/kWh",
211
+ "value": 1714116.9919911886
212
  }
213
  }
214
  }