IlyasMoutawwakil HF staff commited on
Commit
cd45a50
·
verified ·
1 Parent(s): 1f7fcb2

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -73,7 +73,7 @@
73
  "environment": {
74
  "cpu": " AMD EPYC 7R32",
75
  "cpu_count": 16,
76
- "cpu_ram_mb": 66697.29792,
77
  "system": "Linux",
78
  "machine": "x86_64",
79
  "platform": "Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35",
@@ -104,7 +104,7 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 975.2576,
108
  "max_global_vram": 1434.976256,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 794.820608,
@@ -112,101 +112,104 @@
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 70,
116
- "total": 1.0083922910690306,
117
- "mean": 0.014405604158129012,
118
- "stdev": 0.0004794734939388193,
119
- "p50": 0.014235648155212402,
120
- "p90": 0.014927871894836426,
121
- "p95": 0.015397478246688842,
122
- "p99": 0.015928831815719607,
123
  "values": [
124
- 0.016570432662963867,
125
- 0.015566847801208495,
126
- 0.01549516773223877,
127
- 0.015640576362609862,
128
- 0.014824447631835937,
129
- 0.014640128135681153,
130
- 0.014393407821655274,
131
- 0.014565376281738282,
132
- 0.014671872138977051,
133
- 0.014258175849914552,
134
- 0.014014464378356933,
135
- 0.014052351951599122,
136
- 0.014145536422729492,
137
- 0.014072832107543945,
138
- 0.014042112350463867,
139
- 0.014030816078186036,
140
- 0.014020607948303223,
141
- 0.013926400184631347,
142
- 0.013937664031982423,
143
- 0.01398681640625,
144
- 0.01417523193359375,
145
- 0.014321663856506347,
146
- 0.014213120460510254,
147
- 0.013982720375061035,
148
- 0.01415167999267578,
149
- 0.01397657585144043,
150
- 0.013947903633117676,
151
- 0.01387929630279541,
152
- 0.01417625617980957,
153
- 0.01388748836517334,
154
- 0.013949952125549316,
155
- 0.013922304153442382,
156
- 0.01470464038848877,
157
- 0.015278079986572265,
158
- 0.015205375671386719,
159
- 0.014920703887939453,
160
- 0.014812128067016601,
161
- 0.01499238395690918,
162
- 0.014680064201354981,
163
- 0.014793760299682617,
164
- 0.014520319938659668,
165
- 0.014552063941955566,
166
- 0.014633983612060546,
167
- 0.014803999900817872,
168
- 0.014805983543395997,
169
- 0.01435750389099121,
170
- 0.014278656005859374,
171
- 0.014236672401428223,
172
- 0.014279680252075195,
173
- 0.014299136161804199,
174
- 0.014248959541320801,
175
- 0.014244864463806153,
176
- 0.014178303718566895,
177
- 0.014216192245483398,
178
- 0.014160863876342774,
179
- 0.014211071968078613,
180
- 0.01466163158416748,
181
- 0.014203904151916504,
182
- 0.014222335815429688,
183
- 0.014229536056518555,
184
- 0.014234623908996581,
185
- 0.014224384307861328,
186
- 0.014241791725158692,
187
- 0.014213120460510254,
188
- 0.014206975936889648,
189
- 0.014255104064941406,
190
- 0.014222335815429688,
191
- 0.014262271881103515,
192
- 0.014169088363647461,
193
- 0.014189567565917969
 
 
 
194
  ]
195
  },
196
  "throughput": {
197
  "unit": "samples/s",
198
- "value": 69.41742873281055
199
  },
200
  "energy": {
201
  "unit": "kWh",
202
- "cpu": 1.699695440511855e-07,
203
- "ram": 9.288018814394101e-08,
204
- "gpu": 3.4962726382857373e-07,
205
- "total": 6.124769960237002e-07
206
  },
207
  "efficiency": {
208
  "unit": "samples/kWh",
209
- "value": 1632714.3819150135
210
  }
211
  }
212
  }
 
73
  "environment": {
74
  "cpu": " AMD EPYC 7R32",
75
  "cpu_count": 16,
76
+ "cpu_ram_mb": 66697.289728,
77
  "system": "Linux",
78
  "machine": "x86_64",
79
  "platform": "Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35",
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 975.261696,
108
  "max_global_vram": 1434.976256,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 794.820608,
 
112
  },
113
  "latency": {
114
  "unit": "s",
115
+ "count": 73,
116
+ "total": 1.0066957750320438,
117
+ "mean": 0.013790353082630734,
118
+ "stdev": 0.0004645362546856893,
119
+ "p50": 0.013499391555786134,
120
+ "p90": 0.014166835021972657,
121
+ "p95": 0.014752153778076168,
122
+ "p99": 0.015337635803222657,
123
  "values": [
124
+ 0.014995455741882324,
125
+ 0.015316991806030274,
126
+ 0.015049792289733887,
127
+ 0.01539072036743164,
128
+ 0.013749247550964355,
129
+ 0.013489151954650879,
130
+ 0.013489151954650879,
131
+ 0.013499391555786134,
132
+ 0.013453311920166015,
133
+ 0.013466624259948731,
134
+ 0.013418496131896973,
135
+ 0.013468671798706054,
136
+ 0.013463552474975587,
137
+ 0.013444095611572266,
138
+ 0.013447168350219727,
139
+ 0.013516799926757812,
140
+ 0.013439007759094238,
141
+ 0.013476863861083984,
142
+ 0.013453311920166015,
143
+ 0.013496319770812988,
144
+ 0.013469696044921875,
145
+ 0.013524991989135742,
146
+ 0.013485055923461914,
147
+ 0.01345740795135498,
148
+ 0.013458432197570801,
149
+ 0.013458432197570801,
150
+ 0.013436927795410156,
151
+ 0.01344825553894043,
152
+ 0.013484031677246093,
153
+ 0.013576160430908204,
154
+ 0.01346560001373291,
155
+ 0.013508607864379883,
156
+ 0.013458432197570801,
157
+ 0.013488127708435058,
158
+ 0.013475839614868163,
159
+ 0.013426655769348145,
160
+ 0.013523039817810058,
161
+ 0.013495295524597169,
162
+ 0.013452287673950195,
163
+ 0.013522944450378417,
164
+ 0.01345638370513916,
165
+ 0.013532159805297851,
166
+ 0.013413375854492187,
167
+ 0.01347379207611084,
168
+ 0.01345740795135498,
169
+ 0.013426688194274903,
170
+ 0.013439999580383302,
171
+ 0.01344921588897705,
172
+ 0.013469696044921875,
173
+ 0.013990943908691406,
174
+ 0.014356479644775391,
175
+ 0.014219264030456542,
176
+ 0.014160896301269531,
177
+ 0.014166015625,
178
+ 0.01416198444366455,
179
+ 0.014135295867919923,
180
+ 0.014589952468872071,
181
+ 0.014101504325866699,
182
+ 0.014105600357055664,
183
+ 0.014167039871215821,
184
+ 0.014109696388244629,
185
+ 0.014139391899108887,
186
+ 0.014107680320739746,
187
+ 0.01407590389251709,
188
+ 0.014124032020568847,
189
+ 0.014143487930297852,
190
+ 0.014137344360351562,
191
+ 0.014112768173217773,
192
+ 0.014064640045166015,
193
+ 0.014073856353759765,
194
+ 0.014101504325866699,
195
+ 0.01404419231414795,
196
+ 0.014047231674194336
197
  ]
198
  },
199
  "throughput": {
200
  "unit": "samples/s",
201
+ "value": 72.51445949266687
202
  },
203
  "energy": {
204
  "unit": "kWh",
205
+ "cpu": 1.6780827246921165e-07,
206
+ "ram": 9.171032538697791e-08,
207
+ "gpu": 3.5240638521127043e-07,
208
+ "total": 6.1192498306746e-07
209
  },
210
  "efficiency": {
211
  "unit": "samples/kWh",
212
+ "value": 1634187.2413628153
213
  }
214
  }
215
  }