IlyasMoutawwakil HF staff commited on
Commit
12748b3
·
verified ·
1 Parent(s): eb65f68

Upload cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -80,7 +80,7 @@
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.3.1",
83
- "optimum_benchmark_commit": "dcd8d39eb1efde5484c10fe67e911bfe5a0711c9",
84
  "transformers_version": "4.42.4",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.32.1",
@@ -99,7 +99,7 @@
99
  "load": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 942.40768,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -108,31 +108,31 @@
108
  "latency": {
109
  "unit": "s",
110
  "count": 1,
111
- "total": 4.33447696799999,
112
- "mean": 4.33447696799999,
113
  "stdev": 0.0,
114
- "p50": 4.33447696799999,
115
- "p90": 4.33447696799999,
116
- "p95": 4.33447696799999,
117
- "p99": 4.33447696799999,
118
  "values": [
119
- 4.33447696799999
120
  ]
121
  },
122
  "throughput": null,
123
  "energy": {
124
  "unit": "kWh",
125
- "cpu": 4.7421349419487856e-05,
126
- "ram": 1.981958696039783e-06,
127
  "gpu": 0,
128
- "total": 4.940330811552764e-05
129
  },
130
  "efficiency": null
131
  },
132
  "forward": {
133
  "memory": {
134
  "unit": "MB",
135
- "max_ram": 953.5488,
136
  "max_global_vram": null,
137
  "max_process_vram": null,
138
  "max_reserved": null,
@@ -140,51 +140,57 @@
140
  },
141
  "latency": {
142
  "unit": "s",
143
- "count": 20,
144
- "total": 1.0509125060000883,
145
- "mean": 0.052545625300004416,
146
- "stdev": 0.009177589819514645,
147
- "p50": 0.04974745000001235,
148
- "p90": 0.05759523510001259,
149
- "p95": 0.06262408775004646,
150
- "p99": 0.0838372951500406,
151
  "values": [
152
- 0.05046450200001118,
153
- 0.05199875700003531,
154
- 0.048905503000014505,
155
- 0.049315901999989364,
156
- 0.04726157400000375,
157
- 0.047367602000008446,
158
- 0.04765023099997734,
159
- 0.045733241999982965,
160
- 0.05224745300000677,
161
- 0.053284563999966394,
162
- 0.049337392000040836,
163
- 0.04727748400000564,
164
- 0.05719154100000878,
165
- 0.05358853399997088,
166
- 0.050157507999983864,
167
- 0.053351269000017965,
168
- 0.04870411499996408,
169
- 0.04670625400001427,
170
- 0.061228482000046824,
171
- 0.08914059700003918
 
 
 
 
 
 
172
  ]
173
  },
174
  "throughput": {
175
  "unit": "samples/s",
176
- "value": 19.031080024085583
177
  },
178
  "energy": {
179
  "unit": "kWh",
180
- "cpu": 1.7981423272026909e-06,
181
- "ram": 7.514710132299611e-08,
182
  "gpu": 0.0,
183
- "total": 1.8732894285256872e-06
184
  },
185
  "efficiency": {
186
  "unit": "samples/kWh",
187
- "value": 533820.3401847082
188
  }
189
  }
190
  }
 
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.3.1",
83
+ "optimum_benchmark_commit": "8bf68d01ce920e01ece8935a1cc6d7e3f17c2d2b",
84
  "transformers_version": "4.42.4",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.32.1",
 
99
  "load": {
100
  "memory": {
101
  "unit": "MB",
102
+ "max_ram": 941.99808,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
 
108
  "latency": {
109
  "unit": "s",
110
  "count": 1,
111
+ "total": 4.243301096999971,
112
+ "mean": 4.243301096999971,
113
  "stdev": 0.0,
114
+ "p50": 4.243301096999971,
115
+ "p90": 4.243301096999971,
116
+ "p95": 4.243301096999971,
117
+ "p99": 4.243301096999971,
118
  "values": [
119
+ 4.243301096999971
120
  ]
121
  },
122
  "throughput": null,
123
  "energy": {
124
  "unit": "kWh",
125
+ "cpu": 4.742653237448799e-05,
126
+ "ram": 1.9822067545351275e-06,
127
  "gpu": 0,
128
+ "total": 4.9408739129023115e-05
129
  },
130
  "efficiency": null
131
  },
132
  "forward": {
133
  "memory": {
134
  "unit": "MB",
135
+ "max_ram": 953.1392,
136
  "max_global_vram": null,
137
  "max_process_vram": null,
138
  "max_reserved": null,
 
140
  },
141
  "latency": {
142
  "unit": "s",
143
+ "count": 26,
144
+ "total": 1.026963206000005,
145
+ "mean": 0.03949858484615403,
146
+ "stdev": 0.0009805407086175247,
147
+ "p50": 0.039306039500019097,
148
+ "p90": 0.03971452299998646,
149
+ "p95": 0.04178418475000001,
150
+ "p99": 0.042793993250000995,
151
  "values": [
152
+ 0.039669501999981094,
153
+ 0.039532820999966134,
154
+ 0.03947681699997929,
155
+ 0.03956447999996726,
156
+ 0.039303344000018114,
157
+ 0.03915403700000297,
158
+ 0.03930873500002008,
159
+ 0.03885878699998102,
160
+ 0.039082432999975936,
161
+ 0.03920720500002517,
162
+ 0.03935729500000207,
163
+ 0.039486585000020114,
164
+ 0.0393253950000485,
165
+ 0.03926513400000431,
166
+ 0.03918684799998573,
167
+ 0.03922897600000397,
168
+ 0.03975954399999182,
169
+ 0.03929313500003673,
170
+ 0.039510570000004464,
171
+ 0.04245906500000274,
172
+ 0.03961248999996769,
173
+ 0.04290563600000041,
174
+ 0.03857626199999231,
175
+ 0.038866912999992564,
176
+ 0.038362815000027695,
177
+ 0.038608382000006713
178
  ]
179
  },
180
  "throughput": {
181
  "unit": "samples/s",
182
+ "value": 25.317362733246625
183
  },
184
  "energy": {
185
  "unit": "kWh",
186
+ "cpu": 1.5139153370490443e-06,
187
+ "ram": 6.327007709951606e-08,
188
  "gpu": 0.0,
189
+ "total": 1.5771854141485604e-06
190
  },
191
  "efficiency": {
192
  "unit": "samples/kWh",
193
+ "value": 634040.8623039717
194
  }
195
  }
196
  }