IlyasMoutawwakil HF staff commited on
Commit
3d688f9
·
verified ·
1 Parent(s): 65f06ba

Upload cuda_training_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_training_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -110,7 +110,7 @@
110
  "overall": {
111
  "memory": {
112
  "unit": "MB",
113
- "max_ram": 1168.695296,
114
  "max_global_vram": 3379.03616,
115
  "max_process_vram": 0.0,
116
  "max_reserved": 2730.491904,
@@ -119,24 +119,24 @@
119
  "latency": {
120
  "unit": "s",
121
  "count": 5,
122
- "total": 0.9065400390625,
123
- "mean": 0.1813080078125,
124
- "stdev": 0.2611195198471168,
125
- "p50": 0.05045452880859375,
126
- "p90": 0.44293939514160163,
127
- "p95": 0.5732423721313475,
128
- "p99": 0.6774847537231445,
129
  "values": [
130
- 0.7035453491210938,
131
- 0.05203046417236328,
132
- 0.05045452880859375,
133
- 0.05024563217163086,
134
- 0.05026406478881836
135
  ]
136
  },
137
  "throughput": {
138
  "unit": "samples/s",
139
- "value": 55.1547618919376
140
  },
141
  "energy": null,
142
  "efficiency": null
@@ -144,7 +144,7 @@
144
  "warmup": {
145
  "memory": {
146
  "unit": "MB",
147
- "max_ram": 1168.695296,
148
  "max_global_vram": 3379.03616,
149
  "max_process_vram": 0.0,
150
  "max_reserved": 2730.491904,
@@ -153,21 +153,21 @@
153
  "latency": {
154
  "unit": "s",
155
  "count": 2,
156
- "total": 0.755575813293457,
157
- "mean": 0.3777879066467285,
158
- "stdev": 0.3257574424743653,
159
- "p50": 0.3777879066467285,
160
- "p90": 0.6383938606262207,
161
- "p95": 0.6709696048736572,
162
- "p99": 0.6970302002716064,
163
  "values": [
164
- 0.7035453491210938,
165
- 0.05203046417236328
166
  ]
167
  },
168
  "throughput": {
169
  "unit": "samples/s",
170
- "value": 10.58795141301445
171
  },
172
  "energy": null,
173
  "efficiency": null
@@ -175,7 +175,7 @@
175
  "train": {
176
  "memory": {
177
  "unit": "MB",
178
- "max_ram": 1168.695296,
179
  "max_global_vram": 3379.03616,
180
  "max_process_vram": 0.0,
181
  "max_reserved": 2730.491904,
@@ -184,22 +184,22 @@
184
  "latency": {
185
  "unit": "s",
186
  "count": 3,
187
- "total": 0.15096422576904298,
188
- "mean": 0.05032140858968099,
189
- "stdev": 9.443052068983951e-05,
190
- "p50": 0.05026406478881836,
191
- "p90": 0.05041643600463867,
192
- "p95": 0.05043548240661622,
193
- "p99": 0.05045071952819825,
194
  "values": [
195
- 0.05045452880859375,
196
- 0.05024563217163086,
197
- 0.05026406478881836
198
  ]
199
  },
200
  "throughput": {
201
  "unit": "samples/s",
202
- "value": 119.23354628094359
203
  },
204
  "energy": null,
205
  "efficiency": null
 
110
  "overall": {
111
  "memory": {
112
  "unit": "MB",
113
+ "max_ram": 1168.375808,
114
  "max_global_vram": 3379.03616,
115
  "max_process_vram": 0.0,
116
  "max_reserved": 2730.491904,
 
119
  "latency": {
120
  "unit": "s",
121
  "count": 5,
122
+ "total": 0.939579376220703,
123
+ "mean": 0.1879158752441406,
124
+ "stdev": 0.2742261756637174,
125
+ "p50": 0.050735103607177735,
126
+ "p90": 0.46260796508789065,
127
+ "p95": 0.5994872711181639,
128
+ "p99": 0.7089907159423827,
129
  "values": [
130
+ 0.7363665771484375,
131
+ 0.050735103607177735,
132
+ 0.049871871948242184,
133
+ 0.051970046997070314,
134
+ 0.05063577651977539
135
  ]
136
  },
137
  "throughput": {
138
  "unit": "samples/s",
139
+ "value": 53.21530172481694
140
  },
141
  "energy": null,
142
  "efficiency": null
 
144
  "warmup": {
145
  "memory": {
146
  "unit": "MB",
147
+ "max_ram": 1168.375808,
148
  "max_global_vram": 3379.03616,
149
  "max_process_vram": 0.0,
150
  "max_reserved": 2730.491904,
 
153
  "latency": {
154
  "unit": "s",
155
  "count": 2,
156
+ "total": 0.7871016807556152,
157
+ "mean": 0.3935508403778076,
158
+ "stdev": 0.3428157367706299,
159
+ "p50": 0.3935508403778076,
160
+ "p90": 0.6678034297943115,
161
+ "p95": 0.7020850034713745,
162
+ "p99": 0.7295102624130249,
163
  "values": [
164
+ 0.7363665771484375,
165
+ 0.050735103607177735
166
  ]
167
  },
168
  "throughput": {
169
  "unit": "samples/s",
170
+ "value": 10.163871067229872
171
  },
172
  "energy": null,
173
  "efficiency": null
 
175
  "train": {
176
  "memory": {
177
  "unit": "MB",
178
+ "max_ram": 1168.375808,
179
  "max_global_vram": 3379.03616,
180
  "max_process_vram": 0.0,
181
  "max_reserved": 2730.491904,
 
184
  "latency": {
185
  "unit": "s",
186
  "count": 3,
187
+ "total": 0.15247769546508788,
188
+ "mean": 0.05082589848836263,
189
+ "stdev": 0.0008670618609556631,
190
+ "p50": 0.05063577651977539,
191
+ "p90": 0.05170319290161133,
192
+ "p95": 0.05183661994934082,
193
+ "p99": 0.05194336158752442,
194
  "values": [
195
+ 0.049871871948242184,
196
+ 0.051970046997070314,
197
+ 0.05063577651977539
198
  ]
199
  },
200
  "throughput": {
201
  "unit": "samples/s",
202
+ "value": 118.05005279687859
203
  },
204
  "energy": null,
205
  "efficiency": null