IlyasMoutawwakil HF staff commited on
Commit
16a73e0
·
verified ·
1 Parent(s): 8ec9eb1

Upload cuda_training_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_training_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -107,7 +107,7 @@
107
  "overall": {
108
  "memory": {
109
  "unit": "MB",
110
- "max_ram": 1107.050496,
111
  "max_global_vram": 3376.939008,
112
  "max_process_vram": 0.0,
113
  "max_reserved": 2730.491904,
@@ -116,24 +116,24 @@
116
  "latency": {
117
  "unit": "s",
118
  "count": 5,
119
- "total": 0.9781933975219727,
120
- "mean": 0.19563867950439454,
121
- "stdev": 0.23772618513224528,
122
- "p50": 0.0763719711303711,
123
- "p90": 0.4339498901367188,
124
- "p95": 0.5525192565917968,
125
- "p99": 0.6473747497558594,
126
  "values": [
127
- 0.671088623046875,
128
- 0.07824179077148438,
129
- 0.0763156509399414,
130
- 0.07617536163330078,
131
- 0.0763719711303711
132
  ]
133
  },
134
  "throughput": {
135
  "unit": "samples/s",
136
- "value": 51.11463656027884
137
  },
138
  "energy": null,
139
  "efficiency": null
@@ -141,7 +141,7 @@
141
  "warmup": {
142
  "memory": {
143
  "unit": "MB",
144
- "max_ram": 1107.050496,
145
  "max_global_vram": 3376.939008,
146
  "max_process_vram": 0.0,
147
  "max_reserved": 2730.491904,
@@ -150,21 +150,21 @@
150
  "latency": {
151
  "unit": "s",
152
  "count": 2,
153
- "total": 0.7493304138183594,
154
- "mean": 0.3746652069091797,
155
- "stdev": 0.2964234161376953,
156
- "p50": 0.3746652069091797,
157
- "p90": 0.611803939819336,
158
- "p95": 0.6414462814331054,
159
- "p99": 0.665160154724121,
160
  "values": [
161
- 0.671088623046875,
162
- 0.07824179077148438
163
  ]
164
  },
165
  "throughput": {
166
  "unit": "samples/s",
167
- "value": 10.67619817969811
168
  },
169
  "energy": null,
170
  "efficiency": null
@@ -172,7 +172,7 @@
172
  "train": {
173
  "memory": {
174
  "unit": "MB",
175
- "max_ram": 1107.050496,
176
  "max_global_vram": 3376.939008,
177
  "max_process_vram": 0.0,
178
  "max_reserved": 2730.491904,
@@ -181,22 +181,22 @@
181
  "latency": {
182
  "unit": "s",
183
  "count": 3,
184
- "total": 0.2288629837036133,
185
- "mean": 0.07628766123453777,
186
- "stdev": 8.266958848733325e-05,
187
- "p50": 0.0763156509399414,
188
- "p90": 0.07636070709228517,
189
- "p95": 0.07636633911132813,
190
- "p99": 0.07637084472656251,
191
  "values": [
192
- 0.0763156509399414,
193
- 0.07617536163330078,
194
- 0.0763719711303711
195
  ]
196
  },
197
  "throughput": {
198
  "unit": "samples/s",
199
- "value": 78.64967811182046
200
  },
201
  "energy": null,
202
  "efficiency": null
 
107
  "overall": {
108
  "memory": {
109
  "unit": "MB",
110
+ "max_ram": 1106.57536,
111
  "max_global_vram": 3376.939008,
112
  "max_process_vram": 0.0,
113
  "max_reserved": 2730.491904,
 
116
  "latency": {
117
  "unit": "s",
118
  "count": 5,
119
+ "total": 1.0211021041870119,
120
+ "mean": 0.20422042083740238,
121
+ "stdev": 0.2555557187528376,
122
+ "p50": 0.07632998657226563,
123
+ "p90": 0.4599722106933595,
124
+ "p95": 0.5876519073486327,
125
+ "p99": 0.6897956646728516,
126
  "values": [
127
+ 0.7153316040039063,
128
+ 0.07693312072753906,
129
+ 0.07632998657226563,
130
+ 0.07624089813232422,
131
+ 0.07626649475097656
132
  ]
133
  },
134
  "throughput": {
135
  "unit": "samples/s",
136
+ "value": 48.966699603277526
137
  },
138
  "energy": null,
139
  "efficiency": null
 
141
  "warmup": {
142
  "memory": {
143
  "unit": "MB",
144
+ "max_ram": 1106.57536,
145
  "max_global_vram": 3376.939008,
146
  "max_process_vram": 0.0,
147
  "max_reserved": 2730.491904,
 
150
  "latency": {
151
  "unit": "s",
152
  "count": 2,
153
+ "total": 0.7922647247314454,
154
+ "mean": 0.3961323623657227,
155
+ "stdev": 0.3191992416381836,
156
+ "p50": 0.3961323623657227,
157
+ "p90": 0.6514917556762696,
158
+ "p95": 0.6834116798400879,
159
+ "p99": 0.7089476191711426,
160
  "values": [
161
+ 0.7153316040039063,
162
+ 0.07693312072753906
163
  ]
164
  },
165
  "throughput": {
166
  "unit": "samples/s",
167
+ "value": 10.097634982690623
168
  },
169
  "energy": null,
170
  "efficiency": null
 
172
  "train": {
173
  "memory": {
174
  "unit": "MB",
175
+ "max_ram": 1106.57536,
176
  "max_global_vram": 3376.939008,
177
  "max_process_vram": 0.0,
178
  "max_reserved": 2730.491904,
 
181
  "latency": {
182
  "unit": "s",
183
  "count": 3,
184
+ "total": 0.22883737945556643,
185
+ "mean": 0.07627912648518881,
186
+ "stdev": 3.745092846828414e-05,
187
+ "p50": 0.07626649475097656,
188
+ "p90": 0.07631728820800782,
189
+ "p95": 0.07632363739013671,
190
+ "p99": 0.07632871673583984,
191
  "values": [
192
+ 0.07632998657226563,
193
+ 0.07624089813232422,
194
+ 0.07626649475097656
195
  ]
196
  },
197
  "throughput": {
198
  "unit": "samples/s",
199
+ "value": 78.65847809839597
200
  },
201
  "energy": null,
202
  "efficiency": null