IlyasMoutawwakil HF staff commited on
Commit
30edce0
·
verified ·
1 Parent(s): c8e65d2

Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub

Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark_report.json CHANGED
@@ -2,7 +2,7 @@
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 903.348224,
6
  "max_global_vram": 1195.900928,
7
  "max_process_vram": 0.0,
8
  "max_reserved": 555.74528,
@@ -10,181 +10,175 @@
10
  },
11
  "latency": {
12
  "unit": "s",
13
- "count": 150,
14
- "total": 1.00095231628418,
15
- "mean": 0.0066730154418945305,
16
- "stdev": 0.0001377102196758533,
17
- "p50": 0.006644223928451538,
18
- "p90": 0.006847279930114746,
19
- "p95": 0.00689879047870636,
20
- "p99": 0.007091445865631102,
21
  "values": [
22
- 0.007425024032592774,
23
- 0.006897664070129395,
24
- 0.0069253120422363285,
25
- 0.006946847915649414,
26
- 0.006871039867401123,
27
- 0.006896639823913574,
28
- 0.006873087882995605,
29
- 0.006863840103149414,
30
- 0.006776832103729248,
31
- 0.006738944053649902,
32
- 0.006779903888702392,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  0.006688767910003662,
34
- 0.006657023906707763,
35
- 0.006667263984680176,
36
- 0.006670271873474121,
37
- 0.006845439910888672,
38
- 0.00682700777053833,
39
- 0.006796288013458252,
40
- 0.00674508810043335,
41
- 0.006972383975982666,
42
- 0.006763519763946534,
43
- 0.0067717118263244626,
44
- 0.0067758078575134275,
45
- 0.0067686400413513184,
46
- 0.006754303932189941,
47
- 0.006750207901000976,
48
- 0.006745120048522949,
49
- 0.006899712085723877,
50
- 0.006709248065948486,
51
- 0.006568064212799072,
52
- 0.0065616960525512694,
53
- 0.006748159885406494,
54
- 0.006677504062652588,
55
- 0.00672156810760498,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
56
  0.006729728221893311,
57
- 0.006809535980224609,
58
- 0.006870016098022461,
59
- 0.006808576107025147,
60
- 0.006807519912719727,
61
- 0.006737919807434082,
62
- 0.006672383785247803,
63
- 0.006758399963378906,
64
- 0.006761472225189209,
65
  0.006715392112731934,
66
- 0.00672870397567749,
67
- 0.006700032234191895,
68
- 0.006612991809844971,
69
- 0.006599679946899414,
70
- 0.0067153282165527345,
71
- 0.006755328178405762,
72
- 0.0066457920074462894,
73
- 0.006669312000274658,
74
- 0.0066969280242919925,
75
  0.006665215969085693,
76
- 0.006669312000274658,
77
- 0.006716415882110595,
78
- 0.006707200050354004,
79
- 0.006755328178405762,
80
- 0.006718463897705078,
81
- 0.006730751991271973,
82
- 0.006724607944488525,
83
- 0.0066837120056152345,
84
- 0.006773759841918945,
85
- 0.006643712043762207,
86
- 0.006661119937896728,
87
- 0.006604800224304199,
88
- 0.006576128005981445,
89
- 0.00648086404800415,
90
- 0.006586368083953857,
91
- 0.006723584175109864,
92
- 0.006529983997344971,
93
- 0.0062975997924804685,
94
- 0.0065391998291015625,
95
- 0.0068249602317810056,
96
- 0.006681600093841553,
97
- 0.006699007987976074,
98
- 0.006927360057830811,
99
- 0.0068280320167541505,
100
- 0.006870016098022461,
101
- 0.007153664112091064,
102
- 0.006746111869812011,
103
- 0.0065812478065490725,
104
- 0.0066447358131408694,
105
- 0.006599679946899414,
106
- 0.0065474557876586915,
107
- 0.006623231887817383,
108
- 0.006624256134033203,
109
- 0.006646783828735352,
110
- 0.00662937593460083,
111
- 0.006595583915710449,
112
- 0.0065669121742248536,
113
- 0.006574079990386963,
114
- 0.006567935943603515,
115
- 0.006642687797546387,
116
- 0.006571072101593018,
117
- 0.006574079990386963,
118
- 0.0066109437942504885,
119
- 0.00658739185333252,
120
- 0.006565887928009034,
121
- 0.006574079990386963,
122
- 0.006619135856628418,
123
  0.006651904106140137,
124
- 0.006608895778656006,
125
- 0.006663167953491211,
126
- 0.006560768127441406,
127
- 0.006559743881225586,
128
- 0.006586368083953857,
129
- 0.006615039825439453,
130
- 0.006558720111846924,
131
- 0.00659660816192627,
132
- 0.006552576065063476,
133
- 0.006585343837738037,
134
- 0.00657203197479248,
135
- 0.0065474557876586915,
136
- 0.006585343837738037,
137
- 0.006612991809844971,
138
- 0.006567935943603515,
139
- 0.0065771517753601075,
140
- 0.006638591766357422,
141
- 0.00657203197479248,
142
- 0.006549503803253174,
143
- 0.00658739185333252,
144
- 0.006552576065063476,
145
- 0.00658022403717041,
146
- 0.0066427521705627445,
147
- 0.006563839912414551,
148
- 0.006583295822143555,
149
- 0.006555647850036621,
150
- 0.006558720111846924,
151
- 0.006568960189819336,
152
- 0.0065392317771911625,
153
- 0.006567935943603515,
154
- 0.0070266880989074704,
155
- 0.006602752208709717,
156
- 0.006585343837738037,
157
- 0.006497280120849609,
158
- 0.006650879859924317,
159
- 0.006526976108551025,
160
- 0.006486015796661377,
161
- 0.0065177597999572755,
162
- 0.0065064959526062015,
163
- 0.006506303787231446,
164
- 0.006583295822143555,
165
- 0.006595583915710449,
166
- 0.006568992137908935,
167
- 0.006552576065063476,
168
- 0.006642687797546387,
169
- 0.006565887928009034,
170
- 0.00657696008682251,
171
- 0.006556672096252441
172
  ]
173
  },
174
  "throughput": {
175
  "unit": "samples/s",
176
- "value": 149.85728846389284
177
  },
178
  "energy": {
179
  "unit": "kWh",
180
- "cpu": 7.65692055439009e-08,
181
- "ram": 4.1832954362359576e-08,
182
- "gpu": 1.5566499908387099e-07,
183
- "total": 2.7406715899013147e-07
184
  },
185
  "efficiency": {
186
  "unit": "samples/kWh",
187
- "value": 3648740.708973481
188
  }
189
  }
190
  }
 
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 902.69696,
6
  "max_global_vram": 1195.900928,
7
  "max_process_vram": 0.0,
8
  "max_reserved": 555.74528,
 
10
  },
11
  "latency": {
12
  "unit": "s",
13
+ "count": 144,
14
+ "total": 0.9981359982490539,
15
+ "mean": 0.006931499987840652,
16
+ "stdev": 0.0002388507755670479,
17
+ "p50": 0.006790143966674805,
18
+ "p90": 0.007226572751998901,
19
+ "p95": 0.007278131270408631,
20
+ "p99": 0.007468676977157591,
21
  "values": [
22
+ 0.007582719802856445,
23
+ 0.007303167819976806,
24
+ 0.007307295799255371,
25
+ 0.007275519847869873,
26
+ 0.0073175039291381834,
27
+ 0.007310336112976074,
28
+ 0.007301119804382325,
29
+ 0.007244800090789795,
30
+ 0.0071792640686035155,
31
+ 0.007278592109680176,
32
+ 0.007222271919250488,
33
+ 0.007111680030822754,
34
+ 0.007080959796905518,
35
+ 0.007097343921661377,
36
+ 0.0072427520751953125,
37
+ 0.007228415966033935,
38
+ 0.007228415966033935,
39
+ 0.007143424034118652,
40
+ 0.007143424034118652,
41
+ 0.007209983825683594,
42
+ 0.007221248149871826,
43
+ 0.007158783912658692,
44
+ 0.007197663784027099,
45
+ 0.007108607769012451,
46
+ 0.007144447803497315,
47
+ 0.0071823358535766605,
48
+ 0.007202816009521484,
49
+ 0.0070830078125,
50
+ 0.006818816184997558,
51
+ 0.007102496147155761,
52
+ 0.007071743965148926,
53
+ 0.00714137601852417,
54
+ 0.0071485438346862796,
55
+ 0.007163904190063477,
56
+ 0.007268352031707764,
57
+ 0.007215104103088379,
58
+ 0.007152607917785644,
59
+ 0.007102464199066162,
60
+ 0.007102464199066162,
61
+ 0.007209983825683594,
62
+ 0.007171072006225586,
63
+ 0.007158783912658692,
64
+ 0.007739391803741455,
65
+ 0.007107583999633789,
66
+ 0.00710041618347168,
67
+ 0.00723967981338501,
68
+ 0.00708403205871582,
69
+ 0.007077888011932373,
70
+ 0.006616064071655273,
71
+ 0.006694911956787109,
72
  0.006688767910003662,
73
+ 0.006812672138214112,
74
+ 0.0071649918556213376,
75
+ 0.007211040019989014,
76
+ 0.007188479900360107,
77
+ 0.007181312084197998,
78
+ 0.00709119987487793,
79
+ 0.0071526398658752445,
80
+ 0.007107583999633789,
81
+ 0.007189536094665527,
82
+ 0.0070368962287902835,
83
+ 0.0069918718338012695,
84
+ 0.006945792198181152,
85
+ 0.0070860800743103025,
86
+ 0.0070522880554199216,
87
+ 0.0068351998329162595,
88
+ 0.006788095951080322,
89
+ 0.006792191982269287,
90
+ 0.006778880119323731,
91
+ 0.006860799789428711,
92
+ 0.007142399787902832,
93
+ 0.006993919849395752,
94
+ 0.0070225920677185055,
95
+ 0.0071526398658752445,
96
+ 0.007197696208953858,
97
+ 0.007023615837097168,
98
+ 0.006937600135803222,
99
+ 0.006739967823028564,
100
+ 0.006783999919891357,
101
+ 0.006762495994567871,
102
+ 0.006765567779541016,
103
+ 0.006780928134918213,
104
+ 0.006782976150512696,
105
+ 0.0067420158386230465,
106
+ 0.006773759841918945,
107
+ 0.006781951904296875,
108
+ 0.006719488143920899,
109
+ 0.006718495845794678,
110
+ 0.006713344097137451,
111
+ 0.006722591876983643,
112
+ 0.0067041277885437015,
113
+ 0.006717440128326416,
114
+ 0.006725632190704346,
115
+ 0.006719488143920899,
116
+ 0.006730751991271973,
117
+ 0.006702079772949219,
118
+ 0.006725632190704346,
119
+ 0.0067348480224609375,
120
  0.006729728221893311,
121
+ 0.006680575847625733,
122
+ 0.0067420158386230465,
123
+ 0.006716415882110595,
124
+ 0.006685696125030518,
125
+ 0.0066938881874084475,
126
+ 0.006713344097137451,
127
+ 0.006710271835327148,
128
+ 0.006696959972381592,
129
  0.006715392112731934,
130
+ 0.006691840171813965,
131
+ 0.0066979842185974124,
 
 
 
 
 
 
 
132
  0.006665215969085693,
133
+ 0.006692863941192627,
134
+ 0.006684671878814697,
135
+ 0.0067041277885437015,
136
+ 0.006709248065948486,
137
+ 0.006717440128326416,
138
+ 0.00672870397567749,
139
+ 0.006717440128326416,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
140
  0.006651904106140137,
141
+ 0.006708223819732666,
142
+ 0.006702079772949219,
143
+ 0.006659071922302246,
144
+ 0.006766592025756836,
145
+ 0.006721536159515381,
146
+ 0.0067123198509216305,
147
+ 0.00669593620300293,
148
+ 0.006744063854217529,
149
+ 0.006706175804138184,
150
+ 0.006694911956787109,
151
+ 0.006719488143920899,
152
+ 0.006704160213470459,
153
+ 0.006677504062652588,
154
+ 0.0067041277885437015,
155
+ 0.006717440128326416,
156
+ 0.006683648109436035,
157
+ 0.0067338237762451176,
158
+ 0.006770688056945801,
159
+ 0.0066713600158691405,
160
+ 0.006725632190704346,
161
+ 0.006713344097137451,
162
+ 0.006699007987976074,
163
+ 0.006722559928894043,
164
+ 0.0067573761940002445,
165
+ 0.006709248065948486
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
166
  ]
167
  },
168
  "throughput": {
169
  "unit": "samples/s",
170
+ "value": 144.2689175148548
171
  },
172
  "energy": {
173
  "unit": "kWh",
174
+ "cpu": 7.554259986971933e-08,
175
+ "ram": 4.129726972205175e-08,
176
+ "gpu": 1.3920747159235592e-07,
177
+ "total": 2.5604734118412697e-07
178
  },
179
  "efficiency": {
180
  "unit": "samples/kWh",
181
+ "value": 3905527.764417936
182
  }
183
  }
184
  }