IlyasMoutawwakil HF staff commited on
Commit
c89221a
·
verified ·
1 Parent(s): bb2d4cb

Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub

Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark_report.json CHANGED
@@ -2,7 +2,7 @@
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 902.36928,
6
  "max_global_vram": 1195.900928,
7
  "max_process_vram": 0.0,
8
  "max_reserved": 555.74528,
@@ -10,190 +10,188 @@
10
  },
11
  "latency": {
12
  "unit": "s",
13
- "count": 159,
14
- "total": 0.9993454718589789,
15
- "mean": 0.006285191646911813,
16
- "stdev": 0.0002933341231399013,
17
- "p50": 0.00617574405670166,
18
- "p90": 0.006598860931396485,
19
- "p95": 0.006929920053482056,
20
- "p99": 0.0073845759010314925,
21
  "values": [
22
- 0.007458816051483155,
23
- 0.007613440036773681,
24
- 0.00733081579208374,
25
- 0.007011328220367432,
26
- 0.006952960014343262,
27
- 0.006881279945373535,
28
- 0.007006207942962647,
29
- 0.007001088142395019,
30
- 0.0068321280479431154,
31
- 0.006756351947784424,
32
- 0.0068853759765625,
33
- 0.0068884482383728025,
34
- 0.007174047946929931,
35
- 0.006299776077270508,
36
- 0.006149119853973388,
37
- 0.006141952037811279,
38
- 0.006124415874481201,
39
- 0.006100992202758789,
40
- 0.006176767826080322,
41
- 0.006119423866271972,
42
- 0.006278143882751465,
43
- 0.00657203197479248,
44
- 0.0063415999412536625,
45
- 0.00638976001739502,
46
- 0.00657203197479248,
47
- 0.006594560146331787,
48
- 0.0065136637687683106,
49
- 0.006339583873748779,
50
- 0.006384640216827392,
51
- 0.006616064071655273,
52
- 0.006338560104370118,
53
- 0.006370304107666015,
54
- 0.006486015796661377,
55
- 0.0063211522102355954,
56
- 0.006106112003326416,
57
- 0.00603545618057251,
58
- 0.006037504196166992,
59
- 0.0060109119415283204,
60
- 0.006057983875274659,
61
- 0.006038527965545654,
62
- 0.006050816059112549,
63
- 0.0060364799499511715,
64
- 0.006039552211761475,
65
- 0.00601910400390625,
66
- 0.006053887844085694,
67
- 0.006020095825195313,
68
- 0.006023168087005615,
69
- 0.006008831977844238,
70
- 0.0062197761535644535,
71
- 0.006030335903167725,
72
- 0.0060405759811401364,
73
- 0.006023168087005615,
74
- 0.006025216102600098,
75
- 0.0060414719581604005,
76
- 0.00616755199432373,
77
- 0.0060404157638549805,
78
- 0.0063201279640197755,
79
- 0.006329343795776367,
80
- 0.006308864116668702,
81
- 0.006366208076477051,
82
- 0.006278143882751465,
83
- 0.006325247764587402,
84
- 0.006419456005096436,
85
- 0.00639788818359375,
86
- 0.006409215927124024,
87
- 0.00630185604095459,
88
- 0.006437888145446777,
89
- 0.006408192157745361,
90
- 0.00637337589263916,
91
- 0.0063498239517211915,
92
- 0.006345727920532227,
93
- 0.006691840171813965,
94
- 0.006450175762176514,
95
- 0.006292479991912842,
96
- 0.006181888103485108,
97
- 0.006108160018920898,
98
- 0.006172671794891358,
99
- 0.006111231803894043,
100
- 0.006172671794891358,
101
- 0.0062259202003479,
102
- 0.006160319805145264,
103
- 0.006157311916351318,
104
- 0.006111231803894043,
105
- 0.006177663803100586,
106
- 0.006120448112487793,
107
- 0.006146048069000244,
108
- 0.00617574405670166,
109
- 0.0065484800338745115,
110
  0.006446080207824707,
 
 
 
 
 
 
 
111
  0.006503424167633057,
112
- 0.006407167911529541,
113
- 0.006344704151153564,
114
- 0.006334368228912353,
115
- 0.00637440013885498,
116
- 0.006294432163238525,
117
- 0.0063712639808654785,
118
- 0.00658131217956543,
119
- 0.0064061121940612795,
120
  0.006471680164337158,
121
- 0.00630079984664917,
122
- 0.0063170561790466305,
123
- 0.006348800182342529,
124
- 0.006304768085479737,
125
- 0.006338560104370118,
126
- 0.006303840160369873,
127
- 0.006317887783050537,
128
- 0.006310751914978027,
129
- 0.006303743839263916,
130
- 0.006306816101074219,
131
- 0.006257631778717041,
132
- 0.006334464073181153,
133
- 0.006284287929534912,
134
- 0.006303743839263916,
135
- 0.006059135913848877,
136
- 0.0060405759811401364,
137
- 0.006033440113067627,
138
- 0.0060405759811401364,
139
- 0.006025311946868897,
140
- 0.006927360057830811,
141
- 0.006387519836425781,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
142
  0.006138879776000977,
143
- 0.006085631847381592,
144
- 0.006085631847381592,
145
- 0.006079487800598145,
146
- 0.006085631847381592,
147
- 0.006107135772705078,
148
- 0.00614518404006958,
149
- 0.006057983875274659,
150
- 0.006098944187164307,
151
- 0.00606108808517456,
152
- 0.006103040218353272,
153
- 0.0060661759376525876,
 
 
 
 
 
 
 
 
 
 
 
 
154
  0.006145023822784424,
155
- 0.006088704109191895,
156
- 0.006128608226776123,
157
- 0.0059985918998718265,
158
- 0.006052864074707031,
159
- 0.006034431934356689,
160
- 0.006049791812896729,
161
- 0.006079487800598145,
162
- 0.006013951778411865,
163
- 0.006049791812896729,
164
- 0.006100992202758789,
165
- 0.006123519897460937,
166
- 0.006114304065704346,
167
- 0.006114304065704346,
168
- 0.006090752124786377,
169
- 0.006104063987731933,
170
- 0.006061056137084961,
171
- 0.006103040218353272,
172
- 0.006067200183868408,
173
- 0.006120319843292236,
174
- 0.006086688041687011,
175
- 0.006104063987731933,
176
- 0.006061056137084961,
177
- 0.006111231803894043,
178
- 0.006076416015625,
179
- 0.006106112003326416,
180
- 0.006136832237243653
181
  ]
182
  },
183
  "throughput": {
184
  "unit": "samples/s",
185
- "value": 159.10413813576287
186
  },
187
  "energy": {
188
  "unit": "kWh",
189
- "cpu": 7.284353318958763e-08,
190
- "ram": 3.982356626236843e-08,
191
- "gpu": 1.3450419758282108e-07,
192
- "total": 2.471712970347771e-07
193
  },
194
  "efficiency": {
195
  "unit": "samples/kWh",
196
- "value": 4045777.207938912
197
  }
198
  }
199
  }
 
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 902.418432,
6
  "max_global_vram": 1195.900928,
7
  "max_process_vram": 0.0,
8
  "max_reserved": 555.74528,
 
10
  },
11
  "latency": {
12
  "unit": "s",
13
+ "count": 157,
14
+ "total": 1.0011073265075685,
15
+ "mean": 0.006376479786672411,
16
+ "stdev": 0.0001767695489235051,
17
+ "p50": 0.006434815883636475,
18
+ "p90": 0.006492159843444824,
19
+ "p95": 0.0065445887565612785,
20
+ "p99": 0.006815375518798828,
21
  "values": [
22
+ 0.007571455955505371,
23
+ 0.006818816184997558,
24
+ 0.006623231887817383,
25
+ 0.006500351905822754,
26
+ 0.006482944011688232,
27
+ 0.0064849920272827145,
28
+ 0.006456319808959961,
29
+ 0.006470655918121338,
30
+ 0.006482944011688232,
31
+ 0.006455296039581298,
32
+ 0.006487040042877197,
33
+ 0.006435840129852295,
34
+ 0.006445055961608887,
35
+ 0.006481919765472412,
36
+ 0.0064471039772033695,
37
+ 0.006462463855743408,
38
+ 0.0064778242111206055,
39
+ 0.006432767868041992,
40
+ 0.006463488101959228,
41
+ 0.0064553279876708985,
42
+ 0.0064767999649047855,
43
+ 0.006451200008392334,
44
+ 0.006462463855743408,
45
+ 0.00648089599609375,
46
+ 0.006456319808959961,
47
+ 0.00643891191482544,
48
+ 0.0064767999649047855,
49
+ 0.006458367824554443,
50
+ 0.006478847980499268,
51
+ 0.0064737281799316405,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
52
  0.006446080207824707,
53
+ 0.006474783897399902,
54
+ 0.006460415840148926,
55
+ 0.00648089599609375,
56
+ 0.00648089599609375,
57
+ 0.006462463855743408,
58
+ 0.0064778242111206055,
59
+ 0.0065136318206787105,
60
  0.006503424167633057,
 
 
 
 
 
 
 
 
61
  0.006471680164337158,
62
+ 0.006462463855743408,
63
+ 0.006451200008392334,
64
+ 0.006466559886932373,
65
+ 0.006458367824554443,
66
+ 0.006479872226715088,
67
+ 0.006472703933715821,
68
+ 0.006452223777770996,
69
+ 0.006479872226715088,
70
+ 0.006463488101959228,
71
+ 0.0064204797744750975,
72
+ 0.006450175762176514,
73
+ 0.006446080207824707,
74
+ 0.006437888145446777,
75
+ 0.00643891191482544,
76
+ 0.00641539192199707,
77
+ 0.006487040042877197,
78
+ 0.00643071985244751,
79
+ 0.006421504020690918,
80
+ 0.006561791896820069,
81
+ 0.006469632148742676,
82
+ 0.006492159843444824,
83
+ 0.006456319808959961,
84
+ 0.006495232105255127,
85
+ 0.006461440086364746,
86
+ 0.006812672138214112,
87
+ 0.006434815883636475,
88
+ 0.006574079990386963,
89
+ 0.006442944049835205,
90
+ 0.006492159843444824,
91
+ 0.006418432235717773,
92
+ 0.006415359973907471,
93
+ 0.006440959930419922,
94
+ 0.006354944229125976,
95
+ 0.006409215927124024,
96
+ 0.006540287971496582,
97
+ 0.006449151992797852,
98
+ 0.006618112087249756,
99
+ 0.006471680164337158,
100
+ 0.006432767868041992,
101
+ 0.006426623821258545,
102
+ 0.006433792114257812,
103
+ 0.006451136112213135,
104
+ 0.006474783897399902,
105
+ 0.006428671836853027,
106
+ 0.006500351905822754,
107
+ 0.006725632190704346,
108
+ 0.006371327877044678,
109
+ 0.006450175762176514,
110
+ 0.006418432235717773,
111
+ 0.0064767999649047855,
112
+ 0.006459392070770263,
113
+ 0.0064133119583129885,
114
+ 0.0064215359687805175,
115
+ 0.006384640216827392,
116
+ 0.0062975997924804685,
117
+ 0.00643891191482544,
118
+ 0.006346752166748047,
119
+ 0.0064471039772033695,
120
+ 0.006415359973907471,
121
+ 0.006401055812835693,
122
+ 0.006418432235717773,
123
+ 0.006203392028808594,
124
+ 0.006206399917602539,
125
+ 0.006198272228240966,
126
+ 0.006187007904052734,
127
+ 0.006510560035705566,
128
+ 0.006307839870452881,
129
+ 0.006218751907348633,
130
+ 0.00619212818145752,
131
+ 0.006325247764587402,
132
+ 0.0061931519508361815,
133
+ 0.006278143882751465,
134
+ 0.006296576023101807,
135
+ 0.006207488059997559,
136
+ 0.006218751907348633,
137
+ 0.006191103935241699,
138
+ 0.00620851182937622,
139
+ 0.006209536075592041,
140
+ 0.006176767826080322,
141
+ 0.006214655876159668,
142
+ 0.0062044157981872555,
143
+ 0.006210559844970703,
144
+ 0.0061521921157836916,
145
+ 0.006198272228240966,
146
+ 0.0061224961280822755,
147
+ 0.00613478422164917,
148
+ 0.00612556791305542,
149
+ 0.0061521921157836916,
150
+ 0.006139904022216797,
151
+ 0.006115327835083008,
152
  0.006138879776000977,
153
+ 0.006130688190460205,
154
+ 0.00613478422164917,
155
+ 0.006116352081298828,
156
+ 0.006137792110443115,
157
+ 0.006196224212646485,
158
+ 0.006220799922943115,
159
+ 0.006198272228240966,
160
+ 0.0062259202003479,
161
+ 0.00622489595413208,
162
+ 0.006182911872863769,
163
+ 0.006202367782592774,
164
+ 0.006194176197052002,
165
+ 0.006213632106781006,
166
+ 0.006228991985321045,
167
+ 0.006189055919647217,
168
+ 0.00618393611907959,
169
+ 0.006210559844970703,
170
+ 0.006202367782592774,
171
+ 0.006184959888458252,
172
+ 0.0061296639442443845,
173
+ 0.006137856006622314,
174
+ 0.006137856006622314,
175
+ 0.006124544143676758,
176
  0.006145023822784424,
177
+ 0.00612556791305542,
178
+ 0.006146048069000244
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
179
  ]
180
  },
181
  "throughput": {
182
  "unit": "samples/s",
183
+ "value": 156.8263420343803
184
  },
185
  "energy": {
186
  "unit": "kWh",
187
+ "cpu": 7.327386422085338e-08,
188
+ "ram": 4.0061591616665294e-08,
189
+ "gpu": 1.3564311262962752e-07,
190
+ "total": 2.489785684671462e-07
191
  },
192
  "efficiency": {
193
  "unit": "samples/kWh",
194
+ "value": 4016409.9510916513
195
  }
196
  }
197
  }