IlyasMoutawwakil HF staff commited on
Commit
7b1b580
·
verified ·
1 Parent(s): 0a94b40

Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub

Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark_report.json CHANGED
@@ -2,7 +2,7 @@
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 889.749504,
6
  "max_global_vram": 1195.900928,
7
  "max_process_vram": 0.0,
8
  "max_reserved": 555.74528,
@@ -11,186 +11,186 @@
11
  "latency": {
12
  "unit": "s",
13
  "count": 156,
14
- "total": 0.9976697611808775,
15
- "mean": 0.006395318981928702,
16
- "stdev": 0.00022351439414524803,
17
- "p50": 0.00634009599685669,
18
- "p90": 0.006650880098342896,
19
- "p95": 0.0067164161205291745,
20
- "p99": 0.007249152135848994,
21
  "values": [
22
- 0.006970367908477783,
23
- 0.006602752208709717,
24
- 0.006673408031463623,
25
- 0.006624256134033203,
26
- 0.006520832061767578,
27
- 0.006567935943603515,
28
- 0.006569983959197998,
29
- 0.006452223777770996,
30
- 0.006409215927124024,
31
- 0.0064133119583129885,
32
- 0.006395904064178467,
33
- 0.006416384220123291,
34
- 0.006401023864746094,
35
- 0.006435840129852295,
36
- 0.0063272957801818845,
37
- 0.006322175979614258,
38
- 0.006341631889343262,
39
- 0.0063805441856384275,
40
- 0.006341631889343262,
41
- 0.006519904136657715,
42
- 0.006676479816436768,
43
- 0.006681600093841553,
44
- 0.006656000137329102,
45
- 0.006461440086364746,
 
 
 
46
  0.006368256092071533,
47
- 0.0062873601913452145,
48
- 0.0064133119583129885,
49
- 0.006440959930419922,
50
- 0.006421472072601318,
51
- 0.00643891191482544,
52
- 0.006388735771179199,
53
- 0.006375423908233643,
54
  0.006364160060882569,
55
- 0.006402112007141113,
56
- 0.006604800224304199,
57
- 0.0064174079895019534,
58
- 0.006515615940093994,
59
- 0.006533152103424073,
60
- 0.006451168060302734,
61
- 0.006523903846740723,
62
- 0.006573056221008301,
63
- 0.0065443840026855465,
64
- 0.0067338237762451176,
65
- 0.006631423950195312,
66
- 0.006540287971496582,
67
- 0.006626304149627686,
68
- 0.006719488143920899,
69
- 0.0066344962120056155,
70
- 0.006663167953491211,
71
- 0.006673408031463623,
72
- 0.00658022403717041,
73
- 0.006624256134033203,
74
- 0.006358016014099121,
75
- 0.006415359973907471,
76
- 0.006558720111846924,
77
- 0.0063539199829101565,
78
- 0.006536191940307618,
79
- 0.006316031932830811,
80
- 0.006218751907348633,
81
- 0.006338560104370118,
82
- 0.006288383960723877,
83
- 0.006309887886047363,
84
- 0.006427648067474365,
85
- 0.0063569917678833006,
86
- 0.006375487804412842,
87
- 0.006715392112731934,
88
- 0.006729728221893311,
89
- 0.006552576065063476,
90
- 0.006493184089660644,
91
- 0.007090176105499267,
92
- 0.007443456172943115,
93
- 0.007606272220611572,
94
  0.006639616012573242,
95
- 0.006617087841033936,
96
- 0.006659071922302246,
97
- 0.006645760059356689,
98
- 0.0063805441856384275,
99
- 0.006446080207824707,
100
- 0.006364160060882569,
101
- 0.006394879817962646,
102
- 0.006487040042877197,
103
- 0.006590464115142822,
104
- 0.006378528118133545,
105
- 0.006288383960723877,
106
- 0.006127615928649902,
107
- 0.006141952037811279,
108
- 0.006293504238128662,
109
- 0.006289408206939697,
110
- 0.006213632106781006,
111
- 0.006041600227355957,
112
- 0.0059985918998718265,
113
- 0.006102015972137451,
114
- 0.006160384178161621,
115
- 0.006273087978363037,
116
- 0.006207488059997559,
117
- 0.006312960147857666,
118
- 0.006338560104370118,
119
- 0.006223872184753418,
120
- 0.006115359783172607,
121
- 0.006060031890869141,
122
- 0.006086656093597412,
123
- 0.006038527965545654,
124
- 0.006807551860809326,
 
 
 
 
 
 
125
  0.006371327877044678,
126
- 0.006310912132263183,
127
- 0.006325247764587402,
128
- 0.006329343795776367,
129
- 0.00623308801651001,
130
- 0.006268928050994873,
131
- 0.006325247764587402,
132
- 0.006223872184753418,
133
- 0.006200319766998291,
134
- 0.006242303848266601,
135
- 0.006161407947540283,
136
- 0.006258687973022461,
137
- 0.006195199966430664,
138
- 0.006173664093017578,
139
- 0.006255616188049316,
140
- 0.006160384178161621,
141
  0.006280191898345947,
142
- 0.006205440044403076,
143
- 0.006292479991912842,
144
- 0.006337535858154297,
145
- 0.006310912132263183,
146
- 0.006306816101074219,
147
- 0.006244351863861084,
 
 
 
 
148
  0.006294528007507324,
149
- 0.006310912132263183,
150
- 0.006248447895050049,
 
 
 
151
  0.00628223991394043,
152
- 0.006367231845855713,
153
- 0.0062791681289672855,
154
- 0.0062780799865722655,
155
- 0.006194176197052002,
156
- 0.006195136070251465,
157
- 0.00632316780090332,
158
- 0.006253503799438477,
159
- 0.006318079948425293,
160
- 0.006211584091186524,
 
 
 
161
  0.00619212818145752,
162
- 0.006221824169158936,
163
- 0.006116352081298828,
164
- 0.006251455783843994,
165
- 0.006170623779296875,
166
- 0.00623308801651001,
167
- 0.006310912132263183,
168
- 0.006195168018341065,
169
- 0.006268928050994873,
170
- 0.006299647808074951,
171
- 0.006268928050994873,
172
- 0.00632422399520874,
173
- 0.006244351863861084,
174
- 0.006280191898345947,
175
- 0.006318079948425293,
176
- 0.006276095867156982,
177
- 0.00626585578918457
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
178
  ]
179
  },
180
  "throughput": {
181
  "unit": "samples/s",
182
- "value": 156.36436631631778
183
  },
184
  "energy": {
185
  "unit": "kWh",
186
- "cpu": 7.437935512926845e-08,
187
- "ram": 4.0496862412567234e-08,
188
- "gpu": 1.308889935999996e-07,
189
- "total": 2.457652111418353e-07
190
  },
191
  "efficiency": {
192
  "unit": "samples/kWh",
193
- "value": 4068924.1384244696
194
  }
195
  }
196
  }
 
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 889.925632,
6
  "max_global_vram": 1195.900928,
7
  "max_process_vram": 0.0,
8
  "max_reserved": 555.74528,
 
11
  "latency": {
12
  "unit": "s",
13
  "count": 156,
14
+ "total": 0.9961009263992305,
15
+ "mean": 0.006385262348713019,
16
+ "stdev": 0.00025829204945134317,
17
+ "p50": 0.00636620807647705,
18
+ "p90": 0.006680576086044311,
19
+ "p95": 0.006791936159133911,
20
+ "p99": 0.006908979249000549,
21
  "values": [
22
+ 0.007081984043121338,
23
+ 0.0067123198509216305,
24
+ 0.0069253120422363285,
25
+ 0.0067358717918396,
26
+ 0.006791168212890625,
27
+ 0.006594560146331787,
28
+ 0.0068055038452148435,
29
+ 0.006895616054534912,
30
+ 0.006779903888702392,
31
+ 0.006809535980224609,
32
+ 0.006662144184112549,
33
+ 0.006684671878814697,
34
+ 0.006677504062652588,
35
+ 0.006591487884521485,
36
+ 0.006553599834442139,
37
+ 0.006545407772064209,
38
+ 0.0065781760215759275,
39
+ 0.00659660816192627,
40
+ 0.006652927875518798,
41
+ 0.006666240215301514,
42
+ 0.006650879859924317,
43
+ 0.006662144184112549,
44
+ 0.006627327919006347,
45
+ 0.006360064029693604,
46
+ 0.006448128223419189,
47
+ 0.0063569917678833006,
48
+ 0.0064245758056640625,
49
  0.006368256092071533,
 
 
 
 
 
 
 
50
  0.006364160060882569,
51
+ 0.006799295902252197,
52
+ 0.006586368083953857,
53
+ 0.00664572811126709,
54
+ 0.006660096168518067,
55
+ 0.006687744140625,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
56
  0.006639616012573242,
57
+ 0.0065413122177124024,
58
+ 0.006463488101959228,
59
+ 0.006496255874633789,
60
+ 0.006546432018280029,
61
+ 0.006481919765472412,
62
+ 0.006535168170928955,
63
+ 0.006564864158630371,
64
+ 0.006515711784362793,
65
+ 0.006622208118438721,
66
+ 0.006633471965789795,
67
+ 0.006656000137329102,
68
+ 0.006700032234191895,
69
+ 0.006683648109436035,
70
+ 0.006630335807800293,
71
+ 0.006602752208709717,
72
+ 0.006660096168518067,
73
+ 0.00679423999786377,
74
+ 0.00662937593460083,
75
+ 0.006619135856628418,
76
+ 0.006558720111846924,
77
+ 0.006520832061767578,
78
+ 0.006491136074066162,
79
+ 0.0065474557876586915,
80
+ 0.006557695865631104,
81
+ 0.00653926420211792,
82
+ 0.006635519981384277,
83
+ 0.00653926420211792,
84
+ 0.006497280120849609,
85
+ 0.006551551818847656,
86
+ 0.0065710082054138185,
87
+ 0.00658739185333252,
88
+ 0.0068689918518066405,
89
+ 0.006614016056060791,
90
+ 0.006527999877929688,
91
+ 0.006356959819793701,
92
+ 0.0064133119583129885,
93
  0.006371327877044678,
94
+ 0.006396927833557129,
95
+ 0.006397952079772949,
96
+ 0.006558720111846924,
97
+ 0.006609856128692627,
98
+ 0.006641727924346924,
99
+ 0.0065146880149841305,
100
+ 0.006606880187988281,
101
+ 0.006535168170928955,
102
+ 0.006295551776885986,
103
+ 0.006172671794891358,
104
+ 0.006043647766113281,
105
+ 0.006220799922943115,
 
 
 
106
  0.006280191898345947,
107
+ 0.006095871925354004,
108
+ 0.006001664161682129,
109
+ 0.006007808208465576,
110
+ 0.006046720027923584,
111
+ 0.006097919940948486,
112
+ 0.006146048069000244,
113
+ 0.006148096084594727,
114
+ 0.006137856006622314,
115
+ 0.006218751907348633,
116
+ 0.006295551776885986,
117
  0.006294528007507324,
118
+ 0.006146048069000244,
119
+ 0.006043647766113281,
120
+ 0.0062566399574279785,
121
+ 0.0062269439697265625,
122
+ 0.006307839870452881,
123
  0.00628223991394043,
124
+ 0.006259712219238281,
125
+ 0.0062740478515625,
126
+ 0.006294528007507324,
127
+ 0.006296576023101807,
128
+ 0.006262784004211426,
129
+ 0.006292479991912842,
130
+ 0.0062638077735900875,
131
+ 0.006295551776885986,
132
+ 0.006319104194641113,
133
+ 0.006269951820373535,
134
+ 0.006275072097778321,
135
+ 0.006212607860565185,
136
  0.00619212818145752,
137
+ 0.006178815841674804,
138
+ 0.00620851182937622,
139
+ 0.006209536075592041,
140
+ 0.006210559844970703,
141
+ 0.006209536075592041,
142
+ 0.006172671794891358,
143
+ 0.006262784004211426,
144
+ 0.0063190717697143555,
145
+ 0.006069248199462891,
146
+ 0.006045695781707764,
147
+ 0.005897215843200684,
148
+ 0.005884928226470947,
149
+ 0.005897215843200684,
150
+ 0.005899263858795166,
151
+ 0.005964799880981446,
152
+ 0.0059688959121704105,
153
+ 0.005986303806304932,
154
+ 0.005994495868682862,
155
+ 0.006029344081878662,
156
+ 0.00601804780960083,
157
+ 0.00601804780960083,
158
+ 0.005991424083709717,
159
+ 0.005974016189575195,
160
+ 0.005956607818603516,
161
+ 0.005971968173980713,
162
+ 0.005950463771820068,
163
+ 0.005984255790710449,
164
+ 0.006203392028808594,
165
+ 0.0062156801223754886,
166
+ 0.006181888103485108,
167
+ 0.006408192157745361,
168
+ 0.0062341117858886715,
169
+ 0.006170656204223633,
170
+ 0.006259712219238281,
171
+ 0.006237184047698975,
172
+ 0.006260735988616943,
173
+ 0.006223872184753418,
174
+ 0.00643174409866333,
175
+ 0.00628223991394043,
176
+ 0.0061972479820251464,
177
+ 0.006235136032104492
178
  ]
179
  },
180
  "throughput": {
181
  "unit": "samples/s",
182
+ "value": 156.61063639797902
183
  },
184
  "energy": {
185
  "unit": "kWh",
186
+ "cpu": 7.317123323965139e-08,
187
+ "ram": 3.996290985620968e-08,
188
+ "gpu": 1.3100662058029227e-07,
189
+ "total": 2.4414076367615334e-07
190
  },
191
  "efficiency": {
192
  "unit": "samples/kWh",
193
+ "value": 4095997.673401543
194
  }
195
  }
196
  }