IlyasMoutawwakil HF staff commited on
Commit
5f46616
·
verified ·
1 Parent(s): 77db665

Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub

Browse files
cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json CHANGED
@@ -2,7 +2,7 @@
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 908.619776,
6
  "max_global_vram": 1195.900928,
7
  "max_process_vram": 0.0,
8
  "max_reserved": 555.74528,
@@ -10,170 +10,161 @@
10
  },
11
  "latency": {
12
  "unit": "s",
13
- "count": 139,
14
- "total": 0.999167808532715,
15
- "mean": 0.007188257615343272,
16
- "stdev": 0.0003418419015973708,
17
- "p50": 0.007041024208068848,
18
- "p90": 0.007641907119750977,
19
- "p95": 0.008080998802185059,
20
- "p99": 0.008395510120391846,
21
  "values": [
22
- 0.00820736026763916,
23
- 0.008043519973754883,
24
- 0.008146944046020508,
25
- 0.008501312255859376,
26
- 0.008173567771911621,
27
- 0.008077312469482421,
28
- 0.007948287963867188,
29
- 0.008114175796508789,
30
- 0.008378368377685547,
31
- 0.007952383995056152,
32
- 0.007854080200195313,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  0.007856128215789794,
34
- 0.007877632141113282,
35
- 0.00840601634979248,
36
- 0.007550975799560547,
37
- 0.007588863849639893,
38
- 0.007315455913543701,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
  0.007321599960327148,
40
- 0.0073134078979492185,
41
- 0.007316480159759522,
42
- 0.007245823860168457,
43
- 0.007277535915374756,
44
- 0.0072765440940856935,
45
- 0.007278592109680176,
46
- 0.00724889612197876,
47
- 0.007222271919250488,
48
- 0.007300096035003662,
49
- 0.007207935810089112,
50
- 0.007266304016113281,
51
- 0.007241727828979493,
52
- 0.007201791763305664,
53
- 0.0071823358535766605,
54
- 0.007189504146575928,
55
- 0.007262207984924316,
56
- 0.00722431993484497,
57
- 0.007238656044006348,
58
- 0.007221248149871826,
59
- 0.0072427520751953125,
60
- 0.007202816009521484,
61
- 0.007269375801086426,
62
- 0.007058432102203369,
63
- 0.006980607986450196,
64
- 0.007008255958557129,
65
- 0.007049215793609619,
66
- 0.007234560012817383,
67
- 0.007267327785491944,
68
- 0.0071823358535766605,
69
- 0.007211008071899414,
70
  0.007226367950439453,
71
- 0.0072325119972229,
72
- 0.0072540159225463864,
73
- 0.0072130560874938965,
74
- 0.007230463981628418,
75
- 0.007027711868286133,
76
- 0.007027711868286133,
77
- 0.007031807899475098,
78
- 0.006998015880584717,
79
- 0.007018496036529541,
80
- 0.00703276777267456,
81
- 0.007049215793609619,
82
- 0.00704307222366333,
83
- 0.007007232189178467,
84
- 0.007090112209320068,
85
- 0.007036928176879883,
86
- 0.007003136157989502,
87
- 0.007008255958557129,
88
- 0.0070522880554199216,
89
- 0.007053311824798584,
90
- 0.007041024208068848,
91
- 0.0070225920677185055,
92
- 0.007017471790313721,
93
- 0.0070348801612854,
94
- 0.007027711868286133,
95
- 0.006985727787017822,
96
- 0.007003136157989502,
97
- 0.007016448020935059,
98
- 0.007386112213134765,
99
- 0.007251967906951904,
100
- 0.007214079856872559,
101
- 0.007209983825683594,
102
- 0.007245823860168457,
103
- 0.007252992153167725,
104
- 0.007228415966033935,
105
- 0.007127039909362793,
106
- 0.007111711978912354,
107
- 0.007110655784606934,
108
- 0.007096320152282715,
109
- 0.007068672180175781,
110
- 0.007088064193725586,
111
- 0.006906879901885986,
112
- 0.006907904148101806,
113
- 0.006969344139099121,
114
- 0.007007232189178467,
115
- 0.006987775802612305,
116
- 0.006985727787017822,
117
- 0.0069723520278930666,
118
- 0.006938560009002686,
119
- 0.006956031799316406,
120
- 0.006960127830505371,
121
- 0.006936575889587402,
122
- 0.006979584217071533,
123
- 0.006979584217071533,
124
- 0.006986752033233643,
125
- 0.006952960014343262,
126
- 0.006952960014343262,
127
- 0.006971392154693603,
128
- 0.0069959678649902345,
129
- 0.006948895931243896,
130
- 0.006976511955261231,
131
- 0.006958079814910889,
132
- 0.0069632000923156735,
133
- 0.006960095882415772,
134
- 0.006964159965515137,
135
- 0.006991903781890869,
136
- 0.006975520133972168,
137
- 0.006939648151397705,
138
- 0.006980607986450196,
139
- 0.0069847040176391605,
140
- 0.006977536201477051,
141
- 0.006936575889587402,
142
- 0.006967296123504638,
143
- 0.006966271877288818,
144
- 0.006965248107910156,
145
- 0.006947840213775635,
146
- 0.006958144187927246,
147
- 0.0069847040176391605,
148
- 0.007014400005340577,
149
- 0.006956992149353028,
150
- 0.0069621758460998535,
151
- 0.006965248107910156,
152
- 0.006953983783721924,
153
- 0.006934559822082519,
154
- 0.006937600135803222,
155
- 0.0069550080299377445,
156
- 0.0069632320404052735,
157
- 0.006959104061126709,
158
- 0.006998015880584717,
159
- 0.006986752033233643,
160
- 0.006972415924072266
161
  ]
162
  },
163
  "throughput": {
164
  "unit": "samples/s",
165
- "value": 139.11577095755567
166
  },
167
  "energy": {
168
  "unit": "kWh",
169
- "cpu": 8.57844660823472e-08,
170
- "ram": 4.689846807414724e-08,
171
- "gpu": 1.5517725779709873e-07,
172
- "total": 2.878601919535932e-07
173
  },
174
  "efficiency": {
175
  "unit": "samples/kWh",
176
- "value": 3473908.612418396
177
  }
178
  }
179
  }
 
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 909.017088,
6
  "max_global_vram": 1195.900928,
7
  "max_process_vram": 0.0,
8
  "max_reserved": 555.74528,
 
10
  },
11
  "latency": {
12
  "unit": "s",
13
+ "count": 130,
14
+ "total": 0.9988422412872316,
15
+ "mean": 0.007683401856055626,
16
+ "stdev": 0.0005123954613422857,
17
+ "p50": 0.007456255912780762,
18
+ "p90": 0.007938048124313354,
19
+ "p95": 0.008009984302520753,
20
+ "p99": 0.010345533714294431,
21
  "values": [
22
+ 0.010926079750061036,
23
+ 0.010400768280029296,
24
+ 0.010210304260253907,
25
+ 0.00822169589996338,
26
+ 0.008083456039428711,
27
+ 0.00801689624786377,
28
+ 0.007993343830108643,
29
+ 0.007816192150115966,
30
+ 0.007896063804626464,
31
+ 0.007868415832519531,
32
+ 0.007811071872711181,
33
+ 0.00784281587600708,
34
+ 0.007868415832519531,
35
+ 0.007947264194488525,
36
+ 0.0079267840385437,
37
+ 0.007897056102752686,
38
+ 0.00787660789489746,
39
+ 0.007916543960571289,
40
+ 0.007868383884429932,
41
+ 0.007841760158538819,
42
+ 0.007869440078735352,
43
+ 0.007869440078735352,
44
+ 0.007875584125518798,
45
+ 0.007887872219085693,
46
+ 0.007799808025360107,
47
+ 0.007809023857116699,
48
+ 0.007789567947387695,
49
+ 0.007792640209197998,
50
+ 0.007816192150115966,
51
+ 0.007831552028656007,
52
+ 0.007885824203491211,
53
+ 0.007921664237976075,
54
+ 0.007885824203491211,
55
+ 0.007814144134521485,
56
+ 0.007824384212493896,
57
+ 0.007937024116516114,
58
+ 0.0078438401222229,
59
  0.007856128215789794,
60
+ 0.007824384212493896,
61
+ 0.007827455997467042,
62
+ 0.007881728172302246,
63
+ 0.007959551811218261,
64
+ 0.007857151985168457,
65
+ 0.007867392063140868,
66
+ 0.00890880012512207,
67
+ 0.007819263935089112,
68
+ 0.0078438401222229,
69
+ 0.007955455780029297,
70
+ 0.007954432010650634,
71
+ 0.007903232097625732,
72
+ 0.007874559879302979,
73
+ 0.00786947202682495,
74
+ 0.007435264110565186,
75
+ 0.007415808200836181,
76
+ 0.007371776103973389,
77
+ 0.007334911823272705,
78
+ 0.007277567863464355,
79
  0.007321599960327148,
80
+ 0.0074403839111328125,
81
+ 0.007177216053009033,
82
+ 0.007087103843688965,
83
+ 0.007106560230255127,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
84
  0.007226367950439453,
85
+ 0.007575551986694336,
86
+ 0.007778304100036621,
87
+ 0.007773183822631836,
88
+ 0.00789299201965332,
89
+ 0.00800153636932373,
90
+ 0.007746560096740723,
91
+ 0.007543807983398438,
92
+ 0.007482367992401123,
93
+ 0.007456768035888672,
94
+ 0.007455743789672851,
95
+ 0.007472095966339111,
96
+ 0.0074670081138610836,
97
+ 0.007442431926727295,
98
+ 0.0073994240760803225,
99
+ 0.007442431926727295,
100
+ 0.00742195177078247,
101
+ 0.007417856216430664,
102
+ 0.007401472091674805,
103
+ 0.007420928001403809,
104
+ 0.007412735939025879,
105
+ 0.007448575973510742,
106
+ 0.00739737606048584,
107
+ 0.007418879985809326,
108
+ 0.007469056129455566,
109
+ 0.007443456172943115,
110
+ 0.007395328044891358,
111
+ 0.007420896053314209,
112
+ 0.007427040100097656,
113
+ 0.00742300796508789,
114
+ 0.007432191848754883,
115
+ 0.007423999786376953,
116
+ 0.007434239864349365,
117
+ 0.007413760185241699,
118
+ 0.0074035201072692874,
119
+ 0.0074035201072692874,
120
+ 0.007394303798675537,
121
+ 0.007404543876647949,
122
+ 0.00743939208984375,
123
+ 0.0074291200637817386,
124
+ 0.007413760185241699,
125
+ 0.007448575973510742,
126
+ 0.007442431926727295,
127
+ 0.00743833589553833,
128
+ 0.007404543876647949,
129
+ 0.007442431926727295,
130
+ 0.00742195177078247,
131
+ 0.007415808200836181,
132
+ 0.007402495861053467,
133
+ 0.007417856216430664,
134
+ 0.007401472091674805,
135
+ 0.007398399829864502,
136
+ 0.007434207916259766,
137
+ 0.0073994240760803225,
138
+ 0.007412735939025879,
139
+ 0.0074301438331604,
140
+ 0.007423999786376953,
141
+ 0.007404543876647949,
142
+ 0.007391200065612793,
143
+ 0.00743833589553833,
144
+ 0.007417856216430664,
145
+ 0.007418879985809326,
146
+ 0.007450623989105225,
147
+ 0.007468063831329346,
148
+ 0.0074106879234313965,
149
+ 0.007358463764190673,
150
+ 0.007396351814270019,
151
+ 0.0074301438331604
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
152
  ]
153
  },
154
  "throughput": {
155
  "unit": "samples/s",
156
+ "value": 130.15068308731713
157
  },
158
  "energy": {
159
  "unit": "kWh",
160
+ "cpu": 8.757511774698895e-08,
161
+ "ram": 4.787732542873529e-08,
162
+ "gpu": 1.5560469964705714e-07,
163
+ "total": 2.9105714282278136e-07
164
  },
165
  "efficiency": {
166
  "unit": "samples/kWh",
167
+ "value": 3435751.448329441
168
  }
169
  }
170
  }