IlyasMoutawwakil HF staff commited on
Commit
30883e4
·
verified ·
1 Parent(s): 3799efd

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark_report.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark_report.json CHANGED
@@ -2,7 +2,7 @@
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 976.453632,
6
  "max_global_vram": 1434.976256,
7
  "max_process_vram": 0.0,
8
  "max_reserved": 794.820608,
@@ -10,105 +10,102 @@
10
  },
11
  "latency": {
12
  "unit": "s",
13
- "count": 74,
14
- "total": 0.998737825393677,
15
- "mean": 0.01349645709991455,
16
- "stdev": 0.00039168357847122273,
17
- "p50": 0.013515264034271241,
18
- "p90": 0.01398937635421753,
19
- "p95": 0.014288925218582153,
20
- "p99": 0.014692100315093992,
21
  "values": [
22
- 0.014370752334594726,
23
- 0.015021056175231933,
24
- 0.01442307186126709,
25
- 0.01457043170928955,
26
- 0.013313023567199708,
27
- 0.013138943672180176,
28
- 0.013065216064453124,
29
- 0.01307750415802002,
30
- 0.01308672046661377,
31
- 0.013093888282775879,
32
- 0.013040639877319337,
33
- 0.013071359634399414,
34
- 0.013124608039855956,
35
- 0.013055999755859376,
36
- 0.013073408126831054,
37
- 0.01303756809234619,
38
- 0.013206527709960938,
39
- 0.013184000015258789,
40
- 0.013143039703369141,
41
- 0.013220895767211914,
42
- 0.01307750415802002,
43
- 0.013042688369750977,
44
- 0.014244864463806153,
45
- 0.013533184051513672,
46
- 0.013530112266540528,
47
- 0.013503487586975099,
48
- 0.013495295524597169,
49
- 0.013514752388000489,
50
- 0.013501440048217773,
51
- 0.013487104415893555,
52
  0.013526016235351563,
53
- 0.013494272232055664,
54
- 0.013541376113891602,
55
- 0.013501472473144531,
56
- 0.013591551780700683,
57
- 0.013510656356811524,
58
  0.013607935905456543,
59
- 0.013586432456970214,
60
- 0.013503487586975099,
61
- 0.013478912353515626,
62
- 0.014101504325866699,
63
- 0.014116864204406738,
64
- 0.013910016059875489,
65
- 0.013990912437438965,
66
- 0.013985792160034179,
67
- 0.013623295783996582,
68
- 0.013625344276428223,
69
- 0.01354751968383789,
70
- 0.013574144363403321,
71
- 0.01354751968383789,
72
- 0.013515775680541992,
73
- 0.013561856269836426,
74
- 0.013571071624755859,
75
- 0.013580224037170411,
76
- 0.01355059242248535,
77
  0.01354854393005371,
78
- 0.013551615715026855,
 
 
 
79
  0.013533184051513672,
 
 
 
 
 
 
 
80
  0.013515775680541992,
81
- 0.013541376113891602,
82
- 0.013521920204162598,
83
- 0.013554688453674316,
84
- 0.013492223739624023,
85
- 0.013590527534484862,
86
- 0.01358950424194336,
87
- 0.013500415802001953,
88
- 0.013578240394592284,
89
- 0.013493247985839844,
90
- 0.013513728141784668,
91
- 0.012965888023376464,
92
- 0.012999679565429688,
93
- 0.01293619155883789,
94
- 0.012956671714782715,
95
- 0.01296076774597168
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
96
  ]
97
  },
98
  "throughput": {
99
  "unit": "samples/s",
100
- "value": 74.09351895812208
101
  },
102
  "energy": {
103
  "unit": "kWh",
104
- "cpu": 1.6024605212388216e-07,
105
- "ram": 8.743550851076584e-08,
106
- "gpu": 3.3578545381333807e-07,
107
- "total": 5.834670144479861e-07
108
  },
109
  "efficiency": {
110
  "unit": "samples/kWh",
111
- "value": 1713892.9455096836
112
  }
113
  }
114
  }
 
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 976.048128,
6
  "max_global_vram": 1434.976256,
7
  "max_process_vram": 0.0,
8
  "max_reserved": 794.820608,
 
10
  },
11
  "latency": {
12
  "unit": "s",
13
+ "count": 71,
14
+ "total": 1.0009896955490114,
15
+ "mean": 0.014098446416183259,
16
+ "stdev": 0.0006384603254537008,
17
+ "p50": 0.014302207946777343,
18
+ "p90": 0.014630911827087402,
19
+ "p95": 0.015238143920898437,
20
+ "p99": 0.01611059169769287,
21
  "values": [
22
+ 0.01595084762573242,
23
+ 0.016483327865600587,
24
+ 0.015924223899841307,
25
+ 0.01489510440826416,
26
+ 0.015516672134399414,
27
+ 0.014889984130859376,
28
+ 0.01495961570739746,
29
+ 0.01375334358215332,
30
+ 0.013577216148376465,
31
+ 0.013564000129699707,
32
+ 0.013637632369995116,
33
+ 0.013596672058105469,
34
+ 0.013575167655944824,
35
+ 0.013502464294433594,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
  0.013526016235351563,
37
+ 0.0134901762008667,
38
+ 0.013545472145080567,
39
+ 0.013562879562377929,
 
 
40
  0.013607935905456543,
41
+ 0.01357209587097168,
42
+ 0.013522944450378417,
43
+ 0.013522944450378417,
44
+ 0.013513728141784668,
45
+ 0.013554688453674316,
 
 
 
 
 
 
 
 
 
 
 
 
 
46
  0.01354854393005371,
47
+ 0.013495295524597169,
48
+ 0.013554688453674316,
49
+ 0.013536255836486816,
50
+ 0.013537280082702637,
51
  0.013533184051513672,
52
+ 0.013478912353515626,
53
+ 0.013537280082702637,
54
+ 0.013517824172973633,
55
+ 0.013504511833190918,
56
+ 0.013527039527893067,
57
+ 0.0135731201171875,
58
+ 0.013526016235351563,
59
  0.013515775680541992,
60
+ 0.013527039527893067,
61
+ 0.01358847999572754,
62
+ 0.01413430404663086,
63
+ 0.014583744049072266,
64
+ 0.014547967910766601,
65
+ 0.014630911827087402,
66
+ 0.014499903678894043,
67
+ 0.014415871620178223,
68
+ 0.014400511741638184,
69
+ 0.014389216423034668,
70
+ 0.01438003158569336,
71
+ 0.01439641571044922,
72
+ 0.014323712348937988,
73
+ 0.014331904411315918,
74
+ 0.014354432106018066,
75
+ 0.014361599922180175,
76
+ 0.014352383613586426,
77
+ 0.014313471794128419,
78
+ 0.014323648452758788,
79
+ 0.014313471794128419,
80
+ 0.014252032279968262,
81
+ 0.014321663856506347,
82
+ 0.014317567825317384,
83
+ 0.014403583526611329,
84
+ 0.014334976196289062,
85
+ 0.014325728416442871,
86
+ 0.014330880165100097,
87
+ 0.014306303977966308,
88
+ 0.014324735641479493,
89
+ 0.014302207946777343,
90
+ 0.014302207946777343,
91
+ 0.014326784133911133,
92
+ 0.014341119766235352
93
  ]
94
  },
95
  "throughput": {
96
  "unit": "samples/s",
97
+ "value": 70.92980109156741
98
  },
99
  "energy": {
100
  "unit": "kWh",
101
+ "cpu": 1.653465246896685e-07,
102
+ "ram": 9.038774616184786e-08,
103
+ "gpu": 3.504362371388536e-07,
104
+ "total": 6.061705079903701e-07
105
  },
106
  "efficiency": {
107
  "unit": "samples/kWh",
108
+ "value": 1649700.8462442164
109
  }
110
  }
111
  }