IlyasMoutawwakil's picture
Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub
80aa048 verified
raw
history blame
12.2 kB
{
"config": {
"name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base",
"backend": {
"name": "pytorch",
"version": "2.4.1+cu124",
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
"task": "text-classification",
"library": "transformers",
"model_type": "roberta",
"model": "FacebookAI/roberta-base",
"processor": "FacebookAI/roberta-base",
"device": "cuda",
"device_ids": "0",
"seed": 42,
"inter_op_num_threads": null,
"intra_op_num_threads": null,
"model_kwargs": {},
"processor_kwargs": {},
"no_weights": true,
"device_map": null,
"torch_dtype": null,
"eval_mode": true,
"to_bettertransformer": false,
"low_cpu_mem_usage": null,
"attn_implementation": null,
"cache_implementation": null,
"autocast_enabled": false,
"autocast_dtype": null,
"torch_compile": false,
"torch_compile_target": "forward",
"torch_compile_config": {},
"quantization_scheme": null,
"quantization_config": {},
"deepspeed_inference": false,
"deepspeed_inference_config": {},
"peft_type": null,
"peft_config": {}
},
"scenario": {
"name": "inference",
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
"iterations": 1,
"duration": 1,
"warmup_runs": 1,
"input_shapes": {
"batch_size": 1,
"num_choices": 2,
"sequence_length": 2
},
"new_tokens": null,
"memory": true,
"latency": true,
"energy": true,
"forward_kwargs": {},
"generate_kwargs": {
"max_new_tokens": 2,
"min_new_tokens": 2
},
"call_kwargs": {
"num_inference_steps": 2
}
},
"launcher": {
"name": "process",
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
"device_isolation": true,
"device_isolation_action": "error",
"numactl": false,
"numactl_kwargs": {},
"start_method": "spawn"
},
"environment": {
"cpu": " AMD EPYC 7R32",
"cpu_count": 16,
"cpu_ram_mb": 66697.261056,
"system": "Linux",
"machine": "x86_64",
"platform": "Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35",
"processor": "x86_64",
"python_version": "3.10.12",
"gpu": [
"NVIDIA A10G"
],
"gpu_count": 1,
"gpu_vram_mb": 24146608128,
"optimum_benchmark_version": "0.5.0",
"optimum_benchmark_commit": null,
"transformers_version": "4.44.2",
"transformers_commit": null,
"accelerate_version": "0.34.2",
"accelerate_commit": null,
"diffusers_version": "0.30.3",
"diffusers_commit": null,
"optimum_version": "1.22.0",
"optimum_commit": null,
"timm_version": "1.0.9",
"timm_commit": null,
"peft_version": "0.13.0",
"peft_commit": null
}
},
"report": {
"load": {
"memory": {
"unit": "MB",
"max_ram": 803.151872,
"max_global_vram": 1192.7552,
"max_process_vram": 0.0,
"max_reserved": 555.74528,
"max_allocated": 499.376128
},
"latency": {
"unit": "s",
"values": [
8.2791220703125
],
"count": 1,
"total": 8.2791220703125,
"mean": 8.2791220703125,
"p50": 8.2791220703125,
"p90": 8.2791220703125,
"p95": 8.2791220703125,
"p99": 8.2791220703125,
"stdev": 0,
"stdev_": 0
},
"throughput": null,
"energy": {
"unit": "kWh",
"cpu": 1.1417789333329154e-06,
"ram": 6.063624294151774e-07,
"gpu": 1.8286125739990639e-06,
"total": 3.5767539367471567e-06
},
"efficiency": null
},
"forward": {
"memory": {
"unit": "MB",
"max_ram": 1093.709824,
"max_global_vram": 1203.24096,
"max_process_vram": 0.0,
"max_reserved": 555.74528,
"max_allocated": 508.993024
},
"latency": {
"unit": "s",
"values": [
0.007478271961212158,
0.007111680030822754,
0.007142399787902832,
0.007109632015228271,
0.007036928176879883,
0.007010303974151612,
0.0070563840866088865,
0.00708403205871582,
0.007081984043121338,
0.007069695949554444,
0.007031807899475098,
0.006940671920776367,
0.006971392154693603,
0.0069959678649902345,
0.007006207942962647,
0.00704307222366333,
0.0070225920677185055,
0.007014400005340577,
0.0070256638526916505,
0.007192575931549072,
0.007197663784027099,
0.007315455913543701,
0.007287807941436767,
0.00739737606048584,
0.007071743965148926,
0.007051263809204102,
0.007041024208068848,
0.007184383869171143,
0.007093247890472412,
0.007205887794494629,
0.007103487968444824,
0.006961152076721191,
0.007009280204772949,
0.006996960163116455,
0.006931456089019775,
0.00693452787399292,
0.006947840213775635,
0.007097343921661377,
0.007015423774719238,
0.007130112171173096,
0.006892543792724609,
0.006967296123504638,
0.006993919849395752,
0.006994944095611572,
0.0069918718338012695,
0.006958079814910889,
0.007020544052124023,
0.006978559970855713,
0.007017471790313721,
0.0071402878761291505,
0.007064576148986816,
0.007168000221252442,
0.007080959796905518,
0.007011328220367432,
0.006961152076721191,
0.007004159927368164,
0.007013376235961914,
0.0069621758460998535,
0.006967296123504638,
0.00704201602935791,
0.006938623905181885,
0.006897664070129395,
0.006811647891998291,
0.006781951904296875,
0.006923264026641846,
0.007007232189178467,
0.006699007987976074,
0.006632448196411133,
0.0067348480224609375,
0.006841343879699707,
0.006908927917480469,
0.006856704235076904,
0.00695091199874878,
0.006990848064422607,
0.007046144008636474,
0.0069847040176391605,
0.007740416049957275,
0.007855103969573975,
0.007777247905731201,
0.007854080200195313,
0.007821311950683594,
0.0067543678283691405,
0.006459392070770263,
0.006652927875518798,
0.006672319889068604,
0.006694911956787109,
0.006573056221008301,
0.006616000175476074,
0.006609920024871826,
0.006626304149627686,
0.006624256134033203,
0.006621183872222901,
0.006567935943603515,
0.006654975891113281,
0.006609920024871826,
0.006602752208709717,
0.006633471965789795,
0.006627327919006347,
0.006599679946899414,
0.006684671878814697,
0.0066119680404663084,
0.006587423801422119,
0.0065474557876586915,
0.006505472183227539,
0.006466527938842773,
0.0064880638122558594,
0.006475776195526123,
0.00648089599609375,
0.006540287971496582,
0.006520832061767578,
0.0066406397819519045,
0.006645760059356689,
0.006585343837738037,
0.0066304001808166506,
0.006627327919006347,
0.006609920024871826,
0.006651904106140137,
0.006669312000274658,
0.006597631931304931,
0.006646783828735352,
0.006616064071655273,
0.006643712043762207,
0.006643775939941406,
0.006633471965789795,
0.006576128005981445,
0.006590464115142822,
0.006617087841033936,
0.006623231887817383,
0.006574079990386963,
0.006556672096252441,
0.00658022403717041,
0.006591487884521485,
0.006553599834442139,
0.007127039909362793,
0.006603775978088379,
0.006650879859924317,
0.00658022403717041,
0.006615104198455811,
0.006587456226348877,
0.006611936092376709,
0.006597631931304931,
0.0066590080261230465,
0.006583295822143555,
0.006612991809844971,
0.006586368083953857
],
"count": 145,
"total": 0.9957149109840391,
"mean": 0.006866999386096822,
"p50": 0.006908927917480469,
"p90": 0.0071415550231933595,
"p95": 0.007309926319122314,
"p99": 0.007839662170410157,
"stdev": 0.0002916613322388193,
"stdev_": 4.247289330319841
},
"throughput": {
"unit": "samples/s",
"value": 145.62401185365422
},
"energy": {
"unit": "kWh",
"cpu": 7.733070090638864e-08,
"ram": 4.2070456053528464e-08,
"gpu": 1.480989640779236e-07,
"total": 2.675001210378407e-07
},
"efficiency": {
"unit": "samples/kWh",
"value": 3738316.0655039083
}
}
}
}