IlyasMoutawwakil's picture
Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub
4b90f6e verified
raw
history blame
12.3 kB
{
"config": {
"name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base",
"backend": {
"name": "pytorch",
"version": "2.3.1+cu121",
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
"task": "text-classification",
"library": "transformers",
"model_type": "roberta",
"model": "FacebookAI/roberta-base",
"processor": "FacebookAI/roberta-base",
"device": "cuda",
"device_ids": "0",
"seed": 42,
"inter_op_num_threads": null,
"intra_op_num_threads": null,
"model_kwargs": {},
"processor_kwargs": {},
"no_weights": true,
"device_map": null,
"torch_dtype": null,
"eval_mode": true,
"to_bettertransformer": false,
"low_cpu_mem_usage": null,
"attn_implementation": null,
"cache_implementation": null,
"autocast_enabled": false,
"autocast_dtype": null,
"torch_compile": false,
"torch_compile_target": "forward",
"torch_compile_config": {},
"quantization_scheme": null,
"quantization_config": {},
"deepspeed_inference": false,
"deepspeed_inference_config": {},
"peft_type": null,
"peft_config": {}
},
"scenario": {
"name": "inference",
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
"iterations": 1,
"duration": 1,
"warmup_runs": 1,
"input_shapes": {
"batch_size": 1,
"num_choices": 2,
"sequence_length": 2
},
"new_tokens": null,
"memory": true,
"latency": true,
"energy": true,
"forward_kwargs": {},
"generate_kwargs": {
"max_new_tokens": 2,
"min_new_tokens": 2
},
"call_kwargs": {
"num_inference_steps": 2
}
},
"launcher": {
"name": "process",
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
"device_isolation": true,
"device_isolation_action": "error",
"numactl": false,
"numactl_kwargs": {},
"start_method": "spawn"
},
"environment": {
"cpu": " AMD EPYC 7R32",
"cpu_count": 16,
"cpu_ram_mb": 66697.293824,
"system": "Linux",
"machine": "x86_64",
"platform": "Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35",
"processor": "x86_64",
"python_version": "3.10.12",
"gpu": [
"NVIDIA A10G"
],
"gpu_count": 1,
"gpu_vram_mb": 24146608128,
"optimum_benchmark_version": "0.3.1",
"optimum_benchmark_commit": null,
"transformers_version": "4.42.4",
"transformers_commit": null,
"accelerate_version": "0.32.1",
"accelerate_commit": null,
"diffusers_version": "0.29.2",
"diffusers_commit": null,
"optimum_version": null,
"optimum_commit": null,
"timm_version": "1.0.7",
"timm_commit": null,
"peft_version": null,
"peft_commit": null
}
},
"report": {
"load": {
"memory": {
"unit": "MB",
"max_ram": 720.617472,
"max_global_vram": 1185.415168,
"max_process_vram": 0.0,
"max_reserved": 555.74528,
"max_allocated": 499.374592
},
"latency": {
"unit": "s",
"count": 1,
"total": 7.16543994140625,
"mean": 7.16543994140625,
"stdev": 0.0,
"p50": 7.16543994140625,
"p90": 7.16543994140625,
"p95": 7.16543994140625,
"p99": 7.16543994140625,
"values": [
7.16543994140625
]
},
"throughput": null,
"energy": {
"unit": "kWh",
"cpu": 1.0426668657196893e-06,
"ram": 5.559172731071271e-07,
"gpu": 0.0,
"total": 1.5985841388268165e-06
},
"efficiency": null
},
"forward": {
"memory": {
"unit": "MB",
"max_ram": 913.072128,
"max_global_vram": 1195.900928,
"max_process_vram": 0.0,
"max_reserved": 555.74528,
"max_allocated": 508.993024
},
"latency": {
"unit": "s",
"count": 150,
"total": 0.9979014048576361,
"mean": 0.0066526760323842365,
"stdev": 0.0003213292370953852,
"p50": 0.006774271965026855,
"p90": 0.006972108936309814,
"p95": 0.007063654541969299,
"p99": 0.007259381718635559,
"values": [
0.0071823358535766605,
0.0070665922164917,
0.007062528133392334,
0.007093247890472412,
0.007064576148986816,
0.006978559970855713,
0.006951935768127441,
0.007029759883880615,
0.007271423816680909,
0.006894591808319092,
0.006899712085723877,
0.0069027838706970214,
0.00683622407913208,
0.006774784088134766,
0.0068321280479431154,
0.006845439910888672,
0.006864895820617676,
0.0068577280044555666,
0.006904863834381104,
0.006910975933074951,
0.006843391895294189,
0.006755328178405762,
0.006880256175994873,
0.007018496036529541,
0.006910975933074951,
0.006892543792724609,
0.006966271877288818,
0.006956031799316406,
0.006817791938781738,
0.006819839954376221,
0.006920191764831543,
0.006910975933074951,
0.006840320110321045,
0.00682700777053833,
0.006791232109069824,
0.0067573761940002445,
0.006799359798431396,
0.00687820816040039,
0.0068392958641052244,
0.006773759841918945,
0.006865920066833496,
0.007144480228424072,
0.006953983783721924,
0.006855679988861084,
0.006989823818206787,
0.0069212160110473635,
0.0067840638160705565,
0.006795231819152832,
0.006910975933074951,
0.006922239780426025,
0.006823935985565186,
0.006870016098022461,
0.006971392154693603,
0.006804448127746582,
0.0067717118263244626,
0.006823935985565186,
0.006847487926483154,
0.006853631973266602,
0.006817791938781738,
0.007629824161529541,
0.006883327960968018,
0.006797311782836914,
0.0067870402336120605,
0.006884352207183838,
0.006871039867401123,
0.006817791938781738,
0.006961152076721191,
0.006979584217071533,
0.007012351989746094,
0.0068853759765625,
0.006933504104614257,
0.0069027838706970214,
0.006900735855102539,
0.006766592025756836,
0.006815743923187256,
0.006940671920776367,
0.0067717118263244626,
0.006747136116027832,
0.006696959972381592,
0.006621183872222901,
0.006788032054901123,
0.006751232147216797,
0.006492159843444824,
0.006585343837738037,
0.006533120155334473,
0.006590464115142822,
0.006605823993682861,
0.006643712043762207,
0.0066826238632202144,
0.0067645440101623535,
0.006823935985565186,
0.0072468481063842774,
0.0066375679969787596,
0.006428671836853027,
0.006504447937011719,
0.0064174079895019534,
0.006457344055175781,
0.00642252779006958,
0.00645740795135498,
0.006463520050048828,
0.006385663986206055,
0.006403071880340576,
0.006418432235717773,
0.006355967998504639,
0.00643891191482544,
0.006345727920532227,
0.006398975849151611,
0.006401023864746094,
0.0063272957801818845,
0.00637440013885498,
0.006343679904937744,
0.006334464073181153,
0.006372352123260498,
0.006360064029693604,
0.006371327877044678,
0.006355967998504639,
0.006355967998504639,
0.006334464073181153,
0.006685696125030518,
0.006355967998504639,
0.006239232063293457,
0.0060364799499511715,
0.0060282878875732426,
0.006060031890869141,
0.006043647766113281,
0.006042623996734619,
0.006089759826660156,
0.006098944187164307,
0.006101984024047852,
0.006069248199462891,
0.006113279819488526,
0.006069248199462891,
0.006124544143676758,
0.006088704109191895,
0.0061296639442443845,
0.006087679862976075,
0.0061224961280822755,
0.006136832237243653,
0.006516736030578613,
0.006865920066833496,
0.006406144142150879,
0.006392864227294922,
0.006301695823669433,
0.006295551776885986,
0.006301695823669433,
0.006237152099609375,
0.0062975997924804685,
0.00626585578918457,
0.006308864116668702,
0.006254591941833496
]
},
"throughput": {
"unit": "samples/s",
"value": 150.31545127586986
},
"energy": {
"unit": "kWh",
"cpu": 7.135298157951121e-08,
"ram": 3.9009108953961225e-08,
"gpu": 1.385660412409685e-07,
"total": 2.489281317744409e-07
},
"efficiency": {
"unit": "samples/kWh",
"value": 4017223.737918547
}
}
}
}