IlyasMoutawwakil's picture
Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub
7fecdfd verified
raw
history blame
14.1 kB
{
"config": {
"name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224",
"backend": {
"name": "pytorch",
"version": "2.4.1+cu124",
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
"task": "image-classification",
"library": "transformers",
"model_type": "vit",
"model": "google/vit-base-patch16-224",
"processor": "google/vit-base-patch16-224",
"device": "cuda",
"device_ids": "0",
"seed": 42,
"inter_op_num_threads": null,
"intra_op_num_threads": null,
"model_kwargs": {},
"processor_kwargs": {},
"no_weights": true,
"device_map": null,
"torch_dtype": null,
"eval_mode": true,
"to_bettertransformer": false,
"low_cpu_mem_usage": null,
"attn_implementation": null,
"cache_implementation": null,
"autocast_enabled": false,
"autocast_dtype": null,
"torch_compile": false,
"torch_compile_target": "forward",
"torch_compile_config": {},
"quantization_scheme": null,
"quantization_config": {},
"deepspeed_inference": false,
"deepspeed_inference_config": {},
"peft_type": null,
"peft_config": {}
},
"scenario": {
"name": "inference",
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
"iterations": 1,
"duration": 1,
"warmup_runs": 1,
"input_shapes": {
"batch_size": 1,
"num_choices": 2,
"sequence_length": 2
},
"new_tokens": null,
"memory": true,
"latency": true,
"energy": true,
"forward_kwargs": {},
"generate_kwargs": {
"max_new_tokens": 2,
"min_new_tokens": 2
},
"call_kwargs": {
"num_inference_steps": 2
}
},
"launcher": {
"name": "process",
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
"device_isolation": true,
"device_isolation_action": "error",
"numactl": false,
"numactl_kwargs": {},
"start_method": "spawn"
},
"environment": {
"cpu": " AMD EPYC 7R32",
"cpu_count": 16,
"cpu_ram_mb": 66697.261056,
"system": "Linux",
"machine": "x86_64",
"platform": "Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35",
"processor": "x86_64",
"python_version": "3.10.12",
"gpu": [
"NVIDIA A10G"
],
"gpu_count": 1,
"gpu_vram_mb": 24146608128,
"optimum_benchmark_version": "0.5.0",
"optimum_benchmark_commit": null,
"transformers_version": "4.45.1",
"transformers_commit": null,
"accelerate_version": "0.34.2",
"accelerate_commit": null,
"diffusers_version": "0.30.3",
"diffusers_commit": null,
"optimum_version": null,
"optimum_commit": null,
"timm_version": "1.0.9",
"timm_commit": null,
"peft_version": "0.13.0",
"peft_commit": null
}
},
"report": {
"load": {
"memory": {
"unit": "MB",
"max_ram": 844.30848,
"max_global_vram": 1037.565952,
"max_process_vram": 0.0,
"max_reserved": 400.556032,
"max_allocated": 346.271744
},
"latency": {
"unit": "s",
"values": [
0.26597171020507815
],
"count": 1,
"total": 0.26597171020507815,
"mean": 0.26597171020507815,
"p50": 0.26597171020507815,
"p90": 0.26597171020507815,
"p95": 0.26597171020507815,
"p99": 0.26597171020507815,
"stdev": 0,
"stdev_": 0
},
"throughput": null,
"energy": null,
"efficiency": null
},
"forward": {
"memory": {
"unit": "MB",
"max_ram": 1054.253056,
"max_global_vram": 1056.44032,
"max_process_vram": 0.0,
"max_reserved": 406.847488,
"max_allocated": 363.853824
},
"latency": {
"unit": "s",
"values": [
0.005449728012084961,
0.005056511878967285,
0.004986879825592041,
0.004928512096405029,
0.004936704158782959,
0.00499507188796997,
0.004964352130889893,
0.004921343803405762,
0.004932608127593994,
0.005147744178771973,
0.0050973758697509764,
0.005116928100585938,
0.005146624088287354,
0.00516812801361084,
0.005152768135070801,
0.005126143932342529,
0.0052111358642578124,
0.005094399929046631,
0.005130239963531494,
0.005124095916748047,
0.005130239963531494,
0.005258240222930908,
0.005157887935638428,
0.0051435518264770505,
0.0051066880226135255,
0.005139455795288086,
0.005174272060394287,
0.005134367942810058,
0.005178368091583252,
0.005101471900939941,
0.005180255889892578,
0.005096447944641113,
0.005080128192901611,
0.005184512138366699,
0.005119967937469483,
0.005088255882263183,
0.005086207866668701,
0.0051404800415039064,
0.005092351913452148,
0.005145599842071533,
0.005142655849456787,
0.00522547197341919,
0.005058559894561767,
0.0050769920349121095,
0.005114880084991455,
0.0051968002319335935,
0.0051157760620117185,
0.005153791904449463,
0.005091328144073487,
0.0051066560745239254,
0.00509830379486084,
0.005090303897857666,
0.005070847988128662,
0.005120992183685303,
0.005126143932342529,
0.005090303897857666,
0.005083136081695557,
0.005099520206451416,
0.005122047901153564,
0.005264383792877197,
0.005135359764099121,
0.005180416107177735,
0.005100543975830078,
0.005090303897857666,
0.005181439876556396,
0.00515174388885498,
0.005150720119476319,
0.00506879997253418,
0.005254144191741943,
0.005100543975830078,
0.005058559894561767,
0.005142528057098389,
0.005121024131774903,
0.005100543975830078,
0.005114880084991455,
0.005202943801879883,
0.0050852479934692385,
0.005099520206451416,
0.005284863948822022,
0.005088255882263183,
0.0050657281875610355,
0.0051036162376403805,
0.0052295680046081545,
0.005131264209747314,
0.0051404800415039064,
0.0051773757934570315,
0.005097472190856934,
0.005083136081695557,
0.00506982421875,
0.005134143829345703,
0.0051157760620117185,
0.0050841598510742185,
0.005193664073944092,
0.005078015804290771,
0.0051271038055419925,
0.005086207866668701,
0.005180416107177735,
0.005110688209533691,
0.005117951869964599,
0.0051660799980163576,
0.005098495960235596,
0.00510975980758667,
0.005091328144073487,
0.005155839920043945,
0.005117951869964599,
0.005138432025909424,
0.005144576072692871,
0.005117951869964599,
0.005081088066101074,
0.0051660799980163576,
0.005088255882263183,
0.005076096057891845,
0.00506879997253418,
0.005184512138366699,
0.0050841598510742185,
0.005203968048095703,
0.0051138558387756345,
0.005149695873260498,
0.005182464122772217,
0.005088255882263183,
0.005210112094879151,
0.005104640007019043,
0.005143680095672607,
0.0051435518264770505,
0.005130239963531494,
0.005154816150665284,
0.005203999996185303,
0.00521727991104126,
0.005121024131774903,
0.005048319816589355,
0.0051435518264770505,
0.0050954241752624516,
0.00511897611618042,
0.0053043198585510255,
0.0051333122253417966,
0.005100543975830078,
0.00510975980758667,
0.005173247814178467,
0.00510975980758667,
0.005131264209747314,
0.005085184097290039,
0.00508403205871582,
0.0051333122253417966,
0.005047296047210693,
0.005121024131774903,
0.005048319816589355,
0.0050769920349121095,
0.005038080215454102,
0.00509939193725586,
0.005411839962005615,
0.005049439907073974,
0.005090208053588867,
0.005235712051391602,
0.005065824031829834,
0.005040128231048584,
0.005090303897857666,
0.0051138558387756345,
0.005028863906860351,
0.005182464122772217,
0.005081088066101074,
0.005044223785400391,
0.005088255882263183,
0.005119999885559082,
0.005059584140777588,
0.00506060791015625,
0.005104640007019043,
0.005049344062805176,
0.00510975980758667,
0.005208064079284668,
0.005101568222045898,
0.005088160037994385,
0.005112832069396973,
0.005114880084991455,
0.005138432025909424,
0.0051682558059692385,
0.0050646719932556155,
0.005108736038208008,
0.005111807823181152,
0.005124256134033203,
0.005284832000732422,
0.005212160110473632,
0.00652185583114624,
0.005653503894805908,
0.0058787841796875,
0.005454847812652588,
0.006517792224884033,
0.006451200008392334,
0.005155839920043945,
0.005241856098175048,
0.005094399929046631,
0.005100575923919678,
0.0051435518264770505,
0.005185535907745361,
0.005125152111053467
],
"count": 194,
"total": 0.9995627188682555,
"mean": 0.005152385148805442,
"p50": 0.005117439985275269,
"p90": 0.005211852836608887,
"p95": 0.005284843182563782,
"p99": 0.0064558614635467524,
"stdev": 0.00019454586736779612,
"stdev_": 3.7758409309308085
},
"throughput": {
"unit": "samples/s",
"value": 194.08486965145565
},
"energy": {
"unit": "kWh",
"cpu": 5.99920308712115e-08,
"ram": 3.278352660282983e-08,
"gpu": 3.491318730202008e-07,
"total": 4.419074304942421e-07
},
"efficiency": {
"unit": "samples/kWh",
"value": 2262917.369100517
}
}
}
}