IlyasMoutawwakil's picture
Upload cuda_inference_transformers_text-generation_openai-community/gpt2/benchmark.json with huggingface_hub
cd011a2 verified
raw
history blame
15.9 kB
{
"config": {
"name": "cuda_inference_transformers_text-generation_openai-community/gpt2",
"backend": {
"name": "pytorch",
"version": "2.3.1+cu121",
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
"task": "text-generation",
"library": "transformers",
"model": "openai-community/gpt2",
"processor": "openai-community/gpt2",
"device": "cuda",
"device_ids": "0",
"seed": 42,
"inter_op_num_threads": null,
"intra_op_num_threads": null,
"model_kwargs": {},
"processor_kwargs": {},
"hub_kwargs": {},
"no_weights": true,
"device_map": null,
"torch_dtype": null,
"eval_mode": true,
"to_bettertransformer": false,
"low_cpu_mem_usage": null,
"attn_implementation": null,
"cache_implementation": null,
"autocast_enabled": false,
"autocast_dtype": null,
"torch_compile": false,
"torch_compile_target": "forward",
"torch_compile_config": {},
"quantization_scheme": null,
"quantization_config": {},
"deepspeed_inference": false,
"deepspeed_inference_config": {},
"peft_type": null,
"peft_config": {}
},
"scenario": {
"name": "inference",
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
"iterations": 1,
"duration": 1,
"warmup_runs": 1,
"input_shapes": {
"batch_size": 1,
"num_choices": 2,
"sequence_length": 2
},
"new_tokens": null,
"latency": true,
"memory": true,
"energy": true,
"forward_kwargs": {},
"generate_kwargs": {
"max_new_tokens": 2,
"min_new_tokens": 2
},
"call_kwargs": {
"num_inference_steps": 2
}
},
"launcher": {
"name": "process",
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
"device_isolation": true,
"device_isolation_action": "error",
"numactl": false,
"numactl_kwargs": {},
"start_method": "spawn"
},
"environment": {
"cpu": " AMD EPYC 7R32",
"cpu_count": 16,
"cpu_ram_mb": 66697.293824,
"system": "Linux",
"machine": "x86_64",
"platform": "Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35",
"processor": "x86_64",
"python_version": "3.10.12",
"gpu": [
"NVIDIA A10G"
],
"gpu_count": 1,
"gpu_vram_mb": 24146608128,
"optimum_benchmark_version": "0.2.1",
"optimum_benchmark_commit": null,
"transformers_version": "4.42.3",
"transformers_commit": null,
"accelerate_version": "0.31.0",
"accelerate_commit": null,
"diffusers_version": "0.29.2",
"diffusers_commit": null,
"optimum_version": null,
"optimum_commit": null,
"timm_version": "1.0.7",
"timm_commit": null,
"peft_version": null,
"peft_commit": null
}
},
"report": {
"prefill": {
"memory": {
"unit": "MB",
"max_ram": 1041.85856,
"max_global_vram": 1365.77024,
"max_process_vram": 0.0,
"max_reserved": 725.614592,
"max_allocated": 522.210816
},
"latency": {
"unit": "s",
"count": 72,
"total": 0.4892470402717593,
"mean": 0.006795097781552208,
"stdev": 0.0004829188814168122,
"p50": 0.006814224004745483,
"p90": 0.007451107358932495,
"p95": 0.0075594175815582275,
"p99": 0.00775849470615387,
"values": [
0.007811295986175537,
0.00756547212600708,
0.007554463863372803,
0.007496928215026856,
0.00690934419631958,
0.00679414415359497,
0.00722163200378418,
0.007040224075317383,
0.006962687969207764,
0.006826911926269531,
0.006978623867034912,
0.0072698240280151365,
0.007221439838409424,
0.007111999988555908,
0.0071536960601806645,
0.007203904151916504,
0.007302048206329345,
0.007364319801330566,
0.007243167877197265,
0.007736927986145019,
0.007501855850219726,
0.007193376064300537,
0.007422048091888428,
0.007288191795349121,
0.007211872100830078,
0.006841279983520508,
0.007415552139282227,
0.007582816123962402,
0.007214879989624024,
0.00711843204498291,
0.006909791946411133,
0.00713801622390747,
0.00639788818359375,
0.0063311362266540525,
0.006965472221374512,
0.007104415893554688,
0.007454336166381836,
0.006914368152618408,
0.006801536083221435,
0.006847839832305908,
0.006619647979736328,
0.006648543834686279,
0.0066063680648803715,
0.006689216136932373,
0.006661952018737793,
0.006606207847595215,
0.006589695930480957,
0.0062659201622009275,
0.006249663829803467,
0.0062481918334960935,
0.00627785587310791,
0.006244895935058593,
0.006242688179016113,
0.0062401599884033205,
0.006273632049560547,
0.00629094409942627,
0.006273983955383301,
0.006220352172851562,
0.0062639999389648435,
0.006272575855255127,
0.006253439903259278,
0.006229248046875,
0.006261983871459961,
0.006304416179656982,
0.006259935855865479,
0.0062473278045654295,
0.0062349758148193355,
0.006271679878234863,
0.006264512062072754,
0.006229055881500244,
0.00622489595413208,
0.0062549118995666505
]
},
"throughput": {
"unit": "tokens/s",
"value": 294.3298336971302
},
"energy": {
"unit": "kWh",
"cpu": 8.120040892647464e-08,
"ram": 4.4355559189900894e-08,
"gpu": 1.5713863408219074e-07,
"total": 2.8269460219856624e-07
},
"efficiency": {
"unit": "tokens/kWh",
"value": 7074772.508727241
}
},
"decode": {
"memory": {
"unit": "MB",
"max_ram": 1041.85856,
"max_global_vram": 1365.77024,
"max_process_vram": 0.0,
"max_reserved": 725.614592,
"max_allocated": 522.43456
},
"latency": {
"unit": "s",
"count": 72,
"total": 0.516845953464508,
"mean": 0.007178416020340389,
"stdev": 0.0004988555460948412,
"p50": 0.0071613919734954835,
"p90": 0.007770425605773926,
"p95": 0.007864731097221375,
"p99": 0.007998770027160644,
"values": [
0.007898399829864502,
0.007927167892456054,
0.007958271980285644,
0.007632415771484375,
0.007322591781616211,
0.007126016139984131,
0.007695648193359375,
0.00744595193862915,
0.007319968223571777,
0.0073976640701293945,
0.007707456111907959,
0.007801919937133789,
0.007739903926849365,
0.007539072036743164,
0.007620160102844238,
0.007657375812530518,
0.007836448192596436,
0.0077703680992126465,
0.007757952213287353,
0.007739840030670166,
0.008097920417785644,
0.0076661758422851565,
0.007581823825836181,
0.007648896217346191,
0.0077471680641174314,
0.0077272639274597165,
0.007837183952331543,
0.007761631965637207,
0.007635647773742676,
0.007770431995391846,
0.007322976112365723,
0.007289728164672851,
0.006622111797332764,
0.006733695983886719,
0.007543488025665283,
0.007625216007232666,
0.007483424186706543,
0.007283679962158203,
0.007196767807006836,
0.007056096076965332,
0.007007359981536865,
0.0069647679328918456,
0.007009376049041748,
0.006962240219116211,
0.007020736217498779,
0.007002592086791992,
0.0069855680465698245,
0.00663753604888916,
0.006625343799591064,
0.00660752010345459,
0.006693088054656983,
0.006633887767791748,
0.006601791858673095,
0.006588960170745849,
0.006586719989776611,
0.006608895778656006,
0.006621600151062012,
0.006610720157623291,
0.006594816207885742,
0.006600895881652832,
0.00660422420501709,
0.0065998401641845705,
0.0066226239204406736,
0.006666336059570312,
0.0066128640174865725,
0.00661023998260498,
0.006628511905670166,
0.006605728149414063,
0.006598432064056397,
0.006612959861755371,
0.006612736225128174,
0.006581120014190674
]
},
"throughput": {
"unit": "tokens/s",
"value": 139.30649842060583
},
"energy": {
"unit": "kWh",
"cpu": 7.912640528349873e-08,
"ram": 4.299306531189219e-08,
"gpu": 1.4813568421114315e-07,
"total": 2.7025515480653406e-07
},
"efficiency": {
"unit": "tokens/kWh",
"value": 3700206.942272254
}
},
"per_token": {
"memory": null,
"latency": {
"unit": "s",
"count": 71,
"total": 0.9927280626296999,
"mean": 0.013982085389150698,
"stdev": 0.0009615587672331404,
"p50": 0.013969408035278321,
"p90": 0.015185919761657715,
"p95": 0.015461887836456299,
"p99": 0.015596339416503907,
"values": [
0.015527935981750488,
0.015538175582885743,
0.015211520195007324,
0.014292991638183594,
0.013969408035278321,
0.01489510440826416,
0.014532608032226562,
0.014341119766235352,
0.014248959541320801,
0.014693375587463378,
0.015121408462524414,
0.014993408203125,
0.014690303802490234,
0.014813183784484863,
0.014864383697509765,
0.015185919761657715,
0.01517568016052246,
0.01496780776977539,
0.015572992324829102,
0.015650815963745117,
0.014892031669616699,
0.015062015533447265,
0.01496780776977539,
0.014950400352478027,
0.014619647979736328,
0.01529139232635498,
0.01539583969116211,
0.014892031669616699,
0.014937088012695313,
0.014289919853210448,
0.014499839782714843,
0.013112319946289062,
0.01307545566558838,
0.014442496299743653,
0.014730239868164062,
0.015038463592529297,
0.014254079818725587,
0.014033920288085937,
0.0139683837890625,
0.013664256095886231,
0.013624320030212403,
0.013666303634643554,
0.01367244815826416,
0.013699071884155273,
0.013648896217346192,
0.01360588836669922,
0.012940287590026855,
0.012893247604370116,
0.012880831718444825,
0.012983296394348144,
0.012915712356567383,
0.01287987232208252,
0.01285632038116455,
0.012880895614624023,
0.012928000450134277,
0.012914688110351562,
0.012860416412353515,
0.012885024070739746,
0.012875743865966797,
0.012898303985595704,
0.01286246395111084,
0.012907520294189453,
0.012994560241699218,
0.012897279739379883,
0.012883968353271484,
0.012884032249450684,
0.012904383659362794,
0.012868608474731445,
0.01287782382965088,
0.01287168025970459,
0.01286143970489502
]
},
"throughput": {
"unit": "tokens/s",
"value": 71.5200896123795
},
"energy": null,
"efficiency": null
}
}
}