IlyasMoutawwakil HF Staff
Upload benchmarks/pytorch-llama-2.2.0-float16.json with huggingface_hub
605c4bd verified | { | |
| "config": { | |
| "name": "pytorch-llama", | |
| "backend": { | |
| "name": "pytorch", | |
| "version": "2.2.0", | |
| "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
| "task": "text-generation", | |
| "library": "transformers", | |
| "model": "meta-llama/Llama-2-7b-hf", | |
| "processor": "meta-llama/Llama-2-7b-hf", | |
| "device": "cuda", | |
| "device_ids": "0", | |
| "seed": 42, | |
| "inter_op_num_threads": null, | |
| "intra_op_num_threads": null, | |
| "model_kwargs": {}, | |
| "processor_kwargs": {}, | |
| "hub_kwargs": {}, | |
| "no_weights": true, | |
| "device_map": null, | |
| "torch_dtype": "float16", | |
| "eval_mode": true, | |
| "to_bettertransformer": false, | |
| "low_cpu_mem_usage": null, | |
| "attn_implementation": null, | |
| "cache_implementation": null, | |
| "autocast_enabled": false, | |
| "autocast_dtype": null, | |
| "torch_compile": false, | |
| "torch_compile_target": "forward", | |
| "torch_compile_config": {}, | |
| "quantization_scheme": null, | |
| "quantization_config": {}, | |
| "deepspeed_inference": false, | |
| "deepspeed_inference_config": {}, | |
| "tensor_parallel": false, | |
| "peft_type": null, | |
| "peft_config": {} | |
| }, | |
| "scenario": { | |
| "name": "inference", | |
| "_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
| "iterations": 10, | |
| "duration": 10, | |
| "warmup_runs": 10, | |
| "input_shapes": { | |
| "batch_size": 1, | |
| "num_choices": 2, | |
| "sequence_length": 128 | |
| }, | |
| "new_tokens": null, | |
| "latency": true, | |
| "memory": true, | |
| "energy": false, | |
| "forward_kwargs": {}, | |
| "generate_kwargs": { | |
| "max_new_tokens": 32, | |
| "min_new_tokens": 32 | |
| }, | |
| "call_kwargs": {} | |
| }, | |
| "launcher": { | |
| "name": "process", | |
| "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
| "device_isolation": true, | |
| "device_isolation_action": "warn", | |
| "numactl": false, | |
| "numactl_kwargs": {}, | |
| "start_method": "spawn" | |
| }, | |
| "environment": { | |
| "cpu": " AMD EPYC 7742 64-Core Processor", | |
| "cpu_count": 128, | |
| "cpu_ram_mb": 540671.627264, | |
| "system": "Linux", | |
| "machine": "x86_64", | |
| "platform": "Linux-5.4.0-166-generic-x86_64-with-glibc2.35", | |
| "processor": "x86_64", | |
| "python_version": "3.10.12", | |
| "gpu": [ | |
| "NVIDIA A100-SXM4-80GB", | |
| "NVIDIA A100-SXM4-80GB", | |
| "NVIDIA A100-SXM4-80GB", | |
| "NVIDIA DGX Display", | |
| "NVIDIA A100-SXM4-80GB" | |
| ], | |
| "gpu_count": 5, | |
| "gpu_vram_mb": 347892350976, | |
| "optimum_benchmark_version": "0.2.1", | |
| "optimum_benchmark_commit": "347e13ca9f7f904f55669603cfb9f0b6c7e8672c", | |
| "transformers_version": "4.41.1", | |
| "transformers_commit": null, | |
| "accelerate_version": "0.30.1", | |
| "accelerate_commit": null, | |
| "diffusers_version": null, | |
| "diffusers_commit": null, | |
| "optimum_version": "1.20.0", | |
| "optimum_commit": null, | |
| "timm_version": null, | |
| "timm_commit": null, | |
| "peft_version": "0.11.1", | |
| "peft_commit": null | |
| } | |
| }, | |
| "report": { | |
| "prefill": { | |
| "memory": { | |
| "unit": "MB", | |
| "max_ram": 1234.866176, | |
| "max_global_vram": 15218.966528, | |
| "max_process_vram": 14292.09088, | |
| "max_reserved": 13759.414272, | |
| "max_allocated": 13578.50368 | |
| }, | |
| "latency": { | |
| "unit": "s", | |
| "count": 11, | |
| "total": 0.33500819015502925, | |
| "mean": 0.03045529001409357, | |
| "stdev": 0.0006588511013156968, | |
| "p50": 0.030182367324829103, | |
| "p90": 0.031159807205200195, | |
| "p95": 0.031431151390075684, | |
| "p99": 0.03164822673797608, | |
| "values": [ | |
| 0.031702495574951174, | |
| 0.031159807205200195, | |
| 0.031116096496582032, | |
| 0.030891008377075195, | |
| 0.029737823486328124, | |
| 0.030182367324829103, | |
| 0.029910463333129883, | |
| 0.029730783462524415, | |
| 0.029977792739868163, | |
| 0.03075209617614746, | |
| 0.029847455978393556 | |
| ] | |
| }, | |
| "throughput": { | |
| "unit": "tokens/s", | |
| "value": 4202.882321618556 | |
| }, | |
| "energy": null, | |
| "efficiency": null | |
| }, | |
| "decode": { | |
| "memory": { | |
| "unit": "MB", | |
| "max_ram": 1234.866176, | |
| "max_global_vram": 15218.966528, | |
| "max_process_vram": 14292.09088, | |
| "max_reserved": 13759.414272, | |
| "max_allocated": 13657.83552 | |
| }, | |
| "latency": { | |
| "unit": "s", | |
| "count": 11, | |
| "total": 10.43467156982422, | |
| "mean": 0.9486065063476562, | |
| "stdev": 0.01526047085225363, | |
| "p50": 0.9407984619140625, | |
| "p90": 0.97138232421875, | |
| "p95": 0.973743408203125, | |
| "p99": 0.9756322753906249, | |
| "values": [ | |
| 0.9711124877929688, | |
| 0.9761044921875, | |
| 0.97138232421875, | |
| 0.9479179077148437, | |
| 0.9355850219726562, | |
| 0.9407984619140625, | |
| 0.9373528442382812, | |
| 0.9391262817382813, | |
| 0.9376884765625, | |
| 0.9420660400390625, | |
| 0.9355372314453125 | |
| ] | |
| }, | |
| "throughput": { | |
| "unit": "tokens/s", | |
| "value": 32.67951441673832 | |
| }, | |
| "energy": null, | |
| "efficiency": null | |
| }, | |
| "per_token": { | |
| "memory": null, | |
| "latency": { | |
| "unit": "s", | |
| "count": 340, | |
| "total": 10.706682880401612, | |
| "mean": 0.03149024376588709, | |
| "stdev": 0.005226840501727016, | |
| "p50": 0.030386688232421875, | |
| "p90": 0.031479091453552245, | |
| "p95": 0.03172869081497192, | |
| "p99": 0.06165116912841798, | |
| "values": [ | |
| 0.031096832275390625, | |
| 0.031197183609008788, | |
| 0.03119820785522461, | |
| 0.0324956169128418, | |
| 0.031323135375976564, | |
| 0.03099852752685547, | |
| 0.0309616641998291, | |
| 0.030835712432861328, | |
| 0.03092889595031738, | |
| 0.031127552032470703, | |
| 0.031077375411987306, | |
| 0.03099750328063965, | |
| 0.03116748809814453, | |
| 0.031264768600463864, | |
| 0.03121766471862793, | |
| 0.031512575149536134, | |
| 0.03155046463012695, | |
| 0.03127193641662598, | |
| 0.03137740707397461, | |
| 0.031318016052246093, | |
| 0.031083520889282228, | |
| 0.031302656173706055, | |
| 0.031308799743652346, | |
| 0.03145113563537598, | |
| 0.031145984649658204, | |
| 0.03143475151062012, | |
| 0.03202560043334961, | |
| 0.031510528564453126, | |
| 0.03160166358947754, | |
| 0.03172966384887695, | |
| 0.06313267135620117, | |
| 0.031355903625488284, | |
| 0.031513599395751955, | |
| 0.03172863960266113, | |
| 0.031476736068725586, | |
| 0.031301631927490234, | |
| 0.031285247802734374, | |
| 0.03180441665649414, | |
| 0.031335424423217774, | |
| 0.03159654426574707, | |
| 0.0321003532409668, | |
| 0.03156991958618164, | |
| 0.03125862312316895, | |
| 0.031821823120117186, | |
| 0.03144499206542969, | |
| 0.03134668731689453, | |
| 0.03141119956970215, | |
| 0.03141939163208008, | |
| 0.0313753604888916, | |
| 0.03169075202941894, | |
| 0.03159859275817871, | |
| 0.03139993667602539, | |
| 0.031425535202026365, | |
| 0.03139788818359375, | |
| 0.03139276885986328, | |
| 0.03158425521850586, | |
| 0.031442943572998046, | |
| 0.03124224090576172, | |
| 0.031152128219604492, | |
| 0.03142860794067383, | |
| 0.03126272010803223, | |
| 0.0630568962097168, | |
| 0.031357952117919925, | |
| 0.03132825660705566, | |
| 0.03153715133666992, | |
| 0.03180339241027832, | |
| 0.03128217506408691, | |
| 0.03149619293212891, | |
| 0.03154944038391113, | |
| 0.03142963218688965, | |
| 0.03132825660705566, | |
| 0.0315043830871582, | |
| 0.031357952117919925, | |
| 0.0313384952545166, | |
| 0.031491071701049804, | |
| 0.0312801284790039, | |
| 0.03136819267272949, | |
| 0.03147776031494141, | |
| 0.03127398490905762, | |
| 0.03127603149414063, | |
| 0.03131596755981445, | |
| 0.03142348861694336, | |
| 0.031122432708740235, | |
| 0.03118489646911621, | |
| 0.03119513511657715, | |
| 0.031124479293823244, | |
| 0.031162368774414063, | |
| 0.03141939163208008, | |
| 0.031116287231445314, | |
| 0.03098521614074707, | |
| 0.030971904754638672, | |
| 0.0309749755859375, | |
| 0.0625428466796875, | |
| 0.03123302459716797, | |
| 0.03116339111328125, | |
| 0.03105996894836426, | |
| 0.03112652778625488, | |
| 0.03105177688598633, | |
| 0.03124224090576172, | |
| 0.031357952117919925, | |
| 0.03102003288269043, | |
| 0.031194112777709962, | |
| 0.031110143661499022, | |
| 0.031120384216308594, | |
| 0.030507007598876954, | |
| 0.030040063858032227, | |
| 0.02999398422241211, | |
| 0.03017728042602539, | |
| 0.03033497619628906, | |
| 0.030110719680786133, | |
| 0.03014656066894531, | |
| 0.030319616317749022, | |
| 0.030220287322998047, | |
| 0.030262271881103517, | |
| 0.030086143493652344, | |
| 0.030511104583740234, | |
| 0.03042918395996094, | |
| 0.030107648849487304, | |
| 0.030087167739868165, | |
| 0.030271488189697264, | |
| 0.029895679473876953, | |
| 0.030098432540893554, | |
| 0.030031871795654298, | |
| 0.06032179260253906, | |
| 0.030287872314453124, | |
| 0.030098432540893554, | |
| 0.030029823303222656, | |
| 0.030059520721435546, | |
| 0.030464000701904297, | |
| 0.030095359802246095, | |
| 0.0301527042388916, | |
| 0.029862911224365234, | |
| 0.029873151779174805, | |
| 0.030091264724731445, | |
| 0.030089216232299806, | |
| 0.030003200531005858, | |
| 0.02993459129333496, | |
| 0.030130176544189452, | |
| 0.029748224258422853, | |
| 0.029886463165283202, | |
| 0.03018956756591797, | |
| 0.030261247634887696, | |
| 0.030340095520019532, | |
| 0.030119935989379884, | |
| 0.030309375762939454, | |
| 0.030213119506835938, | |
| 0.030272512435913085, | |
| 0.03061759948730469, | |
| 0.030527488708496094, | |
| 0.030301183700561524, | |
| 0.030448640823364258, | |
| 0.03003392028808594, | |
| 0.030062591552734375, | |
| 0.030526464462280273, | |
| 0.061087745666503906, | |
| 0.03015884780883789, | |
| 0.030912511825561522, | |
| 0.03056947135925293, | |
| 0.03060223960876465, | |
| 0.030638080596923828, | |
| 0.030611455917358397, | |
| 0.030336000442504882, | |
| 0.030229503631591798, | |
| 0.03014553642272949, | |
| 0.030243839263916016, | |
| 0.030235647201538086, | |
| 0.030290943145751953, | |
| 0.03015679931640625, | |
| 0.03018854331970215, | |
| 0.030074880599975585, | |
| 0.030461952209472655, | |
| 0.030406656265258788, | |
| 0.030277631759643556, | |
| 0.030512128829956055, | |
| 0.030680063247680665, | |
| 0.03034726333618164, | |
| 0.03017728042602539, | |
| 0.030204927444458008, | |
| 0.03042918395996094, | |
| 0.030247936248779295, | |
| 0.030741504669189453, | |
| 0.030059520721435546, | |
| 0.029854719161987304, | |
| 0.030014463424682617, | |
| 0.030126079559326172, | |
| 0.06049894332885742, | |
| 0.029965311050415038, | |
| 0.030015487670898438, | |
| 0.02998784065246582, | |
| 0.030046207427978516, | |
| 0.030295040130615233, | |
| 0.03057868766784668, | |
| 0.030320640563964843, | |
| 0.030422016143798827, | |
| 0.030369792938232422, | |
| 0.030302207946777345, | |
| 0.030043136596679686, | |
| 0.030053375244140625, | |
| 0.030261247634887696, | |
| 0.030275583267211914, | |
| 0.030083072662353515, | |
| 0.030461952209472655, | |
| 0.030441471099853516, | |
| 0.030353408813476562, | |
| 0.030483455657958985, | |
| 0.03059404754638672, | |
| 0.030055423736572266, | |
| 0.030070783615112305, | |
| 0.03040358352661133, | |
| 0.030264320373535155, | |
| 0.03013222312927246, | |
| 0.0299683837890625, | |
| 0.030243839263916016, | |
| 0.030249984741210937, | |
| 0.03001651191711426, | |
| 0.030029823303222656, | |
| 0.059888641357421876, | |
| 0.03001753616333008, | |
| 0.029764608383178712, | |
| 0.029944831848144532, | |
| 0.03001651191711426, | |
| 0.030426111221313477, | |
| 0.030289920806884765, | |
| 0.030502912521362304, | |
| 0.030296064376831053, | |
| 0.030243839263916016, | |
| 0.03021107292175293, | |
| 0.03038515281677246, | |
| 0.030325759887695314, | |
| 0.03013324737548828, | |
| 0.030298112869262695, | |
| 0.030091264724731445, | |
| 0.030101503372192383, | |
| 0.030494720458984374, | |
| 0.03124019241333008, | |
| 0.030106624603271483, | |
| 0.030243839263916016, | |
| 0.03037286376953125, | |
| 0.03038822364807129, | |
| 0.030242816925048828, | |
| 0.03055411148071289, | |
| 0.030455808639526367, | |
| 0.030277631759643556, | |
| 0.03040358352661133, | |
| 0.03016294479370117, | |
| 0.030443519592285157, | |
| 0.030571519851684572, | |
| 0.06034739303588867, | |
| 0.029904895782470704, | |
| 0.02979020881652832, | |
| 0.029686784744262694, | |
| 0.029671424865722655, | |
| 0.02978099250793457, | |
| 0.030090240478515624, | |
| 0.02979327964782715, | |
| 0.03014041519165039, | |
| 0.030555135726928712, | |
| 0.03020595169067383, | |
| 0.02992742347717285, | |
| 0.03039334487915039, | |
| 0.030282751083374023, | |
| 0.03021414375305176, | |
| 0.03015782356262207, | |
| 0.030361600875854492, | |
| 0.0305797119140625, | |
| 0.030542848587036132, | |
| 0.030229503631591798, | |
| 0.0301527042388916, | |
| 0.030036991119384765, | |
| 0.030142463684082032, | |
| 0.029864959716796875, | |
| 0.030054399490356445, | |
| 0.030091264724731445, | |
| 0.030079999923706056, | |
| 0.03172249603271484, | |
| 0.031091712951660157, | |
| 0.03078758430480957, | |
| 0.03100467109680176, | |
| 0.06201139068603516, | |
| 0.030689279556274415, | |
| 0.030862335205078126, | |
| 0.030870527267456056, | |
| 0.030942207336425782, | |
| 0.03095244789123535, | |
| 0.031246335983276367, | |
| 0.03105177688598633, | |
| 0.03039948844909668, | |
| 0.030280704498291015, | |
| 0.0301527042388916, | |
| 0.030233600616455077, | |
| 0.030253055572509766, | |
| 0.03016703987121582, | |
| 0.02993152046203613, | |
| 0.03021414375305176, | |
| 0.030287872314453124, | |
| 0.030684160232543944, | |
| 0.030259199142456054, | |
| 0.03059507179260254, | |
| 0.03003392028808594, | |
| 0.02994790458679199, | |
| 0.030301183700561524, | |
| 0.03039743995666504, | |
| 0.030268415451049805, | |
| 0.030088191986083986, | |
| 0.030006271362304687, | |
| 0.029838336944580077, | |
| 0.029953023910522462, | |
| 0.030046207427978516, | |
| 0.029873151779174805, | |
| 0.06027775955200195, | |
| 0.030036991119384765, | |
| 0.02996121597290039, | |
| 0.03014143943786621, | |
| 0.030332927703857423, | |
| 0.030111743927001954, | |
| 0.03013324737548828, | |
| 0.030115840911865234, | |
| 0.029995008468627928, | |
| 0.029920255661010742, | |
| 0.030074880599975585, | |
| 0.030060543060302734, | |
| 0.02997248077392578, | |
| 0.030176256179809572, | |
| 0.030043136596679686, | |
| 0.030502912521362304, | |
| 0.030072832107543947, | |
| 0.03039129638671875, | |
| 0.03037491226196289, | |
| 0.030227455139160156, | |
| 0.03034419250488281, | |
| 0.03039641571044922, | |
| 0.030243839263916016, | |
| 0.03021516799926758, | |
| 0.03019161605834961, | |
| 0.030123008728027343, | |
| 0.030216192245483397, | |
| 0.030498815536499024, | |
| 0.03014143943786621, | |
| 0.02996633529663086, | |
| 0.03014143943786621 | |
| ] | |
| }, | |
| "throughput": { | |
| "unit": "tokens/s", | |
| "value": 31.75586722778199 | |
| }, | |
| "energy": null, | |
| "efficiency": null | |
| } | |
| } | |
| } |