IlyasMoutawwakil HF Staff
Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub
2c01266 verified | { | |
| "config": { | |
| "name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base", | |
| "backend": { | |
| "name": "pytorch", | |
| "version": "2.2.2+rocm5.7", | |
| "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
| "task": "text-classification", | |
| "library": "transformers", | |
| "model": "FacebookAI/roberta-base", | |
| "processor": "FacebookAI/roberta-base", | |
| "device": "cuda", | |
| "device_ids": "0", | |
| "seed": 42, | |
| "inter_op_num_threads": null, | |
| "intra_op_num_threads": null, | |
| "model_kwargs": {}, | |
| "processor_kwargs": {}, | |
| "hub_kwargs": {}, | |
| "no_weights": true, | |
| "device_map": null, | |
| "torch_dtype": null, | |
| "eval_mode": true, | |
| "to_bettertransformer": false, | |
| "low_cpu_mem_usage": null, | |
| "attn_implementation": null, | |
| "cache_implementation": null, | |
| "autocast_enabled": false, | |
| "autocast_dtype": null, | |
| "torch_compile": false, | |
| "torch_compile_target": "forward", | |
| "torch_compile_config": {}, | |
| "quantization_scheme": null, | |
| "quantization_config": {}, | |
| "deepspeed_inference": false, | |
| "deepspeed_inference_config": {}, | |
| "peft_type": null, | |
| "peft_config": {} | |
| }, | |
| "scenario": { | |
| "name": "inference", | |
| "_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
| "iterations": 1, | |
| "duration": 1, | |
| "warmup_runs": 1, | |
| "input_shapes": { | |
| "batch_size": 1, | |
| "num_choices": 2, | |
| "sequence_length": 2 | |
| }, | |
| "new_tokens": null, | |
| "latency": true, | |
| "memory": true, | |
| "energy": false, | |
| "forward_kwargs": {}, | |
| "generate_kwargs": { | |
| "max_new_tokens": 2, | |
| "min_new_tokens": 2 | |
| }, | |
| "call_kwargs": { | |
| "num_inference_steps": 2 | |
| } | |
| }, | |
| "launcher": { | |
| "name": "process", | |
| "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
| "device_isolation": true, | |
| "device_isolation_action": "error", | |
| "numactl": false, | |
| "numactl_kwargs": {}, | |
| "start_method": "spawn" | |
| }, | |
| "environment": { | |
| "cpu": " AMD EPYC 7763 64-Core Processor", | |
| "cpu_count": 128, | |
| "cpu_ram_mb": 1082015.236096, | |
| "system": "Linux", | |
| "machine": "x86_64", | |
| "platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35", | |
| "processor": "x86_64", | |
| "python_version": "3.10.12", | |
| "gpu": [ | |
| "Advanced Micro Devices, Inc. [AMD/ATI]" | |
| ], | |
| "gpu_count": 1, | |
| "gpu_vram_mb": 68702699520, | |
| "optimum_benchmark_version": "0.2.1", | |
| "optimum_benchmark_commit": "3b8c49a169ebd79001b2a83fbf2b332612417102", | |
| "transformers_version": "4.42.3", | |
| "transformers_commit": null, | |
| "accelerate_version": "0.31.0", | |
| "accelerate_commit": null, | |
| "diffusers_version": "0.29.2", | |
| "diffusers_commit": null, | |
| "optimum_version": null, | |
| "optimum_commit": null, | |
| "timm_version": "1.0.7", | |
| "timm_commit": null, | |
| "peft_version": null, | |
| "peft_commit": null | |
| } | |
| }, | |
| "report": { | |
| "forward": { | |
| "memory": { | |
| "unit": "MB", | |
| "max_ram": 1009.905664, | |
| "max_global_vram": 897.138688, | |
| "max_process_vram": 195782.180864, | |
| "max_reserved": 555.74528, | |
| "max_allocated": 499.443712 | |
| }, | |
| "latency": { | |
| "unit": "s", | |
| "count": 140, | |
| "total": 0.9992238569259644, | |
| "mean": 0.007137313263756888, | |
| "stdev": 0.0006227442120522962, | |
| "p50": 0.006814506530761719, | |
| "p90": 0.007902210140228271, | |
| "p95": 0.008003529357910155, | |
| "p99": 0.009309260616302484, | |
| "values": [ | |
| 0.009456087112426757, | |
| 0.009468565940856934, | |
| 0.009079608917236328, | |
| 0.008036577224731445, | |
| 0.008018497467041016, | |
| 0.00801401710510254, | |
| 0.00800297737121582, | |
| 0.007981698036193847, | |
| 0.007909698009490967, | |
| 0.00794233798980713, | |
| 0.007817859172821046, | |
| 0.007829699039459228, | |
| 0.007881857872009278, | |
| 0.007912417888641358, | |
| 0.007898978233337402, | |
| 0.007901378154754638, | |
| 0.007883617877960205, | |
| 0.007853377819061279, | |
| 0.007878818988800048, | |
| 0.007847939014434814, | |
| 0.007961698055267334, | |
| 0.007945858001708984, | |
| 0.007900419235229493, | |
| 0.007892738819122315, | |
| 0.008969849586486816, | |
| 0.007810978889465332, | |
| 0.007608420848846435, | |
| 0.007575621128082275, | |
| 0.007582981109619141, | |
| 0.007572260856628418, | |
| 0.007542820930480957, | |
| 0.007507781028747559, | |
| 0.007535941123962402, | |
| 0.007516100883483887, | |
| 0.007528901100158691, | |
| 0.0075025010108947755, | |
| 0.007505701065063476, | |
| 0.007517221927642822, | |
| 0.007579781055450439, | |
| 0.007515621185302734, | |
| 0.007526181221008301, | |
| 0.007542981147766113, | |
| 0.0075293807983398435, | |
| 0.007508262157440186, | |
| 0.007499141216278076, | |
| 0.0075165810585021975, | |
| 0.007562981128692627, | |
| 0.007577381134033203, | |
| 0.0075380210876464845, | |
| 0.00751370096206665, | |
| 0.007522820949554443, | |
| 0.007531620979309082, | |
| 0.007387621879577637, | |
| 0.006944106101989746, | |
| 0.006728587150573731, | |
| 0.006620428085327148, | |
| 0.006630348205566406, | |
| 0.006658827781677246, | |
| 0.006628588199615478, | |
| 0.006608588218688965, | |
| 0.006614667892456055, | |
| 0.006641707897186279, | |
| 0.006640427112579346, | |
| 0.006638507843017578, | |
| 0.006674828052520752, | |
| 0.0066209077835083004, | |
| 0.0066245880126953125, | |
| 0.0066245880126953125, | |
| 0.0066293878555297855, | |
| 0.006798986911773682, | |
| 0.006617708206176758, | |
| 0.00658810806274414, | |
| 0.006622347831726074, | |
| 0.006616108894348145, | |
| 0.006623467922210693, | |
| 0.006624427795410157, | |
| 0.00663402795791626, | |
| 0.006848266124725342, | |
| 0.006632748126983642, | |
| 0.0066338682174682615, | |
| 0.006629228115081787, | |
| 0.0066362681388854984, | |
| 0.0066276278495788574, | |
| 0.006623467922210693, | |
| 0.00662746810913086, | |
| 0.006652586936950684, | |
| 0.006613708019256591, | |
| 0.00663354778289795, | |
| 0.006651308059692383, | |
| 0.007187304019927979, | |
| 0.0070804238319396974, | |
| 0.007057384014129638, | |
| 0.00704570484161377, | |
| 0.007540900230407715, | |
| 0.007566020965576172, | |
| 0.00733642292022705, | |
| 0.007087463855743408, | |
| 0.007017704963684082, | |
| 0.006962825775146484, | |
| 0.007042665004730225, | |
| 0.007106823921203613, | |
| 0.007033384799957275, | |
| 0.006884585857391357, | |
| 0.006618507862091064, | |
| 0.006651947975158692, | |
| 0.006630988121032715, | |
| 0.006599947929382324, | |
| 0.006606667995452881, | |
| 0.0065943489074707035, | |
| 0.006590349197387695, | |
| 0.006625547885894776, | |
| 0.006625708103179932, | |
| 0.0066228280067443844, | |
| 0.006606028079986572, | |
| 0.0066221880912780764, | |
| 0.006639947891235351, | |
| 0.006649548053741455, | |
| 0.00663354778289795, | |
| 0.006642347812652588, | |
| 0.006626987934112549, | |
| 0.006626187801361084, | |
| 0.006605709075927734, | |
| 0.006613708972930908, | |
| 0.006677867889404297, | |
| 0.0068300261497497555, | |
| 0.0066172280311584475, | |
| 0.006626028060913086, | |
| 0.006602828979492188, | |
| 0.006608908176422119, | |
| 0.006606828212738037, | |
| 0.006601707935333252, | |
| 0.006641228199005127, | |
| 0.006699306964874268, | |
| 0.0066551480293273926, | |
| 0.006640428066253662, | |
| 0.006641707897186279, | |
| 0.006636427879333496, | |
| 0.006613868236541748, | |
| 0.006625867843627929, | |
| 0.006605547904968262 | |
| ] | |
| }, | |
| "throughput": { | |
| "unit": "samples/s", | |
| "value": 140.10874443160242 | |
| }, | |
| "energy": null, | |
| "efficiency": null | |
| } | |
| } | |
| } |