IlyasMoutawwakil
HF Staff
Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub
4910d98
verified
| { | |
| "config": { | |
| "name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base", | |
| "backend": { | |
| "name": "pytorch", | |
| "version": "2.2.2+rocm5.7", | |
| "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
| "task": "text-classification", | |
| "library": "transformers", | |
| "model_type": "roberta", | |
| "model": "FacebookAI/roberta-base", | |
| "processor": "FacebookAI/roberta-base", | |
| "device": "cuda", | |
| "device_ids": "0", | |
| "seed": 42, | |
| "inter_op_num_threads": null, | |
| "intra_op_num_threads": null, | |
| "model_kwargs": {}, | |
| "processor_kwargs": {}, | |
| "hub_kwargs": {}, | |
| "no_weights": true, | |
| "device_map": null, | |
| "torch_dtype": null, | |
| "eval_mode": true, | |
| "to_bettertransformer": false, | |
| "low_cpu_mem_usage": null, | |
| "attn_implementation": null, | |
| "cache_implementation": null, | |
| "autocast_enabled": false, | |
| "autocast_dtype": null, | |
| "torch_compile": false, | |
| "torch_compile_target": "forward", | |
| "torch_compile_config": {}, | |
| "quantization_scheme": null, | |
| "quantization_config": {}, | |
| "deepspeed_inference": false, | |
| "deepspeed_inference_config": {}, | |
| "peft_type": null, | |
| "peft_config": {} | |
| }, | |
| "scenario": { | |
| "name": "inference", | |
| "_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
| "iterations": 1, | |
| "duration": 1, | |
| "warmup_runs": 1, | |
| "input_shapes": { | |
| "batch_size": 1, | |
| "num_choices": 2, | |
| "sequence_length": 2 | |
| }, | |
| "new_tokens": null, | |
| "latency": true, | |
| "memory": true, | |
| "energy": false, | |
| "forward_kwargs": {}, | |
| "generate_kwargs": { | |
| "max_new_tokens": 2, | |
| "min_new_tokens": 2 | |
| }, | |
| "call_kwargs": { | |
| "num_inference_steps": 2 | |
| } | |
| }, | |
| "launcher": { | |
| "name": "process", | |
| "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
| "device_isolation": true, | |
| "device_isolation_action": "error", | |
| "numactl": false, | |
| "numactl_kwargs": {}, | |
| "start_method": "spawn" | |
| }, | |
| "environment": { | |
| "cpu": " AMD EPYC 7763 64-Core Processor", | |
| "cpu_count": 128, | |
| "cpu_ram_mb": 1082015.236096, | |
| "system": "Linux", | |
| "machine": "x86_64", | |
| "platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35", | |
| "processor": "x86_64", | |
| "python_version": "3.10.12", | |
| "gpu": [ | |
| "Advanced Micro Devices, Inc. [AMD/ATI]" | |
| ], | |
| "gpu_count": 1, | |
| "gpu_vram_mb": 68702699520, | |
| "optimum_benchmark_version": "0.3.1", | |
| "optimum_benchmark_commit": "10c9775dced65978a85f54ff6905a3aea7d61cca", | |
| "transformers_version": "4.42.4", | |
| "transformers_commit": null, | |
| "accelerate_version": "0.32.1", | |
| "accelerate_commit": null, | |
| "diffusers_version": "0.29.2", | |
| "diffusers_commit": null, | |
| "optimum_version": null, | |
| "optimum_commit": null, | |
| "timm_version": "1.0.7", | |
| "timm_commit": null, | |
| "peft_version": null, | |
| "peft_commit": null | |
| } | |
| }, | |
| "report": { | |
| "forward": { | |
| "memory": { | |
| "unit": "MB", | |
| "max_ram": 1014.112256, | |
| "max_global_vram": 897.114112, | |
| "max_process_vram": 211712.212992, | |
| "max_reserved": 555.74528, | |
| "max_allocated": 499.443712 | |
| }, | |
| "latency": { | |
| "unit": "s", | |
| "count": 125, | |
| "total": 0.9959651637077337, | |
| "mean": 0.007967721309661864, | |
| "stdev": 0.0004717385269373396, | |
| "p50": 0.008071015357971192, | |
| "p90": 0.008473188972473145, | |
| "p95": 0.00855974884033203, | |
| "p99": 0.009477614898681645, | |
| "values": [ | |
| 0.008145414352416992, | |
| 0.008341894149780274, | |
| 0.008391493797302247, | |
| 0.008519332885742187, | |
| 0.00860349178314209, | |
| 0.008547332763671875, | |
| 0.008540932655334472, | |
| 0.008500452995300292, | |
| 0.008145894050598144, | |
| 0.007577735900878907, | |
| 0.007479816913604736, | |
| 0.007440455913543701, | |
| 0.007449096202850342, | |
| 0.007404137134552002, | |
| 0.0073410959243774415, | |
| 0.008256934165954589, | |
| 0.00856285285949707, | |
| 0.007971333980560302, | |
| 0.0075852560997009275, | |
| 0.008306214332580567, | |
| 0.00832541275024414, | |
| 0.008157894134521485, | |
| 0.008173893928527833, | |
| 0.008158534049987794, | |
| 0.008416933059692383, | |
| 0.008454532623291015, | |
| 0.00846013355255127, | |
| 0.008338213920593262, | |
| 0.008389573097229003, | |
| 0.00836781406402588, | |
| 0.008297574043273926, | |
| 0.008250534057617187, | |
| 0.008323013305664062, | |
| 0.008280613899230956, | |
| 0.008297252655029296, | |
| 0.008272454261779786, | |
| 0.008295973777770997, | |
| 0.008272133827209472, | |
| 0.008412294387817383, | |
| 0.008642533302307128, | |
| 0.00874461269378662, | |
| 0.00858253288269043, | |
| 0.008481892585754394, | |
| 0.01016908836364746, | |
| 0.008545733451843262, | |
| 0.008142695426940919, | |
| 0.008115654945373535, | |
| 0.008162054061889648, | |
| 0.009709089279174805, | |
| 0.008264774322509766, | |
| 0.008161093711853027, | |
| 0.008163813591003419, | |
| 0.00818221378326416, | |
| 0.008144133567810059, | |
| 0.008127655029296875, | |
| 0.008126694679260254, | |
| 0.008131653785705566, | |
| 0.008095494270324707, | |
| 0.008091974258422851, | |
| 0.008146854400634765, | |
| 0.008112614631652831, | |
| 0.008246213912963866, | |
| 0.008051654815673829, | |
| 0.008107173919677734, | |
| 0.008038213729858398, | |
| 0.008041415214538574, | |
| 0.00806429386138916, | |
| 0.008069574356079101, | |
| 0.008123173713684081, | |
| 0.008071015357971192, | |
| 0.008144774436950684, | |
| 0.00812701416015625, | |
| 0.008134373664855957, | |
| 0.008081893920898437, | |
| 0.007542535781860351, | |
| 0.007566696166992188, | |
| 0.007188296794891357, | |
| 0.007231337070465088, | |
| 0.0072780570983886714, | |
| 0.0072209367752075196, | |
| 0.0072535772323608395, | |
| 0.007441256046295166, | |
| 0.007651976108551025, | |
| 0.00768365478515625, | |
| 0.007679815769195557, | |
| 0.007666535854339599, | |
| 0.007223817825317383, | |
| 0.0072354979515075685, | |
| 0.007183658123016357, | |
| 0.007304296016693115, | |
| 0.007652935981750489, | |
| 0.007301897048950195, | |
| 0.007157416820526123, | |
| 0.007193256855010986, | |
| 0.007875975131988525, | |
| 0.007588615894317627, | |
| 0.007595176219940185, | |
| 0.0076532549858093265, | |
| 0.007651016235351562, | |
| 0.0076537361145019535, | |
| 0.007716615200042725, | |
| 0.007695335865020752, | |
| 0.00760397481918335, | |
| 0.007589416027069092, | |
| 0.0080631742477417, | |
| 0.008087333679199218, | |
| 0.008123973846435546, | |
| 0.0077038159370422365, | |
| 0.007636775970458984, | |
| 0.007693895816802979, | |
| 0.007657576084136963, | |
| 0.007610054969787597, | |
| 0.007539495944976806, | |
| 0.00756717586517334, | |
| 0.0076422162055969235, | |
| 0.00765069580078125, | |
| 0.007680295944213867, | |
| 0.00766285514831543, | |
| 0.007663815021514892, | |
| 0.007806856155395508, | |
| 0.0077374157905578615, | |
| 0.007685255050659179, | |
| 0.007704936027526856, | |
| 0.007681735038757324, | |
| 0.007683656215667725 | |
| ] | |
| }, | |
| "throughput": { | |
| "unit": "samples/s", | |
| "value": 125.50639776862856 | |
| }, | |
| "energy": null, | |
| "efficiency": null | |
| } | |
| } | |
| } |