IlyasMoutawwakil's picture
Upload cpu_inference_transformers_fill-mask_google-bert/bert-base-uncased/benchmark.json with huggingface_hub
b95d2b5 verified
{
"config": {
"name": "cpu_inference_transformers_fill-mask_google-bert/bert-base-uncased",
"backend": {
"name": "pytorch",
"version": "2.3.1+cpu",
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
"task": "fill-mask",
"library": "transformers",
"model_type": "bert",
"model": "google-bert/bert-base-uncased",
"processor": "google-bert/bert-base-uncased",
"device": "cpu",
"device_ids": null,
"seed": 42,
"inter_op_num_threads": null,
"intra_op_num_threads": null,
"model_kwargs": {},
"processor_kwargs": {},
"no_weights": true,
"device_map": null,
"torch_dtype": null,
"eval_mode": true,
"to_bettertransformer": false,
"low_cpu_mem_usage": null,
"attn_implementation": null,
"cache_implementation": null,
"autocast_enabled": false,
"autocast_dtype": null,
"torch_compile": false,
"torch_compile_target": "forward",
"torch_compile_config": {},
"quantization_scheme": null,
"quantization_config": {},
"deepspeed_inference": false,
"deepspeed_inference_config": {},
"peft_type": null,
"peft_config": {}
},
"scenario": {
"name": "inference",
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
"iterations": 1,
"duration": 1,
"warmup_runs": 1,
"input_shapes": {
"batch_size": 1,
"num_choices": 2,
"sequence_length": 2
},
"new_tokens": null,
"memory": true,
"latency": true,
"energy": true,
"forward_kwargs": {},
"generate_kwargs": {
"max_new_tokens": 2,
"min_new_tokens": 2
},
"call_kwargs": {
"num_inference_steps": 2
}
},
"launcher": {
"name": "process",
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
"device_isolation": false,
"device_isolation_action": "error",
"numactl": false,
"numactl_kwargs": {},
"start_method": "spawn"
},
"environment": {
"cpu": " AMD EPYC 7742 64-Core Processor",
"cpu_count": 128,
"cpu_ram_mb": 540671.660032,
"system": "Linux",
"machine": "x86_64",
"platform": "Linux-5.4.0-166-generic-x86_64-with-glibc2.35",
"processor": "x86_64",
"python_version": "3.10.12",
"optimum_benchmark_version": "0.4.0",
"optimum_benchmark_commit": null,
"transformers_version": "4.44.0",
"transformers_commit": null,
"accelerate_version": "0.33.0",
"accelerate_commit": null,
"diffusers_version": "0.30.0",
"diffusers_commit": null,
"optimum_version": null,
"optimum_commit": null,
"timm_version": "1.0.8",
"timm_commit": null,
"peft_version": "0.12.0",
"peft_commit": null
}
},
"report": {
"load": {
"memory": {
"unit": "MB",
"max_ram": 956.5184,
"max_global_vram": null,
"max_process_vram": null,
"max_reserved": null,
"max_allocated": null
},
"latency": {
"unit": "s",
"count": 1,
"total": 5.060118122026324,
"mean": 5.060118122026324,
"stdev": 0.0,
"p50": 5.060118122026324,
"p90": 5.060118122026324,
"p95": 5.060118122026324,
"p99": 5.060118122026324,
"values": [
5.060118122026324
]
},
"throughput": null,
"energy": {
"unit": "kWh",
"cpu": 5.270721286069602e-05,
"ram": 8.84559721890803e-05,
"gpu": 0,
"total": 0.00014116318504977631
},
"efficiency": null
},
"forward": {
"memory": {
"unit": "MB",
"max_ram": 866.750464,
"max_global_vram": null,
"max_process_vram": null,
"max_reserved": null,
"max_allocated": null
},
"latency": {
"unit": "s",
"count": 72,
"total": 1.005678259767592,
"mean": 0.013967753607883222,
"stdev": 0.0004886843289281364,
"p50": 0.014004338532686234,
"p90": 0.0144935367628932,
"p95": 0.014938362780958414,
"p99": 0.015165246101096274,
"values": [
0.014370780438184738,
0.014929007738828659,
0.014153111726045609,
0.014369908720254898,
0.013790772296488285,
0.01381617970764637,
0.014959964901208878,
0.014098750427365303,
0.014065638184547424,
0.014949796721339226,
0.014665953814983368,
0.014408290386199951,
0.014495604671537876,
0.015389650128781796,
0.014041203074157238,
0.015073588117957115,
0.014353647828102112,
0.014154563657939434,
0.014183458872139454,
0.013983534649014473,
0.014474925585091114,
0.014017948880791664,
0.014457612298429012,
0.014262687414884567,
0.014247548766434193,
0.014294416643679142,
0.01395251601934433,
0.014385919086635113,
0.014030912891030312,
0.014647028408944607,
0.014456741511821747,
0.014318753033876419,
0.013666810467839241,
0.013003306463360786,
0.01324289571493864,
0.013424897566437721,
0.014007849618792534,
0.013886784203350544,
0.01356973871588707,
0.013442880474030972,
0.013466916047036648,
0.013479058630764484,
0.014144276268780231,
0.013504386879503727,
0.013514935970306396,
0.013512711971998215,
0.013484789989888668,
0.013452778570353985,
0.013494557701051235,
0.013536275364458561,
0.013936275616288185,
0.013614563271403313,
0.013594084419310093,
0.013588945381343365,
0.013479079119861126,
0.013717835769057274,
0.013571111485362053,
0.013529453426599503,
0.014000827446579933,
0.01356938760727644,
0.013408997096121311,
0.01338641531765461,
0.013330521062016487,
0.013386826030910015,
0.013561554253101349,
0.01340545155107975,
0.01410538237541914,
0.014211362227797508,
0.014103279449045658,
0.014150678180158138,
0.014198146760463715,
0.014191814698278904
]
},
"throughput": {
"unit": "samples/s",
"value": 71.5934736589005
},
"energy": {
"unit": "kWh",
"cpu": 4.246568537751414e-07,
"ram": 7.125636966945459e-07,
"gpu": 0.0,
"total": 1.1372205504696873e-06
},
"efficiency": {
"unit": "samples/kWh",
"value": 879336.9057453162
}
}
}
}