Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub
Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json
CHANGED
|
@@ -6,17 +6,19 @@
|
|
| 6 |
"version": "2.2.2",
|
| 7 |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
|
| 8 |
"task": "text-classification",
|
| 9 |
-
"library": "transformers",
|
| 10 |
"model": "FacebookAI/roberta-base",
|
| 11 |
-
"
|
| 12 |
"device": "cuda",
|
| 13 |
"device_ids": "0",
|
| 14 |
"seed": 42,
|
| 15 |
"inter_op_num_threads": null,
|
| 16 |
"intra_op_num_threads": null,
|
| 17 |
-
"
|
| 18 |
-
|
| 19 |
-
|
|
|
|
|
|
|
|
|
|
| 20 |
"no_weights": true,
|
| 21 |
"device_map": null,
|
| 22 |
"torch_dtype": null,
|
|
@@ -71,7 +73,7 @@
|
|
| 71 |
"environment": {
|
| 72 |
"cpu": " AMD EPYC 7R32",
|
| 73 |
"cpu_count": 16,
|
| 74 |
-
"cpu_ram_mb": 66697.
|
| 75 |
"system": "Linux",
|
| 76 |
"machine": "x86_64",
|
| 77 |
"platform": "Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35",
|
|
@@ -86,7 +88,7 @@
|
|
| 86 |
"optimum_benchmark_commit": null,
|
| 87 |
"transformers_version": "4.40.2",
|
| 88 |
"transformers_commit": null,
|
| 89 |
-
"accelerate_version": "0.30.
|
| 90 |
"accelerate_commit": null,
|
| 91 |
"diffusers_version": "0.27.2",
|
| 92 |
"diffusers_commit": null,
|
|
@@ -102,7 +104,7 @@
|
|
| 102 |
"forward": {
|
| 103 |
"memory": {
|
| 104 |
"unit": "MB",
|
| 105 |
-
"max_ram":
|
| 106 |
"max_global_vram": 1195.900928,
|
| 107 |
"max_process_vram": 0.0,
|
| 108 |
"max_reserved": 555.74528,
|
|
@@ -110,189 +112,187 @@
|
|
| 110 |
},
|
| 111 |
"latency": {
|
| 112 |
"unit": "s",
|
| 113 |
-
"count":
|
| 114 |
-
"total": 0.
|
| 115 |
-
"mean": 0.
|
| 116 |
-
"stdev": 0.
|
| 117 |
-
"p50": 0.
|
| 118 |
-
"p90": 0.
|
| 119 |
-
"p95": 0.
|
| 120 |
-
"p99": 0.
|
| 121 |
"values": [
|
| 122 |
-
0.
|
| 123 |
-
0.
|
| 124 |
-
0.
|
| 125 |
-
0.
|
| 126 |
-
0.
|
| 127 |
-
0.
|
| 128 |
-
0.
|
| 129 |
-
0.
|
| 130 |
-
0.
|
| 131 |
-
0.
|
| 132 |
-
0.006492159843444824,
|
| 133 |
-
0.00643884801864624,
|
| 134 |
-
0.006647776126861573,
|
| 135 |
-
0.006475776195526123,
|
| 136 |
-
0.0063569917678833006,
|
| 137 |
-
0.006449151992797852,
|
| 138 |
-
0.0066078720092773435,
|
| 139 |
-
0.006359039783477783,
|
| 140 |
-
0.006534143924713135,
|
| 141 |
-
0.006412288188934326,
|
| 142 |
-
0.006412288188934326,
|
| 143 |
-
0.0064102401733398436,
|
| 144 |
-
0.006450111865997314,
|
| 145 |
-
0.006401023864746094,
|
| 146 |
-
0.006342656135559082,
|
| 147 |
-
0.006426623821258545,
|
| 148 |
-
0.00643177604675293,
|
| 149 |
-
0.0063907837867736815,
|
| 150 |
-
0.006650879859924317,
|
| 151 |
-
0.006445055961608887,
|
| 152 |
-
0.00641539192199707,
|
| 153 |
-
0.0065730881690979,
|
| 154 |
-
0.006662144184112549,
|
| 155 |
-
0.006586368083953857,
|
| 156 |
-
0.006615039825439453,
|
| 157 |
-
0.006528031826019287,
|
| 158 |
-
0.006260735988616943,
|
| 159 |
-
0.006284287929534912,
|
| 160 |
-
0.0062975997924804685,
|
| 161 |
-
0.006326272010803223,
|
| 162 |
-
0.00628326416015625,
|
| 163 |
0.006395904064178467,
|
| 164 |
-
0.
|
| 165 |
-
0.0066447358131408694,
|
| 166 |
-
0.0063836159706115725,
|
| 167 |
-
0.006434815883636475,
|
| 168 |
-
0.006432735919952392,
|
| 169 |
-
0.006545407772064209,
|
| 170 |
-
0.00638976001739502,
|
| 171 |
-
0.006407167911529541,
|
| 172 |
-
0.006351871967315674,
|
| 173 |
-
0.0064122557640075685,
|
| 174 |
0.006401023864746094,
|
| 175 |
0.006435840129852295,
|
| 176 |
-
0.
|
| 177 |
-
0.
|
| 178 |
-
0.
|
| 179 |
-
0.
|
| 180 |
-
0.
|
| 181 |
-
0.
|
| 182 |
-
0.
|
| 183 |
-
0.
|
| 184 |
-
0.
|
| 185 |
-
0.
|
| 186 |
-
0.
|
| 187 |
-
0.
|
| 188 |
-
0.
|
| 189 |
-
0.
|
| 190 |
-
0.
|
| 191 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 192 |
0.0065443840026855465,
|
| 193 |
-
0.
|
| 194 |
-
0.
|
| 195 |
-
0.
|
| 196 |
-
0.
|
|
|
|
| 197 |
0.0066344962120056155,
|
| 198 |
-
0.
|
| 199 |
-
0.
|
| 200 |
-
0.
|
| 201 |
-
0.
|
| 202 |
-
0.
|
| 203 |
-
0.
|
| 204 |
-
0.
|
| 205 |
-
0.
|
| 206 |
-
0.
|
| 207 |
-
0.
|
| 208 |
-
0.
|
| 209 |
-
0.
|
| 210 |
-
0.
|
| 211 |
-
0.
|
| 212 |
-
0.
|
| 213 |
-
0.
|
| 214 |
-
0.
|
| 215 |
-
0.
|
| 216 |
-
0.
|
| 217 |
-
0.
|
| 218 |
-
0.
|
| 219 |
-
0.
|
| 220 |
-
0.
|
| 221 |
-
0.
|
| 222 |
-
0.
|
| 223 |
-
0.
|
| 224 |
-
0.
|
| 225 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 226 |
0.006312960147857666,
|
| 227 |
-
0.
|
| 228 |
-
0.
|
| 229 |
-
0.
|
| 230 |
-
0.00608460807800293,
|
| 231 |
-
0.006074368000030517,
|
| 232 |
0.006060031890869141,
|
| 233 |
-
0.
|
| 234 |
-
0.006032383918762207,
|
| 235 |
-
0.006051839828491211,
|
| 236 |
-
0.006033408164978027,
|
| 237 |
-
0.006030303955078125,
|
| 238 |
-
0.006017024040222168,
|
| 239 |
-
0.006015999794006348,
|
| 240 |
-
0.00601087999343872,
|
| 241 |
-
0.00602623987197876,
|
| 242 |
-
0.006001664161682129,
|
| 243 |
-
0.006013887882232666,
|
| 244 |
-
0.005984255790710449,
|
| 245 |
-
0.006004735946655273,
|
| 246 |
-
0.0060026879310607914,
|
| 247 |
-
0.006115327835083008,
|
| 248 |
-
0.006007808208465576,
|
| 249 |
-
0.00601087999343872,
|
| 250 |
-
0.006038527965545654,
|
| 251 |
-
0.006055935859680176,
|
| 252 |
-
0.006069248199462891,
|
| 253 |
-
0.006056960105895996,
|
| 254 |
-
0.006047743797302246,
|
| 255 |
-
0.0060067839622497555,
|
| 256 |
-
0.006005760192871094,
|
| 257 |
-
0.006004735946655273,
|
| 258 |
-
0.0059996161460876465,
|
| 259 |
-
0.006009856224060059,
|
| 260 |
-
0.006031360149383545,
|
| 261 |
-
0.006037504196166992,
|
| 262 |
-
0.006014976024627685,
|
| 263 |
-
0.006039616107940674,
|
| 264 |
-
0.006039552211761475,
|
| 265 |
-
0.0060293121337890625,
|
| 266 |
-
0.0067010560035705566,
|
| 267 |
-
0.006269951820373535,
|
| 268 |
-
0.00603436803817749,
|
| 269 |
-
0.006000639915466309,
|
| 270 |
-
0.0060661759376525876,
|
| 271 |
-
0.006037504196166992,
|
| 272 |
-
0.006052864074707031,
|
| 273 |
-
0.006107135772705078,
|
| 274 |
-
0.006077439785003662,
|
| 275 |
-
0.0060702719688415525,
|
| 276 |
-
0.006032383918762207,
|
| 277 |
0.006038527965545654,
|
| 278 |
-
0.
|
| 279 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 280 |
]
|
| 281 |
},
|
| 282 |
"throughput": {
|
| 283 |
"unit": "samples/s",
|
| 284 |
-
"value":
|
| 285 |
},
|
| 286 |
"energy": {
|
| 287 |
"unit": "kWh",
|
| 288 |
-
"cpu": 7.
|
| 289 |
-
"ram": 4.
|
| 290 |
-
"gpu": 1.
|
| 291 |
-
"total": 2.
|
| 292 |
},
|
| 293 |
"efficiency": {
|
| 294 |
"unit": "samples/kWh",
|
| 295 |
-
"value":
|
| 296 |
}
|
| 297 |
}
|
| 298 |
}
|
|
|
|
| 6 |
"version": "2.2.2",
|
| 7 |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
|
| 8 |
"task": "text-classification",
|
|
|
|
| 9 |
"model": "FacebookAI/roberta-base",
|
| 10 |
+
"library": "transformers",
|
| 11 |
"device": "cuda",
|
| 12 |
"device_ids": "0",
|
| 13 |
"seed": 42,
|
| 14 |
"inter_op_num_threads": null,
|
| 15 |
"intra_op_num_threads": null,
|
| 16 |
+
"hub_kwargs": {
|
| 17 |
+
"revision": "main",
|
| 18 |
+
"force_download": false,
|
| 19 |
+
"local_files_only": false,
|
| 20 |
+
"trust_remote_code": false
|
| 21 |
+
},
|
| 22 |
"no_weights": true,
|
| 23 |
"device_map": null,
|
| 24 |
"torch_dtype": null,
|
|
|
|
| 73 |
"environment": {
|
| 74 |
"cpu": " AMD EPYC 7R32",
|
| 75 |
"cpu_count": 16,
|
| 76 |
+
"cpu_ram_mb": 66697.289728,
|
| 77 |
"system": "Linux",
|
| 78 |
"machine": "x86_64",
|
| 79 |
"platform": "Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35",
|
|
|
|
| 88 |
"optimum_benchmark_commit": null,
|
| 89 |
"transformers_version": "4.40.2",
|
| 90 |
"transformers_commit": null,
|
| 91 |
+
"accelerate_version": "0.30.1",
|
| 92 |
"accelerate_commit": null,
|
| 93 |
"diffusers_version": "0.27.2",
|
| 94 |
"diffusers_commit": null,
|
|
|
|
| 104 |
"forward": {
|
| 105 |
"memory": {
|
| 106 |
"unit": "MB",
|
| 107 |
+
"max_ram": 889.749504,
|
| 108 |
"max_global_vram": 1195.900928,
|
| 109 |
"max_process_vram": 0.0,
|
| 110 |
"max_reserved": 555.74528,
|
|
|
|
| 112 |
},
|
| 113 |
"latency": {
|
| 114 |
"unit": "s",
|
| 115 |
+
"count": 156,
|
| 116 |
+
"total": 0.9976697611808775,
|
| 117 |
+
"mean": 0.006395318981928702,
|
| 118 |
+
"stdev": 0.00022351439414524803,
|
| 119 |
+
"p50": 0.00634009599685669,
|
| 120 |
+
"p90": 0.006650880098342896,
|
| 121 |
+
"p95": 0.0067164161205291745,
|
| 122 |
+
"p99": 0.007249152135848994,
|
| 123 |
"values": [
|
| 124 |
+
0.006970367908477783,
|
| 125 |
+
0.006602752208709717,
|
| 126 |
+
0.006673408031463623,
|
| 127 |
+
0.006624256134033203,
|
| 128 |
+
0.006520832061767578,
|
| 129 |
+
0.006567935943603515,
|
| 130 |
+
0.006569983959197998,
|
| 131 |
+
0.006452223777770996,
|
| 132 |
+
0.006409215927124024,
|
| 133 |
+
0.0064133119583129885,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 134 |
0.006395904064178467,
|
| 135 |
+
0.006416384220123291,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 136 |
0.006401023864746094,
|
| 137 |
0.006435840129852295,
|
| 138 |
+
0.0063272957801818845,
|
| 139 |
+
0.006322175979614258,
|
| 140 |
+
0.006341631889343262,
|
| 141 |
+
0.0063805441856384275,
|
| 142 |
+
0.006341631889343262,
|
| 143 |
+
0.006519904136657715,
|
| 144 |
+
0.006676479816436768,
|
| 145 |
+
0.006681600093841553,
|
| 146 |
+
0.006656000137329102,
|
| 147 |
+
0.006461440086364746,
|
| 148 |
+
0.006368256092071533,
|
| 149 |
+
0.0062873601913452145,
|
| 150 |
+
0.0064133119583129885,
|
| 151 |
+
0.006440959930419922,
|
| 152 |
+
0.006421472072601318,
|
| 153 |
+
0.00643891191482544,
|
| 154 |
+
0.006388735771179199,
|
| 155 |
+
0.006375423908233643,
|
| 156 |
+
0.006364160060882569,
|
| 157 |
+
0.006402112007141113,
|
| 158 |
+
0.006604800224304199,
|
| 159 |
+
0.0064174079895019534,
|
| 160 |
+
0.006515615940093994,
|
| 161 |
+
0.006533152103424073,
|
| 162 |
+
0.006451168060302734,
|
| 163 |
+
0.006523903846740723,
|
| 164 |
+
0.006573056221008301,
|
| 165 |
0.0065443840026855465,
|
| 166 |
+
0.0067338237762451176,
|
| 167 |
+
0.006631423950195312,
|
| 168 |
+
0.006540287971496582,
|
| 169 |
+
0.006626304149627686,
|
| 170 |
+
0.006719488143920899,
|
| 171 |
0.0066344962120056155,
|
| 172 |
+
0.006663167953491211,
|
| 173 |
+
0.006673408031463623,
|
| 174 |
+
0.00658022403717041,
|
| 175 |
+
0.006624256134033203,
|
| 176 |
+
0.006358016014099121,
|
| 177 |
+
0.006415359973907471,
|
| 178 |
+
0.006558720111846924,
|
| 179 |
+
0.0063539199829101565,
|
| 180 |
+
0.006536191940307618,
|
| 181 |
+
0.006316031932830811,
|
| 182 |
+
0.006218751907348633,
|
| 183 |
+
0.006338560104370118,
|
| 184 |
+
0.006288383960723877,
|
| 185 |
+
0.006309887886047363,
|
| 186 |
+
0.006427648067474365,
|
| 187 |
+
0.0063569917678833006,
|
| 188 |
+
0.006375487804412842,
|
| 189 |
+
0.006715392112731934,
|
| 190 |
+
0.006729728221893311,
|
| 191 |
+
0.006552576065063476,
|
| 192 |
+
0.006493184089660644,
|
| 193 |
+
0.007090176105499267,
|
| 194 |
+
0.007443456172943115,
|
| 195 |
+
0.007606272220611572,
|
| 196 |
+
0.006639616012573242,
|
| 197 |
+
0.006617087841033936,
|
| 198 |
+
0.006659071922302246,
|
| 199 |
+
0.006645760059356689,
|
| 200 |
+
0.0063805441856384275,
|
| 201 |
+
0.006446080207824707,
|
| 202 |
+
0.006364160060882569,
|
| 203 |
+
0.006394879817962646,
|
| 204 |
+
0.006487040042877197,
|
| 205 |
+
0.006590464115142822,
|
| 206 |
+
0.006378528118133545,
|
| 207 |
+
0.006288383960723877,
|
| 208 |
+
0.006127615928649902,
|
| 209 |
+
0.006141952037811279,
|
| 210 |
+
0.006293504238128662,
|
| 211 |
+
0.006289408206939697,
|
| 212 |
+
0.006213632106781006,
|
| 213 |
+
0.006041600227355957,
|
| 214 |
+
0.0059985918998718265,
|
| 215 |
+
0.006102015972137451,
|
| 216 |
+
0.006160384178161621,
|
| 217 |
+
0.006273087978363037,
|
| 218 |
+
0.006207488059997559,
|
| 219 |
0.006312960147857666,
|
| 220 |
+
0.006338560104370118,
|
| 221 |
+
0.006223872184753418,
|
| 222 |
+
0.006115359783172607,
|
|
|
|
|
|
|
| 223 |
0.006060031890869141,
|
| 224 |
+
0.006086656093597412,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 225 |
0.006038527965545654,
|
| 226 |
+
0.006807551860809326,
|
| 227 |
+
0.006371327877044678,
|
| 228 |
+
0.006310912132263183,
|
| 229 |
+
0.006325247764587402,
|
| 230 |
+
0.006329343795776367,
|
| 231 |
+
0.00623308801651001,
|
| 232 |
+
0.006268928050994873,
|
| 233 |
+
0.006325247764587402,
|
| 234 |
+
0.006223872184753418,
|
| 235 |
+
0.006200319766998291,
|
| 236 |
+
0.006242303848266601,
|
| 237 |
+
0.006161407947540283,
|
| 238 |
+
0.006258687973022461,
|
| 239 |
+
0.006195199966430664,
|
| 240 |
+
0.006173664093017578,
|
| 241 |
+
0.006255616188049316,
|
| 242 |
+
0.006160384178161621,
|
| 243 |
+
0.006280191898345947,
|
| 244 |
+
0.006205440044403076,
|
| 245 |
+
0.006292479991912842,
|
| 246 |
+
0.006337535858154297,
|
| 247 |
+
0.006310912132263183,
|
| 248 |
+
0.006306816101074219,
|
| 249 |
+
0.006244351863861084,
|
| 250 |
+
0.006294528007507324,
|
| 251 |
+
0.006310912132263183,
|
| 252 |
+
0.006248447895050049,
|
| 253 |
+
0.00628223991394043,
|
| 254 |
+
0.006367231845855713,
|
| 255 |
+
0.0062791681289672855,
|
| 256 |
+
0.0062780799865722655,
|
| 257 |
+
0.006194176197052002,
|
| 258 |
+
0.006195136070251465,
|
| 259 |
+
0.00632316780090332,
|
| 260 |
+
0.006253503799438477,
|
| 261 |
+
0.006318079948425293,
|
| 262 |
+
0.006211584091186524,
|
| 263 |
+
0.00619212818145752,
|
| 264 |
+
0.006221824169158936,
|
| 265 |
+
0.006116352081298828,
|
| 266 |
+
0.006251455783843994,
|
| 267 |
+
0.006170623779296875,
|
| 268 |
+
0.00623308801651001,
|
| 269 |
+
0.006310912132263183,
|
| 270 |
+
0.006195168018341065,
|
| 271 |
+
0.006268928050994873,
|
| 272 |
+
0.006299647808074951,
|
| 273 |
+
0.006268928050994873,
|
| 274 |
+
0.00632422399520874,
|
| 275 |
+
0.006244351863861084,
|
| 276 |
+
0.006280191898345947,
|
| 277 |
+
0.006318079948425293,
|
| 278 |
+
0.006276095867156982,
|
| 279 |
+
0.00626585578918457
|
| 280 |
]
|
| 281 |
},
|
| 282 |
"throughput": {
|
| 283 |
"unit": "samples/s",
|
| 284 |
+
"value": 156.36436631631778
|
| 285 |
},
|
| 286 |
"energy": {
|
| 287 |
"unit": "kWh",
|
| 288 |
+
"cpu": 7.437935512926845e-08,
|
| 289 |
+
"ram": 4.0496862412567234e-08,
|
| 290 |
+
"gpu": 1.308889935999996e-07,
|
| 291 |
+
"total": 2.457652111418353e-07
|
| 292 |
},
|
| 293 |
"efficiency": {
|
| 294 |
"unit": "samples/kWh",
|
| 295 |
+
"value": 4068924.1384244696
|
| 296 |
}
|
| 297 |
}
|
| 298 |
}
|