IlyasMoutawwakil
HF staff
Upload 2024-09-25-18-39-35/benchmark.json with huggingface_hub
5ecb1ff
verified
{ | |
"config": { | |
"name": "2024-09-25-18-39-35", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.3.0", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "feature-extraction", | |
"library": "transformers", | |
"model_type": "bert", | |
"model": "bert-base-uncased", | |
"processor": "bert-base-uncased", | |
"device": "cpu", | |
"device_ids": null, | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"no_weights": false, | |
"device_map": null, | |
"torch_dtype": "float32", | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 10, | |
"duration": 10, | |
"warmup_runs": 10, | |
"input_shapes": { | |
"batch_size": 2, | |
"num_choices": 2, | |
"sequence_length": 16 | |
}, | |
"new_tokens": null, | |
"memory": false, | |
"latency": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": {}, | |
"call_kwargs": {} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": false, | |
"device_isolation_action": null, | |
"numactl": false, | |
"numactl_kwargs": "", | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " 12th Gen Intel(R) Core(TM) i9-12900H", | |
"cpu_count": 20, | |
"cpu_ram_mb": 8162.504704, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.153.1-microsoft-standard-WSL2-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.9.16", | |
"gpu": [ | |
"NVIDIA GeForce RTX 3070 Ti Laptop GPU" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 8589934592, | |
"optimum_benchmark_version": "0.5.0", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.39.3", | |
"transformers_commit": null, | |
"accelerate_version": "0.22.0", | |
"accelerate_commit": null, | |
"diffusers_version": "0.30.3", | |
"diffusers_commit": null, | |
"optimum_version": "1.22.0", | |
"optimum_commit": null, | |
"timm_version": null, | |
"timm_commit": null, | |
"peft_version": "0.5.0", | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"load": { | |
"memory": null, | |
"latency": { | |
"unit": "s", | |
"count": 1, | |
"total": 0.4404991900009918, | |
"mean": 0.4404991900009918, | |
"stdev": 0.0, | |
"p50": 0.4404991900009918, | |
"p90": 0.4404991900009918, | |
"p95": 0.4404991900009918, | |
"p99": 0.4404991900009918, | |
"values": [ | |
0.4404991900009918 | |
] | |
}, | |
"throughput": null, | |
"energy": null, | |
"efficiency": null | |
}, | |
"forward": { | |
"memory": null, | |
"latency": { | |
"unit": "s", | |
"count": 252, | |
"total": 10.015229850869218, | |
"mean": 0.03974297559868738, | |
"stdev": 0.005742631877401999, | |
"p50": 0.03893914049695013, | |
"p90": 0.04605881119787227, | |
"p95": 0.050126781397557336, | |
"p99": 0.06282702282311224, | |
"values": [ | |
0.04899040599411819, | |
0.042815278997295536, | |
0.03908473299816251, | |
0.034543068999482784, | |
0.03309990699926857, | |
0.03336829700128874, | |
0.03398419899895089, | |
0.03334572399762692, | |
0.03200882300006924, | |
0.028675909001321997, | |
0.0317283900003531, | |
0.03353927299758652, | |
0.03402288099459838, | |
0.030377532995771617, | |
0.03241705399705097, | |
0.03226674299367005, | |
0.029186651001509745, | |
0.030224084002838936, | |
0.03152628299721982, | |
0.03340667100565042, | |
0.047515300997474696, | |
0.03685521199804498, | |
0.042165336999460123, | |
0.039128009993874, | |
0.03519364700332517, | |
0.033515962000819854, | |
0.03703717399912421, | |
0.035035058004723396, | |
0.034997150003619026, | |
0.043450605000543874, | |
0.036492041996098123, | |
0.041818154000793584, | |
0.03814117400179384, | |
0.037572378998447675, | |
0.03771013400546508, | |
0.03632617300172569, | |
0.04070184499869356, | |
0.037627613004588056, | |
0.035981411005195696, | |
0.0344188130038674, | |
0.03633717299817363, | |
0.037216611999610905, | |
0.03671217399823945, | |
0.03593159499723697, | |
0.03889387299568625, | |
0.03738952599815093, | |
0.03528978100075619, | |
0.04053263599780621, | |
0.03884881599515211, | |
0.039335531000688206, | |
0.03974449299857952, | |
0.04272111099999165, | |
0.044260508999286685, | |
0.04152893100399524, | |
0.03927340099471621, | |
0.03947146699647419, | |
0.03507376999914413, | |
0.03649390600185143, | |
0.035586470999987796, | |
0.033030379003321286, | |
0.03349058599997079, | |
0.03328011999838054, | |
0.03371976399648702, | |
0.03802555800211849, | |
0.03812191000179155, | |
0.042958723002811894, | |
0.038010979995306116, | |
0.03842924699711148, | |
0.03841162699973211, | |
0.0368064400026924, | |
0.039673095001489855, | |
0.03575109200028237, | |
0.0350475750019541, | |
0.03680976999748964, | |
0.04712750500038965, | |
0.033414691999496426, | |
0.03627826400042977, | |
0.03826910600037081, | |
0.041709138997248374, | |
0.037794324998685624, | |
0.032599181999103166, | |
0.036899897997500375, | |
0.03599880299589131, | |
0.0378871020002407, | |
0.03845229100261349, | |
0.039989371005503926, | |
0.03779220899741631, | |
0.034093426998879295, | |
0.035503802995663136, | |
0.03953923300286988, | |
0.046363528999791015, | |
0.04422000700287754, | |
0.0610588980052853, | |
0.06532843899913132, | |
0.05691179799759993, | |
0.040733919995545875, | |
0.03551298499951372, | |
0.03898440799821401, | |
0.037802769998961594, | |
0.04348975199536653, | |
0.04026014899864094, | |
0.04035071999533102, | |
0.04269444700184977, | |
0.042913719997159205, | |
0.03941228299663635, | |
0.03869527699862374, | |
0.0385083270011819, | |
0.044286676005867776, | |
0.054766806002589874, | |
0.050884810007119086, | |
0.04170769099437166, | |
0.040633188997162506, | |
0.036717862996738404, | |
0.04237594499863917, | |
0.04038509199745022, | |
0.040174151996325236, | |
0.03956635900249239, | |
0.034230775003379676, | |
0.03491230900544906, | |
0.03416358200047398, | |
0.033246597995457705, | |
0.038208524994843174, | |
0.03848343800200382, | |
0.04256212600012077, | |
0.04888024499814492, | |
0.035987479001050815, | |
0.038414025999372825, | |
0.03869335899798898, | |
0.035876326997822616, | |
0.03782076900097309, | |
0.03563289199519204, | |
0.03718385100364685, | |
0.03501259200129425, | |
0.03530150799633702, | |
0.041459805994236376, | |
0.0410386989969993, | |
0.043717997999920044, | |
0.040951710994704627, | |
0.041610517000663094, | |
0.044568072000402026, | |
0.04794325299735647, | |
0.047663778997957706, | |
0.040018599000177346, | |
0.04127529700053856, | |
0.03934888300136663, | |
0.044786430000385735, | |
0.0440426569984993, | |
0.04194546999497106, | |
0.051678251002158504, | |
0.05313485799706541, | |
0.048006814999098424, | |
0.0495544389996212, | |
0.03961917499691481, | |
0.04741559400281403, | |
0.05241631300305016, | |
0.03913883100176463, | |
0.044403815001714975, | |
0.042739953998534475, | |
0.04216369899950223, | |
0.04747926499840105, | |
0.045683753000048455, | |
0.07107584299956216, | |
0.048544610996032134, | |
0.0646673160008504, | |
0.050826310995034873, | |
0.05542013300146209, | |
0.04249467399495188, | |
0.042038725994643755, | |
0.04070095800125273, | |
0.04016656200110447, | |
0.040207598998676986, | |
0.04127512099512387, | |
0.04087191900180187, | |
0.045571795002615545, | |
0.040467371996783186, | |
0.03770307599916123, | |
0.05197845499787945, | |
0.04433975300344173, | |
0.041489372997602914, | |
0.041728105999936815, | |
0.04016000300180167, | |
0.0426388819978456, | |
0.04076775300200097, | |
0.042954156997438986, | |
0.04104360299970722, | |
0.041668146004667506, | |
0.039548977998492774, | |
0.039242856997589115, | |
0.03911634699761635, | |
0.04001419599808287, | |
0.04289905200130306, | |
0.039834432005591225, | |
0.03876793399831513, | |
0.041963249997934327, | |
0.041466156995738856, | |
0.039296247996389866, | |
0.0438131280025118, | |
0.0460004749984364, | |
0.03843835999578005, | |
0.04606529299780959, | |
0.04250747899641283, | |
0.04190356199978851, | |
0.0340156090023811, | |
0.03620198299904587, | |
0.037120389999472536, | |
0.032819624997500796, | |
0.03495316299813567, | |
0.03632776000449667, | |
0.03435257000091951, | |
0.035429786003078334, | |
0.0338895329987281, | |
0.039082054994651116, | |
0.03707176299940329, | |
0.03596253199793864, | |
0.03455382600077428, | |
0.035864934005076066, | |
0.03975507699942682, | |
0.03639991700038081, | |
0.03955922100431053, | |
0.03614759800257161, | |
0.0348923629935598, | |
0.03791975000058301, | |
0.03816556299716467, | |
0.04100097299669869, | |
0.038040118997741956, | |
0.03928876300051343, | |
0.038448222003353294, | |
0.04015361100027803, | |
0.038083574996562675, | |
0.04231058400182519, | |
0.03846807700028876, | |
0.038401525001972914, | |
0.039889068000775296, | |
0.042595484999765176, | |
0.03865843800303992, | |
0.037265788996592164, | |
0.0378204509979696, | |
0.035668928001541644, | |
0.038669847999699414, | |
0.0382528049958637, | |
0.03372610700171208, | |
0.035594289998698514, | |
0.036390797002241015, | |
0.03477477500564419, | |
0.03669727599481121, | |
0.03640892000112217, | |
0.04093301000102656, | |
0.0414314480003668, | |
0.04062247399997432, | |
0.04014685899892356, | |
0.03928500600159168, | |
0.03503117399668554 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 50.323358275822095 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |