IlyasMoutawwakil
HF staff
Upload bert-base-uncased-feature-extraction-pytorch-2024-09-25-13-05-51/benchmark.json with huggingface_hub
7daac2c
verified
{ | |
"config": { | |
"name": "bert-base-uncased-feature-extraction-pytorch-2024-09-25-13-05-51", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.3.0", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "feature-extraction", | |
"library": "transformers", | |
"model_type": "bert", | |
"model": "bert-base-uncased", | |
"processor": "bert-base-uncased", | |
"device": "cpu", | |
"device_ids": null, | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"no_weights": false, | |
"device_map": null, | |
"torch_dtype": "float32", | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 10, | |
"duration": 10, | |
"warmup_runs": 10, | |
"input_shapes": { | |
"batch_size": 2, | |
"num_choices": 2, | |
"sequence_length": 16 | |
}, | |
"new_tokens": null, | |
"memory": false, | |
"latency": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": {}, | |
"call_kwargs": {} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": false, | |
"device_isolation_action": null, | |
"numactl": false, | |
"numactl_kwargs": "", | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " 12th Gen Intel(R) Core(TM) i9-12900H", | |
"cpu_count": 20, | |
"cpu_ram_mb": 8162.504704, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.153.1-microsoft-standard-WSL2-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.9.16", | |
"gpu": [ | |
"NVIDIA GeForce RTX 3070 Ti Laptop GPU" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 8589934592, | |
"optimum_benchmark_version": "0.5.0", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.39.3", | |
"transformers_commit": null, | |
"accelerate_version": "0.22.0", | |
"accelerate_commit": null, | |
"diffusers_version": "0.30.3", | |
"diffusers_commit": null, | |
"optimum_version": "1.22.0", | |
"optimum_commit": null, | |
"timm_version": null, | |
"timm_commit": null, | |
"peft_version": "0.5.0", | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"load": { | |
"memory": null, | |
"latency": { | |
"unit": "s", | |
"count": 1, | |
"total": 0.243447623000975, | |
"mean": 0.243447623000975, | |
"stdev": 0.0, | |
"p50": 0.243447623000975, | |
"p90": 0.243447623000975, | |
"p95": 0.243447623000975, | |
"p99": 0.243447623000975, | |
"values": [ | |
0.243447623000975 | |
] | |
}, | |
"throughput": null, | |
"energy": null, | |
"efficiency": null | |
}, | |
"forward": { | |
"memory": null, | |
"latency": { | |
"unit": "s", | |
"count": 251, | |
"total": 10.025767425000595, | |
"mean": 0.03994329651394659, | |
"stdev": 0.006795598961452563, | |
"p50": 0.03892265899958147, | |
"p90": 0.046723460998691735, | |
"p95": 0.049488473000565136, | |
"p99": 0.056767529499666125, | |
"values": [ | |
0.04140967999956047, | |
0.04426534000049287, | |
0.04335521500070172, | |
0.040721633999055484, | |
0.0360065040003974, | |
0.037973784001223976, | |
0.04303117900053621, | |
0.04336507799962419, | |
0.04171342300105607, | |
0.041276488000221434, | |
0.0356128589992295, | |
0.03170641699944099, | |
0.03539822200036724, | |
0.04311604999929841, | |
0.04647790800117946, | |
0.037890878000325756, | |
0.041082616000494454, | |
0.03693612799906987, | |
0.037327888001527754, | |
0.038629034999758005, | |
0.03502888499861001, | |
0.03781679500025348, | |
0.040340092000406, | |
0.041923441998733324, | |
0.040371756000240566, | |
0.03646165799909795, | |
0.036042644000190194, | |
0.03979371599962178, | |
0.03652547000092454, | |
0.03392184900076245, | |
0.036356615000840975, | |
0.03358583899898804, | |
0.037602166999931796, | |
0.039822196999011794, | |
0.04255021799872338, | |
0.04128097899956629, | |
0.041168256000673864, | |
0.033279563000178314, | |
0.034441255000274396, | |
0.033010647999617504, | |
0.03464433400040434, | |
0.03915756700007478, | |
0.03230477000033716, | |
0.03352403600001708, | |
0.03273983800136193, | |
0.03899787299997115, | |
0.034191196000392665, | |
0.03291846399952192, | |
0.03728161799881491, | |
0.03874241599987727, | |
0.036969364000469795, | |
0.036796088999835774, | |
0.047879998999633244, | |
0.03528849100075604, | |
0.0398191019994556, | |
0.03806472099859093, | |
0.03676271600124892, | |
0.03661760799877811, | |
0.038339115999406204, | |
0.036128809000729234, | |
0.034755548998873564, | |
0.036648842000431614, | |
0.0352679090010497, | |
0.03331106599944178, | |
0.03546584899959271, | |
0.03424188000099093, | |
0.03408887399928062, | |
0.04515426200123329, | |
0.041802026998993824, | |
0.043356960000892286, | |
0.03630857899952389, | |
0.03299766900090617, | |
0.038977047999651404, | |
0.053462158000911586, | |
0.041908918999979505, | |
0.04597193299923674, | |
0.0501673100006883, | |
0.03997264999998151, | |
0.08097437600008561, | |
0.04790816100103257, | |
0.04785499599893228, | |
0.04033438300029957, | |
0.03732819099968765, | |
0.036809348999668146, | |
0.04489737499898183, | |
0.05129487399972277, | |
0.04555075099960959, | |
0.03893319800044992, | |
0.034023103000436095, | |
0.03357020399926114, | |
0.03668714000014006, | |
0.032928486998571316, | |
0.040037583999946946, | |
0.05231058000026678, | |
0.044693247000395786, | |
0.040996794999955455, | |
0.03531728500092868, | |
0.03595876600047632, | |
0.034573072000057437, | |
0.03645827899890719, | |
0.04093122899939772, | |
0.04630876699957298, | |
0.04534182899988082, | |
0.04195916299977398, | |
0.0402147929999046, | |
0.04531697600032203, | |
0.03437311000016052, | |
0.036727998000060325, | |
0.04401456699997652, | |
0.040451363998727174, | |
0.03397782200045185, | |
0.04020345599928987, | |
0.03973420700094721, | |
0.03689013100120064, | |
0.04254719199889223, | |
0.04168791099982627, | |
0.034324019001360284, | |
0.04248134400040726, | |
0.040693760000067414, | |
0.03597001900016039, | |
0.036513775999992504, | |
0.03342521999911696, | |
0.04059156599942071, | |
0.03050467200046114, | |
0.03552321400093206, | |
0.03837382999881811, | |
0.03971367299891426, | |
0.04159295300087251, | |
0.03961526100101764, | |
0.03492181799992977, | |
0.033771775000786874, | |
0.04892243099857296, | |
0.04159555099977297, | |
0.03360729799896944, | |
0.03765634999945178, | |
0.03422505999878922, | |
0.032780204001028324, | |
0.03427113200086751, | |
0.03120312200007902, | |
0.03583974399953149, | |
0.03930401500110747, | |
0.03858250899975246, | |
0.037816969999767025, | |
0.03552929900070012, | |
0.0365712259990687, | |
0.035685250999449636, | |
0.04068836599981296, | |
0.04059191499982262, | |
0.037011681000876706, | |
0.03239092600051663, | |
0.03514259000075981, | |
0.04012494900052843, | |
0.04331657900002028, | |
0.03713826600142056, | |
0.0365737119991536, | |
0.038559058999453555, | |
0.03427029799968295, | |
0.04049523899993801, | |
0.0979738570003974, | |
0.049913320999621646, | |
0.04421495000133291, | |
0.04336414799945487, | |
0.044160793999253656, | |
0.048388109000370605, | |
0.05083520699918154, | |
0.045202848999906564, | |
0.047154052001133095, | |
0.04335056499985512, | |
0.04728519300078915, | |
0.04454470800010313, | |
0.0370318879995466, | |
0.03302354699917487, | |
0.037742174999948475, | |
0.037081389000377385, | |
0.03881162400102767, | |
0.03888378599913267, | |
0.03748597800040443, | |
0.04126267200081202, | |
0.03864905800037377, | |
0.0432513169998856, | |
0.041370564000317245, | |
0.045687308000196936, | |
0.035481097000229056, | |
0.03420309800094401, | |
0.034445744999175076, | |
0.03357387899995956, | |
0.03265024500069558, | |
0.031059044998983154, | |
0.03581253199990897, | |
0.03545339999982389, | |
0.03319400100008352, | |
0.03616060700005619, | |
0.037111376999746426, | |
0.03151596099996823, | |
0.0351212570003554, | |
0.03803590800089296, | |
0.0358772420004243, | |
0.04753761699976167, | |
0.03906237100090948, | |
0.036951066000256105, | |
0.040257472999655874, | |
0.03727524999885645, | |
0.0380510469985893, | |
0.034669356000449625, | |
0.045622498999364325, | |
0.04536155399910058, | |
0.04345013599959202, | |
0.042764802999954554, | |
0.0340042080006242, | |
0.03985111299880373, | |
0.03993925299982948, | |
0.03480199499972514, | |
0.032251153999823146, | |
0.03212300399900414, | |
0.035676232999321655, | |
0.03478069300035713, | |
0.03536861899920041, | |
0.03295161200003349, | |
0.03892265899958147, | |
0.03990881500067189, | |
0.03972912800054473, | |
0.04009784099980607, | |
0.045549349999419064, | |
0.045431008000377915, | |
0.05861185299909266, | |
0.04916253400006099, | |
0.042337075999967055, | |
0.05395946700082277, | |
0.05492320600023959, | |
0.049643819000266376, | |
0.050807732999601285, | |
0.04685425900061091, | |
0.046723460998691735, | |
0.0493331270008639, | |
0.0474537819991383, | |
0.045676591000301414, | |
0.04229911699985678, | |
0.03895158200066362, | |
0.04213146900110587, | |
0.04149395300009928, | |
0.038362896999387885, | |
0.041494618000797345, | |
0.038900188001207425, | |
0.039899826000691974, | |
0.04145743200024299, | |
0.04558495699893683, | |
0.045513078999647405, | |
0.0452049800005625, | |
0.043759135000073, | |
0.04287937300068734, | |
0.04591426700062584 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 50.070979977871396 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |