IlyasMoutawwakil
HF staff
Upload bert-base-uncased-feature-extraction-pytorch-2024-09-25-12-57-16/benchmark.json with huggingface_hub
1429a7b
verified
{ | |
"config": { | |
"name": "bert-base-uncased-feature-extraction-pytorch-2024-09-25-12-57-16", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.3.0", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "feature-extraction", | |
"library": "transformers", | |
"model_type": "bert", | |
"model": "bert-base-uncased", | |
"processor": "bert-base-uncased", | |
"device": "cpu", | |
"device_ids": null, | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"no_weights": false, | |
"device_map": null, | |
"torch_dtype": "float32", | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 10, | |
"duration": 10, | |
"warmup_runs": 10, | |
"input_shapes": { | |
"batch_size": 2, | |
"num_choices": 2, | |
"sequence_length": 16 | |
}, | |
"new_tokens": null, | |
"memory": false, | |
"latency": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": {}, | |
"call_kwargs": {} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": false, | |
"device_isolation_action": null, | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " 12th Gen Intel(R) Core(TM) i9-12900H", | |
"cpu_count": 20, | |
"cpu_ram_mb": 8162.504704, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.153.1-microsoft-standard-WSL2-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.9.16", | |
"gpu": [ | |
"NVIDIA GeForce RTX 3070 Ti Laptop GPU" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 8589934592, | |
"optimum_benchmark_version": "0.5.0", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.39.3", | |
"transformers_commit": null, | |
"accelerate_version": "0.22.0", | |
"accelerate_commit": null, | |
"diffusers_version": "0.30.3", | |
"diffusers_commit": null, | |
"optimum_version": "1.22.0", | |
"optimum_commit": null, | |
"timm_version": null, | |
"timm_commit": null, | |
"peft_version": "0.5.0", | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"load": { | |
"memory": null, | |
"latency": { | |
"unit": "s", | |
"count": 1, | |
"total": 0.21163445799902547, | |
"mean": 0.21163445799902547, | |
"stdev": 0.0, | |
"p50": 0.21163445799902547, | |
"p90": 0.21163445799902547, | |
"p95": 0.21163445799902547, | |
"p99": 0.21163445799902547, | |
"values": [ | |
0.21163445799902547 | |
] | |
}, | |
"throughput": null, | |
"energy": null, | |
"efficiency": null | |
}, | |
"forward": { | |
"memory": null, | |
"latency": { | |
"unit": "s", | |
"count": 227, | |
"total": 10.001136720997238, | |
"mean": 0.044057871017608975, | |
"stdev": 0.004844031775589998, | |
"p50": 0.043573724000452785, | |
"p90": 0.048108165599114724, | |
"p95": 0.04978989289993478, | |
"p99": 0.05818846191985358, | |
"values": [ | |
0.03815077699982794, | |
0.04284172799998487, | |
0.04165696800009755, | |
0.04044770699874789, | |
0.04038155399939569, | |
0.034321953000471694, | |
0.03675424299945007, | |
0.04314277400044375, | |
0.036722482000186574, | |
0.03852093299974513, | |
0.0408456870009104, | |
0.036498060000667465, | |
0.0372420499988948, | |
0.034404009998979745, | |
0.040136239000275964, | |
0.03705203500067, | |
0.03852554900004179, | |
0.04217101700123749, | |
0.04277487000035762, | |
0.039127826001276844, | |
0.049819564999779686, | |
0.044688030999168404, | |
0.03840092300015385, | |
0.04430563200003235, | |
0.0382647589995031, | |
0.04258946500158345, | |
0.0450386439988506, | |
0.044456805000663735, | |
0.037400117000288446, | |
0.040190182999140234, | |
0.04339363000144658, | |
0.04294859000037832, | |
0.04767369099863572, | |
0.04336862899981497, | |
0.04187604599974293, | |
0.042637921000277856, | |
0.04073244000028353, | |
0.044239204999030335, | |
0.042724457000076654, | |
0.04458731599879684, | |
0.03983374300150899, | |
0.04573074799918686, | |
0.03954862400132697, | |
0.04209167699991667, | |
0.04336080600114656, | |
0.04572039500089886, | |
0.036588389999451465, | |
0.0442104069989, | |
0.04381239499889489, | |
0.043810007999127265, | |
0.0440916229999857, | |
0.045281479999175644, | |
0.04235721200166154, | |
0.04728463600076793, | |
0.0442924220005807, | |
0.043475515998579795, | |
0.040809688998706406, | |
0.04622284499964735, | |
0.045039089000056265, | |
0.0444460060007259, | |
0.042437644999154145, | |
0.0441015120013617, | |
0.043960131999483565, | |
0.041908679999323795, | |
0.04309701499914809, | |
0.04534891200091806, | |
0.04106510500059812, | |
0.04131245200005651, | |
0.04170111299936252, | |
0.04581119300019054, | |
0.050510524000856094, | |
0.045779194000715506, | |
0.044977478000873816, | |
0.041313117999379756, | |
0.042676826000388246, | |
0.04261777499959862, | |
0.04316627299886022, | |
0.041810534999967786, | |
0.0430216479999217, | |
0.0444446789988433, | |
0.0439651220003725, | |
0.04151693099993281, | |
0.043573724000452785, | |
0.04248226900017471, | |
0.04833243500070239, | |
0.04740680299983069, | |
0.04646829599914781, | |
0.04286772800151084, | |
0.041121788999589626, | |
0.04342508500121767, | |
0.04451153699847055, | |
0.047042807998877834, | |
0.0471968370002287, | |
0.04466002500157629, | |
0.04260592500031635, | |
0.04052886600038619, | |
0.04474269499951333, | |
0.0386294239997369, | |
0.04333555799894384, | |
0.039977865999389905, | |
0.0434575790004601, | |
0.04452780300016457, | |
0.04558389100020577, | |
0.0443559790001018, | |
0.042368737998913275, | |
0.042223829001159174, | |
0.04284816599829355, | |
0.04779753599905234, | |
0.04324114199880569, | |
0.044546933999299654, | |
0.04559323299872631, | |
0.0407937710006081, | |
0.0414077109999198, | |
0.04430668799977866, | |
0.045467121999536175, | |
0.044657099000687595, | |
0.040830649000781705, | |
0.048744277000878355, | |
0.03911088999848289, | |
0.04820981299963023, | |
0.04329602899997553, | |
0.048083117999340175, | |
0.03953064100096526, | |
0.041746609000256285, | |
0.04146359799960919, | |
0.039430951999747776, | |
0.0451678310000716, | |
0.04255272999944282, | |
0.0455301199999667, | |
0.042760743001053925, | |
0.046520261999830836, | |
0.048145736998776556, | |
0.04710020899983647, | |
0.04624329000034777, | |
0.04292400099984661, | |
0.03962627800137852, | |
0.042185195999991265, | |
0.03713873400010925, | |
0.04797647399936977, | |
0.04620297799920081, | |
0.0463772399998561, | |
0.04358592100106762, | |
0.04395960100009688, | |
0.042370221999590285, | |
0.042261589000190725, | |
0.044540737000716035, | |
0.043234405999101, | |
0.04224513099870819, | |
0.04498892199990223, | |
0.04354401099953975, | |
0.044581931999346125, | |
0.035908727999412804, | |
0.039222316998348106, | |
0.04530707699996128, | |
0.0462900010006706, | |
0.047450648999074474, | |
0.047666995000327006, | |
0.04095155200047884, | |
0.04207379299987224, | |
0.04358295300153259, | |
0.042259328000000096, | |
0.046603247999883024, | |
0.04221145999872533, | |
0.04364184299993212, | |
0.0453056820006168, | |
0.042234069000187446, | |
0.0452455149988964, | |
0.041958918000091217, | |
0.0423393639994174, | |
0.04288401200028602, | |
0.034834447000321234, | |
0.04106049300025916, | |
0.040114214998538955, | |
0.04513658800169651, | |
0.04512483700091252, | |
0.04431954299980134, | |
0.0411927539989847, | |
0.04544970899951295, | |
0.04303791400161572, | |
0.03683460100000957, | |
0.04517204799958563, | |
0.04363813100098923, | |
0.04235966000123881, | |
0.04058173699922918, | |
0.04720091799936199, | |
0.05605703799847106, | |
0.05380112800048664, | |
0.063203715999407, | |
0.08490149899989774, | |
0.05616209400068328, | |
0.04874078699867823, | |
0.05816296299963142, | |
0.04929178400016099, | |
0.04452332399887382, | |
0.04972065800029668, | |
0.05085855300058029, | |
0.04931387299984635, | |
0.046880676000000676, | |
0.044706668000799255, | |
0.043348294000679743, | |
0.04556250200039358, | |
0.05680821200076025, | |
0.04698229099994933, | |
0.047118067001065356, | |
0.04380583900092461, | |
0.04508574999999837, | |
0.045398220001516165, | |
0.05819742099993164, | |
0.053218049000861356, | |
0.047733970000990666, | |
0.04235678499935602, | |
0.044074357001591125, | |
0.049097696000899305, | |
0.04043576599906373, | |
0.041585630000554374, | |
0.04095260000030976, | |
0.04852376100097899, | |
0.04633160599951225, | |
0.042648540998925455, | |
0.043078263001007144, | |
0.046696336999957566, | |
0.04572774499865773, | |
0.046517083999788156, | |
0.04656116400110477, | |
0.04834544199911761, | |
0.047990360000767396, | |
0.04153126299934229 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 45.394839873234986 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |