IlyasMoutawwakil
HF staff
Upload bert-base-uncased-feature-extraction-pytorch-2024-09-25-13-33-37/benchmark.json with huggingface_hub
991b02a
verified
{ | |
"config": { | |
"name": "bert-base-uncased-feature-extraction-pytorch-2024-09-25-13-33-37", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.3.0", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "feature-extraction", | |
"library": "transformers", | |
"model_type": "bert", | |
"model": "bert-base-uncased", | |
"processor": "bert-base-uncased", | |
"device": "cpu", | |
"device_ids": null, | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"no_weights": false, | |
"device_map": null, | |
"torch_dtype": "float32", | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 10, | |
"duration": 10, | |
"warmup_runs": 10, | |
"input_shapes": { | |
"batch_size": 2, | |
"num_choices": 2, | |
"sequence_length": 16 | |
}, | |
"new_tokens": null, | |
"memory": false, | |
"latency": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": {}, | |
"call_kwargs": {} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": false, | |
"device_isolation_action": null, | |
"numactl": true, | |
"numactl_kwargs": "", | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " 12th Gen Intel(R) Core(TM) i9-12900H", | |
"cpu_count": 20, | |
"cpu_ram_mb": 8162.504704, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.153.1-microsoft-standard-WSL2-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.9.16", | |
"gpu": [ | |
"NVIDIA GeForce RTX 3070 Ti Laptop GPU" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 8589934592, | |
"optimum_benchmark_version": "0.5.0", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.39.3", | |
"transformers_commit": null, | |
"accelerate_version": "0.22.0", | |
"accelerate_commit": null, | |
"diffusers_version": "0.30.3", | |
"diffusers_commit": null, | |
"optimum_version": "1.22.0", | |
"optimum_commit": null, | |
"timm_version": null, | |
"timm_commit": null, | |
"peft_version": "0.5.0", | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"load": { | |
"memory": null, | |
"latency": { | |
"unit": "s", | |
"count": 1, | |
"total": 0.27026245999877574, | |
"mean": 0.27026245999877574, | |
"stdev": 0.0, | |
"p50": 0.27026245999877574, | |
"p90": 0.27026245999877574, | |
"p95": 0.27026245999877574, | |
"p99": 0.27026245999877574, | |
"values": [ | |
0.27026245999877574 | |
] | |
}, | |
"throughput": null, | |
"energy": null, | |
"efficiency": null | |
}, | |
"forward": { | |
"memory": null, | |
"latency": { | |
"unit": "s", | |
"count": 239, | |
"total": 9.995716405024723, | |
"mean": 0.04182308119257206, | |
"stdev": 0.005479080615945863, | |
"p50": 0.04034890899856691, | |
"p90": 0.04739702399892849, | |
"p95": 0.04979351750007481, | |
"p99": 0.06522586329876504, | |
"values": [ | |
0.04883613899801276, | |
0.04795146300239139, | |
0.044728029002726544, | |
0.04450069900121889, | |
0.037102894002600806, | |
0.04325066700039315, | |
0.038864441001351224, | |
0.04134699599671876, | |
0.03634349999992992, | |
0.04034890899856691, | |
0.036186952002026374, | |
0.03435628000079305, | |
0.03859177399863256, | |
0.04764142299973173, | |
0.04620927599899005, | |
0.04015466199780349, | |
0.03909649599881959, | |
0.04286497900102404, | |
0.04790742299883277, | |
0.04678403899742989, | |
0.03972219299976132, | |
0.039439162999769906, | |
0.043716514999687206, | |
0.04149892799978261, | |
0.04485208400001284, | |
0.045184098002209794, | |
0.042145937997702276, | |
0.04288213499967242, | |
0.04364068299764767, | |
0.039757909002219094, | |
0.049521335000463296, | |
0.05205518000002485, | |
0.043305869003233965, | |
0.045183808000729186, | |
0.04119596100281342, | |
0.04324858299878542, | |
0.043353609999030596, | |
0.04339353400064283, | |
0.04313248099788325, | |
0.04097406100117951, | |
0.03751959200235433, | |
0.04324551899844664, | |
0.04211481400125194, | |
0.04133345399895916, | |
0.046108590002404526, | |
0.048132077001355356, | |
0.05006720300298184, | |
0.04046721299891942, | |
0.037020277999545215, | |
0.039295552000112366, | |
0.037684594997699605, | |
0.03770215999975335, | |
0.03719843099679565, | |
0.03958193300059065, | |
0.03832562300158315, | |
0.03784008599905064, | |
0.036885741999867605, | |
0.039457221999327885, | |
0.0356366749983863, | |
0.03765420000127051, | |
0.04518810700028553, | |
0.043174895999982255, | |
0.04014851199826808, | |
0.04430357300225296, | |
0.03990488699855632, | |
0.045868396999139804, | |
0.04408296199835604, | |
0.04196070399848395, | |
0.039426113002264174, | |
0.04251481800019974, | |
0.04414554299728479, | |
0.04485624100198038, | |
0.040009176002058666, | |
0.04631545099982759, | |
0.047560331997374306, | |
0.04976310799975181, | |
0.044739471002685605, | |
0.04422163299750537, | |
0.050801667999621714, | |
0.04033712399905198, | |
0.04787530300018261, | |
0.07768884499819251, | |
0.06992002600236447, | |
0.06501595299778273, | |
0.06535451799936709, | |
0.06414465099805966, | |
0.047107880000112345, | |
0.049671882999973604, | |
0.046817270002065925, | |
0.048200277000432834, | |
0.04289439399872208, | |
0.042556159001833294, | |
0.04148061800151481, | |
0.04605946499941638, | |
0.05406537900125841, | |
0.047356196999317035, | |
0.04112943599830032, | |
0.04207716199744027, | |
0.0457085369998822, | |
0.0450342210024246, | |
0.04537120600070921, | |
0.043119087000377476, | |
0.043988712001009844, | |
0.05100539200066123, | |
0.05300349000026472, | |
0.050372056000924204, | |
0.04656135999903199, | |
0.044712947001244174, | |
0.045107324000127846, | |
0.042533060997811845, | |
0.04268773600051645, | |
0.04223857599936309, | |
0.04091800500100362, | |
0.04278873799921712, | |
0.03869780099921627, | |
0.038958564000495244, | |
0.03877288600051543, | |
0.03791242200168199, | |
0.03849375099889585, | |
0.04151796199948876, | |
0.03599204299825942, | |
0.049295237000478664, | |
0.044613818998186616, | |
0.03873952999856556, | |
0.03621115100031602, | |
0.037899742001172854, | |
0.042580515997542534, | |
0.03531684200061136, | |
0.04093022900269716, | |
0.04079395200096769, | |
0.03776857300181291, | |
0.03813702599654789, | |
0.0394068409987085, | |
0.03751699599888525, | |
0.03893014999994193, | |
0.037653745002899086, | |
0.04032735699729528, | |
0.03736954100168077, | |
0.039967545999388676, | |
0.03889614999934565, | |
0.03969464500187314, | |
0.035546210001484724, | |
0.039777933998266235, | |
0.04309199300041655, | |
0.03653666600075667, | |
0.035243919999629725, | |
0.0374708820017986, | |
0.036051042003236944, | |
0.038411220000853064, | |
0.037509626999963075, | |
0.03522844100007205, | |
0.03874467999776243, | |
0.03969091800172464, | |
0.03686645000198041, | |
0.03710848000264377, | |
0.04005178899751627, | |
0.04064311199908843, | |
0.03410557800089009, | |
0.03902073199787992, | |
0.044466652998380596, | |
0.042854074999922886, | |
0.036740889998327475, | |
0.038410733999626245, | |
0.03709079599866527, | |
0.038994911999907345, | |
0.03885987700050464, | |
0.03638443300224026, | |
0.03730879599970649, | |
0.04184052100026747, | |
0.03792752200024552, | |
0.040056259000266436, | |
0.03891962400302873, | |
0.043371029998525046, | |
0.0403801040010876, | |
0.04565152899886016, | |
0.039157361999969, | |
0.04199136900206213, | |
0.04456694500186131, | |
0.04229618300087168, | |
0.03871364599763183, | |
0.03995152000061353, | |
0.04022040400013793, | |
0.03746177400171291, | |
0.03833966399906785, | |
0.04027781499826233, | |
0.04140972600362147, | |
0.0394521390007867, | |
0.03857994999998482, | |
0.03810556000098586, | |
0.03915545800191467, | |
0.03705566300050123, | |
0.039858565000031376, | |
0.03819382399888127, | |
0.037734322002506815, | |
0.03974019899760606, | |
0.03827522700157715, | |
0.03795622299730894, | |
0.04207506799866678, | |
0.0414760040002875, | |
0.04025740799988853, | |
0.04250739499912015, | |
0.03630166800212464, | |
0.03968086000168114, | |
0.040627029000461334, | |
0.037420855001983, | |
0.038611437998042675, | |
0.041941858999052783, | |
0.03878899100163835, | |
0.036525151001114864, | |
0.040973220002342714, | |
0.03875776300264988, | |
0.04009454899642151, | |
0.040104771996993804, | |
0.042863520997343585, | |
0.04007712000020547, | |
0.04471383299824083, | |
0.037383546001365175, | |
0.04061252199971932, | |
0.04164920499897562, | |
0.03945347600165405, | |
0.03817712000090978, | |
0.0387453440016543, | |
0.04242988600162789, | |
0.035310049999679904, | |
0.039323120003246004, | |
0.04020754299926921, | |
0.038065135002398165, | |
0.03528401899893652, | |
0.03543830000126036, | |
0.042556922999210656, | |
0.04281551500025671, | |
0.0400575640014722, | |
0.04024984200077597, | |
0.039552648002427304, | |
0.04168981000111671, | |
0.04360315899975831, | |
0.04440774799877545, | |
0.04503937900153687, | |
0.04018941500180517 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 47.820484358651406 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |