poca-SoccerTwos / run_logs /timers.json
ronanlobo7's picture
Second Push
7b11449 verified
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 2.4417953491210938,
"min": 2.4372684955596924,
"max": 3.2034013271331787,
"count": 351
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 48992.1796875,
"min": 18580.30078125,
"max": 114449.1015625,
"count": 351
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 44.027027027027025,
"min": 43.07964601769911,
"max": 999.0,
"count": 351
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19548.0,
"min": 3996.0,
"max": 32868.0,
"count": 351
},
"SoccerTwos.Step.mean": {
"value": 6089986.0,
"min": 2529737.0,
"max": 6089986.0,
"count": 357
},
"SoccerTwos.Step.sum": {
"value": 6089986.0,
"min": 2529737.0,
"max": 6089986.0,
"count": 357
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": 0.047659702599048615,
"min": -0.06996490806341171,
"max": 0.17576617002487183,
"count": 357
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": 10.580453872680664,
"min": -8.325823783874512,
"max": 26.9212646484375,
"count": 357
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.04913307726383209,
"min": -0.07348953932523727,
"max": 0.18069696426391602,
"count": 357
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": 10.907543182373047,
"min": -8.745255470275879,
"max": 26.480384826660156,
"count": 357
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 357
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 357
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": 0.08971351250871881,
"min": -0.6561125009320676,
"max": 0.49619725958941735,
"count": 357
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": 19.916399776935577,
"min": -37.92640018463135,
"max": 62.96359956264496,
"count": 357
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": 0.08971351250871881,
"min": -0.6561125009320676,
"max": 0.49619725958941735,
"count": 357
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": 19.916399776935577,
"min": -37.92640018463135,
"max": 62.96359956264496,
"count": 357
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 357
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 357
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.013407751137856394,
"min": 0.01142314199047784,
"max": 0.02425788139613966,
"count": 164
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.013407751137856394,
"min": 0.01142314199047784,
"max": 0.02425788139613966,
"count": 164
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.09530262673894564,
"min": 6.020134222239903e-08,
"max": 0.10161784266432126,
"count": 164
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.09530262673894564,
"min": 6.020134222239903e-08,
"max": 0.10161784266432126,
"count": 164
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.09778229296207427,
"min": 1.1880357154344287e-07,
"max": 0.10410436963041624,
"count": 164
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.09778229296207427,
"min": 1.1880357154344287e-07,
"max": 0.10410436963041624,
"count": 164
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 164
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 164
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.19999999999999993,
"max": 0.20000000000000007,
"count": 164
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.19999999999999993,
"max": 0.20000000000000007,
"count": 164
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005,
"max": 0.005000000000000001,
"count": 164
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005,
"max": 0.005000000000000001,
"count": 164
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1393.6875639312832,
"min": 1175.020763080963,
"max": 1407.4624380333469,
"count": 257
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 309398.63919274486,
"min": 2350.041526161926,
"max": 312828.9320947636,
"count": 257
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1726853619",
"python_version": "3.10.12 (main, Jul 5 2023, 18:54:27) [GCC 11.2.0]",
"command_line_arguments": "/home/loboronan/miniconda3/envs/unityrl/bin/mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos.x86_64 --run-id=SoccerTwos --no-graphics --resume",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.4.1+cu121",
"numpy_version": "1.23.5",
"end_time_seconds": "1726862084"
},
"total": 8003.731036661001,
"count": 1,
"self": 0.1642863410015707,
"children": {
"run_training.setup": {
"total": 0.01574568600000248,
"count": 1,
"self": 0.01574568600000248
},
"TrainerController.start_learning": {
"total": 8003.551004634,
"count": 1,
"self": 5.11542567806282,
"children": {
"TrainerController._reset_env": {
"total": 3.500413088002176,
"count": 19,
"self": 3.500413088002176
},
"TrainerController.advance": {
"total": 7994.509755703934,
"count": 235191,
"self": 5.586768086869597,
"children": {
"env_step": {
"total": 6542.052595024812,
"count": 235191,
"self": 4133.565731688963,
"children": {
"SubprocessEnvManager._take_step": {
"total": 2405.351513774882,
"count": 235191,
"self": 30.02797726479821,
"children": {
"TorchPolicy.evaluate": {
"total": 2375.3235365100836,
"count": 455834,
"self": 2375.3235365100836
}
}
},
"workers": {
"total": 3.1353495609669153,
"count": 235190,
"self": 0.0,
"children": {
"worker_root": {
"total": 7991.384502473039,
"count": 235190,
"is_parallel": true,
"self": 4415.152569256068,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0029765129999930195,
"count": 2,
"is_parallel": true,
"self": 0.0010946439999770519,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0018818690000159677,
"count": 8,
"is_parallel": true,
"self": 0.0018818690000159677
}
}
},
"UnityEnvironment.step": {
"total": 0.02766064100001131,
"count": 1,
"is_parallel": true,
"self": 0.0006161299999973835,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.00035528300000464696,
"count": 1,
"is_parallel": true,
"self": 0.00035528300000464696
},
"communicator.exchange": {
"total": 0.02512660099998243,
"count": 1,
"is_parallel": true,
"self": 0.02512660099998243
},
"steps_from_proto": {
"total": 0.0015626270000268505,
"count": 2,
"is_parallel": true,
"self": 0.00036540899998271925,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0011972180000441313,
"count": 8,
"is_parallel": true,
"self": 0.0011972180000441313
}
}
}
}
}
}
},
"steps_from_proto": {
"total": 0.024961540998958753,
"count": 36,
"is_parallel": true,
"self": 0.005609905998994691,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.019351634999964062,
"count": 144,
"is_parallel": true,
"self": 0.019351634999964062
}
}
},
"UnityEnvironment.step": {
"total": 3576.2069716759725,
"count": 235189,
"is_parallel": true,
"self": 99.84989229992061,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 72.77683382908867,
"count": 235189,
"is_parallel": true,
"self": 72.77683382908867
},
"communicator.exchange": {
"total": 3064.8591148571395,
"count": 235189,
"is_parallel": true,
"self": 3064.8591148571395
},
"steps_from_proto": {
"total": 338.7211306898232,
"count": 470378,
"is_parallel": true,
"self": 76.59657505344228,
"children": {
"_process_rank_one_or_two_observation": {
"total": 262.12455563638093,
"count": 1881512,
"is_parallel": true,
"self": 262.12455563638093
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 1446.8703925922532,
"count": 235190,
"self": 37.20655809023424,
"children": {
"process_trajectory": {
"total": 668.5377125160178,
"count": 235190,
"self": 666.3401519490178,
"children": {
"RLTrainer._checkpoint": {
"total": 2.1975605669999823,
"count": 7,
"self": 2.1975605669999823
}
}
},
"_update_policy": {
"total": 741.1261219860012,
"count": 164,
"self": 319.34207435801346,
"children": {
"TorchPOCAOptimizer.update": {
"total": 421.7840476279877,
"count": 5031,
"self": 421.7840476279877
}
}
}
}
}
}
},
"trainer_threads": {
"total": 9.180002962239087e-07,
"count": 1,
"self": 9.180002962239087e-07
},
"TrainerController._save_models": {
"total": 0.42540924599961727,
"count": 1,
"self": 0.013648916999954963,
"children": {
"RLTrainer._checkpoint": {
"total": 0.4117603289996623,
"count": 1,
"self": 0.4117603289996623
}
}
}
}
}
}
}