poca-SoccerTwos / run_logs /timers.json
ahmadsyy's picture
First Push`
9cf8a4e verified
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.5660171508789062,
"min": 1.5396820306777954,
"max": 1.880081295967102,
"count": 198
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 31320.34375,
"min": 29534.00390625,
"max": 41144.2421875,
"count": 198
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 63.835443037974684,
"min": 42.70175438596491,
"max": 84.71666666666667,
"count": 198
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 20172.0,
"min": 16128.0,
"max": 20528.0,
"count": 198
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1417.6097296242715,
"min": 1380.403699591196,
"max": 1456.4866303781612,
"count": 198
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 223982.3372806349,
"min": 167199.51708247018,
"max": 323254.78982444253,
"count": 198
},
"SoccerTwos.Step.mean": {
"value": 5529938.0,
"min": 3559972.0,
"max": 5529938.0,
"count": 198
},
"SoccerTwos.Step.sum": {
"value": 5529938.0,
"min": 3559972.0,
"max": 5529938.0,
"count": 198
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.051855020225048065,
"min": -0.14628547430038452,
"max": 0.0822330117225647,
"count": 198
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -8.193093299865723,
"min": -28.37938117980957,
"max": 16.035436630249023,
"count": 198
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.042027898132801056,
"min": -0.1427212804555893,
"max": 0.08749193698167801,
"count": 198
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -6.640408039093018,
"min": -27.68792724609375,
"max": 17.060928344726562,
"count": 198
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 198
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 198
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.09168860882143431,
"min": -0.3312575988769531,
"max": 0.3173333341425115,
"count": 198
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -14.486800193786621,
"min": -54.38579994440079,
"max": 52.360000133514404,
"count": 198
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.09168860882143431,
"min": -0.3312575988769531,
"max": 0.3173333341425115,
"count": 198
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -14.486800193786621,
"min": -54.38579994440079,
"max": 52.360000133514404,
"count": 198
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 198
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 198
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.026255231936617444,
"min": 0.01627497440543569,
"max": 0.03656857611301045,
"count": 191
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.026255231936617444,
"min": 0.01627497440543569,
"max": 0.03656857611301045,
"count": 191
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.09653884669144948,
"min": 0.07564474120736123,
"max": 0.12114908628993565,
"count": 191
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.09653884669144948,
"min": 0.07564474120736123,
"max": 0.12114908628993565,
"count": 191
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.10025102198123932,
"min": 0.07619098275899887,
"max": 0.1247354370576364,
"count": 191
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.10025102198123932,
"min": 0.07619098275899887,
"max": 0.1247354370576364,
"count": 191
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 191
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 191
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.2,
"max": 0.20000000000000007,
"count": 191
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.2,
"max": 0.20000000000000007,
"count": 191
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005,
"max": 0.005000000000000001,
"count": 191
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005,
"max": 0.005000000000000001,
"count": 191
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1732095102",
"python_version": "3.10.4 (tags/v3.10.4:9d38120, Mar 23 2022, 23:13:41) [MSC v.1929 64 bit (AMD64)]",
"command_line_arguments": "D:\\python projects\\ml-agents\\venv\\Scripts\\mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos/SoccerTwos.exe --run-id=SoccerTwos --no-graphics --resume",
"mlagents_version": "1.1.0",
"mlagents_envs_version": "1.1.0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.5.1+cpu",
"numpy_version": "1.23.5",
"end_time_seconds": "1732101370"
},
"total": 6268.3186942000175,
"count": 1,
"self": 10.264327400014736,
"children": {
"run_training.setup": {
"total": 0.3097142999758944,
"count": 1,
"self": 0.3097142999758944
},
"TrainerController.start_learning": {
"total": 6257.744652500027,
"count": 1,
"self": 3.813153204973787,
"children": {
"TrainerController._reset_env": {
"total": 11.315755600109696,
"count": 11,
"self": 11.315755600109696
},
"TrainerController.advance": {
"total": 6242.059609094984,
"count": 137126,
"self": 3.7370750026311725,
"children": {
"env_step": {
"total": 2836.0991703160107,
"count": 137126,
"self": 2218.9756366901565,
"children": {
"SubprocessEnvManager._take_step": {
"total": 614.819088418386,
"count": 137126,
"self": 22.754441584693268,
"children": {
"TorchPolicy.evaluate": {
"total": 592.0646468336927,
"count": 248380,
"self": 592.0646468336927
}
}
},
"workers": {
"total": 2.304445207468234,
"count": 137125,
"self": 0.0,
"children": {
"worker_root": {
"total": 6246.121647206368,
"count": 137125,
"is_parallel": true,
"self": 4446.1522600069875,
"children": {
"steps_from_proto": {
"total": 0.020320799783803523,
"count": 22,
"is_parallel": true,
"self": 0.0038775004213675857,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.016443299362435937,
"count": 88,
"is_parallel": true,
"self": 0.016443299362435937
}
}
},
"UnityEnvironment.step": {
"total": 1799.9490663995966,
"count": 137125,
"is_parallel": true,
"self": 95.17972115217708,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 73.22999456862453,
"count": 137125,
"is_parallel": true,
"self": 73.22999456862453
},
"communicator.exchange": {
"total": 1345.3410510788672,
"count": 137125,
"is_parallel": true,
"self": 1345.3410510788672
},
"steps_from_proto": {
"total": 286.19829959992785,
"count": 274250,
"is_parallel": true,
"self": 55.273062570835464,
"children": {
"_process_rank_one_or_two_observation": {
"total": 230.9252370290924,
"count": 1097000,
"is_parallel": true,
"self": 230.9252370290924
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 3402.223363776342,
"count": 137125,
"self": 25.176138061331585,
"children": {
"process_trajectory": {
"total": 673.8780544148758,
"count": 137125,
"self": 669.0686950148083,
"children": {
"RLTrainer._checkpoint": {
"total": 4.809359400067478,
"count": 4,
"self": 4.809359400067478
}
}
},
"_update_policy": {
"total": 2703.1691713001346,
"count": 191,
"self": 285.2503954063868,
"children": {
"TorchPOCAOptimizer.update": {
"total": 2417.918775893748,
"count": 5562,
"self": 2417.918775893748
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.200009137392044e-06,
"count": 1,
"self": 1.200009137392044e-06
},
"TrainerController._save_models": {
"total": 0.5561333999503404,
"count": 1,
"self": 0.2464682999998331,
"children": {
"RLTrainer._checkpoint": {
"total": 0.3096650999505073,
"count": 1,
"self": 0.3096650999505073
}
}
}
}
}
}
}