poca-SoccerTwos / run_logs /timers.json
gg232's picture
First Push
9356cc6 verified
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.1234384775161743,
"min": 1.1234384775161743,
"max": 3.295717477798462,
"count": 2500
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 22217.119140625,
"min": 20717.66015625,
"max": 122826.21875,
"count": 2500
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 62.61538461538461,
"min": 49.96,
"max": 999.0,
"count": 2500
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19536.0,
"min": 16496.0,
"max": 22876.0,
"count": 2500
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1825.7938991827937,
"min": 1185.2835797668943,
"max": 1856.6931527929742,
"count": 2466
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 284823.8482725158,
"min": 2377.148258356644,
"max": 341921.96813963883,
"count": 2466
},
"SoccerTwos.Step.mean": {
"value": 24999932.0,
"min": 9656.0,
"max": 24999932.0,
"count": 2500
},
"SoccerTwos.Step.sum": {
"value": 24999932.0,
"min": 9656.0,
"max": 24999932.0,
"count": 2500
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.02974133938550949,
"min": -1.5241801738739014,
"max": 1.186009168624878,
"count": 2500
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -4.639648914337158,
"min": -19.358009338378906,
"max": 23.737735748291016,
"count": 2500
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.029097305610775948,
"min": -1.6131078004837036,
"max": 0.2732936143875122,
"count": 2500
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -4.539179801940918,
"min": -22.36490821838379,
"max": 23.503250122070312,
"count": 2500
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 2500
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 2500
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": 0.04126153924526312,
"min": -0.6842105263157895,
"max": 0.6175052600733021,
"count": 2500
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": 6.436800122261047,
"min": -64.91519975662231,
"max": 54.3716002702713,
"count": 2500
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": 0.04126153924526312,
"min": -0.6842105263157895,
"max": 0.6175052600733021,
"count": 2500
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": 6.436800122261047,
"min": -64.91519975662231,
"max": 54.3716002702713,
"count": 2500
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 2500
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 2500
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.035445474874368534,
"min": 0.029532684385382407,
"max": 0.04380931518039105,
"count": 932
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.035445474874368534,
"min": 0.029532684385382407,
"max": 0.04380931518039105,
"count": 932
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.05445720377163245,
"min": 7.463254202079558e-07,
"max": 0.7107169893689644,
"count": 932
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.05445720377163245,
"min": 7.463254202079558e-07,
"max": 0.7107169893689644,
"count": 932
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.05484463350895124,
"min": 7.923651948453599e-07,
"max": 6.126995172256079,
"count": 932
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.05484463350895124,
"min": 7.923651948453599e-07,
"max": 6.126995172256079,
"count": 932
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 4.2665994668000927e-07,
"min": 4.2665994668000927e-07,
"max": 0.0007991250561093682,
"count": 932
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 4.2665994668000927e-07,
"min": 4.2665994668000927e-07,
"max": 0.0007991250561093682,
"count": 932
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.10002788209440003,
"min": 0.10002788209440003,
"max": 0.15223480928544003,
"count": 932
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.10002788209440003,
"min": 0.10002788209440003,
"max": 0.15223480928544003,
"count": 932
},
"SoccerTwos.Policy.Beta.mean": {
"value": 1.2660668000000057e-05,
"min": 1.2660668000000057e-05,
"max": 0.0049945425368,
"count": 932
},
"SoccerTwos.Policy.Beta.sum": {
"value": 1.2660668000000057e-05,
"min": 1.2660668000000057e-05,
"max": 0.0049945425368,
"count": 932
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1719142258",
"python_version": "3.10.12 (main, Jul 5 2023, 18:54:27) [GCC 11.2.0]",
"command_line_arguments": "/home/gerard/anaconda3/envs/rl/bin/mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos/SoccerTwos.x86_64 --run-id=SoccerTwos --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.3.1+cu121",
"numpy_version": "1.23.5",
"end_time_seconds": "1719178175"
},
"total": 35916.95696124504,
"count": 1,
"self": 0.21920562500599772,
"children": {
"run_training.setup": {
"total": 0.01551381906028837,
"count": 1,
"self": 0.01551381906028837
},
"TrainerController.start_learning": {
"total": 35916.72224180098,
"count": 1,
"self": 23.849299312452786,
"children": {
"TrainerController._reset_env": {
"total": 3.327589773456566,
"count": 81,
"self": 3.327589773456566
},
"TrainerController.advance": {
"total": 35889.401946845115,
"count": 1696100,
"self": 23.199814443127252,
"children": {
"env_step": {
"total": 25058.3524640525,
"count": 1696100,
"self": 19046.92350125627,
"children": {
"SubprocessEnvManager._take_step": {
"total": 5995.742158797686,
"count": 1696100,
"self": 137.29762796347495,
"children": {
"TorchPolicy.evaluate": {
"total": 5858.444530834211,
"count": 3138294,
"self": 5858.444530834211
}
}
},
"workers": {
"total": 15.686803998542018,
"count": 1696100,
"self": 0.0,
"children": {
"worker_root": {
"total": 35883.286539411056,
"count": 1696100,
"is_parallel": true,
"self": 19771.91978485475,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0021088559878990054,
"count": 2,
"is_parallel": true,
"self": 0.0007412320701405406,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0013676239177584648,
"count": 8,
"is_parallel": true,
"self": 0.0013676239177584648
}
}
},
"UnityEnvironment.step": {
"total": 0.020144177018664777,
"count": 1,
"is_parallel": true,
"self": 0.0004085621330887079,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.00036455795634537935,
"count": 1,
"is_parallel": true,
"self": 0.00036455795634537935
},
"communicator.exchange": {
"total": 0.017962728976272047,
"count": 1,
"is_parallel": true,
"self": 0.017962728976272047
},
"steps_from_proto": {
"total": 0.0014083279529586434,
"count": 2,
"is_parallel": true,
"self": 0.00032167299650609493,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0010866549564525485,
"count": 8,
"is_parallel": true,
"self": 0.0010866549564525485
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 16111.253339246032,
"count": 1696099,
"is_parallel": true,
"self": 757.80488253769,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 451.1183807726484,
"count": 1696099,
"is_parallel": true,
"self": 451.1183807726484
},
"communicator.exchange": {
"total": 12717.078414023272,
"count": 1696099,
"is_parallel": true,
"self": 12717.078414023272
},
"steps_from_proto": {
"total": 2185.2516619124217,
"count": 3392198,
"is_parallel": true,
"self": 402.50320950429887,
"children": {
"_process_rank_one_or_two_observation": {
"total": 1782.7484524081228,
"count": 13568792,
"is_parallel": true,
"self": 1782.7484524081228
}
}
}
}
},
"steps_from_proto": {
"total": 0.11341531027574092,
"count": 160,
"is_parallel": true,
"self": 0.020305291982367635,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.09311001829337329,
"count": 640,
"is_parallel": true,
"self": 0.09311001829337329
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 10807.849668349489,
"count": 1696100,
"self": 173.37280472845305,
"children": {
"process_trajectory": {
"total": 3123.997487073415,
"count": 1696100,
"self": 3113.1751382783987,
"children": {
"RLTrainer._checkpoint": {
"total": 10.822348795016296,
"count": 50,
"self": 10.822348795016296
}
}
},
"_update_policy": {
"total": 7510.479376547621,
"count": 932,
"self": 2139.252104226616,
"children": {
"TorchPOCAOptimizer.update": {
"total": 5371.227272321004,
"count": 145728,
"self": 5371.227272321004
}
}
}
}
}
}
},
"trainer_threads": {
"total": 5.479669198393822e-07,
"count": 1,
"self": 5.479669198393822e-07
},
"TrainerController._save_models": {
"total": 0.14340532198548317,
"count": 1,
"self": 0.0011018469231203198,
"children": {
"RLTrainer._checkpoint": {
"total": 0.14230347506236285,
"count": 1,
"self": 0.14230347506236285
}
}
}
}
}
}
}