poca-SoccerTwos / run_logs /timers.json
saintzeno's picture
First Push
5cf01f4
raw
history blame
15.7 kB
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.991182565689087,
"min": 1.991182565689087,
"max": 2.3107833862304688,
"count": 20
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 198863.390625,
"min": 198863.390625,
"max": 232557.25,
"count": 20
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 54.242290748898675,
"min": 47.715953307392994,
"max": 59.204819277108435,
"count": 20
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 98504.0,
"min": 97352.0,
"max": 98564.0,
"count": 20
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1596.8856160680812,
"min": 1520.4333709905238,
"max": 1596.8856160680812,
"count": 20
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 1449972.1393898176,
"min": 1261959.6979221348,
"max": 1597547.9131219038,
"count": 20
},
"SoccerTwos.Step.mean": {
"value": 5999858.0,
"min": 5049956.0,
"max": 5999858.0,
"count": 20
},
"SoccerTwos.Step.sum": {
"value": 5999858.0,
"min": 5049956.0,
"max": 5999858.0,
"count": 20
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": 0.01571565866470337,
"min": -0.03754736855626106,
"max": 0.0836557149887085,
"count": 20
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": 14.269817352294922,
"min": -31.164316177368164,
"max": 80.19994354248047,
"count": 20
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.014159336686134338,
"min": -0.03675474971532822,
"max": 0.08358553797006607,
"count": 20
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": 12.856678009033203,
"min": -30.50644302368164,
"max": 79.80325317382812,
"count": 20
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 20
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 20
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.06581629976827143,
"min": -0.07461106190375522,
"max": 0.12742424346667863,
"count": 20
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -59.761200189590454,
"min": -67.44839996099472,
"max": 117.74000096321106,
"count": 20
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.06581629976827143,
"min": -0.07461106190375522,
"max": 0.12742424346667863,
"count": 20
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -59.761200189590454,
"min": -67.44839996099472,
"max": 117.74000096321106,
"count": 20
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.017259779567151176,
"min": 0.013903003576948927,
"max": 0.018675298746287202,
"count": 20
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.03451955913430235,
"min": 0.027806007153897853,
"max": 0.05332428064575652,
"count": 20
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.10991844969491163,
"min": 0.09613035536474653,
"max": 0.11699956779678664,
"count": 20
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.21983689938982326,
"min": 0.19462583710749942,
"max": 0.33388328850269317,
"count": 20
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.11183060631155968,
"min": 0.09769865911867882,
"max": 0.11947158152858417,
"count": 20
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.22366121262311936,
"min": 0.19866952300071716,
"max": 0.33829459423820174,
"count": 20
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 20
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0006,
"min": 0.0006,
"max": 0.0009,
"count": 20
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 20
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.40000000000000013,
"min": 0.40000000000000013,
"max": 0.6000000000000002,
"count": 20
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 20
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.010000000000000002,
"min": 0.010000000000000002,
"max": 0.015000000000000003,
"count": 20
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 20
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 20
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1688705682",
"python_version": "3.9.13 (tags/v3.9.13:6de2ca5, May 17 2022, 16:36:42) [MSC v.1929 64 bit (AMD64)]",
"command_line_arguments": "C:\\cygwin64\\home\\zmusc\\git_repos\\hf-deep-rl-course-unit7handson\\venv\\Scripts\\mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos/SoccerTwos/SoccerTwos.exe --run-id=SoccerTwos --no-graphics --resume",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.9.0+cu111",
"numpy_version": "1.21.2",
"end_time_seconds": "1688707031"
},
"total": 1350.0074677,
"count": 1,
"self": 0.21273450000012417,
"children": {
"run_training.setup": {
"total": 0.12336409999999987,
"count": 1,
"self": 0.12336409999999987
},
"TrainerController.start_learning": {
"total": 1349.6713691,
"count": 1,
"self": 1.165658700003405,
"children": {
"TrainerController._reset_env": {
"total": 2.8202560000002506,
"count": 7,
"self": 2.8202560000002506
},
"TrainerController.advance": {
"total": 1345.5291634999962,
"count": 70174,
"self": 1.0655464999508695,
"children": {
"env_step": {
"total": 951.0618612000178,
"count": 70174,
"self": 590.5109055000164,
"children": {
"SubprocessEnvManager._take_step": {
"total": 359.8672633000093,
"count": 70174,
"self": 5.474472699996113,
"children": {
"TorchPolicy.evaluate": {
"total": 354.3927906000132,
"count": 125362,
"self": 354.3927906000132
}
}
},
"workers": {
"total": 0.6836923999920055,
"count": 70174,
"self": 0.0,
"children": {
"worker_root": {
"total": 1346.2262785999642,
"count": 70174,
"is_parallel": true,
"self": 870.1020653999858,
"children": {
"steps_from_proto": {
"total": 0.007733100000117288,
"count": 12,
"is_parallel": true,
"self": 0.0017047000002894386,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.006028399999827849,
"count": 48,
"is_parallel": true,
"self": 0.006028399999827849
}
}
},
"UnityEnvironment.step": {
"total": 476.11648009997833,
"count": 70174,
"is_parallel": true,
"self": 27.08991029995542,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 19.331992400010115,
"count": 70174,
"is_parallel": true,
"self": 19.331992400010115
},
"communicator.exchange": {
"total": 350.2684715000071,
"count": 70174,
"is_parallel": true,
"self": 350.2684715000071
},
"steps_from_proto": {
"total": 79.42610590000574,
"count": 140348,
"is_parallel": true,
"self": 17.727934800009173,
"children": {
"_process_rank_one_or_two_observation": {
"total": 61.698171099996564,
"count": 561392,
"is_parallel": true,
"self": 61.698171099996564
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 393.40175580002756,
"count": 70174,
"self": 7.691731500006881,
"children": {
"process_trajectory": {
"total": 213.01986200002028,
"count": 70174,
"self": 212.71671580002027,
"children": {
"RLTrainer._checkpoint": {
"total": 0.3031462000000147,
"count": 2,
"self": 0.3031462000000147
}
}
},
"_update_policy": {
"total": 172.6901623000004,
"count": 48,
"self": 110.52477350000055,
"children": {
"TorchPOCAOptimizer.update": {
"total": 62.165388799999846,
"count": 1440,
"self": 62.165388799999846
}
}
}
}
}
}
},
"trainer_threads": {
"total": 6.000000212225132e-07,
"count": 1,
"self": 6.000000212225132e-07
},
"TrainerController._save_models": {
"total": 0.156290300000137,
"count": 1,
"self": 0.02723340000011376,
"children": {
"RLTrainer._checkpoint": {
"total": 0.12905690000002323,
"count": 1,
"self": 0.12905690000002323
}
}
}
}
}
}
}