poca-SoccerTwos / run_logs /timers.json
Absie's picture
Fixed Save Push`
dc883bc
raw
history blame
15.7 kB
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.888009786605835,
"min": 1.8311636447906494,
"max": 1.959346890449524,
"count": 801
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 39331.01953125,
"min": 1819.8912353515625,
"max": 48154.59375,
"count": 801
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 96.09615384615384,
"min": 25.5,
"max": 145.73529411764707,
"count": 801
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19988.0,
"min": 204.0,
"max": 21540.0,
"count": 801
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1255.5466302384193,
"min": 1172.8204168941593,
"max": 1292.0871171030165,
"count": 801
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 130576.84954479561,
"min": 4777.322614426512,
"max": 202045.14151784935,
"count": 801
},
"SoccerTwos.Step.mean": {
"value": 99999958.0,
"min": 91999974.0,
"max": 99999958.0,
"count": 801
},
"SoccerTwos.Step.sum": {
"value": 99999958.0,
"min": 91999974.0,
"max": 99999958.0,
"count": 801
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": 0.011170216836035252,
"min": -0.10366028547286987,
"max": 0.27148541808128357,
"count": 801
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": 1.1617025136947632,
"min": -9.909892082214355,
"max": 11.412410736083984,
"count": 801
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.010454973205924034,
"min": -0.10391616076231003,
"max": 0.25962936878204346,
"count": 801
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": 1.0873172283172607,
"min": -10.197688102722168,
"max": 11.668359756469727,
"count": 801
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 801
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 801
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.0009730767745238084,
"min": -0.48741333617104426,
"max": 0.973800003528595,
"count": 801
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -0.10119998455047607,
"min": -51.632599890232086,
"max": 43.781599938869476,
"count": 801
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.0009730767745238084,
"min": -0.48741333617104426,
"max": 0.973800003528595,
"count": 801
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -0.10119998455047607,
"min": -51.632599890232086,
"max": 43.781599938869476,
"count": 801
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 801
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 801
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.03705909576549727,
"min": 0.0348788173470114,
"max": 0.04165394398982503,
"count": 387
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.03705909576549727,
"min": 0.0348788173470114,
"max": 0.04165394398982503,
"count": 387
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.0351282373336809,
"min": 0.029210525937378405,
"max": 0.04823799559048244,
"count": 387
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.0351282373336809,
"min": 0.029210525937378405,
"max": 0.04823799559048244,
"count": 387
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.035853533233915054,
"min": 0.0302964643442205,
"max": 0.04926804788410664,
"count": 387
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.035853533233915054,
"min": 0.0302964643442205,
"max": 0.04926804788410664,
"count": 387
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.00030000000000000014,
"min": 0.00030000000000000014,
"max": 0.00030000000000000014,
"count": 387
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.00030000000000000014,
"min": 0.00030000000000000014,
"max": 0.00030000000000000014,
"count": 387
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.15000000000000002,
"min": 0.15000000000000002,
"max": 0.15000000000000002,
"count": 387
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.15000000000000002,
"min": 0.15000000000000002,
"max": 0.15000000000000002,
"count": 387
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.007499999999999997,
"min": 0.007499999999999997,
"max": 0.007499999999999997,
"count": 387
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.007499999999999997,
"min": 0.007499999999999997,
"max": 0.007499999999999997,
"count": 387
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1682099556",
"python_version": "3.9.16 (main, Mar 8 2023, 10:39:24) [MSC v.1916 64 bit (AMD64)]",
"command_line_arguments": "E:\\ProgramData\\Anaconda3\\envs\\rl\\Scripts\\mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos.exe --run-id=SoccerTwos --no-graphics --resume",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.0.0+cpu",
"numpy_version": "1.21.2",
"end_time_seconds": "1682173797"
},
"total": 74241.41468059999,
"count": 1,
"self": 0.8190269999904558,
"children": {
"run_training.setup": {
"total": 0.1846551999999999,
"count": 1,
"self": 0.1846551999999999
},
"TrainerController.start_learning": {
"total": 74240.4109984,
"count": 1,
"self": 14.906384497313411,
"children": {
"TrainerController._reset_env": {
"total": 8.796456700028592,
"count": 41,
"self": 8.796456700028592
},
"TrainerController.advance": {
"total": 74216.50738250265,
"count": 537138,
"self": 13.548255704488838,
"children": {
"env_step": {
"total": 10851.157855802614,
"count": 537138,
"self": 8053.994533206816,
"children": {
"SubprocessEnvManager._take_step": {
"total": 2787.380743899268,
"count": 537138,
"self": 88.36784830023953,
"children": {
"TorchPolicy.evaluate": {
"total": 2699.0128955990285,
"count": 1006016,
"self": 2699.0128955990285
}
}
},
"workers": {
"total": 9.782578696529532,
"count": 537138,
"self": 0.0,
"children": {
"worker_root": {
"total": 74213.09890529854,
"count": 537138,
"is_parallel": true,
"self": 67847.0200776994,
"children": {
"steps_from_proto": {
"total": 0.09415409996984625,
"count": 82,
"is_parallel": true,
"self": 0.01869509999147656,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.07545899997836969,
"count": 328,
"is_parallel": true,
"self": 0.07545899997836969
}
}
},
"UnityEnvironment.step": {
"total": 6365.984673499165,
"count": 537138,
"is_parallel": true,
"self": 351.8846960898827,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 303.0113294023722,
"count": 537138,
"is_parallel": true,
"self": 303.0113294023722
},
"communicator.exchange": {
"total": 4600.752456200797,
"count": 537138,
"is_parallel": true,
"self": 4600.752456200797
},
"steps_from_proto": {
"total": 1110.336191806114,
"count": 1074276,
"is_parallel": true,
"self": 217.09260710833303,
"children": {
"_process_rank_one_or_two_observation": {
"total": 893.2435846977809,
"count": 4297104,
"is_parallel": true,
"self": 893.2435846977809
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 63351.80127099555,
"count": 537138,
"self": 120.4491249943967,
"children": {
"process_trajectory": {
"total": 4088.742428101126,
"count": 537138,
"self": 4085.4002123011164,
"children": {
"RLTrainer._checkpoint": {
"total": 3.3422158000095674,
"count": 17,
"self": 3.3422158000095674
}
}
},
"_update_policy": {
"total": 59142.60971790003,
"count": 387,
"self": 3747.502850699464,
"children": {
"TorchPOCAOptimizer.update": {
"total": 55395.106867200564,
"count": 27090,
"self": 55395.106867200564
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.3999961083754897e-06,
"count": 1,
"self": 1.3999961083754897e-06
},
"TrainerController._save_models": {
"total": 0.2007733000064036,
"count": 1,
"self": 0.011347999999998137,
"children": {
"RLTrainer._checkpoint": {
"total": 0.18942530000640545,
"count": 1,
"self": 0.18942530000640545
}
}
}
}
}
}
}