poca-SoccerTwos / run_logs /timers.json
ByeByeFlyGuy's picture
First Push
f6daa6b verified
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.4756355285644531,
"min": 1.2978237867355347,
"max": 3.295745611190796,
"count": 5000
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 31637.626953125,
"min": 23235.044921875,
"max": 116324.859375,
"count": 5000
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 85.56896551724138,
"min": 39.699186991869915,
"max": 999.0,
"count": 5000
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19852.0,
"min": 15140.0,
"max": 27908.0,
"count": 5000
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1776.7649151177247,
"min": 1198.2180567289765,
"max": 1846.4434251951945,
"count": 4987
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 206104.73015365607,
"min": 2396.5509993320493,
"max": 422802.5951249929,
"count": 4987
},
"SoccerTwos.Step.mean": {
"value": 49999926.0,
"min": 9970.0,
"max": 49999926.0,
"count": 5000
},
"SoccerTwos.Step.sum": {
"value": 49999926.0,
"min": 9970.0,
"max": 49999926.0,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.018933746963739395,
"min": -0.1372309774160385,
"max": 0.1623445749282837,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -2.196314573287964,
"min": -27.851730346679688,
"max": 27.091503143310547,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.01998467557132244,
"min": -0.14969651401042938,
"max": 0.16986478865146637,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -2.3182222843170166,
"min": -30.38839340209961,
"max": 26.94598960876465,
"count": 5000
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 5000
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.01756551553463114,
"min": -0.6153846153846154,
"max": 0.5457272637974132,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -2.037599802017212,
"min": -75.49440014362335,
"max": 64.44439995288849,
"count": 5000
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.01756551553463114,
"min": -0.6153846153846154,
"max": 0.5457272637974132,
"count": 5000
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -2.037599802017212,
"min": -75.49440014362335,
"max": 64.44439995288849,
"count": 5000
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 5000
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 5000
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.013691785931587219,
"min": 0.009495296816627767,
"max": 0.025735475623514505,
"count": 2423
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.013691785931587219,
"min": 0.009495296816627767,
"max": 0.025735475623514505,
"count": 2423
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.07996175289154053,
"min": 2.5397517856617923e-05,
"max": 0.13317935988307,
"count": 2423
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.07996175289154053,
"min": 2.5397517856617923e-05,
"max": 0.13317935988307,
"count": 2423
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.08090781743327777,
"min": 2.6957952832162847e-05,
"max": 0.135737781226635,
"count": 2423
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.08090781743327777,
"min": 2.6957952832162847e-05,
"max": 0.135737781226635,
"count": 2423
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 2423
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 2423
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 2423
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 2423
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 2423
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 2423
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1716763793",
"python_version": "3.10.12 (main, Jul 5 2023, 15:02:25) [Clang 14.0.6 ]",
"command_line_arguments": "/opt/anaconda3/envs/rl/bin/mlagents-learn --force ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos/SoccerTwos.app --run-id=SoccerTwos-001 --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.3.0",
"numpy_version": "1.23.5",
"end_time_seconds": "1716935012"
},
"total": 171219.27670237498,
"count": 1,
"self": 0.18242091697175056,
"children": {
"run_training.setup": {
"total": 0.013269540999317542,
"count": 1,
"self": 0.013269540999317542
},
"TrainerController.start_learning": {
"total": 171219.081011917,
"count": 1,
"self": 31.257770368101774,
"children": {
"TrainerController._reset_env": {
"total": 12.590659799958303,
"count": 250,
"self": 12.590659799958303
},
"TrainerController.advance": {
"total": 171175.13169974895,
"count": 3444336,
"self": 29.032848853355972,
"children": {
"env_step": {
"total": 138124.10667970876,
"count": 3444336,
"self": 133096.7276505906,
"children": {
"SubprocessEnvManager._take_step": {
"total": 5007.574257688111,
"count": 3444336,
"self": 150.42064816422862,
"children": {
"TorchPolicy.evaluate": {
"total": 4857.153609523883,
"count": 6282274,
"self": 4857.153609523883
}
}
},
"workers": {
"total": 19.80477143004464,
"count": 3444336,
"self": 0.0,
"children": {
"worker_root": {
"total": 171165.84883158753,
"count": 3444336,
"is_parallel": true,
"self": 42123.76268863637,
"children": {
"steps_from_proto": {
"total": 0.3698663835039042,
"count": 500,
"is_parallel": true,
"self": 0.04107995965750888,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.32878642384639534,
"count": 2000,
"is_parallel": true,
"self": 0.32878642384639534
}
}
},
"UnityEnvironment.step": {
"total": 129041.71627656765,
"count": 3444336,
"is_parallel": true,
"self": 357.02263938727265,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 2273.3610816853106,
"count": 3444336,
"is_parallel": true,
"self": 2273.3610816853106
},
"communicator.exchange": {
"total": 121752.08375839358,
"count": 3444336,
"is_parallel": true,
"self": 121752.08375839358
},
"steps_from_proto": {
"total": 4659.248797101498,
"count": 6888672,
"is_parallel": true,
"self": 507.09123357024873,
"children": {
"_process_rank_one_or_two_observation": {
"total": 4152.157563531249,
"count": 27554688,
"is_parallel": true,
"self": 4152.157563531249
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 33021.99217118684,
"count": 3444336,
"self": 252.24339973379392,
"children": {
"process_trajectory": {
"total": 7260.486781825728,
"count": 3444336,
"self": 7250.712993118652,
"children": {
"RLTrainer._checkpoint": {
"total": 9.773788707076164,
"count": 100,
"self": 9.773788707076164
}
}
},
"_update_policy": {
"total": 25509.261989627314,
"count": 2423,
"self": 2962.7607473233365,
"children": {
"TorchPOCAOptimizer.update": {
"total": 22546.501242303977,
"count": 72690,
"self": 22546.501242303977
}
}
}
}
}
}
},
"trainer_threads": {
"total": 3.3300602808594704e-07,
"count": 1,
"self": 3.3300602808594704e-07
},
"TrainerController._save_models": {
"total": 0.100881666992791,
"count": 1,
"self": 0.0005530419875867665,
"children": {
"RLTrainer._checkpoint": {
"total": 0.10032862500520423,
"count": 1,
"self": 0.10032862500520423
}
}
}
}
}
}
}