poca-SoccerTwos / run_logs /timers.json
bee-eater78's picture
First Push`
bb0199c verified
raw
history blame contribute delete
No virus
16 kB
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 2.0262696743011475,
"min": 1.9880521297454834,
"max": 3.295755624771118,
"count": 508
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 40784.7578125,
"min": 25418.4921875,
"max": 120190.4140625,
"count": 508
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 47.95192307692308,
"min": 39.29032258064516,
"max": 999.0,
"count": 508
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19948.0,
"min": 16120.0,
"max": 23200.0,
"count": 508
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1515.1394460438003,
"min": 1180.6211122271916,
"max": 1527.1639885210489,
"count": 479
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 315149.0047771105,
"min": 2361.242224454383,
"max": 359637.58795954974,
"count": 479
},
"SoccerTwos.Step.mean": {
"value": 5079950.0,
"min": 9538.0,
"max": 5079950.0,
"count": 508
},
"SoccerTwos.Step.sum": {
"value": 5079950.0,
"min": 9538.0,
"max": 5079950.0,
"count": 508
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.02973540686070919,
"min": -0.09560553729534149,
"max": 0.2579193115234375,
"count": 508
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -6.155229091644287,
"min": -23.136539459228516,
"max": 35.22888946533203,
"count": 508
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.03305745869874954,
"min": -0.10276874899864197,
"max": 0.2554766535758972,
"count": 508
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -6.842893600463867,
"min": -24.870037078857422,
"max": 36.84461212158203,
"count": 508
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 508
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 508
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.14444927777645092,
"min": -0.4830588242586921,
"max": 0.5767661285015845,
"count": 508
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -29.901000499725342,
"min": -44.858599841594696,
"max": 71.51899993419647,
"count": 508
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.14444927777645092,
"min": -0.4830588242586921,
"max": 0.5767661285015845,
"count": 508
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -29.901000499725342,
"min": -44.858599841594696,
"max": 71.51899993419647,
"count": 508
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 508
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 508
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.01595897461908559,
"min": 0.011230257884350673,
"max": 0.023755209202257297,
"count": 243
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.01595897461908559,
"min": 0.011230257884350673,
"max": 0.023755209202257297,
"count": 243
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.10269984404246012,
"min": 5.126551354805997e-06,
"max": 0.12066958571473757,
"count": 243
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.10269984404246012,
"min": 5.126551354805997e-06,
"max": 0.12066958571473757,
"count": 243
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.1044114867846171,
"min": 6.205549493643048e-06,
"max": 0.12275022765000661,
"count": 243
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.1044114867846171,
"min": 6.205549493643048e-06,
"max": 0.12275022765000661,
"count": 243
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 243
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 243
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 243
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 243
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 243
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 243
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1720805995",
"python_version": "3.10.12 | packaged by Anaconda, Inc. | (main, Jul 5 2023, 19:01:18) [MSC v.1916 64 bit (AMD64)]",
"command_line_arguments": "\\\\?\\C:\\Users\\Ave\\miniconda3\\envs\\rl\\Scripts\\mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos.exe --run-id=SoccerTwos --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.3.1+cpu",
"numpy_version": "1.23.5",
"end_time_seconds": "1720818841"
},
"total": 12846.315494299983,
"count": 1,
"self": 0.5742558999918401,
"children": {
"run_training.setup": {
"total": 0.07667410001158714,
"count": 1,
"self": 0.07667410001158714
},
"TrainerController.start_learning": {
"total": 12845.66456429998,
"count": 1,
"self": 7.43605251115514,
"children": {
"TrainerController._reset_env": {
"total": 6.728150500217453,
"count": 24,
"self": 6.728150500217453
},
"TrainerController.advance": {
"total": 12831.357966088573,
"count": 345598,
"self": 6.89183339982992,
"children": {
"env_step": {
"total": 5336.416878271382,
"count": 345598,
"self": 4144.636669692758,
"children": {
"SubprocessEnvManager._take_step": {
"total": 1186.9340458841762,
"count": 345598,
"self": 39.76811819529394,
"children": {
"TorchPolicy.evaluate": {
"total": 1147.1659276888822,
"count": 644612,
"self": 1147.1659276888822
}
}
},
"workers": {
"total": 4.8461626944481395,
"count": 345597,
"self": 0.0,
"children": {
"worker_root": {
"total": 12832.510514082445,
"count": 345597,
"is_parallel": true,
"self": 9581.670501844841,
"children": {
"steps_from_proto": {
"total": 0.04172789986478165,
"count": 48,
"is_parallel": true,
"self": 0.007546900014858693,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.034180999849922955,
"count": 192,
"is_parallel": true,
"self": 0.034180999849922955
}
}
},
"UnityEnvironment.step": {
"total": 3250.7982843377395,
"count": 345597,
"is_parallel": true,
"self": 171.48967445077142,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 165.6886538043036,
"count": 345597,
"is_parallel": true,
"self": 165.6886538043036
},
"communicator.exchange": {
"total": 2352.1890384132275,
"count": 345597,
"is_parallel": true,
"self": 2352.1890384132275
},
"steps_from_proto": {
"total": 561.4309176694369,
"count": 691194,
"is_parallel": true,
"self": 102.65535279654432,
"children": {
"_process_rank_one_or_two_observation": {
"total": 458.7755648728926,
"count": 2764776,
"is_parallel": true,
"self": 458.7755648728926
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 7488.049254417361,
"count": 345597,
"self": 57.0236196257174,
"children": {
"process_trajectory": {
"total": 1739.8856363912928,
"count": 345597,
"self": 1738.662033391418,
"children": {
"RLTrainer._checkpoint": {
"total": 1.2236029998748563,
"count": 10,
"self": 1.2236029998748563
}
}
},
"_update_policy": {
"total": 5691.139998400351,
"count": 244,
"self": 846.8035820008372,
"children": {
"TorchPOCAOptimizer.update": {
"total": 4844.336416399514,
"count": 7320,
"self": 4844.336416399514
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.00000761449337e-06,
"count": 1,
"self": 1.00000761449337e-06
},
"TrainerController._save_models": {
"total": 0.142394200025592,
"count": 1,
"self": 0.007007400039583445,
"children": {
"RLTrainer._checkpoint": {
"total": 0.13538679998600855,
"count": 1,
"self": 0.13538679998600855
}
}
}
}
}
}
}