ppo-Pyramids / run_logs /timers.json
KevStrider's picture
First Push
6bda244 verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.36080846190452576,
"min": 0.36080846190452576,
"max": 1.492026686668396,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 10887.755859375,
"min": 10862.2333984375,
"max": 45262.12109375,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989935.0,
"min": 29952.0,
"max": 989935.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989935.0,
"min": 29952.0,
"max": 989935.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.6282707452774048,
"min": -0.09663841873407364,
"max": 0.6517738103866577,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 182.8267822265625,
"min": -23.386497497558594,
"max": 184.45199584960938,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.0017289967508986592,
"min": -0.014226950705051422,
"max": 0.41538336873054504,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 0.5031380653381348,
"min": -3.7416880130767822,
"max": 98.44586181640625,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.07127154217480226,
"min": 0.06572784943481176,
"max": 0.07253702786248581,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9978015904472317,
"min": 0.48405769124798387,
"max": 1.082901483256137,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.016454836634446374,
"min": 0.0006503703630575881,
"max": 0.017475892354999845,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.23036771288224925,
"min": 0.008454814719748644,
"max": 0.2523642569673636,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.370026114785708e-06,
"min": 7.370026114785708e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010318036560699991,
"min": 0.00010318036560699991,
"max": 0.0035073788308738,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10245664285714284,
"min": 0.10245664285714284,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4343929999999998,
"min": 1.3886848,
"max": 2.5691262000000004,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.0002554186214285713,
"min": 0.0002554186214285713,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.003575860699999998,
"min": 0.003575860699999998,
"max": 0.11693570738,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.01007277611643076,
"min": 0.01000268291682005,
"max": 0.43067994713783264,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.14101886749267578,
"min": 0.14003756642341614,
"max": 3.0147595405578613,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 295.25471698113205,
"min": 284.63,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 31297.0,
"min": 15984.0,
"max": 33496.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.6466228403505825,
"min": -1.0000000521540642,
"max": 1.6714336463425419,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 172.89539823681116,
"min": -28.760401651263237,
"max": 172.89539823681116,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.6466228403505825,
"min": -1.0000000521540642,
"max": 1.6714336463425419,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 172.89539823681116,
"min": -28.760401651263237,
"max": 172.89539823681116,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.03131883567298895,
"min": 0.03131883567298895,
"max": 9.808839224278927,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 3.2884777456638403,
"min": 3.019375595729798,
"max": 156.94142758846283,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1709138675",
"python_version": "3.10.12 (main, Nov 20 2023, 15:14:05) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.2.1+cu121",
"numpy_version": "1.23.5",
"end_time_seconds": "1709140963"
},
"total": 2288.6852070520004,
"count": 1,
"self": 0.5357530810001663,
"children": {
"run_training.setup": {
"total": 0.050847814999997354,
"count": 1,
"self": 0.050847814999997354
},
"TrainerController.start_learning": {
"total": 2288.098606156,
"count": 1,
"self": 1.5783597919403292,
"children": {
"TrainerController._reset_env": {
"total": 3.7456265030000395,
"count": 1,
"self": 3.7456265030000395
},
"TrainerController.advance": {
"total": 2282.62415719906,
"count": 64077,
"self": 1.6699714160372423,
"children": {
"env_step": {
"total": 1644.5218062210006,
"count": 64077,
"self": 1499.128876419994,
"children": {
"SubprocessEnvManager._take_step": {
"total": 144.41877855503867,
"count": 64077,
"self": 5.1964316380432365,
"children": {
"TorchPolicy.evaluate": {
"total": 139.22234691699543,
"count": 62557,
"self": 139.22234691699543
}
}
},
"workers": {
"total": 0.9741512459679598,
"count": 64077,
"self": 0.0,
"children": {
"worker_root": {
"total": 2282.2798452719253,
"count": 64077,
"is_parallel": true,
"self": 911.648962206973,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.006133458000022074,
"count": 1,
"is_parallel": true,
"self": 0.00424862399995618,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0018848340000658936,
"count": 8,
"is_parallel": true,
"self": 0.0018848340000658936
}
}
},
"UnityEnvironment.step": {
"total": 0.05103326599999036,
"count": 1,
"is_parallel": true,
"self": 0.0006429049999496783,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0005362690000083603,
"count": 1,
"is_parallel": true,
"self": 0.0005362690000083603
},
"communicator.exchange": {
"total": 0.04800169200007076,
"count": 1,
"is_parallel": true,
"self": 0.04800169200007076
},
"steps_from_proto": {
"total": 0.0018523999999615626,
"count": 1,
"is_parallel": true,
"self": 0.00037941199991564645,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0014729880000459161,
"count": 8,
"is_parallel": true,
"self": 0.0014729880000459161
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1370.6308830649523,
"count": 64076,
"is_parallel": true,
"self": 36.92307292408418,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 26.903799616936453,
"count": 64076,
"is_parallel": true,
"self": 26.903799616936453
},
"communicator.exchange": {
"total": 1197.30181012293,
"count": 64076,
"is_parallel": true,
"self": 1197.30181012293
},
"steps_from_proto": {
"total": 109.5022004010018,
"count": 64076,
"is_parallel": true,
"self": 22.83030750603689,
"children": {
"_process_rank_one_or_two_observation": {
"total": 86.67189289496491,
"count": 512608,
"is_parallel": true,
"self": 86.67189289496491
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 636.4323795620223,
"count": 64077,
"self": 3.03253870602839,
"children": {
"process_trajectory": {
"total": 132.35724074799305,
"count": 64077,
"self": 132.04011050699296,
"children": {
"RLTrainer._checkpoint": {
"total": 0.31713024100008624,
"count": 2,
"self": 0.31713024100008624
}
}
},
"_update_policy": {
"total": 501.0426001080009,
"count": 455,
"self": 292.6589765649935,
"children": {
"TorchPPOOptimizer.update": {
"total": 208.3836235430074,
"count": 22761,
"self": 208.3836235430074
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.5659998098271899e-06,
"count": 1,
"self": 1.5659998098271899e-06
},
"TrainerController._save_models": {
"total": 0.15046109600007185,
"count": 1,
"self": 0.0026089240000146674,
"children": {
"RLTrainer._checkpoint": {
"total": 0.14785217200005718,
"count": 1,
"self": 0.14785217200005718
}
}
}
}
}
}
}