Pyramids / run_logs /timers.json
Ithai's picture
First Pyramids training
bab8292
raw
history blame
18.8 kB
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.8404826521873474,
"min": 0.8404826521873474,
"max": 1.4598430395126343,
"count": 10
},
"Pyramids.Policy.Entropy.sum": {
"value": 25106.8984375,
"min": 25106.8984375,
"max": 44285.796875,
"count": 10
},
"Pyramids.Step.mean": {
"value": 299952.0,
"min": 29952.0,
"max": 299952.0,
"count": 10
},
"Pyramids.Step.sum": {
"value": 299952.0,
"min": 29952.0,
"max": 299952.0,
"count": 10
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.03310125693678856,
"min": -0.07654073089361191,
"max": 0.1605910360813141,
"count": 10
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": -8.010503768920898,
"min": -18.44631576538086,
"max": 38.06007385253906,
"count": 10
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.019918009638786316,
"min": 0.019918009638786316,
"max": 0.28890857100486755,
"count": 10
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 4.8201584815979,
"min": 4.8201584815979,
"max": 68.47132873535156,
"count": 10
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06773245448382904,
"min": 0.06645605675805881,
"max": 0.07163800704222076,
"count": 10
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9482543627736065,
"min": 0.49019168771990046,
"max": 0.9815040180547381,
"count": 10
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.0028364419568095494,
"min": 0.0004969434000239663,
"max": 0.006827787384967173,
"count": 10
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.03971018739533369,
"min": 0.005963320800287596,
"max": 0.06437933459762338,
"count": 10
},
"Pyramids.Policy.LearningRate.mean": {
"value": 1.5029594990166666e-05,
"min": 1.5029594990166666e-05,
"max": 0.0002838354339596191,
"count": 10
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00021041432986233333,
"min": 0.00021041432986233333,
"max": 0.0023461314179563336,
"count": 10
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10500983333333334,
"min": 0.10500983333333334,
"max": 0.19461180952380958,
"count": 10
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4701376666666668,
"min": 1.362282666666667,
"max": 2.03139,
"count": 10
},
"Pyramids.Policy.Beta.mean": {
"value": 0.00051048235,
"min": 0.00051048235,
"max": 0.00946171977142857,
"count": 10
},
"Pyramids.Policy.Beta.sum": {
"value": 0.007146752900000001,
"min": 0.007146752900000001,
"max": 0.07824616229999999,
"count": 10
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.02987137995660305,
"min": 0.02987137995660305,
"max": 0.363151490688324,
"count": 10
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.4181993305683136,
"min": 0.4181993305683136,
"max": 2.542060375213623,
"count": 10
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 953.8125,
"min": 882.1034482758621,
"max": 999.0,
"count": 10
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 30522.0,
"min": 15984.0,
"max": 32618.0,
"count": 10
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": -0.5657613385588892,
"min": -1.0000000521540642,
"max": -0.26180694740394067,
"count": 10
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": -17.538601495325565,
"min": -30.65000168979168,
"max": -7.592401474714279,
"count": 10
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": -0.5657613385588892,
"min": -1.0000000521540642,
"max": -0.26180694740394067,
"count": 10
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": -17.538601495325565,
"min": -30.65000168979168,
"max": -7.592401474714279,
"count": 10
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.30023005015156684,
"min": 0.30023005015156684,
"max": 7.17590135615319,
"count": 10
},
"Pyramids.Policy.RndReward.sum": {
"value": 9.307131554698572,
"min": 8.91508346516639,
"max": 114.81442169845104,
"count": 10
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 10
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 10
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1676561680",
"python_version": "3.8.10 (default, Nov 14 2022, 12:59:47) \n[GCC 9.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "0.29.0.dev0",
"mlagents_envs_version": "0.29.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.8.1+cu102",
"numpy_version": "1.21.6",
"end_time_seconds": "1676562704"
},
"total": 1024.5575820230001,
"count": 1,
"self": 0.6995505899999443,
"children": {
"run_training.setup": {
"total": 0.1480947869999909,
"count": 1,
"self": 0.1480947869999909
},
"TrainerController.start_learning": {
"total": 1023.7099366460002,
"count": 1,
"self": 0.7865703300121822,
"children": {
"TrainerController._reset_env": {
"total": 5.383240442999977,
"count": 1,
"self": 5.383240442999977
},
"TrainerController.advance": {
"total": 1017.4169878609878,
"count": 18897,
"self": 0.8082753710023098,
"children": {
"env_step": {
"total": 619.0710352310152,
"count": 18897,
"self": 571.8386058710821,
"children": {
"SubprocessEnvManager._take_step": {
"total": 46.70313683396262,
"count": 18897,
"self": 2.2971994609656576,
"children": {
"TorchPolicy.evaluate": {
"total": 44.40593737299696,
"count": 18794,
"self": 9.849545270017188,
"children": {
"TorchPolicy.sample_actions": {
"total": 34.55639210297977,
"count": 18794,
"self": 34.55639210297977
}
}
}
}
},
"workers": {
"total": 0.5292925259705044,
"count": 18897,
"self": 0.0,
"children": {
"worker_root": {
"total": 1021.5677363160107,
"count": 18897,
"is_parallel": true,
"self": 507.5499073669962,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.003455330999713624,
"count": 1,
"is_parallel": true,
"self": 0.0015149869996093912,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0019403440001042327,
"count": 8,
"is_parallel": true,
"self": 0.0019403440001042327
}
}
},
"UnityEnvironment.step": {
"total": 0.07122465100019326,
"count": 1,
"is_parallel": true,
"self": 0.0006838459999016777,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0005478600000969891,
"count": 1,
"is_parallel": true,
"self": 0.0005478600000969891
},
"communicator.exchange": {
"total": 0.06766062799988504,
"count": 1,
"is_parallel": true,
"self": 0.06766062799988504
},
"steps_from_proto": {
"total": 0.0023323170003095584,
"count": 1,
"is_parallel": true,
"self": 0.0006167400001686474,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.001715577000140911,
"count": 8,
"is_parallel": true,
"self": 0.001715577000140911
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 514.0178289490145,
"count": 18896,
"is_parallel": true,
"self": 14.077046086012615,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 8.6963265929935,
"count": 18896,
"is_parallel": true,
"self": 8.6963265929935
},
"communicator.exchange": {
"total": 449.5596541469986,
"count": 18896,
"is_parallel": true,
"self": 449.5596541469986
},
"steps_from_proto": {
"total": 41.68480212300983,
"count": 18896,
"is_parallel": true,
"self": 10.560656833954454,
"children": {
"_process_rank_one_or_two_observation": {
"total": 31.124145289055377,
"count": 151168,
"is_parallel": true,
"self": 31.124145289055377
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 397.53767725897023,
"count": 18897,
"self": 1.3109092859881457,
"children": {
"process_trajectory": {
"total": 74.87929048998103,
"count": 18897,
"self": 74.87929048998103
},
"_update_policy": {
"total": 321.34747748300106,
"count": 118,
"self": 87.390586292021,
"children": {
"TorchPPOOptimizer.update": {
"total": 233.95689119098006,
"count": 6882,
"self": 233.95689119098006
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.261999841517536e-06,
"count": 1,
"self": 1.261999841517536e-06
},
"TrainerController._save_models": {
"total": 0.12313675000041258,
"count": 1,
"self": 0.002378926000346837,
"children": {
"RLTrainer._checkpoint": {
"total": 0.12075782400006574,
"count": 1,
"self": 0.12075782400006574
}
}
}
}
}
}
}