|
{
|
|
"name": "root",
|
|
"gauges": {
|
|
"Agent.Policy.Entropy.mean": {
|
|
"value": 1.4345054626464844,
|
|
"min": 1.4196497201919556,
|
|
"max": 1.4345054626464844,
|
|
"count": 200
|
|
},
|
|
"Agent.Policy.Entropy.sum": {
|
|
"value": 8202.501953125,
|
|
"min": 7451.7021484375,
|
|
"max": 10131.6142578125,
|
|
"count": 200
|
|
},
|
|
"Agent.DroneBasedReforestation.TreeDropCount.mean": {
|
|
"value": 1.0,
|
|
"min": 0.4444444444444444,
|
|
"max": 1.4,
|
|
"count": 200
|
|
},
|
|
"Agent.DroneBasedReforestation.TreeDropCount.sum": {
|
|
"value": 15.0,
|
|
"min": 8.0,
|
|
"max": 22.0,
|
|
"count": 200
|
|
},
|
|
"Agent.DroneBasedReforestation.RechargeEnergyCount.mean": {
|
|
"value": 9.0,
|
|
"min": 8.133333333333333,
|
|
"max": 62.27777777777778,
|
|
"count": 200
|
|
},
|
|
"Agent.DroneBasedReforestation.RechargeEnergyCount.sum": {
|
|
"value": 135.0,
|
|
"min": 122.0,
|
|
"max": 1121.0,
|
|
"count": 200
|
|
},
|
|
"Agent.DroneBasedReforestation.SaveLocationCount.mean": {
|
|
"value": 0.0,
|
|
"min": 0.0,
|
|
"max": 0.0,
|
|
"count": 200
|
|
},
|
|
"Agent.DroneBasedReforestation.SaveLocationCount.sum": {
|
|
"value": 0.0,
|
|
"min": 0.0,
|
|
"max": 0.0,
|
|
"count": 200
|
|
},
|
|
"Agent.DroneBasedReforestation.OutofEnergyCount.mean": {
|
|
"value": 1.0,
|
|
"min": 0.4,
|
|
"max": 1.0,
|
|
"count": 200
|
|
},
|
|
"Agent.DroneBasedReforestation.OutofEnergyCount.sum": {
|
|
"value": 15.0,
|
|
"min": 6.0,
|
|
"max": 18.0,
|
|
"count": 200
|
|
},
|
|
"Agent.DroneBasedReforestation.CumulativeDistanceUntilTreeDrop.mean": {
|
|
"value": 89.16977411905924,
|
|
"min": 13.588333129882812,
|
|
"max": 122.56704436408148,
|
|
"count": 200
|
|
},
|
|
"Agent.DroneBasedReforestation.CumulativeDistanceUntilTreeDrop.sum": {
|
|
"value": 1337.5466117858887,
|
|
"min": 203.8249969482422,
|
|
"max": 2206.206798553467,
|
|
"count": 200
|
|
},
|
|
"Agent.DroneBasedReforestation.CumulativeTreeDropReward.mean": {
|
|
"value": 9.874763335908453,
|
|
"min": 1.4282119909922282,
|
|
"max": 10.545252879460653,
|
|
"count": 200
|
|
},
|
|
"Agent.DroneBasedReforestation.CumulativeTreeDropReward.sum": {
|
|
"value": 148.1214500386268,
|
|
"min": 21.423179864883423,
|
|
"max": 189.81455183029175,
|
|
"count": 200
|
|
},
|
|
"Agent.DroneBasedReforestation.CumulativeDistanceReward.mean": {
|
|
"value": 4.005469703674317,
|
|
"min": 0.31344227658377755,
|
|
"max": 4.538780053456624,
|
|
"count": 200
|
|
},
|
|
"Agent.DroneBasedReforestation.CumulativeDistanceReward.sum": {
|
|
"value": 60.082045555114746,
|
|
"min": 5.455199956893921,
|
|
"max": 81.69804096221924,
|
|
"count": 200
|
|
},
|
|
"Agent.DroneBasedReforestation.CumulativeNormalizedDistanceUntilTreeDrop.mean": {
|
|
"value": 0.40054696997006733,
|
|
"min": 0.03134422832065158,
|
|
"max": 0.4538780020342933,
|
|
"count": 200
|
|
},
|
|
"Agent.DroneBasedReforestation.CumulativeNormalizedDistanceUntilTreeDrop.sum": {
|
|
"value": 6.00820454955101,
|
|
"min": 0.5455200001597404,
|
|
"max": 8.169804036617279,
|
|
"count": 200
|
|
},
|
|
"Agent.DroneBasedReforestation.CumulativeDistancetoExistingTrees.mean": {
|
|
"value": 29.395476841926573,
|
|
"min": 29.395476841926573,
|
|
"max": 122.22820276684232,
|
|
"count": 200
|
|
},
|
|
"Agent.DroneBasedReforestation.CumulativeDistancetoExistingTrees.sum": {
|
|
"value": 440.9321526288986,
|
|
"min": 440.9321526288986,
|
|
"max": 2200.1076498031616,
|
|
"count": 200
|
|
},
|
|
"Agent.Environment.LessonNumber.difficulty.mean": {
|
|
"value": 0.0,
|
|
"min": 0.0,
|
|
"max": 0.0,
|
|
"count": 200
|
|
},
|
|
"Agent.Environment.LessonNumber.difficulty.sum": {
|
|
"value": 0.0,
|
|
"min": 0.0,
|
|
"max": 0.0,
|
|
"count": 200
|
|
},
|
|
"Agent.Environment.LessonNumber.task.mean": {
|
|
"value": 0.0,
|
|
"min": 0.0,
|
|
"max": 0.0,
|
|
"count": 200
|
|
},
|
|
"Agent.Environment.LessonNumber.task.sum": {
|
|
"value": 0.0,
|
|
"min": 0.0,
|
|
"max": 0.0,
|
|
"count": 200
|
|
},
|
|
"Agent.Environment.EpisodeLength.mean": {
|
|
"value": 379.2,
|
|
"min": 345.3333333333333,
|
|
"max": 399.0,
|
|
"count": 200
|
|
},
|
|
"Agent.Environment.EpisodeLength.sum": {
|
|
"value": 5688.0,
|
|
"min": 5211.0,
|
|
"max": 7128.0,
|
|
"count": 200
|
|
},
|
|
"Agent.Step.mean": {
|
|
"value": 1199728.0,
|
|
"min": 5987.0,
|
|
"max": 1199728.0,
|
|
"count": 200
|
|
},
|
|
"Agent.Step.sum": {
|
|
"value": 1199728.0,
|
|
"min": 5987.0,
|
|
"max": 1199728.0,
|
|
"count": 200
|
|
},
|
|
"Agent.Policy.CuriosityValueEstimate.mean": {
|
|
"value": 0.28683173656463623,
|
|
"min": 0.02928275428712368,
|
|
"max": 1.0453386306762695,
|
|
"count": 200
|
|
},
|
|
"Agent.Policy.CuriosityValueEstimate.sum": {
|
|
"value": 4.58930778503418,
|
|
"min": 0.43924131989479065,
|
|
"max": 17.770755767822266,
|
|
"count": 200
|
|
},
|
|
"Agent.Policy.ExtrinsicValueEstimate.mean": {
|
|
"value": 1.1851378679275513,
|
|
"min": 0.06568823009729385,
|
|
"max": 1.5239319801330566,
|
|
"count": 200
|
|
},
|
|
"Agent.Policy.ExtrinsicValueEstimate.sum": {
|
|
"value": 18.96220588684082,
|
|
"min": 1.1166999340057373,
|
|
"max": 24.382911682128906,
|
|
"count": 200
|
|
},
|
|
"Agent.Environment.CumulativeReward.mean": {
|
|
"value": 17.46530520915985,
|
|
"min": -1.2342333793640137,
|
|
"max": 17.736011838912965,
|
|
"count": 200
|
|
},
|
|
"Agent.Environment.CumulativeReward.sum": {
|
|
"value": 279.4448833465576,
|
|
"min": -19.74773406982422,
|
|
"max": 279.4448833465576,
|
|
"count": 200
|
|
},
|
|
"Agent.Policy.CuriosityReward.mean": {
|
|
"value": 1.102638527750969,
|
|
"min": 0.0,
|
|
"max": 13.09873253107071,
|
|
"count": 200
|
|
},
|
|
"Agent.Policy.CuriosityReward.sum": {
|
|
"value": 17.642216444015503,
|
|
"min": 0.0,
|
|
"max": 209.57972049713135,
|
|
"count": 200
|
|
},
|
|
"Agent.Policy.ExtrinsicReward.mean": {
|
|
"value": 15.718773733824492,
|
|
"min": -1.1108101196587086,
|
|
"max": 15.96240953207016,
|
|
"count": 200
|
|
},
|
|
"Agent.Policy.ExtrinsicReward.sum": {
|
|
"value": 251.50037974119186,
|
|
"min": -17.772961914539337,
|
|
"max": 251.50037974119186,
|
|
"count": 200
|
|
},
|
|
"Agent.IsTraining.mean": {
|
|
"value": 1.0,
|
|
"min": 1.0,
|
|
"max": 1.0,
|
|
"count": 200
|
|
},
|
|
"Agent.IsTraining.sum": {
|
|
"value": 1.0,
|
|
"min": 1.0,
|
|
"max": 1.0,
|
|
"count": 200
|
|
},
|
|
"Agent.Losses.PolicyLoss.mean": {
|
|
"value": 0.023352904590191664,
|
|
"min": 0.015012246297879351,
|
|
"max": 0.03334855157192106,
|
|
"count": 135
|
|
},
|
|
"Agent.Losses.PolicyLoss.sum": {
|
|
"value": 0.023352904590191664,
|
|
"min": 0.015012246297879351,
|
|
"max": 0.03334855157192106,
|
|
"count": 135
|
|
},
|
|
"Agent.Losses.ValueLoss.mean": {
|
|
"value": 0.6238656871848636,
|
|
"min": 0.13406095250199238,
|
|
"max": 1.3653589884440105,
|
|
"count": 135
|
|
},
|
|
"Agent.Losses.ValueLoss.sum": {
|
|
"value": 0.6238656871848636,
|
|
"min": 0.13406095250199238,
|
|
"max": 1.3653589884440105,
|
|
"count": 135
|
|
},
|
|
"Agent.Policy.LearningRate.mean": {
|
|
"value": 8.198497267500081e-07,
|
|
"min": 8.198497267500081e-07,
|
|
"max": 0.00029780325073225,
|
|
"count": 135
|
|
},
|
|
"Agent.Policy.LearningRate.sum": {
|
|
"value": 8.198497267500081e-07,
|
|
"min": 8.198497267500081e-07,
|
|
"max": 0.00029780325073225,
|
|
"count": 135
|
|
},
|
|
"Agent.Policy.Epsilon.mean": {
|
|
"value": 0.10027325,
|
|
"min": 0.10027325,
|
|
"max": 0.19926775000000005,
|
|
"count": 135
|
|
},
|
|
"Agent.Policy.Epsilon.sum": {
|
|
"value": 0.10027325,
|
|
"min": 0.10027325,
|
|
"max": 0.19926775000000005,
|
|
"count": 135
|
|
},
|
|
"Agent.Policy.Beta.mean": {
|
|
"value": 2.363517500000013e-05,
|
|
"min": 2.363517500000013e-05,
|
|
"max": 0.004963460725,
|
|
"count": 135
|
|
},
|
|
"Agent.Policy.Beta.sum": {
|
|
"value": 2.363517500000013e-05,
|
|
"min": 2.363517500000013e-05,
|
|
"max": 0.004963460725,
|
|
"count": 135
|
|
},
|
|
"Agent.Losses.CuriosityForwardLoss.mean": {
|
|
"value": 0.02761068429659914,
|
|
"min": 0.02499390913483997,
|
|
"max": 0.6027635087569555,
|
|
"count": 135
|
|
},
|
|
"Agent.Losses.CuriosityForwardLoss.sum": {
|
|
"value": 0.02761068429659914,
|
|
"min": 0.02499390913483997,
|
|
"max": 0.6027635087569555,
|
|
"count": 135
|
|
},
|
|
"Agent.Losses.CuriosityInverseLoss.mean": {
|
|
"value": 2.197030217559249,
|
|
"min": 2.037269095579783,
|
|
"max": 3.315477500359217,
|
|
"count": 135
|
|
},
|
|
"Agent.Losses.CuriosityInverseLoss.sum": {
|
|
"value": 2.197030217559249,
|
|
"min": 2.037269095579783,
|
|
"max": 3.315477500359217,
|
|
"count": 135
|
|
}
|
|
},
|
|
"metadata": {
|
|
"timer_format_version": "0.1.0",
|
|
"start_time_seconds": "1717630995",
|
|
"python_version": "3.9.18 (main, Sep 11 2023, 14:09:26) [MSC v.1916 64 bit (AMD64)]",
|
|
"command_line_arguments": "C:\\Users\\pdsie\\anaconda3\\envs\\mlagents20\\Scripts\\mlagents-learn c:/users/pdsie/documents/hivex/src/hivex/training/baseline/ml_agents/configs/mlagents/tmp/train/DroneBasedReforestation_difficulty_5_task_0_run_id_2_train.yaml --run-id=DroneBasedReforestation/train/DroneBasedReforestation_difficulty_5_task_0_run_id_2_train --base-port 5007",
|
|
"mlagents_version": "0.30.0",
|
|
"mlagents_envs_version": "0.30.0",
|
|
"communication_protocol_version": "1.5.0",
|
|
"pytorch_version": "1.7.1+cu110",
|
|
"numpy_version": "1.21.0",
|
|
"end_time_seconds": "1717634666"
|
|
},
|
|
"total": 3671.2171716999997,
|
|
"count": 1,
|
|
"self": 0.2712237999999161,
|
|
"children": {
|
|
"run_training.setup": {
|
|
"total": 0.05108199999999996,
|
|
"count": 1,
|
|
"self": 0.05108199999999996
|
|
},
|
|
"TrainerController.start_learning": {
|
|
"total": 3670.8948659,
|
|
"count": 1,
|
|
"self": 5.001083099995867,
|
|
"children": {
|
|
"TrainerController._reset_env": {
|
|
"total": 2.0102677,
|
|
"count": 1,
|
|
"self": 2.0102677
|
|
},
|
|
"TrainerController.advance": {
|
|
"total": 3663.7077979000037,
|
|
"count": 400988,
|
|
"self": 4.961417599907236,
|
|
"children": {
|
|
"env_step": {
|
|
"total": 3658.7463803000965,
|
|
"count": 400988,
|
|
"self": 1644.3199452001156,
|
|
"children": {
|
|
"SubprocessEnvManager._take_step": {
|
|
"total": 2011.0064264999294,
|
|
"count": 400988,
|
|
"self": 10.466995599909751,
|
|
"children": {
|
|
"TorchPolicy.evaluate": {
|
|
"total": 2000.5394309000196,
|
|
"count": 400043,
|
|
"self": 2000.5394309000196
|
|
}
|
|
}
|
|
},
|
|
"workers": {
|
|
"total": 3.420008600051421,
|
|
"count": 400988,
|
|
"self": 0.0,
|
|
"children": {
|
|
"worker_root": {
|
|
"total": 3662.342524000176,
|
|
"count": 400988,
|
|
"is_parallel": true,
|
|
"self": 2222.298089800248,
|
|
"children": {
|
|
"steps_from_proto": {
|
|
"total": 0.006183199999999944,
|
|
"count": 1,
|
|
"is_parallel": true,
|
|
"self": 0.00010350000000003412,
|
|
"children": {
|
|
"_process_maybe_compressed_observation": {
|
|
"total": 0.006034399999999884,
|
|
"count": 2,
|
|
"is_parallel": true,
|
|
"self": 3.019999999986922e-05,
|
|
"children": {
|
|
"_observation_to_np_array": {
|
|
"total": 0.006004200000000015,
|
|
"count": 3,
|
|
"is_parallel": true,
|
|
"self": 3.019999999986922e-05,
|
|
"children": {
|
|
"process_pixels": {
|
|
"total": 0.005974000000000146,
|
|
"count": 3,
|
|
"is_parallel": true,
|
|
"self": 0.00023519999999987995,
|
|
"children": {
|
|
"image_decompress": {
|
|
"total": 0.005738800000000266,
|
|
"count": 3,
|
|
"is_parallel": true,
|
|
"self": 0.005738800000000266
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
},
|
|
"_process_rank_one_or_two_observation": {
|
|
"total": 4.5300000000025875e-05,
|
|
"count": 2,
|
|
"is_parallel": true,
|
|
"self": 4.5300000000025875e-05
|
|
}
|
|
}
|
|
},
|
|
"UnityEnvironment.step": {
|
|
"total": 1440.038250999928,
|
|
"count": 400988,
|
|
"is_parallel": true,
|
|
"self": 17.915697599831674,
|
|
"children": {
|
|
"UnityEnvironment._generate_step_input": {
|
|
"total": 19.406641000016506,
|
|
"count": 400988,
|
|
"is_parallel": true,
|
|
"self": 19.406641000016506
|
|
},
|
|
"communicator.exchange": {
|
|
"total": 1264.3674498000264,
|
|
"count": 400988,
|
|
"is_parallel": true,
|
|
"self": 1264.3674498000264
|
|
},
|
|
"steps_from_proto": {
|
|
"total": 138.3484626000537,
|
|
"count": 400988,
|
|
"is_parallel": true,
|
|
"self": 27.723966900038533,
|
|
"children": {
|
|
"_process_maybe_compressed_observation": {
|
|
"total": 98.63544689990084,
|
|
"count": 801976,
|
|
"is_parallel": true,
|
|
"self": 7.669849399944397,
|
|
"children": {
|
|
"_observation_to_np_array": {
|
|
"total": 90.96559749995644,
|
|
"count": 1203240,
|
|
"is_parallel": true,
|
|
"self": 7.956188699813637,
|
|
"children": {
|
|
"process_pixels": {
|
|
"total": 83.00940880014281,
|
|
"count": 1203240,
|
|
"is_parallel": true,
|
|
"self": 39.201315600020735,
|
|
"children": {
|
|
"image_decompress": {
|
|
"total": 43.80809320012207,
|
|
"count": 1203240,
|
|
"is_parallel": true,
|
|
"self": 43.80809320012207
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
},
|
|
"_process_rank_one_or_two_observation": {
|
|
"total": 11.989048800114315,
|
|
"count": 801976,
|
|
"is_parallel": true,
|
|
"self": 11.989048800114315
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
},
|
|
"trainer_threads": {
|
|
"total": 2.9199999971751822e-05,
|
|
"count": 1,
|
|
"self": 2.9199999971751822e-05,
|
|
"children": {
|
|
"thread_root": {
|
|
"total": 0.0,
|
|
"count": 0,
|
|
"is_parallel": true,
|
|
"self": 0.0,
|
|
"children": {
|
|
"trainer_advance": {
|
|
"total": 3667.4640509000888,
|
|
"count": 179820,
|
|
"is_parallel": true,
|
|
"self": 3.7331988001337777,
|
|
"children": {
|
|
"process_trajectory": {
|
|
"total": 2924.198506799955,
|
|
"count": 179820,
|
|
"is_parallel": true,
|
|
"self": 2923.8035963999555,
|
|
"children": {
|
|
"RLTrainer._checkpoint": {
|
|
"total": 0.39491039999961686,
|
|
"count": 2,
|
|
"is_parallel": true,
|
|
"self": 0.39491039999961686
|
|
}
|
|
}
|
|
},
|
|
"_update_policy": {
|
|
"total": 739.5323453000001,
|
|
"count": 135,
|
|
"is_parallel": true,
|
|
"self": 494.49920119999626,
|
|
"children": {
|
|
"TorchPPOOptimizer.update": {
|
|
"total": 245.0331441000038,
|
|
"count": 3393,
|
|
"is_parallel": true,
|
|
"self": 245.0331441000038
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
},
|
|
"TrainerController._save_models": {
|
|
"total": 0.17568800000026386,
|
|
"count": 1,
|
|
"self": 0.011633200000233046,
|
|
"children": {
|
|
"RLTrainer._checkpoint": {
|
|
"total": 0.1640548000000308,
|
|
"count": 1,
|
|
"self": 0.1640548000000308
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
} |