ppo-Huggy / run_logs /training_status.json
rherrmann's picture
Huggy
69d47df
{
"Huggy": {
"checkpoints": [
{
"steps": 499992,
"file_path": "results/Huggy/Huggy/Huggy-499992.onnx",
"reward": 3.8579931425493816,
"creation_time": 1670896056.1791737,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-499992.pt"
]
},
{
"steps": 999977,
"file_path": "results/Huggy/Huggy/Huggy-999977.onnx",
"reward": 3.823188080371005,
"creation_time": 1670896629.4844384,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999977.pt"
]
},
{
"steps": 1499986,
"file_path": "results/Huggy/Huggy/Huggy-1499986.onnx",
"reward": 3.8552635293158275,
"creation_time": 1670897196.4167929,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1499986.pt"
]
},
{
"steps": 1999933,
"file_path": "results/Huggy/Huggy/Huggy-1999933.onnx",
"reward": 3.441635570218486,
"creation_time": 1670897769.1821632,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999933.pt"
]
},
{
"steps": 2000609,
"file_path": "results/Huggy/Huggy/Huggy-2000609.onnx",
"reward": 3.4618291929364204,
"creation_time": 1670897769.3235085,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000609.pt"
]
}
],
"final_checkpoint": {
"steps": 2000609,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.4618291929364204,
"creation_time": 1670897769.3235085,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000609.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}