ppo-Huggy / run_logs /training_status.json
pabloyesteb's picture
Huggy
d8674c2
{
"Huggy": {
"checkpoints": [
{
"steps": 199906,
"file_path": "results/Huggy/Huggy/Huggy-199906.onnx",
"reward": 3.1921703153186374,
"creation_time": 1681722941.051303,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199906.pt"
]
},
{
"steps": 399918,
"file_path": "results/Huggy/Huggy/Huggy-399918.onnx",
"reward": 3.6321657964161465,
"creation_time": 1681723173.640026,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399918.pt"
]
},
{
"steps": 599927,
"file_path": "results/Huggy/Huggy/Huggy-599927.onnx",
"reward": 4.508185029029846,
"creation_time": 1681723405.3551793,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599927.pt"
]
},
{
"steps": 799931,
"file_path": "results/Huggy/Huggy/Huggy-799931.onnx",
"reward": 3.7262496863557995,
"creation_time": 1681723634.0334666,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799931.pt"
]
},
{
"steps": 999987,
"file_path": "results/Huggy/Huggy/Huggy-999987.onnx",
"reward": 3.5972424098905527,
"creation_time": 1681723865.5370688,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999987.pt"
]
},
{
"steps": 1199886,
"file_path": "results/Huggy/Huggy/Huggy-1199886.onnx",
"reward": 3.208886135708202,
"creation_time": 1681724098.4128115,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199886.pt"
]
},
{
"steps": 1399381,
"file_path": "results/Huggy/Huggy/Huggy-1399381.onnx",
"reward": 3.746830185417269,
"creation_time": 1681724328.3366349,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399381.pt"
]
},
{
"steps": 1599922,
"file_path": "results/Huggy/Huggy/Huggy-1599922.onnx",
"reward": 3.703206221004585,
"creation_time": 1681724563.477693,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599922.pt"
]
},
{
"steps": 1799857,
"file_path": "results/Huggy/Huggy/Huggy-1799857.onnx",
"reward": 3.5912203883180522,
"creation_time": 1681724798.649161,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799857.pt"
]
},
{
"steps": 1999884,
"file_path": "results/Huggy/Huggy/Huggy-1999884.onnx",
"reward": 1.2311093012491863,
"creation_time": 1681725027.7986414,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999884.pt"
]
},
{
"steps": 2000003,
"file_path": "results/Huggy/Huggy/Huggy-2000003.onnx",
"reward": 2.4501792192459106,
"creation_time": 1681725027.922443,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000003.pt"
]
}
],
"final_checkpoint": {
"steps": 2000003,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 2.4501792192459106,
"creation_time": 1681725027.922443,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000003.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}