ppo-Huggy / run_logs /training_status.json
menelaos's picture
Huggy
5e451ee
{
"Huggy": {
"checkpoints": [
{
"steps": 199975,
"file_path": "results/Huggy/Huggy/Huggy-199975.onnx",
"reward": 3.1444546977678933,
"creation_time": 1674303666.4276068,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199975.pt"
]
},
{
"steps": 399919,
"file_path": "results/Huggy/Huggy/Huggy-399919.onnx",
"reward": 3.4301109313964844,
"creation_time": 1674303881.369827,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399919.pt"
]
},
{
"steps": 599977,
"file_path": "results/Huggy/Huggy/Huggy-599977.onnx",
"reward": 3.7271020692937515,
"creation_time": 1674304098.5848424,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599977.pt"
]
},
{
"steps": 799900,
"file_path": "results/Huggy/Huggy/Huggy-799900.onnx",
"reward": 3.5791341474839857,
"creation_time": 1674304312.919451,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799900.pt"
]
},
{
"steps": 999903,
"file_path": "results/Huggy/Huggy/Huggy-999903.onnx",
"reward": 3.786853778056609,
"creation_time": 1674304531.6670897,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999903.pt"
]
},
{
"steps": 1199999,
"file_path": "results/Huggy/Huggy/Huggy-1199999.onnx",
"reward": 3.966481728023953,
"creation_time": 1674304752.262122,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199999.pt"
]
},
{
"steps": 1399993,
"file_path": "results/Huggy/Huggy/Huggy-1399993.onnx",
"reward": 3.761055851701516,
"creation_time": 1674304967.6881702,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399993.pt"
]
},
{
"steps": 1599432,
"file_path": "results/Huggy/Huggy/Huggy-1599432.onnx",
"reward": 3.8917786496527054,
"creation_time": 1674305187.6442745,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599432.pt"
]
},
{
"steps": 1799996,
"file_path": "results/Huggy/Huggy/Huggy-1799996.onnx",
"reward": 3.597949866841479,
"creation_time": 1674305408.0228338,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799996.pt"
]
},
{
"steps": 1999981,
"file_path": "results/Huggy/Huggy/Huggy-1999981.onnx",
"reward": 5.28090778986613,
"creation_time": 1674305627.8539972,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999981.pt"
]
},
{
"steps": 2000115,
"file_path": "results/Huggy/Huggy/Huggy-2000115.onnx",
"reward": 5.318100929260254,
"creation_time": 1674305627.9749284,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000115.pt"
]
}
],
"final_checkpoint": {
"steps": 2000115,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 5.318100929260254,
"creation_time": 1674305627.9749284,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000115.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}