ppo-Huggy / run_logs /training_status.json
ravkumar's picture
Huggy
304e31a
{
"Huggy": {
"checkpoints": [
{
"steps": 199706,
"file_path": "results/Huggy/Huggy/Huggy-199706.onnx",
"reward": 3.1647442273403468,
"creation_time": 1682884600.30673,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199706.pt"
]
},
{
"steps": 399959,
"file_path": "results/Huggy/Huggy/Huggy-399959.onnx",
"reward": 3.5830342173576355,
"creation_time": 1682884827.1910102,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399959.pt"
]
},
{
"steps": 599942,
"file_path": "results/Huggy/Huggy/Huggy-599942.onnx",
"reward": 3.639671675090132,
"creation_time": 1682885058.9290917,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599942.pt"
]
},
{
"steps": 799960,
"file_path": "results/Huggy/Huggy/Huggy-799960.onnx",
"reward": 4.092663955390453,
"creation_time": 1682885288.3152604,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799960.pt"
]
},
{
"steps": 999970,
"file_path": "results/Huggy/Huggy/Huggy-999970.onnx",
"reward": 4.185395872592926,
"creation_time": 1682885526.8803065,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999970.pt"
]
},
{
"steps": 1199913,
"file_path": "results/Huggy/Huggy/Huggy-1199913.onnx",
"reward": 3.732329269250234,
"creation_time": 1682885768.3068318,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199913.pt"
]
},
{
"steps": 1399951,
"file_path": "results/Huggy/Huggy/Huggy-1399951.onnx",
"reward": 4.299892770392554,
"creation_time": 1682886005.8656023,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399951.pt"
]
},
{
"steps": 1599995,
"file_path": "results/Huggy/Huggy/Huggy-1599995.onnx",
"reward": 3.8662159544470063,
"creation_time": 1682886239.2919555,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599995.pt"
]
},
{
"steps": 1799964,
"file_path": "results/Huggy/Huggy/Huggy-1799964.onnx",
"reward": 4.161931554865993,
"creation_time": 1682886477.0588534,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799964.pt"
]
},
{
"steps": 1999862,
"file_path": "results/Huggy/Huggy/Huggy-1999862.onnx",
"reward": 4.233895217577617,
"creation_time": 1682886715.9033043,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999862.pt"
]
},
{
"steps": 2000016,
"file_path": "results/Huggy/Huggy/Huggy-2000016.onnx",
"reward": 4.261112895451094,
"creation_time": 1682886716.0315745,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000016.pt"
]
}
],
"final_checkpoint": {
"steps": 2000016,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.261112895451094,
"creation_time": 1682886716.0315745,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000016.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}