ppo-Huggy / run_logs /training_status.json
42loops's picture
Huggy
2fe12e1 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199954,
"file_path": "results/Huggy2/Huggy/Huggy-199954.onnx",
"reward": 3.6267115314091956,
"creation_time": 1741709488.5799465,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199954.pt"
]
},
{
"steps": 399885,
"file_path": "results/Huggy2/Huggy/Huggy-399885.onnx",
"reward": 3.7186213127324277,
"creation_time": 1741709631.1758094,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399885.pt"
]
},
{
"steps": 599909,
"file_path": "results/Huggy2/Huggy/Huggy-599909.onnx",
"reward": 3.6818997383117678,
"creation_time": 1741709776.3961277,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599909.pt"
]
},
{
"steps": 799890,
"file_path": "results/Huggy2/Huggy/Huggy-799890.onnx",
"reward": 3.8526050315602967,
"creation_time": 1741709918.533103,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799890.pt"
]
},
{
"steps": 999981,
"file_path": "results/Huggy2/Huggy/Huggy-999981.onnx",
"reward": 3.800258727577644,
"creation_time": 1741710063.642485,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999981.pt"
]
},
{
"steps": 1199881,
"file_path": "results/Huggy2/Huggy/Huggy-1199881.onnx",
"reward": 3.922735427246719,
"creation_time": 1741710208.3737912,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199881.pt"
]
},
{
"steps": 1399969,
"file_path": "results/Huggy2/Huggy/Huggy-1399969.onnx",
"reward": 4.236039614677429,
"creation_time": 1741710353.8131871,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399969.pt"
]
},
{
"steps": 1599959,
"file_path": "results/Huggy2/Huggy/Huggy-1599959.onnx",
"reward": 3.709990175984661,
"creation_time": 1741710497.529161,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599959.pt"
]
},
{
"steps": 1799893,
"file_path": "results/Huggy2/Huggy/Huggy-1799893.onnx",
"reward": 3.738382253253344,
"creation_time": 1741710642.7827778,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799893.pt"
]
},
{
"steps": 1999957,
"file_path": "results/Huggy2/Huggy/Huggy-1999957.onnx",
"reward": 4.000487691477725,
"creation_time": 1741710790.5483606,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999957.pt"
]
},
{
"steps": 2000046,
"file_path": "results/Huggy2/Huggy/Huggy-2000046.onnx",
"reward": 4.075086176395416,
"creation_time": 1741710790.6690514,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000046.pt"
]
}
],
"final_checkpoint": {
"steps": 2000046,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.075086176395416,
"creation_time": 1741710790.6690514,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000046.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}