ppo-Huggy / run_logs /training_status.json
egilron's picture
Huggy01
6942802 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199938,
"file_path": "results/Huggy2/Huggy/Huggy-199938.onnx",
"reward": 3.2744047436220893,
"creation_time": 1717487107.9856656,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199938.pt"
]
},
{
"steps": 399912,
"file_path": "results/Huggy2/Huggy/Huggy-399912.onnx",
"reward": 3.9223035241282265,
"creation_time": 1717487347.7552824,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399912.pt"
]
},
{
"steps": 599927,
"file_path": "results/Huggy2/Huggy/Huggy-599927.onnx",
"reward": 4.357888240080613,
"creation_time": 1717487594.9046843,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599927.pt"
]
},
{
"steps": 799997,
"file_path": "results/Huggy2/Huggy/Huggy-799997.onnx",
"reward": 3.904463357546113,
"creation_time": 1717487834.6745498,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799997.pt"
]
},
{
"steps": 999700,
"file_path": "results/Huggy2/Huggy/Huggy-999700.onnx",
"reward": 3.559163364080282,
"creation_time": 1717488078.7136378,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999700.pt"
]
},
{
"steps": 1199955,
"file_path": "results/Huggy2/Huggy/Huggy-1199955.onnx",
"reward": 3.77784343787602,
"creation_time": 1717488324.869273,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199955.pt"
]
},
{
"steps": 1399397,
"file_path": "results/Huggy2/Huggy/Huggy-1399397.onnx",
"reward": 3.969614332424846,
"creation_time": 1717488568.3406713,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399397.pt"
]
},
{
"steps": 1599993,
"file_path": "results/Huggy2/Huggy/Huggy-1599993.onnx",
"reward": 3.883558148834981,
"creation_time": 1717488813.7081263,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599993.pt"
]
},
{
"steps": 1799990,
"file_path": "results/Huggy2/Huggy/Huggy-1799990.onnx",
"reward": 3.8328578745832247,
"creation_time": 1717489061.1774359,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799990.pt"
]
},
{
"steps": 1999953,
"file_path": "results/Huggy2/Huggy/Huggy-1999953.onnx",
"reward": 3.9163651525974275,
"creation_time": 1717489305.9676552,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999953.pt"
]
},
{
"steps": 2000057,
"file_path": "results/Huggy2/Huggy/Huggy-2000057.onnx",
"reward": 3.9619204714184715,
"creation_time": 1717489306.0889575,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000057.pt"
]
}
],
"final_checkpoint": {
"steps": 2000057,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.9619204714184715,
"creation_time": 1717489306.0889575,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000057.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}