ppo-Huggy / run_logs /training_status.json
ckandemir's picture
Huggy
d4af99f verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199718,
"file_path": "results/Huggy/Huggy/Huggy-199718.onnx",
"reward": 3.3635429367423058,
"creation_time": 1724097183.0497184,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199718.pt"
]
},
{
"steps": 399887,
"file_path": "results/Huggy/Huggy/Huggy-399887.onnx",
"reward": 3.530230677733987,
"creation_time": 1724097417.904084,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399887.pt"
]
},
{
"steps": 599944,
"file_path": "results/Huggy/Huggy/Huggy-599944.onnx",
"reward": 3.839178647994995,
"creation_time": 1724097656.887657,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599944.pt"
]
},
{
"steps": 799960,
"file_path": "results/Huggy/Huggy/Huggy-799960.onnx",
"reward": 3.6240010769529776,
"creation_time": 1724097893.412152,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799960.pt"
]
},
{
"steps": 999589,
"file_path": "results/Huggy/Huggy/Huggy-999589.onnx",
"reward": 3.9100323442150566,
"creation_time": 1724098136.3660781,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999589.pt"
]
},
{
"steps": 1199914,
"file_path": "results/Huggy/Huggy/Huggy-1199914.onnx",
"reward": 3.88228909755021,
"creation_time": 1724098383.616469,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199914.pt"
]
},
{
"steps": 1399897,
"file_path": "results/Huggy/Huggy/Huggy-1399897.onnx",
"reward": 4.881224036216736,
"creation_time": 1724098627.2696922,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399897.pt"
]
},
{
"steps": 1599960,
"file_path": "results/Huggy/Huggy/Huggy-1599960.onnx",
"reward": 3.9023103459314865,
"creation_time": 1724098867.4479458,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599960.pt"
]
},
{
"steps": 1799940,
"file_path": "results/Huggy/Huggy/Huggy-1799940.onnx",
"reward": 4.0003840247510185,
"creation_time": 1724099110.7495985,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799940.pt"
]
},
{
"steps": 1999930,
"file_path": "results/Huggy/Huggy/Huggy-1999930.onnx",
"reward": 3.8655002066067286,
"creation_time": 1724099355.771384,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999930.pt"
]
},
{
"steps": 2000044,
"file_path": "results/Huggy/Huggy/Huggy-2000044.onnx",
"reward": 3.89092278984231,
"creation_time": 1724099355.886748,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000044.pt"
]
}
],
"final_checkpoint": {
"steps": 2000044,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.89092278984231,
"creation_time": 1724099355.886748,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000044.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.1+cu121"
}
}