ppo-Huggy / run_logs /training_status.json
Chhabi's picture
Huggy
31f82dc verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199813,
"file_path": "results/Huggy2/Huggy/Huggy-199813.onnx",
"reward": 3.7401781938292764,
"creation_time": 1714552335.137317,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199813.pt"
]
},
{
"steps": 399981,
"file_path": "results/Huggy2/Huggy/Huggy-399981.onnx",
"reward": 3.702498183321597,
"creation_time": 1714552795.308428,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399981.pt"
]
},
{
"steps": 599880,
"file_path": "results/Huggy2/Huggy/Huggy-599880.onnx",
"reward": 3.7436126031373678,
"creation_time": 1714553260.5058234,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599880.pt"
]
},
{
"steps": 799960,
"file_path": "results/Huggy2/Huggy/Huggy-799960.onnx",
"reward": 3.860156606514375,
"creation_time": 1714553711.8552895,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799960.pt"
]
},
{
"steps": 999883,
"file_path": "results/Huggy2/Huggy/Huggy-999883.onnx",
"reward": 3.713585154404716,
"creation_time": 1714554183.913034,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999883.pt"
]
},
{
"steps": 1199973,
"file_path": "results/Huggy2/Huggy/Huggy-1199973.onnx",
"reward": 3.868400384982427,
"creation_time": 1714554651.9829648,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199973.pt"
]
},
{
"steps": 1399968,
"file_path": "results/Huggy2/Huggy/Huggy-1399968.onnx",
"reward": 3.89920664253739,
"creation_time": 1714555108.7570488,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399968.pt"
]
},
{
"steps": 1599916,
"file_path": "results/Huggy2/Huggy/Huggy-1599916.onnx",
"reward": 3.890522776544094,
"creation_time": 1714555574.1958363,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599916.pt"
]
},
{
"steps": 1799965,
"file_path": "results/Huggy2/Huggy/Huggy-1799965.onnx",
"reward": 3.6414907336235047,
"creation_time": 1714556035.8640313,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799965.pt"
]
},
{
"steps": 1999973,
"file_path": "results/Huggy2/Huggy/Huggy-1999973.onnx",
"reward": 3.5337768944256616,
"creation_time": 1714556489.1057105,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999973.pt"
]
},
{
"steps": 2000019,
"file_path": "results/Huggy2/Huggy/Huggy-2000019.onnx",
"reward": 3.530859401964006,
"creation_time": 1714556489.3443668,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000019.pt"
]
}
],
"final_checkpoint": {
"steps": 2000019,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.530859401964006,
"creation_time": 1714556489.3443668,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000019.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}