ppo-Huggy / run_logs /training_status.json
emmashe15's picture
Huggy
b8313e3
{
"Huggy": {
"checkpoints": [
{
"steps": 199766,
"file_path": "results/Huggy/Huggy/Huggy-199766.onnx",
"reward": 3.541225162045709,
"creation_time": 1671110834.649562,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199766.pt"
]
},
{
"steps": 399277,
"file_path": "results/Huggy/Huggy/Huggy-399277.onnx",
"reward": 3.9237190186977386,
"creation_time": 1671111063.3519318,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399277.pt"
]
},
{
"steps": 599988,
"file_path": "results/Huggy/Huggy/Huggy-599988.onnx",
"reward": 3.8004597676427743,
"creation_time": 1671111295.4921846,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599988.pt"
]
},
{
"steps": 799963,
"file_path": "results/Huggy/Huggy/Huggy-799963.onnx",
"reward": 3.569106745804455,
"creation_time": 1671111522.0374362,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799963.pt"
]
},
{
"steps": 999537,
"file_path": "results/Huggy/Huggy/Huggy-999537.onnx",
"reward": 3.64688536211064,
"creation_time": 1671111753.6135793,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999537.pt"
]
},
{
"steps": 1199979,
"file_path": "results/Huggy/Huggy/Huggy-1199979.onnx",
"reward": 4.364796297494755,
"creation_time": 1671111986.5987017,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199979.pt"
]
},
{
"steps": 1399951,
"file_path": "results/Huggy/Huggy/Huggy-1399951.onnx",
"reward": 3.5816029367799107,
"creation_time": 1671112211.4015474,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399951.pt"
]
},
{
"steps": 1599869,
"file_path": "results/Huggy/Huggy/Huggy-1599869.onnx",
"reward": 3.6893617201734474,
"creation_time": 1671112441.4799156,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599869.pt"
]
},
{
"steps": 1799839,
"file_path": "results/Huggy/Huggy/Huggy-1799839.onnx",
"reward": 3.6981644800731113,
"creation_time": 1671112672.8765786,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799839.pt"
]
},
{
"steps": 1999981,
"file_path": "results/Huggy/Huggy/Huggy-1999981.onnx",
"reward": 3.9224737882614136,
"creation_time": 1671112906.412852,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999981.pt"
]
},
{
"steps": 2000068,
"file_path": "results/Huggy/Huggy/Huggy-2000068.onnx",
"reward": 4.02949104309082,
"creation_time": 1671112906.5384781,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000068.pt"
]
}
],
"final_checkpoint": {
"steps": 2000068,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.02949104309082,
"creation_time": 1671112906.5384781,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000068.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}