ppo-Huggy / run_logs /training_status.json
sjainlucky's picture
Huggy
7d0bd00
{
"Huggy": {
"checkpoints": [
{
"steps": 199981,
"file_path": "results/Huggy/Huggy/Huggy-199981.onnx",
"reward": 3.2060509003125706,
"creation_time": 1677072493.4453542,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199981.pt"
]
},
{
"steps": 399346,
"file_path": "results/Huggy/Huggy/Huggy-399346.onnx",
"reward": 3.4386079907417297,
"creation_time": 1677072733.5467126,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399346.pt"
]
},
{
"steps": 599989,
"file_path": "results/Huggy/Huggy/Huggy-599989.onnx",
"reward": 3.814567344529288,
"creation_time": 1677072975.061186,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599989.pt"
]
},
{
"steps": 799507,
"file_path": "results/Huggy/Huggy/Huggy-799507.onnx",
"reward": 3.866514891994243,
"creation_time": 1677073214.6142077,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799507.pt"
]
},
{
"steps": 999945,
"file_path": "results/Huggy/Huggy/Huggy-999945.onnx",
"reward": 3.80905831605196,
"creation_time": 1677073458.390472,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999945.pt"
]
},
{
"steps": 1199999,
"file_path": "results/Huggy/Huggy/Huggy-1199999.onnx",
"reward": 3.802225105224117,
"creation_time": 1677073703.2517574,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199999.pt"
]
},
{
"steps": 1399610,
"file_path": "results/Huggy/Huggy/Huggy-1399610.onnx",
"reward": 3.8174512773614873,
"creation_time": 1677073949.3768167,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399610.pt"
]
},
{
"steps": 1599973,
"file_path": "results/Huggy/Huggy/Huggy-1599973.onnx",
"reward": 3.9010282688259337,
"creation_time": 1677074199.8401315,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599973.pt"
]
},
{
"steps": 1799545,
"file_path": "results/Huggy/Huggy/Huggy-1799545.onnx",
"reward": 3.772148502006959,
"creation_time": 1677074446.9440258,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799545.pt"
]
},
{
"steps": 1999953,
"file_path": "results/Huggy/Huggy/Huggy-1999953.onnx",
"reward": 4.062780046463013,
"creation_time": 1677074695.6556158,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999953.pt"
]
},
{
"steps": 2000023,
"file_path": "results/Huggy/Huggy/Huggy-2000023.onnx",
"reward": 4.076999687013172,
"creation_time": 1677074695.7728,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000023.pt"
]
}
],
"final_checkpoint": {
"steps": 2000023,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.076999687013172,
"creation_time": 1677074695.7728,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000023.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}