ppo-Huggy / run_logs /training_status.json
BILOO237's picture
Huggy
98c419c verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199982,
"file_path": "results/Huggy2/Huggy/Huggy-199982.onnx",
"reward": 2.9644240220980858,
"creation_time": 1723283984.8956766,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199982.pt"
]
},
{
"steps": 399985,
"file_path": "results/Huggy2/Huggy/Huggy-399985.onnx",
"reward": 3.2140926160509626,
"creation_time": 1723284213.7249343,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399985.pt"
]
},
{
"steps": 599935,
"file_path": "results/Huggy2/Huggy/Huggy-599935.onnx",
"reward": 4.306666904025608,
"creation_time": 1723284446.8662877,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599935.pt"
]
},
{
"steps": 799997,
"file_path": "results/Huggy2/Huggy/Huggy-799997.onnx",
"reward": 3.605627469442509,
"creation_time": 1723284678.0679934,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799997.pt"
]
},
{
"steps": 999957,
"file_path": "results/Huggy2/Huggy/Huggy-999957.onnx",
"reward": 3.483904051283995,
"creation_time": 1723284913.6568747,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999957.pt"
]
},
{
"steps": 1199926,
"file_path": "results/Huggy2/Huggy/Huggy-1199926.onnx",
"reward": 3.7603145648793475,
"creation_time": 1723285148.3544753,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199926.pt"
]
},
{
"steps": 1399934,
"file_path": "results/Huggy2/Huggy/Huggy-1399934.onnx",
"reward": 3.7720073129557354,
"creation_time": 1723285378.3938243,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399934.pt"
]
},
{
"steps": 1599923,
"file_path": "results/Huggy2/Huggy/Huggy-1599923.onnx",
"reward": 3.3713954916367164,
"creation_time": 1723285611.0813172,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599923.pt"
]
},
{
"steps": 1799969,
"file_path": "results/Huggy2/Huggy/Huggy-1799969.onnx",
"reward": 2.9794031849929263,
"creation_time": 1723285843.6777487,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799969.pt"
]
},
{
"steps": 1999992,
"file_path": "results/Huggy2/Huggy/Huggy-1999992.onnx",
"reward": 3.102344039413664,
"creation_time": 1723286073.9808037,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999992.pt"
]
},
{
"steps": 2000092,
"file_path": "results/Huggy2/Huggy/Huggy-2000092.onnx",
"reward": 3.1184438467025757,
"creation_time": 1723286074.1010938,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000092.pt"
]
}
],
"final_checkpoint": {
"steps": 2000092,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.1184438467025757,
"creation_time": 1723286074.1010938,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000092.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.1+cu121"
}
}