ppo-Huggy / run_logs /training_status.json
KevinCRB's picture
Huggy
e5fb45c verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199930,
"file_path": "results/Huggy2/Huggy/Huggy-199930.onnx",
"reward": 3.222682249718818,
"creation_time": 1733504503.6281466,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199930.pt"
]
},
{
"steps": 399899,
"file_path": "results/Huggy2/Huggy/Huggy-399899.onnx",
"reward": 3.533149515760356,
"creation_time": 1733504746.8490074,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399899.pt"
]
},
{
"steps": 599932,
"file_path": "results/Huggy2/Huggy/Huggy-599932.onnx",
"reward": 3.8931154949324474,
"creation_time": 1733504987.8645298,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599932.pt"
]
},
{
"steps": 799883,
"file_path": "results/Huggy2/Huggy/Huggy-799883.onnx",
"reward": 3.7794327543627833,
"creation_time": 1733505231.3760803,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799883.pt"
]
},
{
"steps": 999997,
"file_path": "results/Huggy2/Huggy/Huggy-999997.onnx",
"reward": 3.864470323630139,
"creation_time": 1733505475.432701,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999997.pt"
]
},
{
"steps": 1199992,
"file_path": "results/Huggy2/Huggy/Huggy-1199992.onnx",
"reward": 3.6126409899104726,
"creation_time": 1733505739.2549548,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199992.pt"
]
},
{
"steps": 1399954,
"file_path": "results/Huggy2/Huggy/Huggy-1399954.onnx",
"reward": 3.8995903788828383,
"creation_time": 1733505989.3111284,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399954.pt"
]
},
{
"steps": 1599969,
"file_path": "results/Huggy2/Huggy/Huggy-1599969.onnx",
"reward": 4.05907917948243,
"creation_time": 1733506232.0228374,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599969.pt"
]
},
{
"steps": 1799933,
"file_path": "results/Huggy2/Huggy/Huggy-1799933.onnx",
"reward": 4.09589813714442,
"creation_time": 1733506475.0663812,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799933.pt"
]
},
{
"steps": 1999808,
"file_path": "results/Huggy2/Huggy/Huggy-1999808.onnx",
"reward": 4.147406352193732,
"creation_time": 1733506715.8090005,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999808.pt"
]
},
{
"steps": 2000051,
"file_path": "results/Huggy2/Huggy/Huggy-2000051.onnx",
"reward": 4.252123928070068,
"creation_time": 1733506715.9357355,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000051.pt"
]
}
],
"final_checkpoint": {
"steps": 2000051,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.252123928070068,
"creation_time": 1733506715.9357355,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000051.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.1+cu121"
}
}