ppo-Huggy / run_logs /training_status.json
inventwithdean's picture
Huggy v0
cd5642a verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199850,
"file_path": "results/Huggy2/Huggy/Huggy-199850.onnx",
"reward": 3.2039250034552356,
"creation_time": 1717871903.0668008,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199850.pt"
]
},
{
"steps": 399670,
"file_path": "results/Huggy2/Huggy/Huggy-399670.onnx",
"reward": 3.655000385690908,
"creation_time": 1717872148.4509168,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399670.pt"
]
},
{
"steps": 599935,
"file_path": "results/Huggy2/Huggy/Huggy-599935.onnx",
"reward": 3.4884164772535624,
"creation_time": 1717872394.0693867,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599935.pt"
]
},
{
"steps": 799991,
"file_path": "results/Huggy2/Huggy/Huggy-799991.onnx",
"reward": 3.8825783547090023,
"creation_time": 1717872636.08221,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799991.pt"
]
},
{
"steps": 999922,
"file_path": "results/Huggy2/Huggy/Huggy-999922.onnx",
"reward": 4.148203771689842,
"creation_time": 1717872885.6567893,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999922.pt"
]
},
{
"steps": 1199768,
"file_path": "results/Huggy2/Huggy/Huggy-1199768.onnx",
"reward": 3.763288672531352,
"creation_time": 1717873136.1288233,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199768.pt"
]
},
{
"steps": 1399954,
"file_path": "results/Huggy2/Huggy/Huggy-1399954.onnx",
"reward": 4.1698837487593945,
"creation_time": 1717873385.7657018,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399954.pt"
]
},
{
"steps": 1599935,
"file_path": "results/Huggy2/Huggy/Huggy-1599935.onnx",
"reward": 3.7479288841317766,
"creation_time": 1717873630.6262963,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599935.pt"
]
},
{
"steps": 1799979,
"file_path": "results/Huggy2/Huggy/Huggy-1799979.onnx",
"reward": 3.725676856628836,
"creation_time": 1717873879.9551628,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799979.pt"
]
},
{
"steps": 1999685,
"file_path": "results/Huggy2/Huggy/Huggy-1999685.onnx",
"reward": 4.01280415058136,
"creation_time": 1717874126.2530968,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999685.pt"
]
},
{
"steps": 2000435,
"file_path": "results/Huggy2/Huggy/Huggy-2000435.onnx",
"reward": 3.3570622503757477,
"creation_time": 1717874126.4745827,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000435.pt"
]
}
],
"final_checkpoint": {
"steps": 2000435,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.3570622503757477,
"creation_time": 1717874126.4745827,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000435.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}