ppo-HuggyPRO / run_logs /training_status.json
Goodevile's picture
Huggy
f3650e5
raw
history blame
5.89 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 7399945,
"file_path": "results/HuggyPRO/Huggy/Huggy-7399945.onnx",
"reward": 3.051377342215606,
"creation_time": 1688016222.80325,
"auxillary_file_paths": [
"results/HuggyPRO/Huggy/Huggy-7399945.pt"
]
},
{
"steps": 7599393,
"file_path": "results/HuggyPRO/Huggy/Huggy-7599393.onnx",
"reward": 3.369081986245028,
"creation_time": 1688016389.0007834,
"auxillary_file_paths": [
"results/HuggyPRO/Huggy/Huggy-7599393.pt"
]
},
{
"steps": 7799720,
"file_path": "results/HuggyPRO/Huggy/Huggy-7799720.onnx",
"reward": 3.480793364627181,
"creation_time": 1688016557.888164,
"auxillary_file_paths": [
"results/HuggyPRO/Huggy/Huggy-7799720.pt"
]
},
{
"steps": 7999988,
"file_path": "results/HuggyPRO/Huggy/Huggy-7999988.onnx",
"reward": 5.588488578796387,
"creation_time": 1688016724.8431225,
"auxillary_file_paths": [
"results/HuggyPRO/Huggy/Huggy-7999988.pt"
]
},
{
"steps": 8199885,
"file_path": "results/HuggyPRO/Huggy/Huggy-8199885.onnx",
"reward": 2.912739634227294,
"creation_time": 1688016890.0908227,
"auxillary_file_paths": [
"results/HuggyPRO/Huggy/Huggy-8199885.pt"
]
},
{
"steps": 8399813,
"file_path": "results/HuggyPRO/Huggy/Huggy-8399813.onnx",
"reward": 3.5065758538246157,
"creation_time": 1688017056.9512923,
"auxillary_file_paths": [
"results/HuggyPRO/Huggy/Huggy-8399813.pt"
]
},
{
"steps": 8599477,
"file_path": "results/HuggyPRO/Huggy/Huggy-8599477.onnx",
"reward": 2.6361040584743023,
"creation_time": 1688017219.7248666,
"auxillary_file_paths": [
"results/HuggyPRO/Huggy/Huggy-8599477.pt"
]
},
{
"steps": 8799487,
"file_path": "results/HuggyPRO/Huggy/Huggy-8799487.onnx",
"reward": 2.2121368527412413,
"creation_time": 1688017385.8512013,
"auxillary_file_paths": [
"results/HuggyPRO/Huggy/Huggy-8799487.pt"
]
},
{
"steps": 8999281,
"file_path": "results/HuggyPRO/Huggy/Huggy-8999281.onnx",
"reward": 1.2284660264849663,
"creation_time": 1688017551.598415,
"auxillary_file_paths": [
"results/HuggyPRO/Huggy/Huggy-8999281.pt"
]
},
{
"steps": 9199305,
"file_path": "results/HuggyPRO/Huggy/Huggy-9199305.onnx",
"reward": 2.5108944786631544,
"creation_time": 1688017715.3146114,
"auxillary_file_paths": [
"results/HuggyPRO/Huggy/Huggy-9199305.pt"
]
},
{
"steps": 9399351,
"file_path": "results/HuggyPRO/Huggy/Huggy-9399351.onnx",
"reward": 2.969933057427406,
"creation_time": 1688017880.364168,
"auxillary_file_paths": [
"results/HuggyPRO/Huggy/Huggy-9399351.pt"
]
},
{
"steps": 9599935,
"file_path": "results/HuggyPRO/Huggy/Huggy-9599935.onnx",
"reward": 2.386953830718994,
"creation_time": 1688018046.6176262,
"auxillary_file_paths": [
"results/HuggyPRO/Huggy/Huggy-9599935.pt"
]
},
{
"steps": 9799549,
"file_path": "results/HuggyPRO/Huggy/Huggy-9799549.onnx",
"reward": 2.764477152181299,
"creation_time": 1688018208.6878228,
"auxillary_file_paths": [
"results/HuggyPRO/Huggy/Huggy-9799549.pt"
]
},
{
"steps": 9999578,
"file_path": "results/HuggyPRO/Huggy/Huggy-9999578.onnx",
"reward": 1.930848617106676,
"creation_time": 1688018376.3925066,
"auxillary_file_paths": [
"results/HuggyPRO/Huggy/Huggy-9999578.pt"
]
},
{
"steps": 10000328,
"file_path": "results/HuggyPRO/Huggy/Huggy-10000328.onnx",
"reward": 1.8292621707215029,
"creation_time": 1688018376.5172257,
"auxillary_file_paths": [
"results/HuggyPRO/Huggy/Huggy-10000328.pt"
]
}
],
"final_checkpoint": {
"steps": 10000328,
"file_path": "results/HuggyPRO/Huggy.onnx",
"reward": 1.8292621707215029,
"creation_time": 1688018376.5172257,
"auxillary_file_paths": [
"results/HuggyPRO/Huggy/Huggy-10000328.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "2.0.1"
}
}