ppo-Huggy / run_logs /training_status.json
sswt's picture
Huggy
96972e6 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199840,
"file_path": "results/Huggy2/Huggy/Huggy-199840.onnx",
"reward": 3.6024819814552695,
"creation_time": 1718284976.9707735,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199840.pt"
]
},
{
"steps": 399980,
"file_path": "results/Huggy2/Huggy/Huggy-399980.onnx",
"reward": 3.948912371482168,
"creation_time": 1718285242.9143195,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399980.pt"
]
},
{
"steps": 599878,
"file_path": "results/Huggy2/Huggy/Huggy-599878.onnx",
"reward": 3.603855522473653,
"creation_time": 1718285514.544988,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599878.pt"
]
},
{
"steps": 799865,
"file_path": "results/Huggy2/Huggy/Huggy-799865.onnx",
"reward": 3.9040440122286477,
"creation_time": 1718285782.5615563,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799865.pt"
]
},
{
"steps": 999935,
"file_path": "results/Huggy2/Huggy/Huggy-999935.onnx",
"reward": 3.838298751402271,
"creation_time": 1718286058.9532766,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999935.pt"
]
},
{
"steps": 1199979,
"file_path": "results/Huggy2/Huggy/Huggy-1199979.onnx",
"reward": 3.6147878741564816,
"creation_time": 1718286331.4477627,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199979.pt"
]
},
{
"steps": 1399463,
"file_path": "results/Huggy2/Huggy/Huggy-1399463.onnx",
"reward": 3.8247948364472726,
"creation_time": 1718286599.7015295,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399463.pt"
]
},
{
"steps": 1599921,
"file_path": "results/Huggy2/Huggy/Huggy-1599921.onnx",
"reward": 3.6310946292178645,
"creation_time": 1718286873.9699352,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599921.pt"
]
},
{
"steps": 1799988,
"file_path": "results/Huggy2/Huggy/Huggy-1799988.onnx",
"reward": 3.5145926354452968,
"creation_time": 1718287154.945475,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799988.pt"
]
},
{
"steps": 1999937,
"file_path": "results/Huggy2/Huggy/Huggy-1999937.onnx",
"reward": 2.679570496082306,
"creation_time": 1718287443.9985478,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999937.pt"
]
},
{
"steps": 2000033,
"file_path": "results/Huggy2/Huggy/Huggy-2000033.onnx",
"reward": 3.580238858858744,
"creation_time": 1718287444.1432247,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000033.pt"
]
}
],
"final_checkpoint": {
"steps": 2000033,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.580238858858744,
"creation_time": 1718287444.1432247,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000033.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}