ppo-Huggy / run_logs /training_status.json
michael-kingston's picture
Huggy
8c05304
{
"Huggy": {
"checkpoints": [
{
"steps": 199819,
"file_path": "results/Huggy/Huggy/Huggy-199819.onnx",
"reward": 3.337421376915539,
"creation_time": 1699322003.3094811,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199819.pt"
]
},
{
"steps": 399793,
"file_path": "results/Huggy/Huggy/Huggy-399793.onnx",
"reward": 3.7106046199798586,
"creation_time": 1699322259.322604,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399793.pt"
]
},
{
"steps": 599966,
"file_path": "results/Huggy/Huggy/Huggy-599966.onnx",
"reward": 3.451304389880254,
"creation_time": 1699322519.0601273,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599966.pt"
]
},
{
"steps": 799953,
"file_path": "results/Huggy/Huggy/Huggy-799953.onnx",
"reward": 3.792570505850017,
"creation_time": 1699322779.1972964,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799953.pt"
]
},
{
"steps": 999738,
"file_path": "results/Huggy/Huggy/Huggy-999738.onnx",
"reward": 3.811092725753784,
"creation_time": 1699323042.3664627,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999738.pt"
]
},
{
"steps": 1199995,
"file_path": "results/Huggy/Huggy/Huggy-1199995.onnx",
"reward": 3.4988154447392414,
"creation_time": 1699323304.4036744,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199995.pt"
]
},
{
"steps": 1399906,
"file_path": "results/Huggy/Huggy/Huggy-1399906.onnx",
"reward": 4.407717335224151,
"creation_time": 1699323559.9910123,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399906.pt"
]
},
{
"steps": 1599968,
"file_path": "results/Huggy/Huggy/Huggy-1599968.onnx",
"reward": 3.737314440863473,
"creation_time": 1699323812.3728187,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599968.pt"
]
},
{
"steps": 1799938,
"file_path": "results/Huggy/Huggy/Huggy-1799938.onnx",
"reward": 3.77162748728043,
"creation_time": 1699324067.9461794,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799938.pt"
]
},
{
"steps": 1999987,
"file_path": "results/Huggy/Huggy/Huggy-1999987.onnx",
"reward": 3.371283725688332,
"creation_time": 1699324328.9041693,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999987.pt"
]
},
{
"steps": 2000044,
"file_path": "results/Huggy/Huggy/Huggy-2000044.onnx",
"reward": 3.3291737735271454,
"creation_time": 1699324329.0578003,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000044.pt"
]
}
],
"final_checkpoint": {
"steps": 2000044,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.3291737735271454,
"creation_time": 1699324329.0578003,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000044.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.0+cu118"
}
}