RL2 / run_logs /training_status.json
Rubén Escobedo
Huggy
2660945
{
"Huggy": {
"checkpoints": [
{
"steps": 199926,
"file_path": "results/Huggy/Huggy/Huggy-199926.onnx",
"reward": 3.4332784596416683,
"creation_time": 1677494408.252362,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199926.pt"
]
},
{
"steps": 399894,
"file_path": "results/Huggy/Huggy/Huggy-399894.onnx",
"reward": 4.136766782691402,
"creation_time": 1677494674.6457944,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399894.pt"
]
},
{
"steps": 599993,
"file_path": "results/Huggy/Huggy/Huggy-599993.onnx",
"reward": 3.759084076985069,
"creation_time": 1677494941.602738,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599993.pt"
]
},
{
"steps": 799927,
"file_path": "results/Huggy/Huggy/Huggy-799927.onnx",
"reward": 3.9291552003377523,
"creation_time": 1677495204.8071759,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799927.pt"
]
},
{
"steps": 999895,
"file_path": "results/Huggy/Huggy/Huggy-999895.onnx",
"reward": 3.734996803361794,
"creation_time": 1677495469.1417296,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999895.pt"
]
},
{
"steps": 1199976,
"file_path": "results/Huggy/Huggy/Huggy-1199976.onnx",
"reward": 3.7349527464972603,
"creation_time": 1677495732.5219183,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199976.pt"
]
},
{
"steps": 1399973,
"file_path": "results/Huggy/Huggy/Huggy-1399973.onnx",
"reward": 3.6906628715243315,
"creation_time": 1677495997.4254048,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399973.pt"
]
},
{
"steps": 1599944,
"file_path": "results/Huggy/Huggy/Huggy-1599944.onnx",
"reward": 3.657820500103773,
"creation_time": 1677496260.4192994,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599944.pt"
]
},
{
"steps": 1799891,
"file_path": "results/Huggy/Huggy/Huggy-1799891.onnx",
"reward": 3.6638246386550195,
"creation_time": 1677496523.8809922,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799891.pt"
]
},
{
"steps": 1999913,
"file_path": "results/Huggy/Huggy/Huggy-1999913.onnx",
"reward": 3.65679143998358,
"creation_time": 1677496784.9831135,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999913.pt"
]
},
{
"steps": 2000010,
"file_path": "results/Huggy/Huggy/Huggy-2000010.onnx",
"reward": 3.67054723968822,
"creation_time": 1677496785.105913,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000010.pt"
]
}
],
"final_checkpoint": {
"steps": 2000010,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.67054723968822,
"creation_time": 1677496785.105913,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000010.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}