ppo-Huggy / run_logs /training_status.json
Ducco's picture
Huggy
d632b7e
{
"Huggy": {
"checkpoints": [
{
"steps": 199969,
"file_path": "results/Huggy/Huggy/Huggy-199969.onnx",
"reward": 3.3431186856645527,
"creation_time": 1688018435.8009386,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199969.pt"
]
},
{
"steps": 399901,
"file_path": "results/Huggy/Huggy/Huggy-399901.onnx",
"reward": 3.5392983446658497,
"creation_time": 1688018685.3301342,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399901.pt"
]
},
{
"steps": 599844,
"file_path": "results/Huggy/Huggy/Huggy-599844.onnx",
"reward": 4.313225620671322,
"creation_time": 1688018937.632465,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599844.pt"
]
},
{
"steps": 799992,
"file_path": "results/Huggy/Huggy/Huggy-799992.onnx",
"reward": 3.6806233512661444,
"creation_time": 1688019186.5337977,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799992.pt"
]
},
{
"steps": 999943,
"file_path": "results/Huggy/Huggy/Huggy-999943.onnx",
"reward": 3.667519268414653,
"creation_time": 1688019444.0340302,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999943.pt"
]
},
{
"steps": 1199980,
"file_path": "results/Huggy/Huggy/Huggy-1199980.onnx",
"reward": 3.584860695190117,
"creation_time": 1688019708.3815672,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199980.pt"
]
},
{
"steps": 1399930,
"file_path": "results/Huggy/Huggy/Huggy-1399930.onnx",
"reward": 3.933383627635677,
"creation_time": 1688019973.258727,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399930.pt"
]
},
{
"steps": 1599978,
"file_path": "results/Huggy/Huggy/Huggy-1599978.onnx",
"reward": 3.8725905675441026,
"creation_time": 1688020239.7567468,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599978.pt"
]
},
{
"steps": 1799954,
"file_path": "results/Huggy/Huggy/Huggy-1799954.onnx",
"reward": 3.6303456338743367,
"creation_time": 1688020506.8207421,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799954.pt"
]
},
{
"steps": 1999938,
"file_path": "results/Huggy/Huggy/Huggy-1999938.onnx",
"reward": 3.761007796634327,
"creation_time": 1688020772.5607166,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999938.pt"
]
},
{
"steps": 2000062,
"file_path": "results/Huggy/Huggy/Huggy-2000062.onnx",
"reward": 3.873745348142541,
"creation_time": 1688020772.690065,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000062.pt"
]
}
],
"final_checkpoint": {
"steps": 2000062,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.873745348142541,
"creation_time": 1688020772.690065,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000062.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}