ppo-Huggy / run_logs /training_status.json
emylrahim's picture
Huggy
95d4f5d
raw
history blame contribute delete
No virus
4.37 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199785,
"file_path": "results/Huggy/Huggy/Huggy-199785.onnx",
"reward": 3.6534304120323875,
"creation_time": 1671686308.6945987,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199785.pt"
]
},
{
"steps": 399984,
"file_path": "results/Huggy/Huggy/Huggy-399984.onnx",
"reward": 3.882966278230443,
"creation_time": 1671686529.6811311,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399984.pt"
]
},
{
"steps": 599973,
"file_path": "results/Huggy/Huggy/Huggy-599973.onnx",
"reward": 4.128737533092499,
"creation_time": 1671686750.7006006,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599973.pt"
]
},
{
"steps": 799928,
"file_path": "results/Huggy/Huggy/Huggy-799928.onnx",
"reward": 4.005162583043178,
"creation_time": 1671686968.7998211,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799928.pt"
]
},
{
"steps": 999969,
"file_path": "results/Huggy/Huggy/Huggy-999969.onnx",
"reward": 3.937776027082586,
"creation_time": 1671687191.8054585,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999969.pt"
]
},
{
"steps": 1199945,
"file_path": "results/Huggy/Huggy/Huggy-1199945.onnx",
"reward": 3.7550404760572644,
"creation_time": 1671687414.544934,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199945.pt"
]
},
{
"steps": 1399965,
"file_path": "results/Huggy/Huggy/Huggy-1399965.onnx",
"reward": 3.9156710147857665,
"creation_time": 1671687639.350574,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399965.pt"
]
},
{
"steps": 1599961,
"file_path": "results/Huggy/Huggy/Huggy-1599961.onnx",
"reward": 4.077137245258815,
"creation_time": 1671687860.3365452,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599961.pt"
]
},
{
"steps": 1799988,
"file_path": "results/Huggy/Huggy/Huggy-1799988.onnx",
"reward": 4.108326592427843,
"creation_time": 1671688083.071282,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799988.pt"
]
},
{
"steps": 1999899,
"file_path": "results/Huggy/Huggy/Huggy-1999899.onnx",
"reward": 3.9551266898980013,
"creation_time": 1671688306.7327611,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999899.pt"
]
},
{
"steps": 2000014,
"file_path": "results/Huggy/Huggy/Huggy-2000014.onnx",
"reward": 3.979792726834615,
"creation_time": 1671688306.857536,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000014.pt"
]
}
],
"final_checkpoint": {
"steps": 2000014,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.979792726834615,
"creation_time": 1671688306.857536,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000014.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}