ppo-Huggy / run_logs /training_status.json
mejdi86's picture
Huggy
475f98e verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199907,
"file_path": "results/Huggy2/Huggy/Huggy-199907.onnx",
"reward": 3.2482561424926475,
"creation_time": 1714897923.7660048,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199907.pt"
]
},
{
"steps": 399925,
"file_path": "results/Huggy2/Huggy/Huggy-399925.onnx",
"reward": 3.550834097114264,
"creation_time": 1714898170.537273,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399925.pt"
]
},
{
"steps": 599938,
"file_path": "results/Huggy2/Huggy/Huggy-599938.onnx",
"reward": 2.708030621210734,
"creation_time": 1714898417.8497162,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599938.pt"
]
},
{
"steps": 799979,
"file_path": "results/Huggy2/Huggy/Huggy-799979.onnx",
"reward": 3.762150787526653,
"creation_time": 1714898662.3898547,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799979.pt"
]
},
{
"steps": 999951,
"file_path": "results/Huggy2/Huggy/Huggy-999951.onnx",
"reward": 3.7849496285849753,
"creation_time": 1714898909.7734892,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999951.pt"
]
},
{
"steps": 1199914,
"file_path": "results/Huggy2/Huggy/Huggy-1199914.onnx",
"reward": 3.4459999590589288,
"creation_time": 1714899156.3520024,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199914.pt"
]
},
{
"steps": 1399980,
"file_path": "results/Huggy2/Huggy/Huggy-1399980.onnx",
"reward": 3.713604661692744,
"creation_time": 1714899399.9150436,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399980.pt"
]
},
{
"steps": 1599986,
"file_path": "results/Huggy2/Huggy/Huggy-1599986.onnx",
"reward": 3.6386796565003734,
"creation_time": 1714899646.1893287,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599986.pt"
]
},
{
"steps": 1799983,
"file_path": "results/Huggy2/Huggy/Huggy-1799983.onnx",
"reward": 3.82738499660961,
"creation_time": 1714899898.4596636,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799983.pt"
]
},
{
"steps": 1999808,
"file_path": "results/Huggy2/Huggy/Huggy-1999808.onnx",
"reward": 4.101602216561635,
"creation_time": 1714900149.90733,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999808.pt"
]
},
{
"steps": 2000558,
"file_path": "results/Huggy2/Huggy/Huggy-2000558.onnx",
"reward": 3.932853725941285,
"creation_time": 1714900150.0523603,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000558.pt"
]
}
],
"final_checkpoint": {
"steps": 2000558,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.932853725941285,
"creation_time": 1714900150.0523603,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000558.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}