{ "Huggy": { "checkpoints": [ { "steps": 499882, "file_path": "results/Huggy/Huggy/Huggy-499882.onnx", "reward": 4.030668764501004, "creation_time": 1670786231.051118, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-499882.pt" ] }, { "steps": 999923, "file_path": "results/Huggy/Huggy/Huggy-999923.onnx", "reward": 3.9878892971758257, "creation_time": 1670786825.6281378, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-999923.pt" ] }, { "steps": 1499909, "file_path": "results/Huggy/Huggy/Huggy-1499909.onnx", "reward": 3.7668952918821765, "creation_time": 1670787405.4718544, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-1499909.pt" ] }, { "steps": 1999918, "file_path": "results/Huggy/Huggy/Huggy-1999918.onnx", "reward": 2.8792127597899664, "creation_time": 1670787985.6289582, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-1999918.pt" ] }, { "steps": 2000001, "file_path": "results/Huggy/Huggy/Huggy-2000001.onnx", "reward": 2.9935929721051995, "creation_time": 1670787985.7499921, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-2000001.pt" ] } ], "final_checkpoint": { "steps": 2000001, "file_path": "results/Huggy/Huggy.onnx", "reward": 2.9935929721051995, "creation_time": 1670787985.7499921, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-2000001.pt" ] } }, "metadata": { "stats_format_version": "0.3.0", "mlagents_version": "0.29.0.dev0", "torch_version": "1.8.1+cu102" } }