File size: 4,368 Bytes
4d9f0a1
 
 
 
d4af99f
 
 
 
4d9f0a1
d4af99f
4d9f0a1
 
 
d4af99f
 
 
 
4d9f0a1
d4af99f
4d9f0a1
 
 
d4af99f
 
 
 
4d9f0a1
d4af99f
4d9f0a1
 
 
d4af99f
 
 
 
4d9f0a1
d4af99f
4d9f0a1
 
 
d4af99f
 
 
 
4d9f0a1
d4af99f
4d9f0a1
 
 
d4af99f
 
 
 
4d9f0a1
d4af99f
4d9f0a1
 
 
d4af99f
 
 
 
4d9f0a1
d4af99f
4d9f0a1
 
 
d4af99f
 
 
 
4d9f0a1
d4af99f
4d9f0a1
 
 
d4af99f
 
 
 
4d9f0a1
d4af99f
4d9f0a1
 
 
d4af99f
 
 
 
4d9f0a1
d4af99f
4d9f0a1
 
 
d4af99f
 
 
 
4d9f0a1
d4af99f
4d9f0a1
 
 
 
d4af99f
4d9f0a1
d4af99f
 
4d9f0a1
d4af99f
4d9f0a1
 
 
 
 
d4af99f
 
4d9f0a1
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
{
    "Huggy": {
        "checkpoints": [
            {
                "steps": 199718,
                "file_path": "results/Huggy/Huggy/Huggy-199718.onnx",
                "reward": 3.3635429367423058,
                "creation_time": 1724097183.0497184,
                "auxillary_file_paths": [
                    "results/Huggy/Huggy/Huggy-199718.pt"
                ]
            },
            {
                "steps": 399887,
                "file_path": "results/Huggy/Huggy/Huggy-399887.onnx",
                "reward": 3.530230677733987,
                "creation_time": 1724097417.904084,
                "auxillary_file_paths": [
                    "results/Huggy/Huggy/Huggy-399887.pt"
                ]
            },
            {
                "steps": 599944,
                "file_path": "results/Huggy/Huggy/Huggy-599944.onnx",
                "reward": 3.839178647994995,
                "creation_time": 1724097656.887657,
                "auxillary_file_paths": [
                    "results/Huggy/Huggy/Huggy-599944.pt"
                ]
            },
            {
                "steps": 799960,
                "file_path": "results/Huggy/Huggy/Huggy-799960.onnx",
                "reward": 3.6240010769529776,
                "creation_time": 1724097893.412152,
                "auxillary_file_paths": [
                    "results/Huggy/Huggy/Huggy-799960.pt"
                ]
            },
            {
                "steps": 999589,
                "file_path": "results/Huggy/Huggy/Huggy-999589.onnx",
                "reward": 3.9100323442150566,
                "creation_time": 1724098136.3660781,
                "auxillary_file_paths": [
                    "results/Huggy/Huggy/Huggy-999589.pt"
                ]
            },
            {
                "steps": 1199914,
                "file_path": "results/Huggy/Huggy/Huggy-1199914.onnx",
                "reward": 3.88228909755021,
                "creation_time": 1724098383.616469,
                "auxillary_file_paths": [
                    "results/Huggy/Huggy/Huggy-1199914.pt"
                ]
            },
            {
                "steps": 1399897,
                "file_path": "results/Huggy/Huggy/Huggy-1399897.onnx",
                "reward": 4.881224036216736,
                "creation_time": 1724098627.2696922,
                "auxillary_file_paths": [
                    "results/Huggy/Huggy/Huggy-1399897.pt"
                ]
            },
            {
                "steps": 1599960,
                "file_path": "results/Huggy/Huggy/Huggy-1599960.onnx",
                "reward": 3.9023103459314865,
                "creation_time": 1724098867.4479458,
                "auxillary_file_paths": [
                    "results/Huggy/Huggy/Huggy-1599960.pt"
                ]
            },
            {
                "steps": 1799940,
                "file_path": "results/Huggy/Huggy/Huggy-1799940.onnx",
                "reward": 4.0003840247510185,
                "creation_time": 1724099110.7495985,
                "auxillary_file_paths": [
                    "results/Huggy/Huggy/Huggy-1799940.pt"
                ]
            },
            {
                "steps": 1999930,
                "file_path": "results/Huggy/Huggy/Huggy-1999930.onnx",
                "reward": 3.8655002066067286,
                "creation_time": 1724099355.771384,
                "auxillary_file_paths": [
                    "results/Huggy/Huggy/Huggy-1999930.pt"
                ]
            },
            {
                "steps": 2000044,
                "file_path": "results/Huggy/Huggy/Huggy-2000044.onnx",
                "reward": 3.89092278984231,
                "creation_time": 1724099355.886748,
                "auxillary_file_paths": [
                    "results/Huggy/Huggy/Huggy-2000044.pt"
                ]
            }
        ],
        "final_checkpoint": {
            "steps": 2000044,
            "file_path": "results/Huggy/Huggy.onnx",
            "reward": 3.89092278984231,
            "creation_time": 1724099355.886748,
            "auxillary_file_paths": [
                "results/Huggy/Huggy/Huggy-2000044.pt"
            ]
        }
    },
    "metadata": {
        "stats_format_version": "0.3.0",
        "mlagents_version": "1.1.0.dev0",
        "torch_version": "2.3.1+cu121"
    }
}