jacobthebanana
commited on
Commit
•
0705032
1
Parent(s):
ed3cb15
Saving weights and logs of step 143142
Browse files- eval_results.json +0 -2
- events.out.tfevents.1657347033.t1v-n-f83092c9-w-0.1799015.0.v2 +3 -0
- events.out.tfevents.1657483363.t1v-n-f83092c9-w-0.1968957.0.v2 +3 -0
- events.out.tfevents.1657564667.t1v-n-f83092c9-w-0.2073685.0.v2 +3 -0
- events.out.tfevents.1657565171.t1v-n-f83092c9-w-0.2078193.0.v2 +3 -0
- flax_model.msgpack +1 -1
- logs/2022-07-11-roberta-base.txt +286 -0
eval_results.json
CHANGED
@@ -1,2 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"eval_loss":
|
|
|
|
|
|
events.out.tfevents.1657347033.t1v-n-f83092c9-w-0.1799015.0.v2
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6fc2eab760d3486c36bc89b190bcb05810abb69ea8934da7fd09545d11425ac2
|
3 |
+
size 64190028
|
events.out.tfevents.1657483363.t1v-n-f83092c9-w-0.1968957.0.v2
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:31a9dd9ee783c66e1615f8f25b6030c8913d7ac5641322588f9913221b8bead5
|
3 |
+
size 64190027
|
events.out.tfevents.1657564667.t1v-n-f83092c9-w-0.2073685.0.v2
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2038c91894c5feff48d27bf0eebd33aa3de6a99bf861555b96e9c2c4222f19ee
|
3 |
+
size 151135
|
events.out.tfevents.1657565171.t1v-n-f83092c9-w-0.2078193.0.v2
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:10ec26219c58ccb8a68790dd3b3468e1b89f2c46312d89685e893640b5eb962d
|
3 |
+
size 21396183
|
flax_model.msgpack
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 498589677
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:86f99255f50293823fe440fb30efd297809cb58ae96ae011d2f0a5d13797254d
|
3 |
size 498589677
|
logs/2022-07-11-roberta-base.txt
ADDED
@@ -0,0 +1,286 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Step... (500/437388 | Training Loss: 0.000404452090151608, Learning Rate: 1.9977182091679424e-05)
|
2 |
+
Step... (1000/437388 | Training Loss: 0.0006339488318189979, Learning Rate: 1.9954319213866256e-05)
|
3 |
+
Step... (1500/437388 | Training Loss: 9.516239515505731e-05, Learning Rate: 1.993145633605309e-05)
|
4 |
+
Step... (2000/437388 | Training Loss: 0.00023419701028615236, Learning Rate: 1.990859345823992e-05)
|
5 |
+
Step... (2500/437388 | Training Loss: 0.00013391178799793124, Learning Rate: 1.9885730580426753e-05)
|
6 |
+
Step... (3000/437388 | Training Loss: 0.0026682857424020767, Learning Rate: 1.9862867702613585e-05)
|
7 |
+
Step... (3500/437388 | Training Loss: 7.625477883266285e-05, Learning Rate: 1.9840004824800417e-05)
|
8 |
+
Step... (4000/437388 | Training Loss: 2.2841697500552982e-05, Learning Rate: 1.9817140127997845e-05)
|
9 |
+
Step... (4500/437388 | Training Loss: 4.078699566889554e-05, Learning Rate: 1.979427906917408e-05)
|
10 |
+
Step... (5000/437388 | Training Loss: 0.0001533488102722913, Learning Rate: 1.9771416191360913e-05)
|
11 |
+
Step... (5500/437388 | Training Loss: -7.60231387175736e-07, Learning Rate: 1.9748553313547745e-05)
|
12 |
+
Step... (6000/437388 | Training Loss: 6.25096436124295e-05, Learning Rate: 1.9725688616745174e-05)
|
13 |
+
Step... (6500/437388 | Training Loss: -5.6734875215624925e-06, Learning Rate: 1.9702825738932006e-05)
|
14 |
+
Step... (7000/437388 | Training Loss: 2.8425463369785575e-06, Learning Rate: 1.9679962861118838e-05)
|
15 |
+
Step... (7500/437388 | Training Loss: -1.0971100437018322e-06, Learning Rate: 1.965709998330567e-05)
|
16 |
+
Step... (8000/437388 | Training Loss: -3.7226100175757892e-06, Learning Rate: 1.9634237105492502e-05)
|
17 |
+
Step... (8500/437388 | Training Loss: -9.15726377570536e-06, Learning Rate: 1.9611374227679335e-05)
|
18 |
+
Step... (9000/437388 | Training Loss: 0.00028566704713739455, Learning Rate: 1.9588511349866167e-05)
|
19 |
+
Step... (9500/437388 | Training Loss: -2.5296901640103897e-06, Learning Rate: 1.9565648472053e-05)
|
20 |
+
Step... (10000/437388 | Training Loss: 4.72695282951463e-05, Learning Rate: 1.954278559423983e-05)
|
21 |
+
Step... (10500/437388 | Training Loss: 2.3120959667721763e-05, Learning Rate: 1.9519922716426663e-05)
|
22 |
+
Step... (11000/437388 | Training Loss: 1.824241735448595e-05, Learning Rate: 1.9497059838613495e-05)
|
23 |
+
Step... (11500/437388 | Training Loss: 2.931378548964858e-05, Learning Rate: 1.9474195141810924e-05)
|
24 |
+
Step... (12000/437388 | Training Loss: -5.238102403382072e-06, Learning Rate: 1.9451332263997756e-05)
|
25 |
+
Step... (12500/437388 | Training Loss: 4.2583487811498344e-05, Learning Rate: 1.942847120517399e-05)
|
26 |
+
Step... (13000/437388 | Training Loss: -6.542205483128782e-06, Learning Rate: 1.9405608327360824e-05)
|
27 |
+
Step... (13500/437388 | Training Loss: 1.2718789548671339e-05, Learning Rate: 1.9382743630558252e-05)
|
28 |
+
Step... (14000/437388 | Training Loss: -7.005878615018446e-06, Learning Rate: 1.9359880752745084e-05)
|
29 |
+
Step... (14500/437388 | Training Loss: -7.702277798671275e-06, Learning Rate: 1.9337017874931917e-05)
|
30 |
+
Step... (15000/437388 | Training Loss: 1.781577338988427e-07, Learning Rate: 1.931415499711875e-05)
|
31 |
+
Step... (15500/437388 | Training Loss: -5.5821979003667366e-06, Learning Rate: 1.929129211930558e-05)
|
32 |
+
Step... (16000/437388 | Training Loss: 4.509760856308276e-06, Learning Rate: 1.9268429241492413e-05)
|
33 |
+
Step... (16500/437388 | Training Loss: -3.0017952667549253e-05, Learning Rate: 1.9245566363679245e-05)
|
34 |
+
Step... (17000/437388 | Training Loss: -6.2593853726866655e-06, Learning Rate: 1.9222703485866077e-05)
|
35 |
+
Step... (17500/437388 | Training Loss: -4.433305548445787e-06, Learning Rate: 1.919984060805291e-05)
|
36 |
+
Step... (18000/437388 | Training Loss: -7.731199730187654e-06, Learning Rate: 1.917697773023974e-05)
|
37 |
+
Step... (18500/437388 | Training Loss: -8.37458446767414e-06, Learning Rate: 1.9154114852426574e-05)
|
38 |
+
Step... (19000/437388 | Training Loss: -5.067255642643431e-06, Learning Rate: 1.9131251974613406e-05)
|
39 |
+
Step... (19500/437388 | Training Loss: 6.776190730306553e-06, Learning Rate: 1.9108387277810834e-05)
|
40 |
+
Step... (20000/437388 | Training Loss: -7.4958870754926465e-06, Learning Rate: 1.908552621898707e-05)
|
41 |
+
Step... (20500/437388 | Training Loss: -5.895290541957365e-06, Learning Rate: 1.9062663341173902e-05)
|
42 |
+
Step... (21000/437388 | Training Loss: -6.40215785097098e-06, Learning Rate: 1.9039800463360734e-05)
|
43 |
+
Step... (21500/437388 | Training Loss: 3.1176207357930252e-06, Learning Rate: 1.9016935766558163e-05)
|
44 |
+
Step... (22000/437388 | Training Loss: -3.3340879781462718e-06, Learning Rate: 1.8994072888744995e-05)
|
45 |
+
Step... (22500/437388 | Training Loss: -5.5457257985835895e-06, Learning Rate: 1.8971210010931827e-05)
|
46 |
+
Step... (23000/437388 | Training Loss: 2.9257062124088407e-06, Learning Rate: 1.894834713311866e-05)
|
47 |
+
Step... (23500/437388 | Training Loss: -5.124312338011805e-06, Learning Rate: 1.892548425530549e-05)
|
48 |
+
Step... (24000/437388 | Training Loss: -1.773118128767237e-06, Learning Rate: 1.8902621377492324e-05)
|
49 |
+
Step... (24500/437388 | Training Loss: -5.4094566621643025e-06, Learning Rate: 1.8879758499679156e-05)
|
50 |
+
Step... (25000/437388 | Training Loss: -3.4828276511689182e-06, Learning Rate: 1.8856895621865988e-05)
|
51 |
+
Step... (25500/437388 | Training Loss: -4.534229901764775e-06, Learning Rate: 1.883403274405282e-05)
|
52 |
+
Step... (26000/437388 | Training Loss: -8.387982234125957e-06, Learning Rate: 1.8811169866239652e-05)
|
53 |
+
Step... (26500/437388 | Training Loss: -5.165531547390856e-06, Learning Rate: 1.8788306988426484e-05)
|
54 |
+
Step... (27000/437388 | Training Loss: -6.8571389419957995e-06, Learning Rate: 1.8765442291623913e-05)
|
55 |
+
Step... (27500/437388 | Training Loss: -7.399700734822545e-06, Learning Rate: 1.8742579413810745e-05)
|
56 |
+
Step... (28000/437388 | Training Loss: -2.5704005111037986e-06, Learning Rate: 1.871971835498698e-05)
|
57 |
+
Step... (28500/437388 | Training Loss: -6.7595292421174236e-06, Learning Rate: 1.8696855477173813e-05)
|
58 |
+
Step... (29000/437388 | Training Loss: -6.415868483600207e-06, Learning Rate: 1.867399078037124e-05)
|
59 |
+
Step... (29500/437388 | Training Loss: -1.9005401554750279e-06, Learning Rate: 1.8651127902558073e-05)
|
60 |
+
Step... (30000/437388 | Training Loss: -5.97090274823131e-06, Learning Rate: 1.8628265024744906e-05)
|
61 |
+
Step... (30500/437388 | Training Loss: -4.2556366679491475e-06, Learning Rate: 1.8605402146931738e-05)
|
62 |
+
Step... (31000/437388 | Training Loss: -5.529957434191601e-06, Learning Rate: 1.858253926911857e-05)
|
63 |
+
Step... (31500/437388 | Training Loss: -9.014551324071363e-06, Learning Rate: 1.8559676391305402e-05)
|
64 |
+
Step... (32000/437388 | Training Loss: -7.940849172882736e-06, Learning Rate: 1.8536813513492234e-05)
|
65 |
+
Step... (32500/437388 | Training Loss: -5.163095920579508e-06, Learning Rate: 1.8513950635679066e-05)
|
66 |
+
Step... (33000/437388 | Training Loss: -3.989764081779867e-06, Learning Rate: 1.84910877578659e-05)
|
67 |
+
Step... (33500/437388 | Training Loss: -4.6107038542686496e-07, Learning Rate: 1.846822488005273e-05)
|
68 |
+
Step... (34000/437388 | Training Loss: -5.280284767650301e-06, Learning Rate: 1.8445362002239563e-05)
|
69 |
+
Step... (34500/437388 | Training Loss: -7.053782155708177e-06, Learning Rate: 1.8422499124426395e-05)
|
70 |
+
Step... (35000/437388 | Training Loss: -8.036065992200747e-06, Learning Rate: 1.8399634427623823e-05)
|
71 |
+
Step... (35500/437388 | Training Loss: -3.392528924450744e-06, Learning Rate: 1.8376771549810655e-05)
|
72 |
+
Step... (36000/437388 | Training Loss: -3.18766433338169e-06, Learning Rate: 1.835391049098689e-05)
|
73 |
+
Step... (36500/437388 | Training Loss: -5.097432222100906e-06, Learning Rate: 1.8331047613173723e-05)
|
74 |
+
Step... (37000/437388 | Training Loss: -5.385624717746396e-06, Learning Rate: 1.8308182916371152e-05)
|
75 |
+
Step... (37500/437388 | Training Loss: 2.5853719307633583e-06, Learning Rate: 1.8285320038557984e-05)
|
76 |
+
Step... (38000/437388 | Training Loss: -4.720991000795038e-06, Learning Rate: 1.8262457160744816e-05)
|
77 |
+
Step... (38500/437388 | Training Loss: -5.782361768069677e-06, Learning Rate: 1.8239594282931648e-05)
|
78 |
+
Step... (39000/437388 | Training Loss: -1.1970331570410053e-06, Learning Rate: 1.821673140511848e-05)
|
79 |
+
Step... (39500/437388 | Training Loss: -6.66629148327047e-06, Learning Rate: 1.8193868527305312e-05)
|
80 |
+
Step... (40000/437388 | Training Loss: -3.6221426853444427e-06, Learning Rate: 1.8171005649492145e-05)
|
81 |
+
Step... (40500/437388 | Training Loss: -5.708717253583018e-06, Learning Rate: 1.8148142771678977e-05)
|
82 |
+
Step... (41000/437388 | Training Loss: -7.3245118983322755e-06, Learning Rate: 1.812527989386581e-05)
|
83 |
+
Step... (41500/437388 | Training Loss: -8.422952305409126e-06, Learning Rate: 1.810241701605264e-05)
|
84 |
+
Step... (42000/437388 | Training Loss: 6.409215984604089e-06, Learning Rate: 1.8079554138239473e-05)
|
85 |
+
Step... (42500/437388 | Training Loss: -7.179387466749176e-06, Learning Rate: 1.80566894414369e-05)
|
86 |
+
Step... (43000/437388 | Training Loss: 4.655762040783884e-06, Learning Rate: 1.8033828382613137e-05)
|
87 |
+
Step... (43500/437388 | Training Loss: -6.4161577029153705e-06, Learning Rate: 1.801096550479997e-05)
|
88 |
+
Step... (44000/437388 | Training Loss: -4.054519195051398e-06, Learning Rate: 1.79881026269868e-05)
|
89 |
+
Step... (44500/437388 | Training Loss: -6.536903129017446e-06, Learning Rate: 1.796523793018423e-05)
|
90 |
+
Step... (45000/437388 | Training Loss: -6.904315341671463e-06, Learning Rate: 1.7942375052371062e-05)
|
91 |
+
Step... (45500/437388 | Training Loss: -6.406115062418394e-06, Learning Rate: 1.7919512174557894e-05)
|
92 |
+
Step... (46000/437388 | Training Loss: -7.637635462742765e-06, Learning Rate: 1.7896649296744727e-05)
|
93 |
+
Step... (46500/437388 | Training Loss: -7.8167386163841e-06, Learning Rate: 1.787378641893156e-05)
|
94 |
+
Step... (47000/437388 | Training Loss: -4.7537982936773915e-06, Learning Rate: 1.785092354111839e-05)
|
95 |
+
Step... (47500/437388 | Training Loss: -6.590669727302156e-06, Learning Rate: 1.7828060663305223e-05)
|
96 |
+
Step... (48000/437388 | Training Loss: -6.844691597507335e-06, Learning Rate: 1.7805197785492055e-05)
|
97 |
+
Step... (48500/437388 | Training Loss: -5.927687197981868e-06, Learning Rate: 1.7782334907678887e-05)
|
98 |
+
Step... (49000/437388 | Training Loss: -3.12098063659505e-06, Learning Rate: 1.775947202986572e-05)
|
99 |
+
Step... (49500/437388 | Training Loss: -5.57123212274746e-06, Learning Rate: 1.773660915205255e-05)
|
100 |
+
Step... (50000/437388 | Training Loss: -7.441677098540822e-06, Learning Rate: 1.7713746274239384e-05)
|
101 |
+
Step... (50500/437388 | Training Loss: -6.516596386063611e-06, Learning Rate: 1.7690881577436812e-05)
|
102 |
+
Step... (51000/437388 | Training Loss: -4.794296728505287e-06, Learning Rate: 1.7668020518613048e-05)
|
103 |
+
Step... (51500/437388 | Training Loss: -6.038122592144646e-06, Learning Rate: 1.764515764079988e-05)
|
104 |
+
Step... (52000/437388 | Training Loss: -5.160728505870793e-06, Learning Rate: 1.7622294762986712e-05)
|
105 |
+
Step... (52500/437388 | Training Loss: -6.0460461099864915e-06, Learning Rate: 1.759943006618414e-05)
|
106 |
+
Step... (53000/437388 | Training Loss: -6.9430088842636906e-06, Learning Rate: 1.7576567188370973e-05)
|
107 |
+
Step... (53500/437388 | Training Loss: -6.698157903883839e-06, Learning Rate: 1.7553704310557805e-05)
|
108 |
+
Step... (54000/437388 | Training Loss: 2.8181757443235256e-06, Learning Rate: 1.7530841432744637e-05)
|
109 |
+
Step... (54500/437388 | Training Loss: -4.628562692232663e-06, Learning Rate: 1.750797855493147e-05)
|
110 |
+
Step... (55000/437388 | Training Loss: -7.183040906966198e-06, Learning Rate: 1.74851156771183e-05)
|
111 |
+
Step... (55500/437388 | Training Loss: -6.296264018601505e-06, Learning Rate: 1.7462252799305134e-05)
|
112 |
+
Step... (56000/437388 | Training Loss: -5.903525561734568e-06, Learning Rate: 1.7439389921491966e-05)
|
113 |
+
Step... (56500/437388 | Training Loss: -5.366368895920459e-06, Learning Rate: 1.7416527043678798e-05)
|
114 |
+
Step... (57000/437388 | Training Loss: 7.901328444859246e-07, Learning Rate: 1.739366416586563e-05)
|
115 |
+
Step... (57500/437388 | Training Loss: -5.448615866043838e-06, Learning Rate: 1.7370801288052462e-05)
|
116 |
+
Step... (58000/437388 | Training Loss: 3.7966519812471233e-06, Learning Rate: 1.734793659124989e-05)
|
117 |
+
Step... (58500/437388 | Training Loss: -5.885602149646729e-06, Learning Rate: 1.7325073713436723e-05)
|
118 |
+
Step... (59000/437388 | Training Loss: -3.6664628169091884e-06, Learning Rate: 1.730221265461296e-05)
|
119 |
+
Step... (59500/437388 | Training Loss: -1.0451103662489913e-05, Learning Rate: 1.727934977679979e-05)
|
120 |
+
Step... (60000/437388 | Training Loss: -4.403611001180252e-06, Learning Rate: 1.725648507999722e-05)
|
121 |
+
Step... (60500/437388 | Training Loss: -6.125190338934772e-06, Learning Rate: 1.723362220218405e-05)
|
122 |
+
Step... (61000/437388 | Training Loss: -5.913780114497058e-06, Learning Rate: 1.7210759324370883e-05)
|
123 |
+
Step... (61500/437388 | Training Loss: -5.16153249918716e-06, Learning Rate: 1.7187896446557716e-05)
|
124 |
+
Step... (62000/437388 | Training Loss: -6.906470844114665e-06, Learning Rate: 1.7165033568744548e-05)
|
125 |
+
Step... (62500/437388 | Training Loss: -6.748471605533268e-06, Learning Rate: 1.714217069093138e-05)
|
126 |
+
Step... (63000/437388 | Training Loss: -6.144377948658075e-06, Learning Rate: 1.7119307813118212e-05)
|
127 |
+
Step... (63500/437388 | Training Loss: -7.6089936555945314e-06, Learning Rate: 1.7096444935305044e-05)
|
128 |
+
Step... (64000/437388 | Training Loss: -6.796373781980947e-06, Learning Rate: 1.7073582057491876e-05)
|
129 |
+
Step... (64500/437388 | Training Loss: -1.0295361789758317e-05, Learning Rate: 1.705071917967871e-05)
|
130 |
+
Step... (65000/437388 | Training Loss: -6.661573934252374e-06, Learning Rate: 1.702785630186554e-05)
|
131 |
+
Step... (65500/437388 | Training Loss: -6.960311111470219e-06, Learning Rate: 1.7004993424052373e-05)
|
132 |
+
Step... (66000/437388 | Training Loss: -8.780200005276129e-06, Learning Rate: 1.69821287272498e-05)
|
133 |
+
Step... (66500/437388 | Training Loss: -7.80259051680332e-06, Learning Rate: 1.6959265849436633e-05)
|
134 |
+
Step... (67000/437388 | Training Loss: -5.190452611714136e-06, Learning Rate: 1.693640479061287e-05)
|
135 |
+
Step... (67500/437388 | Training Loss: -6.170910637592897e-06, Learning Rate: 1.69135419127997e-05)
|
136 |
+
Step... (68000/437388 | Training Loss: -5.451665401778882e-06, Learning Rate: 1.689067721599713e-05)
|
137 |
+
Step... (68500/437388 | Training Loss: -8.33153444546042e-06, Learning Rate: 1.6867814338183962e-05)
|
138 |
+
Step... (69000/437388 | Training Loss: -8.836791494104546e-06, Learning Rate: 1.6844951460370794e-05)
|
139 |
+
Step... (69500/437388 | Training Loss: -7.965396434883587e-06, Learning Rate: 1.6822088582557626e-05)
|
140 |
+
Step... (70000/437388 | Training Loss: -7.993636245373636e-06, Learning Rate: 1.6799225704744458e-05)
|
141 |
+
Step... (70500/437388 | Training Loss: -7.944276148919016e-06, Learning Rate: 1.677636282693129e-05)
|
142 |
+
Step... (71000/437388 | Training Loss: -9.243218300980516e-06, Learning Rate: 1.6753499949118122e-05)
|
143 |
+
Step... (71500/437388 | Training Loss: -8.733037248020992e-06, Learning Rate: 1.6730637071304955e-05)
|
144 |
+
Step... (72000/437388 | Training Loss: -8.596071893407498e-06, Learning Rate: 1.6707774193491787e-05)
|
145 |
+
Step... (72500/437388 | Training Loss: -8.663163498567883e-06, Learning Rate: 1.668491131567862e-05)
|
146 |
+
Step... (73000/437388 | Training Loss: -9.036175470100716e-06, Learning Rate: 1.666204843786545e-05)
|
147 |
+
Step... (73500/437388 | Training Loss: -9.52473419602029e-06, Learning Rate: 1.663918374106288e-05)
|
148 |
+
Step... (74000/437388 | Training Loss: -8.939436156651936e-06, Learning Rate: 1.6616322682239115e-05)
|
149 |
+
Step... (74500/437388 | Training Loss: -9.713441613712348e-06, Learning Rate: 1.6593457985436544e-05)
|
150 |
+
Step... (75000/437388 | Training Loss: -9.852315997704864e-06, Learning Rate: 1.657059692661278e-05)
|
151 |
+
Step... (75500/437388 | Training Loss: -1.306520243815612e-05, Learning Rate: 1.6547732229810208e-05)
|
152 |
+
Step... (76000/437388 | Training Loss: -9.346391379949637e-06, Learning Rate: 1.652486935199704e-05)
|
153 |
+
Step... (76500/437388 | Training Loss: -9.491493983659893e-06, Learning Rate: 1.6502006474183872e-05)
|
154 |
+
Step... (77000/437388 | Training Loss: -9.394856533617713e-06, Learning Rate: 1.6479143596370704e-05)
|
155 |
+
Step... (77500/437388 | Training Loss: -9.725947165861726e-06, Learning Rate: 1.6456280718557537e-05)
|
156 |
+
Step... (78000/437388 | Training Loss: -9.789433534024283e-06, Learning Rate: 1.643341784074437e-05)
|
157 |
+
Step... (78500/437388 | Training Loss: -9.560701073496602e-06, Learning Rate: 1.64105549629312e-05)
|
158 |
+
Step... (79000/437388 | Training Loss: -9.880196557787713e-06, Learning Rate: 1.6387692085118033e-05)
|
159 |
+
Step... (79500/437388 | Training Loss: -9.761097317095846e-06, Learning Rate: 1.6364829207304865e-05)
|
160 |
+
Step... (80000/437388 | Training Loss: -9.930587111739442e-06, Learning Rate: 1.6341966329491697e-05)
|
161 |
+
Step... (80500/437388 | Training Loss: -9.742347174324095e-06, Learning Rate: 1.631910345167853e-05)
|
162 |
+
Step... (81000/437388 | Training Loss: -9.903991667670198e-06, Learning Rate: 1.629624057386536e-05)
|
163 |
+
Step... (81500/437388 | Training Loss: -9.69627762970049e-06, Learning Rate: 1.627337587706279e-05)
|
164 |
+
Step... (82000/437388 | Training Loss: -9.94446236290969e-06, Learning Rate: 1.6250514818239026e-05)
|
165 |
+
Step... (82500/437388 | Training Loss: -9.852805305854417e-06, Learning Rate: 1.6227650121436454e-05)
|
166 |
+
Step... (83000/437388 | Training Loss: -9.919120202539489e-06, Learning Rate: 1.620478906261269e-05)
|
167 |
+
Step... (83500/437388 | Training Loss: -9.893648893921636e-06, Learning Rate: 1.618192436581012e-05)
|
168 |
+
Step... (84000/437388 | Training Loss: -9.873612725641578e-06, Learning Rate: 1.615906148799695e-05)
|
169 |
+
Step... (84500/437388 | Training Loss: -9.74766953731887e-06, Learning Rate: 1.6136198610183783e-05)
|
170 |
+
Step... (85000/437388 | Training Loss: -9.893889910017606e-06, Learning Rate: 1.6113335732370615e-05)
|
171 |
+
Step... (85500/437388 | Training Loss: -9.625448910810519e-06, Learning Rate: 1.6090472854557447e-05)
|
172 |
+
Step... (86000/437388 | Training Loss: -9.15635609999299e-06, Learning Rate: 1.606760997674428e-05)
|
173 |
+
Step... (86500/437388 | Training Loss: -9.932768080034293e-06, Learning Rate: 1.604474709893111e-05)
|
174 |
+
Step... (87000/437388 | Training Loss: -9.676634363131598e-06, Learning Rate: 1.6021884221117944e-05)
|
175 |
+
Step... (87500/437388 | Training Loss: -1.0389778253738768e-05, Learning Rate: 1.5999021343304776e-05)
|
176 |
+
Step... (88000/437388 | Training Loss: -9.875341675069649e-06, Learning Rate: 1.5976158465491608e-05)
|
177 |
+
Step... (88500/437388 | Training Loss: -9.83661630016286e-06, Learning Rate: 1.595329558767844e-05)
|
178 |
+
Step... (89000/437388 | Training Loss: -9.88773626886541e-06, Learning Rate: 1.593043089087587e-05)
|
179 |
+
Step... (89500/437388 | Training Loss: -1.005065132630989e-05, Learning Rate: 1.59075680130627e-05)
|
180 |
+
Step... (90000/437388 | Training Loss: -8.606668416177854e-06, Learning Rate: 1.5884706954238936e-05)
|
181 |
+
Step... (90500/437388 | Training Loss: -9.940247764461674e-06, Learning Rate: 1.586184407642577e-05)
|
182 |
+
Step... (91000/437388 | Training Loss: -9.934198715200182e-06, Learning Rate: 1.5838979379623197e-05)
|
183 |
+
Step... (91500/437388 | Training Loss: -9.929631232807878e-06, Learning Rate: 1.581611650181003e-05)
|
184 |
+
Step... (92000/437388 | Training Loss: -9.918112482409924e-06, Learning Rate: 1.579325362399686e-05)
|
185 |
+
Step... (92500/437388 | Training Loss: -9.936074093275238e-06, Learning Rate: 1.5770390746183693e-05)
|
186 |
+
Step... (93000/437388 | Training Loss: -9.881726327876095e-06, Learning Rate: 1.5747527868370526e-05)
|
187 |
+
Step... (93500/437388 | Training Loss: -9.933090041158721e-06, Learning Rate: 1.5724664990557358e-05)
|
188 |
+
Step... (94000/437388 | Training Loss: -9.976016372093e-06, Learning Rate: 1.570180211274419e-05)
|
189 |
+
Step... (94500/437388 | Training Loss: -9.939818482962437e-06, Learning Rate: 1.5678939234931022e-05)
|
190 |
+
Step... (95000/437388 | Training Loss: -9.946526915882714e-06, Learning Rate: 1.5656076357117854e-05)
|
191 |
+
Step... (95500/437388 | Training Loss: -9.949440936907195e-06, Learning Rate: 1.5633213479304686e-05)
|
192 |
+
Step... (96000/437388 | Training Loss: -1.0023382856161334e-05, Learning Rate: 1.561035060149152e-05)
|
193 |
+
Step... (96500/437388 | Training Loss: -9.958578630175907e-06, Learning Rate: 1.558748772367835e-05)
|
194 |
+
Step... (97000/437388 | Training Loss: -9.940342351910658e-06, Learning Rate: 1.556462302687578e-05)
|
195 |
+
Step... (97500/437388 | Training Loss: -9.942121323547326e-06, Learning Rate: 1.554176014906261e-05)
|
196 |
+
Step... (98000/437388 | Training Loss: -9.938939911080524e-06, Learning Rate: 1.5518899090238847e-05)
|
197 |
+
Step... (98500/437388 | Training Loss: -9.912233508657664e-06, Learning Rate: 1.549603621242568e-05)
|
198 |
+
Step... (99000/437388 | Training Loss: -9.934283298207447e-06, Learning Rate: 1.5473171515623108e-05)
|
199 |
+
Step... (99500/437388 | Training Loss: -9.91287743090652e-06, Learning Rate: 1.545030863780994e-05)
|
200 |
+
Step... (100000/437388 | Training Loss: -9.90589614957571e-06, Learning Rate: 1.5427445759996772e-05)
|
201 |
+
Step... (100500/437388 | Training Loss: -9.936301466950681e-06, Learning Rate: 1.5404582882183604e-05)
|
202 |
+
Step... (101000/437388 | Training Loss: -9.946139471139759e-06, Learning Rate: 1.5381720004370436e-05)
|
203 |
+
Step... (101500/437388 | Training Loss: -9.907307685352862e-06, Learning Rate: 1.5358857126557268e-05)
|
204 |
+
Step... (102000/437388 | Training Loss: -9.979352398659103e-06, Learning Rate: 1.53359942487441e-05)
|
205 |
+
Step... (102500/437388 | Training Loss: -9.954264896805398e-06, Learning Rate: 1.5313131370930932e-05)
|
206 |
+
Step... (103000/437388 | Training Loss: -9.955115274351556e-06, Learning Rate: 1.5290268493117765e-05)
|
207 |
+
Step... (103500/437388 | Training Loss: -9.925246558850631e-06, Learning Rate: 1.5267405615304597e-05)
|
208 |
+
Step... (104000/437388 | Training Loss: -9.947918442776427e-06, Learning Rate: 1.5244541827996727e-05)
|
209 |
+
Step... (104500/437388 | Training Loss: -9.944302291842178e-06, Learning Rate: 1.522167895018356e-05)
|
210 |
+
Step... (105000/437388 | Training Loss: -9.955795576388482e-06, Learning Rate: 1.5198816981865093e-05)
|
211 |
+
Step... (105500/437388 | Training Loss: -9.942156793840695e-06, Learning Rate: 1.5175952285062522e-05)
|
212 |
+
Step... (106000/437388 | Training Loss: -9.936501555785071e-06, Learning Rate: 1.5153090316744056e-05)
|
213 |
+
Step... (106500/437388 | Training Loss: -9.950296771421563e-06, Learning Rate: 1.5130227438930888e-05)
|
214 |
+
Step... (107000/437388 | Training Loss: -9.797213351703249e-06, Learning Rate: 1.510736456111772e-05)
|
215 |
+
Step... (107500/437388 | Training Loss: -9.955976565834135e-06, Learning Rate: 1.508450077380985e-05)
|
216 |
+
Step... (108000/437388 | Training Loss: -9.953116204997059e-06, Learning Rate: 1.5061637895996682e-05)
|
217 |
+
Step... (108500/437388 | Training Loss: -9.957444490282796e-06, Learning Rate: 1.5038775018183514e-05)
|
218 |
+
Step... (109000/437388 | Training Loss: -9.821238563745283e-06, Learning Rate: 1.5015911230875645e-05)
|
219 |
+
Step... (109500/437388 | Training Loss: -9.946938916982617e-06, Learning Rate: 1.499305017205188e-05)
|
220 |
+
Step... (110000/437388 | Training Loss: -9.967616279027425e-06, Learning Rate: 1.4970185475249309e-05)
|
221 |
+
Step... (110500/437388 | Training Loss: -9.965857316274196e-06, Learning Rate: 1.4947323506930843e-05)
|
222 |
+
Step... (111000/437388 | Training Loss: -9.944502380676568e-06, Learning Rate: 1.4924460629117675e-05)
|
223 |
+
Step... (111500/437388 | Training Loss: -9.968231097445823e-06, Learning Rate: 1.4901596841809805e-05)
|
224 |
+
Step... (112000/437388 | Training Loss: -9.950863386620767e-06, Learning Rate: 1.4878733963996638e-05)
|
225 |
+
Step... (112500/437388 | Training Loss: -9.969849088520277e-06, Learning Rate: 1.485587108618347e-05)
|
226 |
+
Step... (113000/437388 | Training Loss: -9.920340744429268e-06, Learning Rate: 1.4833009117865004e-05)
|
227 |
+
Step... (113500/437388 | Training Loss: -9.993400453822687e-06, Learning Rate: 1.4810144421062432e-05)
|
228 |
+
Step... (114000/437388 | Training Loss: -9.957962902262807e-06, Learning Rate: 1.4787282452743966e-05)
|
229 |
+
Step... (114500/437388 | Training Loss: -9.957393558579497e-06, Learning Rate: 1.4764419574930798e-05)
|
230 |
+
Step... (115000/437388 | Training Loss: -9.974402928492054e-06, Learning Rate: 1.474155669711763e-05)
|
231 |
+
Step... (115500/437388 | Training Loss: -9.969589882530272e-06, Learning Rate: 1.471869290980976e-05)
|
232 |
+
Step... (116000/437388 | Training Loss: -9.969175152946264e-06, Learning Rate: 1.4695830031996593e-05)
|
233 |
+
Step... (116500/437388 | Training Loss: -9.960567695088685e-06, Learning Rate: 1.4672968063678127e-05)
|
234 |
+
Step... (117000/437388 | Training Loss: -9.816652891458943e-06, Learning Rate: 1.4650103366875555e-05)
|
235 |
+
Step... (117500/437388 | Training Loss: -9.960506758943666e-06, Learning Rate: 1.462724139855709e-05)
|
236 |
+
Step... (118000/437388 | Training Loss: -9.966113793780096e-06, Learning Rate: 1.4604378520743921e-05)
|
237 |
+
Step... (118500/437388 | Training Loss: -9.95407117443392e-06, Learning Rate: 1.4581515642930754e-05)
|
238 |
+
Step... (119000/437388 | Training Loss: -9.978957677958533e-06, Learning Rate: 1.4558651855622884e-05)
|
239 |
+
Step... (119500/437388 | Training Loss: -9.961753676179796e-06, Learning Rate: 1.4535788977809716e-05)
|
240 |
+
Step... (120000/437388 | Training Loss: -9.842288818617817e-06, Learning Rate: 1.451292700949125e-05)
|
241 |
+
Step... (120500/437388 | Training Loss: -9.98788709694054e-06, Learning Rate: 1.449006322218338e-05)
|
242 |
+
Step... (121000/437388 | Training Loss: -9.951994798029773e-06, Learning Rate: 1.4467201253864914e-05)
|
243 |
+
Step... (121500/437388 | Training Loss: -9.9709377536783e-06, Learning Rate: 1.4444336557062343e-05)
|
244 |
+
Step... (122000/437388 | Training Loss: -9.959147064364515e-06, Learning Rate: 1.4421474588743877e-05)
|
245 |
+
Step... (122500/437388 | Training Loss: -9.685417353466619e-06, Learning Rate: 1.4398611710930709e-05)
|
246 |
+
Step... (123000/437388 | Training Loss: -9.959947419702075e-06, Learning Rate: 1.4375747923622839e-05)
|
247 |
+
Step... (123500/437388 | Training Loss: -9.967046935344115e-06, Learning Rate: 1.4352885045809671e-05)
|
248 |
+
Step... (124000/437388 | Training Loss: -9.98882569547277e-06, Learning Rate: 1.4330022167996503e-05)
|
249 |
+
Step... (124500/437388 | Training Loss: -9.963216143660247e-06, Learning Rate: 1.4307160199678037e-05)
|
250 |
+
Step... (125000/437388 | Training Loss: -9.949379091267474e-06, Learning Rate: 1.4284295502875466e-05)
|
251 |
+
Step... (125500/437388 | Training Loss: -9.868399502011016e-06, Learning Rate: 1.4261433534557e-05)
|
252 |
+
Step... (126000/437388 | Training Loss: -9.719717127154581e-06, Learning Rate: 1.4238570656743832e-05)
|
253 |
+
Step... (126500/437388 | Training Loss: -9.97211282083299e-06, Learning Rate: 1.4215707778930664e-05)
|
254 |
+
Step... (127000/437388 | Training Loss: -9.905468687065877e-06, Learning Rate: 1.4192843991622794e-05)
|
255 |
+
Step... (127500/437388 | Training Loss: -9.968744052457623e-06, Learning Rate: 1.4169981113809627e-05)
|
256 |
+
Step... (128000/437388 | Training Loss: -9.980949471355416e-06, Learning Rate: 1.414711914549116e-05)
|
257 |
+
Step... (128500/437388 | Training Loss: -9.967592632165179e-06, Learning Rate: 1.4124254448688589e-05)
|
258 |
+
Step... (129000/437388 | Training Loss: -9.979932656278834e-06, Learning Rate: 1.4101392480370123e-05)
|
259 |
+
Step... (129500/437388 | Training Loss: -9.441755537409335e-06, Learning Rate: 1.4078528693062253e-05)
|
260 |
+
Step... (130000/437388 | Training Loss: -9.931108252203558e-06, Learning Rate: 1.4055666724743787e-05)
|
261 |
+
Step... (130500/437388 | Training Loss: -9.979726200981531e-06, Learning Rate: 1.403280384693062e-05)
|
262 |
+
Step... (131000/437388 | Training Loss: -9.972496627597138e-06, Learning Rate: 1.400994005962275e-05)
|
263 |
+
Step... (131500/437388 | Training Loss: -9.980236427509226e-06, Learning Rate: 1.3987077181809582e-05)
|
264 |
+
Step... (132000/437388 | Training Loss: -9.980353752325755e-06, Learning Rate: 1.3964214303996414e-05)
|
265 |
+
Step... (132500/437388 | Training Loss: -9.980927643482573e-06, Learning Rate: 1.3941352335677948e-05)
|
266 |
+
Step... (133000/437388 | Training Loss: -9.694988875708077e-06, Learning Rate: 1.3918487638875376e-05)
|
267 |
+
Step... (133500/437388 | Training Loss: -9.752675396157429e-06, Learning Rate: 1.389562567055691e-05)
|
268 |
+
Step... (134000/437388 | Training Loss: -9.952703294402454e-06, Learning Rate: 1.3872762792743742e-05)
|
269 |
+
Step... (134500/437388 | Training Loss: -9.968332960852422e-06, Learning Rate: 1.3849899005435873e-05)
|
270 |
+
Step... (135000/437388 | Training Loss: -9.869803761830553e-06, Learning Rate: 1.3827036127622705e-05)
|
271 |
+
Step... (135500/437388 | Training Loss: -9.96959897747729e-06, Learning Rate: 1.3804173249809537e-05)
|
272 |
+
Step... (136000/437388 | Training Loss: -9.982661140384153e-06, Learning Rate: 1.3781311281491071e-05)
|
273 |
+
Step... (136500/437388 | Training Loss: -9.980454706237651e-06, Learning Rate: 1.37584465846885e-05)
|
274 |
+
Step... (137000/437388 | Training Loss: -9.969302482204512e-06, Learning Rate: 1.3735584616370033e-05)
|
275 |
+
Step... (137500/437388 | Training Loss: -9.979442438634578e-06, Learning Rate: 1.3712721738556866e-05)
|
276 |
+
Step... (138000/437388 | Training Loss: -9.97651386569487e-06, Learning Rate: 1.3689858860743698e-05)
|
277 |
+
Step... (138500/437388 | Training Loss: -9.985508768295404e-06, Learning Rate: 1.3666995073435828e-05)
|
278 |
+
Step... (139000/437388 | Training Loss: -9.726589269121177e-06, Learning Rate: 1.364413219562266e-05)
|
279 |
+
Step... (139500/437388 | Training Loss: -9.964971468434669e-06, Learning Rate: 1.3621270227304194e-05)
|
280 |
+
Step... (140000/437388 | Training Loss: -9.96951166598592e-06, Learning Rate: 1.3598405530501623e-05)
|
281 |
+
Step... (140500/437388 | Training Loss: -9.984936696127988e-06, Learning Rate: 1.3575543562183157e-05)
|
282 |
+
Step... (141000/437388 | Training Loss: -9.892564776237123e-06, Learning Rate: 1.3552679774875287e-05)
|
283 |
+
Step... (141500/437388 | Training Loss: -9.977737136068754e-06, Learning Rate: 1.3529817806556821e-05)
|
284 |
+
Step... (142000/437388 | Training Loss: -9.97265669866465e-06, Learning Rate: 1.3506954928743653e-05)
|
285 |
+
Step... (142500/437388 | Training Loss: -9.97040478978306e-06, Learning Rate: 1.3484091141435783e-05)
|
286 |
+
Step... (143000/437388 | Training Loss: -9.97698589344509e-06, Learning Rate: 1.3461228263622615e-05)
|