diff --git a/config.json b/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..32e5f508d07378286598a4173c52035f1c5414f8
--- /dev/null
+++ b/config.json
@@ -0,0 +1,45 @@
+{
+ "_name_or_path": "lmsys/vicuna-13b-v1.5",
+ "architectures": [
+ "LlavaLlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 1,
+ "eos_token_id": 2,
+ "freeze_mm_mlp_adapter": false,
+ "hidden_act": "silu",
+ "hidden_size": 5120,
+ "image_aspect_ratio": "pad",
+ "initializer_range": 0.02,
+ "intermediate_size": 13824,
+ "max_length": 4096,
+ "max_position_embeddings": 4096,
+ "mm_hidden_size": 1024,
+ "mm_patch_merge_type": "flat",
+ "mm_projector_lr": null,
+ "mm_projector_type": "mlp2x_gelu",
+ "mm_use_im_patch_token": false,
+ "mm_use_im_start_end": false,
+ "mm_vision_select_feature": "patch",
+ "mm_vision_select_layer": -2,
+ "mm_vision_tower": "openai/clip-vit-large-patch14-336",
+ "model_type": "llava_llama",
+ "num_attention_heads": 40,
+ "num_hidden_layers": 40,
+ "num_key_value_heads": 40,
+ "pad_token_id": 0,
+ "pretraining_tp": 1,
+ "rms_norm_eps": 1e-05,
+ "rope_scaling": null,
+ "rope_theta": 10000.0,
+ "tie_word_embeddings": false,
+ "tokenizer_model_max_length": 1560,
+ "tokenizer_padding_side": "right",
+ "torch_dtype": "bfloat16",
+ "transformers_version": "4.36.2",
+ "tune_mm_mlp_adapter": false,
+ "use_cache": true,
+ "use_mm_proj": true,
+ "vocab_size": 32000
+}
diff --git a/headcut_mask_3p67/0.pth b/headcut_mask_3p67/0.pth
new file mode 100644
index 0000000000000000000000000000000000000000..8ec9bab63c4bd1fb0839cbf77c052a685f06f0be
--- /dev/null
+++ b/headcut_mask_3p67/0.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8b9e56ed6127a980308a442909092f02a52cb4d87bfc24532aa6272685fe4247
+size 1086
diff --git a/headcut_mask_3p67/1.pth b/headcut_mask_3p67/1.pth
new file mode 100644
index 0000000000000000000000000000000000000000..6ddaa47c57e69098e6def45e3e85d7897289b87f
--- /dev/null
+++ b/headcut_mask_3p67/1.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:424a0b376990caa5d86f6353e3c0206532903814a51a9846ee87f522aad04bbf
+size 1086
diff --git a/headcut_mask_3p67/10.pth b/headcut_mask_3p67/10.pth
new file mode 100644
index 0000000000000000000000000000000000000000..375f8df29fd8c340702f8b1f1057fd3fec46a8b6
--- /dev/null
+++ b/headcut_mask_3p67/10.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2bb9611827c68bd7cd953db744e33e38512dbec375b173f89772eadc102bbc64
+size 1155
diff --git a/headcut_mask_3p67/11.pth b/headcut_mask_3p67/11.pth
new file mode 100644
index 0000000000000000000000000000000000000000..2d60371e64fe928f8cdc3439d1e50031c35aa3fc
--- /dev/null
+++ b/headcut_mask_3p67/11.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:84f46bf6288508158790660d90d2c542e0f8015788db75d2eb134fc696e5bea3
+size 1155
diff --git a/headcut_mask_3p67/12.pth b/headcut_mask_3p67/12.pth
new file mode 100644
index 0000000000000000000000000000000000000000..f1852d01e590a6fb56d1d8c410ee768eb8464ec2
--- /dev/null
+++ b/headcut_mask_3p67/12.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8dfcc4b3c6c5cffbd6b811f2a7ab396aa25255c9de4c2e5921c0c07a0ce8c7bd
+size 1155
diff --git a/headcut_mask_3p67/13.pth b/headcut_mask_3p67/13.pth
new file mode 100644
index 0000000000000000000000000000000000000000..48623021007ea84256dbc384e8f7872973609586
--- /dev/null
+++ b/headcut_mask_3p67/13.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:223dd3a8636ca048241f6237545ecbbadcfc74b349ba2f6fdea916a808f29dd0
+size 1155
diff --git a/headcut_mask_3p67/14.pth b/headcut_mask_3p67/14.pth
new file mode 100644
index 0000000000000000000000000000000000000000..2f4a4e4010eb6a74e4bcd49183dc460720a7295d
--- /dev/null
+++ b/headcut_mask_3p67/14.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:eab1f1767e6e0bb97df5cd9e7379c169ad514a2b0ea341ab88010ceb3308532e
+size 1155
diff --git a/headcut_mask_3p67/15.pth b/headcut_mask_3p67/15.pth
new file mode 100644
index 0000000000000000000000000000000000000000..e902837669100b34d51cb2e251610f916f298d56
--- /dev/null
+++ b/headcut_mask_3p67/15.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d72e6a062da7986efc37e7ffc3ddac0ecc27cda0d03c448810b0fddb9d19a2e0
+size 1155
diff --git a/headcut_mask_3p67/16.pth b/headcut_mask_3p67/16.pth
new file mode 100644
index 0000000000000000000000000000000000000000..c007e245c9c324e4d5f0fc2ccf79ae501806b911
--- /dev/null
+++ b/headcut_mask_3p67/16.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6a8d71f6d290269f451a97c055191b6754589fe9c845a8788d08a9c6c3555b97
+size 1155
diff --git a/headcut_mask_3p67/17.pth b/headcut_mask_3p67/17.pth
new file mode 100644
index 0000000000000000000000000000000000000000..f82603a82c7ac0d10984320018d7ff90c280d7d3
--- /dev/null
+++ b/headcut_mask_3p67/17.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:990c4174d0cc904366efc9ebea2075038131023eacc56ac73b89bec77c68a4a2
+size 1155
diff --git a/headcut_mask_3p67/18.pth b/headcut_mask_3p67/18.pth
new file mode 100644
index 0000000000000000000000000000000000000000..9a9020e9d1a2e41128261440444825c63a4c7f5f
--- /dev/null
+++ b/headcut_mask_3p67/18.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a1aeb9a6f68c255355fce36d6877e9295fcc7372f913b9821fa327fc2d72d82d
+size 1155
diff --git a/headcut_mask_3p67/19.pth b/headcut_mask_3p67/19.pth
new file mode 100644
index 0000000000000000000000000000000000000000..7a4018cf7373ff8d42442d47b106cd2700a2a50f
--- /dev/null
+++ b/headcut_mask_3p67/19.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1c60bef878d8883962eecabe6bdc4e464b215156e682fe0319bf48f151410e96
+size 1155
diff --git a/headcut_mask_3p67/2.pth b/headcut_mask_3p67/2.pth
new file mode 100644
index 0000000000000000000000000000000000000000..ee94f7553f45acbf97d2a360693bf8833594088e
--- /dev/null
+++ b/headcut_mask_3p67/2.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d595e7d7c1d7f7e6ab277a82804eaf49ca7ecb3f451f80ca46b987bd48b48e30
+size 1086
diff --git a/headcut_mask_3p67/20.pth b/headcut_mask_3p67/20.pth
new file mode 100644
index 0000000000000000000000000000000000000000..a2598e2589f75389e381770e26769978d067d5c6
--- /dev/null
+++ b/headcut_mask_3p67/20.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9d816d5f933cedfc2959d51076c0a3ca05b80a462773a322eb1ec9a18c0fa732
+size 1155
diff --git a/headcut_mask_3p67/21.pth b/headcut_mask_3p67/21.pth
new file mode 100644
index 0000000000000000000000000000000000000000..f2750f4586bbe05a81b9c9194f2192f95bb5e3ec
--- /dev/null
+++ b/headcut_mask_3p67/21.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:524f0cd620492ed32560761c16af3e0b545a8be4ae9dfc0d03a99c004e85f1fd
+size 1155
diff --git a/headcut_mask_3p67/22.pth b/headcut_mask_3p67/22.pth
new file mode 100644
index 0000000000000000000000000000000000000000..6af40b716bc72e109dfc270a84feffc41474101b
--- /dev/null
+++ b/headcut_mask_3p67/22.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:031fbf160201f90d59dcf7c3b0f47a440718b1fa4f98808cd9e9fde40821713a
+size 1155
diff --git a/headcut_mask_3p67/23.pth b/headcut_mask_3p67/23.pth
new file mode 100644
index 0000000000000000000000000000000000000000..96193649ac06b7348c252a6cbb778f5160f669f4
--- /dev/null
+++ b/headcut_mask_3p67/23.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:eaca8a9ddcfde6cb93dc7725ab06523d619426928a6dc8acedb1ebefdf2e0812
+size 1155
diff --git a/headcut_mask_3p67/24.pth b/headcut_mask_3p67/24.pth
new file mode 100644
index 0000000000000000000000000000000000000000..6617b363dd73ba871b28f95538e87b2f3fd2a3e6
--- /dev/null
+++ b/headcut_mask_3p67/24.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8232aa68127a7a1494120ce7da98dd988c8d057c7509b3f171d79ace114073b3
+size 1155
diff --git a/headcut_mask_3p67/25.pth b/headcut_mask_3p67/25.pth
new file mode 100644
index 0000000000000000000000000000000000000000..eb132f6fc93649531fb97d04146420d85636ff5e
--- /dev/null
+++ b/headcut_mask_3p67/25.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bafe13ed42077a63a15e7e06b8903f5650858e35c7f0702f8807797af90aee77
+size 1155
diff --git a/headcut_mask_3p67/26.pth b/headcut_mask_3p67/26.pth
new file mode 100644
index 0000000000000000000000000000000000000000..85986119792c1d6c7beb7a09a200291255e9c676
--- /dev/null
+++ b/headcut_mask_3p67/26.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e05a0c7ee4c1d819437faf49a7dec8d802d0a332ece890eaea34512c8b12c91d
+size 1155
diff --git a/headcut_mask_3p67/27.pth b/headcut_mask_3p67/27.pth
new file mode 100644
index 0000000000000000000000000000000000000000..269c3414efd20a8d9d8f39d749aeba03259f5c6f
--- /dev/null
+++ b/headcut_mask_3p67/27.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:420c29fc5e1eeb102cb54d60aa9018e57e4eafc3fc0af705d42c298d0c65b801
+size 1155
diff --git a/headcut_mask_3p67/28.pth b/headcut_mask_3p67/28.pth
new file mode 100644
index 0000000000000000000000000000000000000000..7b35405ba1d30360efe0a8a41e66c4c550437967
--- /dev/null
+++ b/headcut_mask_3p67/28.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b913ebb397e7e9056c9cacd977aab668462cce09b044152a36466c68eaeb0890
+size 1155
diff --git a/headcut_mask_3p67/29.pth b/headcut_mask_3p67/29.pth
new file mode 100644
index 0000000000000000000000000000000000000000..3b49ba3f5cdd3302d78558e22b2987e20389ee35
--- /dev/null
+++ b/headcut_mask_3p67/29.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:587fb7c05256d2c96db21cfb258f9425e102c132014101091ae54ce81583ae19
+size 1155
diff --git a/headcut_mask_3p67/3.pth b/headcut_mask_3p67/3.pth
new file mode 100644
index 0000000000000000000000000000000000000000..126d9a019125fae57d885164b2406ae3cf6ee32d
--- /dev/null
+++ b/headcut_mask_3p67/3.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4271bcca623f7aa8057d3af35166258e80a19ed4989c25ec9f2da9563922f319
+size 1086
diff --git a/headcut_mask_3p67/30.pth b/headcut_mask_3p67/30.pth
new file mode 100644
index 0000000000000000000000000000000000000000..76618a055a26ee3b4065de0ee9f1a25915e01ce1
--- /dev/null
+++ b/headcut_mask_3p67/30.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7e2e58ba07180eb45d1dce42707bc0c82d04786f6759f2741f7993b5b4c8a510
+size 1155
diff --git a/headcut_mask_3p67/31.pth b/headcut_mask_3p67/31.pth
new file mode 100644
index 0000000000000000000000000000000000000000..e153612129106c3a36abd689e5af6a8590b6d39b
--- /dev/null
+++ b/headcut_mask_3p67/31.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ecf13c5e773db32dc2988b034f3cc8c811776aad5ee778e230cb7acd3db38426
+size 1155
diff --git a/headcut_mask_3p67/32.pth b/headcut_mask_3p67/32.pth
new file mode 100644
index 0000000000000000000000000000000000000000..fd94813d589dfa35756fd555ad7fd2acd415079e
--- /dev/null
+++ b/headcut_mask_3p67/32.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:dcb39d0a7f3d32871cde5848627dcbb2b40f2654e5cd2715af19b6dbb4b36932
+size 1155
diff --git a/headcut_mask_3p67/33.pth b/headcut_mask_3p67/33.pth
new file mode 100644
index 0000000000000000000000000000000000000000..11cdd299a9ebf3a78e8cb4033a7b6ed06fceb1c6
--- /dev/null
+++ b/headcut_mask_3p67/33.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d6ba2e384a7389ec3b2ba5b738d5ae2b86721aa0c734ed982a70bd97566c5031
+size 1155
diff --git a/headcut_mask_3p67/34.pth b/headcut_mask_3p67/34.pth
new file mode 100644
index 0000000000000000000000000000000000000000..3c75d2d74e0207a0a2fe49e2da40272ef7666179
--- /dev/null
+++ b/headcut_mask_3p67/34.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:36602961f4780500b4af89602758a816eea7515716b486670e78e265f3228db5
+size 1155
diff --git a/headcut_mask_3p67/35.pth b/headcut_mask_3p67/35.pth
new file mode 100644
index 0000000000000000000000000000000000000000..dfef644d87146c4cb3eadd4b8f4a7dfb23856343
--- /dev/null
+++ b/headcut_mask_3p67/35.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:aa2d4a17b06969a9338de44a05ff6dc272329b3f3774538f82daa8750f45dc81
+size 1155
diff --git a/headcut_mask_3p67/36.pth b/headcut_mask_3p67/36.pth
new file mode 100644
index 0000000000000000000000000000000000000000..938874ed93454c77486be8bbded1f5a72c261835
--- /dev/null
+++ b/headcut_mask_3p67/36.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:58c14d3476676fa0f07ee8d9697a08fd9acf64e53df8b9f3c8f8bea98343d88e
+size 1155
diff --git a/headcut_mask_3p67/37.pth b/headcut_mask_3p67/37.pth
new file mode 100644
index 0000000000000000000000000000000000000000..c454331515839d15eda1fb6b1046d63b0d891ff5
--- /dev/null
+++ b/headcut_mask_3p67/37.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6c4f3ba785ed7ec0be36e6cf388e08d98559e1088d000b9689d98aaad5b0f341
+size 1155
diff --git a/headcut_mask_3p67/38.pth b/headcut_mask_3p67/38.pth
new file mode 100644
index 0000000000000000000000000000000000000000..f882d6d6ecaee6eb25ac465cf6f902e42f37e830
--- /dev/null
+++ b/headcut_mask_3p67/38.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:65230cf1f3d7be3ff6a6f7aad691d9add7d9132a4328002417c0b246f533daa0
+size 1155
diff --git a/headcut_mask_3p67/39.pth b/headcut_mask_3p67/39.pth
new file mode 100644
index 0000000000000000000000000000000000000000..8270ff11817acbe946aaa8807aed64c51cdad48c
--- /dev/null
+++ b/headcut_mask_3p67/39.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b205ba9c0de51ddf420ad5d398bfe27d50dcff5119af47658a4762ccc4160f68
+size 1155
diff --git a/headcut_mask_3p67/4.pth b/headcut_mask_3p67/4.pth
new file mode 100644
index 0000000000000000000000000000000000000000..35c826007e27397e4f8fea4c68cc3093a26cad7a
--- /dev/null
+++ b/headcut_mask_3p67/4.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5d14812e6aa11cb8321f58df2cae98fb83c97bdafd9ffd5d6ddef1a913324e1d
+size 1086
diff --git a/headcut_mask_3p67/5.pth b/headcut_mask_3p67/5.pth
new file mode 100644
index 0000000000000000000000000000000000000000..7330739efef7f54c1a59c26c05d0825ccfa63f7f
--- /dev/null
+++ b/headcut_mask_3p67/5.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d613713664cc03ea59215eb9d19cc8ca1fb52410754d9845ec6b79bed4ea3449
+size 1086
diff --git a/headcut_mask_3p67/6.pth b/headcut_mask_3p67/6.pth
new file mode 100644
index 0000000000000000000000000000000000000000..09c80a89e59c2b3308fdbbe896e338e51d3ebd01
--- /dev/null
+++ b/headcut_mask_3p67/6.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c4ad6a2c9cdafd520ffedd5d90f2f8ec36bfddabf9d6e77d3bb02848b60a6e6e
+size 1086
diff --git a/headcut_mask_3p67/7.pth b/headcut_mask_3p67/7.pth
new file mode 100644
index 0000000000000000000000000000000000000000..ec9b93259f5bcba3a45d38e61edbe3ceeac2346d
--- /dev/null
+++ b/headcut_mask_3p67/7.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e117c779be6f6e9d74f644029273e86d575e63a741f7979196d976fc1ca44c62
+size 1086
diff --git a/headcut_mask_3p67/8.pth b/headcut_mask_3p67/8.pth
new file mode 100644
index 0000000000000000000000000000000000000000..974f54d1975849537da464a379eb2f605c226786
--- /dev/null
+++ b/headcut_mask_3p67/8.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:22e124d901c05bd9d19011e199fcb3abf2730bbcca97e32111fb11bb87e4fffc
+size 1086
diff --git a/headcut_mask_3p67/9.pth b/headcut_mask_3p67/9.pth
new file mode 100644
index 0000000000000000000000000000000000000000..1aa232342d9702151492da25b94ac18fec501d2b
--- /dev/null
+++ b/headcut_mask_3p67/9.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9dd8aa2037ac9d45e951d2777ef747f7841c6daf8aeac5ba5bd69ee3f9a360bd
+size 1086
diff --git a/model-00001-of-00006.safetensors b/model-00001-of-00006.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..d6cd0ff058755188cdc9666628b00877572e9fbb
--- /dev/null
+++ b/model-00001-of-00006.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d559fa01c34da0aecb7c273f3a6cf55addc5264bd99d62a4f87ec7de5aec2396
+size 4978265800
diff --git a/model-00002-of-00006.safetensors b/model-00002-of-00006.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..17e2e366cf87adb090f6e785a064c0e2df41a761
--- /dev/null
+++ b/model-00002-of-00006.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:98d8c39720cc027c4b4e40592cfabe8d5fe2a4a556483b4aa5dfbdf73e69f10d
+size 4970422232
diff --git a/model-00003-of-00006.safetensors b/model-00003-of-00006.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..5a6d656eed81ffbc61e0c8114903a34eab190f2c
--- /dev/null
+++ b/model-00003-of-00006.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1d9321a1f58401a4165ed04cbe9593d664e6b288fc8d10d895e5cdac4f17dcd0
+size 4970422256
diff --git a/model-00004-of-00006.safetensors b/model-00004-of-00006.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..f7509f9afc4ab0aad3ec8a849259623755e8ef23
--- /dev/null
+++ b/model-00004-of-00006.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b2dbecacf52532c933f168748fe35bbcf512bae73e8137002bdf175ed94b7d45
+size 4933701504
diff --git a/model-00005-of-00006.safetensors b/model-00005-of-00006.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..5c1d04dcb035e3880a6567bfeef3d7cdeeb365b8
--- /dev/null
+++ b/model-00005-of-00006.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:45fb012f1c7d53f1771f868550908ec18af182aaf822647d4eb94be0edbc5ff7
+size 4933722216
diff --git a/model-00006-of-00006.safetensors b/model-00006-of-00006.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..6bfd252fff0f57bba871af7340bc8841f6474eeb
--- /dev/null
+++ b/model-00006-of-00006.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3f5a5e9db88af642a1b044b716a65748c1374c193bef5261b8b2b9ca12cbbbaa
+size 1915248664
diff --git a/model.safetensors.index.json b/model.safetensors.index.json
new file mode 100644
index 0000000000000000000000000000000000000000..0b710ea4003a96fb2d14b8f7775e12bcfcfb7c2a
--- /dev/null
+++ b/model.safetensors.index.json
@@ -0,0 +1,765 @@
+{
+ "metadata": {
+ "total_size": 26701678592
+ },
+ "weight_map": {
+ "lm_head.weight": "model-00006-of-00006.safetensors",
+ "model.embed_tokens.weight": "model-00001-of-00006.safetensors",
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.10.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.15.input_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.15.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.15.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.15.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.15.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.15.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.15.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.input_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.input_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.input_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.input_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.20.input_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.20.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.20.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.input_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.23.input_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.23.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.23.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.23.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.23.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.23.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.23.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.23.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.23.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.input_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.input_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.input_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.input_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.input_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.input_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.30.input_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.30.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.30.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.30.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.30.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.30.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.30.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.30.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.30.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.31.input_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.31.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.31.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.31.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.31.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.31.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.31.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.31.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.31.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.input_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.input_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.input_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.input_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.input_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.input_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.38.input_layernorm.weight": "model-00006-of-00006.safetensors",
+ "model.layers.38.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.38.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.38.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.38.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
+ "model.layers.38.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.38.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.38.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.38.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.39.input_layernorm.weight": "model-00006-of-00006.safetensors",
+ "model.layers.39.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.39.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.39.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.39.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
+ "model.layers.39.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.39.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.39.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.39.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.7.input_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.7.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.7.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.8.input_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.8.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.8.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
+ "model.mm_projector.0.bias": "model-00006-of-00006.safetensors",
+ "model.mm_projector.0.weight": "model-00006-of-00006.safetensors",
+ "model.mm_projector.2.bias": "model-00006-of-00006.safetensors",
+ "model.mm_projector.2.weight": "model-00006-of-00006.safetensors",
+ "model.norm.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.embeddings.class_embedding": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.embeddings.patch_embedding.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.embeddings.position_embedding.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.post_layernorm.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.post_layernorm.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.pre_layrnorm.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.pre_layrnorm.weight": "model-00006-of-00006.safetensors"
+ }
+}
diff --git a/modeling_llama.py b/modeling_llama.py
new file mode 100644
index 0000000000000000000000000000000000000000..182dbcc62106eb4392fe106011a6b5f955238637
--- /dev/null
+++ b/modeling_llama.py
@@ -0,0 +1,1483 @@
+# coding=utf-8
+# Copyright 2022 EleutherAI and the HuggingFace Inc. team. All rights reserved.
+#
+# This code is based on EleutherAI's GPT-NeoX library and the GPT-NeoX
+# and OPT implementations in this library. It has been modified from its
+# original forms to accommodate minor architectural differences compared
+# to GPT-NeoX and OPT used by the Meta AI team that trained the model.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+""" PyTorch LLaMA model."""
+import math
+import warnings
+from typing import List, Optional, Tuple, Union
+
+import torch
+import torch.nn.functional as F
+import torch.utils.checkpoint
+from torch import nn
+from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
+
+from ...activations import ACT2FN
+from ...cache_utils import Cache, DynamicCache
+from ...modeling_attn_mask_utils import (
+ AttentionMaskConverter,
+ _prepare_4d_attention_mask,
+ _prepare_4d_causal_attention_mask,
+ _prepare_4d_causal_attention_mask_for_sdpa,
+)
+from ...modeling_outputs import BaseModelOutputWithPast, CausalLMOutputWithPast, SequenceClassifierOutputWithPast
+from ...modeling_utils import PreTrainedModel
+from ...pytorch_utils import ALL_LAYERNORM_LAYERS, is_torch_greater_or_equal_than_1_13
+from ...utils import (
+ add_start_docstrings,
+ add_start_docstrings_to_model_forward,
+ is_flash_attn_2_available,
+ is_flash_attn_greater_or_equal_2_10,
+ logging,
+ replace_return_docstrings,
+)
+from ...utils.import_utils import is_torch_fx_available
+from .configuration_llama import LlamaConfig
+
+
+if is_flash_attn_2_available():
+ from flash_attn import flash_attn_func, flash_attn_varlen_func
+ from flash_attn.bert_padding import index_first_axis, pad_input, unpad_input # noqa
+
+
+# This makes `_prepare_4d_causal_attention_mask` a leaf function in the FX graph.
+# It means that the function will not be traced through and simply appear as a node in the graph.
+if is_torch_fx_available():
+ if not is_torch_greater_or_equal_than_1_13:
+ import torch.fx
+
+ _prepare_4d_causal_attention_mask = torch.fx.wrap(_prepare_4d_causal_attention_mask)
+
+
+logger = logging.get_logger(__name__)
+
+_CONFIG_FOR_DOC = "LlamaConfig"
+
+count_drop_head = 0
+def _get_unpad_data(attention_mask):
+ seqlens_in_batch = attention_mask.sum(dim=-1, dtype=torch.int32)
+ indices = torch.nonzero(attention_mask.flatten(), as_tuple=False).flatten()
+ max_seqlen_in_batch = seqlens_in_batch.max().item()
+ cu_seqlens = F.pad(torch.cumsum(seqlens_in_batch, dim=0, dtype=torch.torch.int32), (1, 0))
+ return (
+ indices,
+ cu_seqlens,
+ max_seqlen_in_batch,
+ )
+
+
+def _expand_mask(mask: torch.Tensor, dtype: torch.dtype, tgt_len: Optional[int] = None):
+ warnings.warn(
+ "Calling `transformers.models.llama.modeling_llama._prepare_4d_attention_mask` is deprecated and will be removed in v4.37. Use `transformers.modeling_attn_mask_utils._prepare_4d_attention_mask"
+ )
+ return _prepare_4d_attention_mask(mask=mask, dtype=dtype, tgt_len=tgt_len)
+
+
+def _make_causal_mask(
+ input_ids_shape: torch.Size, dtype: torch.dtype, device: torch.device, past_key_values_length: int = 0
+):
+ warnings.warn(
+ "Calling `transformers.models.llama.modeling_llama._make_causal_mask` is deprecated and will be removed in v4.37. Use `transformers.models.llama.modeling_llama.AttentionMaskConverter._make_causal_mask"
+ )
+ return AttentionMaskConverter._make_causal_mask(
+ input_ids_shape=input_ids_shape, dtype=dtype, device=device, past_key_values_length=past_key_values_length
+ )
+
+
+class LlamaRMSNorm(nn.Module):
+ def __init__(self, hidden_size, eps=1e-6):
+ """
+ LlamaRMSNorm is equivalent to T5LayerNorm
+ """
+ super().__init__()
+ self.weight = nn.Parameter(torch.ones(hidden_size))
+ self.variance_epsilon = eps
+
+ def forward(self, hidden_states):
+ input_dtype = hidden_states.dtype
+ hidden_states = hidden_states.to(torch.float32)
+ variance = hidden_states.pow(2).mean(-1, keepdim=True)
+ hidden_states = hidden_states * torch.rsqrt(variance + self.variance_epsilon)
+ return self.weight * hidden_states.to(input_dtype)
+
+
+ALL_LAYERNORM_LAYERS.append(LlamaRMSNorm)
+
+
+class LlamaRotaryEmbedding(nn.Module):
+ def __init__(self, dim, max_position_embeddings=2048, base=10000, device=None):
+ super().__init__()
+
+ self.dim = dim
+ self.max_position_embeddings = max_position_embeddings
+ self.base = base
+ inv_freq = 1.0 / (self.base ** (torch.arange(0, self.dim, 2).float().to(device) / self.dim))
+ self.register_buffer("inv_freq", inv_freq, persistent=False)
+
+ # Build here to make `torch.jit.trace` work.
+ self._set_cos_sin_cache(
+ seq_len=max_position_embeddings, device=self.inv_freq.device, dtype=torch.get_default_dtype()
+ )
+
+ def _set_cos_sin_cache(self, seq_len, device, dtype):
+ self.max_seq_len_cached = seq_len
+ t = torch.arange(self.max_seq_len_cached, device=device, dtype=self.inv_freq.dtype)
+
+ freqs = torch.outer(t, self.inv_freq)
+ # Different from paper, but it uses a different permutation in order to obtain the same calculation
+ emb = torch.cat((freqs, freqs), dim=-1)
+ self.register_buffer("cos_cached", emb.cos().to(dtype), persistent=False)
+ self.register_buffer("sin_cached", emb.sin().to(dtype), persistent=False)
+
+ def forward(self, x, seq_len=None):
+ # x: [bs, num_attention_heads, seq_len, head_size]
+ if seq_len > self.max_seq_len_cached:
+ self._set_cos_sin_cache(seq_len=seq_len, device=x.device, dtype=x.dtype)
+
+ return (
+ self.cos_cached[:seq_len].to(dtype=x.dtype),
+ self.sin_cached[:seq_len].to(dtype=x.dtype),
+ )
+
+
+class LlamaLinearScalingRotaryEmbedding(LlamaRotaryEmbedding):
+ """LlamaRotaryEmbedding extended with linear scaling. Credits to the Reddit user /u/kaiokendev"""
+
+ def __init__(self, dim, max_position_embeddings=2048, base=10000, device=None, scaling_factor=1.0):
+ self.scaling_factor = scaling_factor
+ super().__init__(dim, max_position_embeddings, base, device)
+
+ def _set_cos_sin_cache(self, seq_len, device, dtype):
+ self.max_seq_len_cached = seq_len
+ t = torch.arange(self.max_seq_len_cached, device=device, dtype=self.inv_freq.dtype)
+ t = t / self.scaling_factor
+
+ freqs = torch.outer(t, self.inv_freq)
+ # Different from paper, but it uses a different permutation in order to obtain the same calculation
+ emb = torch.cat((freqs, freqs), dim=-1)
+ self.register_buffer("cos_cached", emb.cos().to(dtype), persistent=False)
+ self.register_buffer("sin_cached", emb.sin().to(dtype), persistent=False)
+
+
+class LlamaDynamicNTKScalingRotaryEmbedding(LlamaRotaryEmbedding):
+ """LlamaRotaryEmbedding extended with Dynamic NTK scaling. Credits to the Reddit users /u/bloc97 and /u/emozilla"""
+
+ def __init__(self, dim, max_position_embeddings=2048, base=10000, device=None, scaling_factor=1.0):
+ self.scaling_factor = scaling_factor
+ super().__init__(dim, max_position_embeddings, base, device)
+
+ def _set_cos_sin_cache(self, seq_len, device, dtype):
+ self.max_seq_len_cached = seq_len
+
+ if seq_len > self.max_position_embeddings:
+ base = self.base * (
+ (self.scaling_factor * seq_len / self.max_position_embeddings) - (self.scaling_factor - 1)
+ ) ** (self.dim / (self.dim - 2))
+ inv_freq = 1.0 / (base ** (torch.arange(0, self.dim, 2).float().to(device) / self.dim))
+ self.register_buffer("inv_freq", inv_freq, persistent=False)
+
+ t = torch.arange(self.max_seq_len_cached, device=device, dtype=self.inv_freq.dtype)
+
+ freqs = torch.outer(t, self.inv_freq)
+ # Different from paper, but it uses a different permutation in order to obtain the same calculation
+ emb = torch.cat((freqs, freqs), dim=-1)
+ self.register_buffer("cos_cached", emb.cos().to(dtype), persistent=False)
+ self.register_buffer("sin_cached", emb.sin().to(dtype), persistent=False)
+
+
+def rotate_half(x):
+ """Rotates half the hidden dims of the input."""
+ x1 = x[..., : x.shape[-1] // 2]
+ x2 = x[..., x.shape[-1] // 2 :]
+ return torch.cat((-x2, x1), dim=-1)
+
+
+def apply_rotary_pos_emb(q, k, cos, sin, position_ids, unsqueeze_dim=1):
+ """Applies Rotary Position Embedding to the query and key tensors.
+
+ Args:
+ q (`torch.Tensor`): The query tensor.
+ k (`torch.Tensor`): The key tensor.
+ cos (`torch.Tensor`): The cosine part of the rotary embedding.
+ sin (`torch.Tensor`): The sine part of the rotary embedding.
+ position_ids (`torch.Tensor`):
+ The position indices of the tokens corresponding to the query and key tensors. For example, this can be
+ used to pass offsetted position ids when working with a KV-cache.
+ unsqueeze_dim (`int`, *optional*, defaults to 1):
+ The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
+ sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
+ that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
+ k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
+ cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
+ the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
+ Returns:
+ `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
+ """
+ cos = cos[position_ids].unsqueeze(unsqueeze_dim)
+ sin = sin[position_ids].unsqueeze(unsqueeze_dim)
+ q_embed = (q * cos) + (rotate_half(q) * sin)
+ k_embed = (k * cos) + (rotate_half(k) * sin)
+ return q_embed, k_embed
+
+
+class LlamaMLP(nn.Module):
+ def __init__(self, config):
+ super().__init__()
+ self.config = config
+ self.hidden_size = config.hidden_size
+ self.intermediate_size = config.intermediate_size
+ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False)
+ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False)
+ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False)
+ self.act_fn = ACT2FN[config.hidden_act]
+
+ def forward(self, x):
+ if self.config.pretraining_tp > 1:
+ slice = self.intermediate_size // self.config.pretraining_tp
+ gate_proj_slices = self.gate_proj.weight.split(slice, dim=0)
+ up_proj_slices = self.up_proj.weight.split(slice, dim=0)
+ down_proj_slices = self.down_proj.weight.split(slice, dim=1)
+
+ gate_proj = torch.cat(
+ [F.linear(x, gate_proj_slices[i]) for i in range(self.config.pretraining_tp)], dim=-1
+ )
+ up_proj = torch.cat([F.linear(x, up_proj_slices[i]) for i in range(self.config.pretraining_tp)], dim=-1)
+
+ intermediate_states = (self.act_fn(gate_proj) * up_proj).split(slice, dim=2)
+ down_proj = [
+ F.linear(intermediate_states[i], down_proj_slices[i]) for i in range(self.config.pretraining_tp)
+ ]
+ down_proj = sum(down_proj)
+ else:
+ down_proj = self.down_proj(self.act_fn(self.gate_proj(x)) * self.up_proj(x))
+
+ return down_proj
+
+
+def repeat_kv(hidden_states: torch.Tensor, n_rep: int) -> torch.Tensor:
+ """
+ This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
+ num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
+ """
+ batch, num_key_value_heads, slen, head_dim = hidden_states.shape
+ if n_rep == 1:
+ return hidden_states
+ hidden_states = hidden_states[:, :, None, :, :].expand(batch, num_key_value_heads, n_rep, slen, head_dim)
+ return hidden_states.reshape(batch, num_key_value_heads * n_rep, slen, head_dim)
+
+
+class LlamaAttention(nn.Module):
+ """Multi-headed attention from 'Attention Is All You Need' paper"""
+
+ def __init__(self, config: LlamaConfig, layer_idx: Optional[int] = None):
+ super().__init__()
+ self.config = config
+ self.layer_idx = layer_idx
+ if layer_idx is None:
+ logger.warning_once(
+ f"Instantiating {self.__class__.__name__} without passing `layer_idx` is not recommended and will "
+ "to errors during the forward call, if caching is used. Please make sure to provide a `layer_idx` "
+ "when creating this class."
+ )
+
+ self.attention_dropout = config.attention_dropout
+ self.hidden_size = config.hidden_size
+ self.num_heads = config.num_attention_heads
+ self.head_dim = self.hidden_size // self.num_heads
+ self.num_key_value_heads = config.num_key_value_heads
+ self.num_key_value_groups = self.num_heads // self.num_key_value_heads
+ self.max_position_embeddings = config.max_position_embeddings
+ self.rope_theta = config.rope_theta
+ self.is_causal = True
+
+ if (self.head_dim * self.num_heads) != self.hidden_size:
+ raise ValueError(
+ f"hidden_size must be divisible by num_heads (got `hidden_size`: {self.hidden_size}"
+ f" and `num_heads`: {self.num_heads})."
+ )
+
+ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=config.attention_bias)
+ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias)
+ self.v_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias)
+ self.o_proj = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=config.attention_bias)
+ self._init_rope()
+ self.mask = torch.load('headcut_mask_3p67/'+str(self.layer_idx)+'.pth')
+ def _init_rope(self):
+ if self.config.rope_scaling is None:
+ self.rotary_emb = LlamaRotaryEmbedding(
+ self.head_dim,
+ max_position_embeddings=self.max_position_embeddings,
+ base=self.rope_theta,
+ )
+ else:
+ scaling_type = self.config.rope_scaling["type"]
+ scaling_factor = self.config.rope_scaling["factor"]
+ if scaling_type == "linear":
+ self.rotary_emb = LlamaLinearScalingRotaryEmbedding(
+ self.head_dim,
+ max_position_embeddings=self.max_position_embeddings,
+ scaling_factor=scaling_factor,
+ base=self.rope_theta,
+ )
+ elif scaling_type == "dynamic":
+ self.rotary_emb = LlamaDynamicNTKScalingRotaryEmbedding(
+ self.head_dim,
+ max_position_embeddings=self.max_position_embeddings,
+ scaling_factor=scaling_factor,
+ base=self.rope_theta,
+ )
+ else:
+ raise ValueError(f"Unknown RoPE scaling type {scaling_type}")
+
+ def _shape(self, tensor: torch.Tensor, seq_len: int, bsz: int):
+ return tensor.view(bsz, seq_len, self.num_heads, self.head_dim).transpose(1, 2).contiguous()
+
+ def forward(
+ self,
+ hidden_states: torch.Tensor,
+ attention_mask: Optional[torch.Tensor] = None,
+ position_ids: Optional[torch.LongTensor] = None,
+ past_key_value: Optional[Cache] = None,
+ output_attentions: bool = False,
+ use_cache: bool = False,
+ **kwargs,
+ ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]:
+ if "padding_mask" in kwargs:
+ warnings.warn(
+ "Passing `padding_mask` is deprecated and will be removed in v4.37. Please make sure use `attention_mask` instead.`"
+ )
+
+ bsz, q_len, _ = hidden_states.size()
+
+ if self.config.pretraining_tp > 1:
+ key_value_slicing = (self.num_key_value_heads * self.head_dim) // self.config.pretraining_tp
+ query_slices = self.q_proj.weight.split(
+ (self.num_heads * self.head_dim) // self.config.pretraining_tp, dim=0
+ )
+ key_slices = self.k_proj.weight.split(key_value_slicing, dim=0)
+ value_slices = self.v_proj.weight.split(key_value_slicing, dim=0)
+
+ query_states = [F.linear(hidden_states, query_slices[i]) for i in range(self.config.pretraining_tp)]
+ query_states = torch.cat(query_states, dim=-1)
+
+ key_states = [F.linear(hidden_states, key_slices[i]) for i in range(self.config.pretraining_tp)]
+ key_states = torch.cat(key_states, dim=-1)
+
+ value_states = [F.linear(hidden_states, value_slices[i]) for i in range(self.config.pretraining_tp)]
+ value_states = torch.cat(value_states, dim=-1)
+
+ else:
+ query_states = self.q_proj(hidden_states)
+ key_states = self.k_proj(hidden_states)
+ value_states = self.v_proj(hidden_states)
+
+ query_states = query_states.view(bsz, q_len, self.num_heads, self.head_dim).transpose(1, 2)
+ key_states = key_states.view(bsz, q_len, self.num_key_value_heads, self.head_dim).transpose(1, 2)
+ value_states = value_states.view(bsz, q_len, self.num_key_value_heads, self.head_dim).transpose(1, 2)
+
+ kv_seq_len = key_states.shape[-2]
+ if past_key_value is not None:
+ if self.layer_idx is None:
+ raise ValueError(
+ f"The cache structure has changed since version v4.36. If you are using {self.__class__.__name__} "
+ "for auto-regressive decoding with k/v caching, please make sure to initialize the attention class "
+ "with a layer index."
+ )
+ kv_seq_len += past_key_value.get_usable_length(kv_seq_len, self.layer_idx)
+ cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len)
+ query_states, key_states = apply_rotary_pos_emb(query_states, key_states, cos, sin, position_ids)
+
+ if past_key_value is not None:
+ cache_kwargs = {"sin": sin, "cos": cos} # Specific to RoPE models
+ key_states, value_states = past_key_value.update(key_states, value_states, self.layer_idx, cache_kwargs)
+
+ key_states = repeat_kv(key_states, self.num_key_value_groups)
+ value_states = repeat_kv(value_states, self.num_key_value_groups)
+ #attn = "local"
+ attn = "original"
+ # print("layer_index",self.layer_idx)
+ '''
+ offset=35
+ if self.layer_idx<=10 or self.layer_idx>=20: # > 37
+ attention_mask = attention_mask.clone()
+ attention_mask[:,:,offset+576:,offset:offset+576]=float('-inf')
+ if attention_mask.shape[2]==1:
+ attention_mask[:,:,:,offset:offset+576]=float('-inf')
+ attention_mask = attention_mask.clone()
+ #print(attention_mask)
+ # print(value_states.shape)
+ '''
+ if attn == "original":
+ import time
+ start = time.time()
+ attn_weights = torch.matmul(query_states, key_states.transpose(2, 3)) / math.sqrt(self.head_dim) #torch.Size([16, 40, 1752, 1752])
+ if attn_weights.size() != (bsz, self.num_heads, q_len, kv_seq_len):
+ raise ValueError(
+ f"Attention weights should be of size {(bsz, self.num_heads, q_len, kv_seq_len)}, but is"
+ f" {attn_weights.size()}"
+ )
+
+ if attention_mask is not None:
+ if attention_mask.size() != (bsz, 1, q_len, kv_seq_len):
+ raise ValueError(
+ f"Attention mask should be of size {(bsz, 1, q_len, kv_seq_len)}, but is {attention_mask.size()}"
+ )
+ attn_weights = attn_weights + attention_mask
+
+ # upcast attention to fp32
+ attn_weights = nn.functional.softmax(attn_weights, dim=-1, dtype=torch.float32).to(query_states.dtype)
+ attn_weights = nn.functional.dropout(attn_weights, p=self.attention_dropout, training=self.training)
+ #import pdb; pdb.set_trace()
+ #if self.layer_idx ==20:
+ # import pdb; pdb.set_trace()
+ # mask = torch.load('temp/'+str(self.layer_idx)+'.pth')
+
+
+ #import pdb; pdb.set_trace()
+ if attn_weights.shape[2]>1:
+ '''
+ res_v = torch.sum( attn_weights[0,:,35+576:,35:35+576],dim=[1,2])
+ res_t = torch.sum( attn_weights[0,:,35+576:,35+576:],dim=[1,2])
+ res = res_v/res_t
+ print(self.layer_idx,len(res[res<0.3]))
+ '''
+ # mask = torch.load('temp/'+str(self.layer_idx)+'.pth')
+ mask = self.mask.unsqueeze(1).unsqueeze(1).unsqueeze(0)
+
+ attn_weights[:,:,:,35:35+576] = attn_weights[:,:,:,35:35+576]*mask.cuda()
+ else:
+ '''
+ res_v = torch.sum( attn_weights[0,:,:,35:35+576],dim=[1,2])
+ res_t = torch.sum( attn_weights[0,:,:,35+576:],dim=[1,2])
+ res_s = torch.sum( attn_weights[0,:,:,:35],dim=[1,2])
+ res = res_v/(res_t+res_s)
+ #import pdb; pdb.set_trace()
+
+ mask = res>0.1
+ mask = torch.unsqueeze(mask,dim=1)
+ mask =mask.repeat(1,620-35)
+ # torch.save(mask, 'temp/'+str(self.layer_idx)+'.pth')
+ '''
+ # mask = torch.load('temp/'+str(self.layer_idx)+'.pth')
+ mask = self.mask.unsqueeze(1).unsqueeze(1)
+ attn_weights[0,:,:1,35:35+576] = attn_weights[0,:,:1,35:35+576]*mask.cuda()
+ global count_drop_head
+ count_drop_head+= int(len(mask[mask>0])/mask.shape[1])
+ if self.layer_idx==39:
+ # print(count_drop_head/40)
+ count_drop_head=0
+ #print(self.layer_idx, res)
+ # sorted_tensor, indices = torch.sort(res)
+ # print(self.layer_idx, "sorted", sorted_tensor)
+ # print(attn_weights[:,:,35:35+576,35:35+576].sum())
+ attn_output = torch.matmul(attn_weights, value_states)
+ end = time.time()
+ #print(end-start)
+
+ if attn_output.size() != (bsz, self.num_heads, q_len, self.head_dim):
+ raise ValueError(
+ f"`attn_output` should be of size {(bsz, self.num_heads, q_len, self.head_dim)}, but is"
+ f" {attn_output.size()}"
+ )
+
+ attn_output = attn_output.transpose(1, 2).contiguous()
+
+ attn_output = attn_output.reshape(bsz, q_len, self.hidden_size)
+
+ if self.config.pretraining_tp > 1:
+ attn_output = attn_output.split(self.hidden_size // self.config.pretraining_tp, dim=2)
+ o_proj_slices = self.o_proj.weight.split(self.hidden_size // self.config.pretraining_tp, dim=1)
+ attn_output = sum([F.linear(attn_output[i], o_proj_slices[i]) for i in range(self.config.pretraining_tp)])
+ else:
+ attn_output = self.o_proj(attn_output)
+
+ if not output_attentions:
+ attn_weights = None
+
+ return attn_output, attn_weights, past_key_value
+
+
+class LlamaFlashAttention2(LlamaAttention):
+ """
+ Llama flash attention module. This module inherits from `LlamaAttention` as the weights of the module stays
+ untouched. The only required change would be on the forward pass where it needs to correctly call the public API of
+ flash attention and deal with padding tokens in case the input contains any of them.
+ """
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ # TODO: Should be removed once Flash Attention for RoCm is bumped to 2.1.
+ # flash_attn<2.1 generates top-left aligned causal mask, while what is needed here is bottom-right alignement, that was made default for flash_attn>=2.1. This attribute is used to handle this difference. Reference: https://github.com/Dao-AILab/flash-attention/releases/tag/v2.1.0.
+ # Beware that with flash_attn<2.1, using q_seqlen != k_seqlen (except for the case q_seqlen == 1) produces a wrong mask (top-left).
+ self._flash_attn_uses_top_left_mask = not is_flash_attn_greater_or_equal_2_10()
+
+ def forward(
+ self,
+ hidden_states: torch.Tensor,
+ attention_mask: Optional[torch.LongTensor] = None,
+ position_ids: Optional[torch.LongTensor] = None,
+ past_key_value: Optional[Cache] = None,
+ output_attentions: bool = False,
+ use_cache: bool = False,
+ **kwargs,
+ ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]:
+ # LlamaFlashAttention2 attention does not support output_attentions
+ if "padding_mask" in kwargs:
+ warnings.warn(
+ "Passing `padding_mask` is deprecated and will be removed in v4.37. Please make sure use `attention_mask` instead.`"
+ )
+
+ # overwrite attention_mask with padding_mask
+ attention_mask = kwargs.pop("padding_mask")
+
+ output_attentions = False
+
+ bsz, q_len, _ = hidden_states.size()
+
+ query_states = self.q_proj(hidden_states)
+ key_states = self.k_proj(hidden_states)
+ value_states = self.v_proj(hidden_states)
+
+ # Flash attention requires the input to have the shape
+ # batch_size x seq_length x head_dim x hidden_dim
+ # therefore we just need to keep the original shape
+ query_states = query_states.view(bsz, q_len, self.num_heads, self.head_dim).transpose(1, 2)
+ key_states = key_states.view(bsz, q_len, self.num_key_value_heads, self.head_dim).transpose(1, 2)
+ value_states = value_states.view(bsz, q_len, self.num_key_value_heads, self.head_dim).transpose(1, 2)
+
+ kv_seq_len = key_states.shape[-2]
+ if past_key_value is not None:
+ kv_seq_len += past_key_value.get_usable_length(kv_seq_len, self.layer_idx)
+ cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len)
+ query_states, key_states = apply_rotary_pos_emb(query_states, key_states, cos, sin, position_ids)
+
+ if past_key_value is not None:
+ cache_kwargs = {"sin": sin, "cos": cos} # Specific to RoPE models
+ key_states, value_states = past_key_value.update(key_states, value_states, self.layer_idx, cache_kwargs)
+
+ # TODO: These transpose are quite inefficient but Flash Attention requires the layout [batch_size, sequence_length, num_heads, head_dim]. We would need to refactor the KV cache
+ # to be able to avoid many of these transpose/reshape/view.
+ query_states = query_states.transpose(1, 2)
+ key_states = key_states.transpose(1, 2)
+ value_states = value_states.transpose(1, 2)
+
+ dropout_rate = self.attention_dropout if self.training else 0.0
+
+ # In PEFT, usually we cast the layer norms in float32 for training stability reasons
+ # therefore the input hidden states gets silently casted in float32. Hence, we need
+ # cast them back in the correct dtype just to be sure everything works as expected.
+ # This might slowdown training & inference so it is recommended to not cast the LayerNorms
+ # in fp32. (LlamaRMSNorm handles it correctly)
+
+ input_dtype = query_states.dtype
+ if input_dtype == torch.float32:
+ # Handle the case where the model is quantized
+ if hasattr(self.config, "_pre_quantization_dtype"):
+ target_dtype = self.config._pre_quantization_dtype
+ else:
+ target_dtype = self.q_proj.weight.dtype
+
+ logger.warning_once(
+ f"The input hidden states seems to be silently casted in float32, this might be related to"
+ f" the fact you have upcasted embedding or layer norm layers in float32. We will cast back the input in"
+ f" {target_dtype}."
+ )
+
+ query_states = query_states.to(target_dtype)
+ key_states = key_states.to(target_dtype)
+ value_states = value_states.to(target_dtype)
+
+ attn_output = self._flash_attention_forward(
+ query_states, key_states, value_states, attention_mask, q_len, dropout=dropout_rate
+ )
+
+ attn_output = attn_output.reshape(bsz, q_len, self.hidden_size).contiguous()
+ attn_output = self.o_proj(attn_output)
+
+ if not output_attentions:
+ attn_weights = None
+
+ return attn_output, attn_weights, past_key_value
+
+ def _flash_attention_forward(
+ self, query_states, key_states, value_states, attention_mask, query_length, dropout=0.0, softmax_scale=None
+ ):
+ """
+ Calls the forward method of Flash Attention - if the input hidden states contain at least one padding token
+ first unpad the input, then computes the attention scores and pad the final attention scores.
+
+ Args:
+ query_states (`torch.Tensor`):
+ Input query states to be passed to Flash Attention API
+ key_states (`torch.Tensor`):
+ Input key states to be passed to Flash Attention API
+ value_states (`torch.Tensor`):
+ Input value states to be passed to Flash Attention API
+ attention_mask (`torch.Tensor`):
+ The padding mask - corresponds to a tensor of size `(batch_size, seq_len)` where 0 stands for the
+ position of padding tokens and 1 for the position of non-padding tokens.
+ dropout (`int`, *optional*):
+ Attention dropout
+ softmax_scale (`float`, *optional*):
+ The scaling of QK^T before applying softmax. Default to 1 / sqrt(head_dim)
+ """
+ if not self._flash_attn_uses_top_left_mask:
+ causal = self.is_causal
+ else:
+ # TODO: Remove the `query_length != 1` check once Flash Attention for RoCm is bumped to 2.1. For details, please see the comment in LlamaFlashAttention2 __init__.
+ causal = self.is_causal and query_length != 1
+
+ # Contains at least one padding token in the sequence
+ if attention_mask is not None:
+ batch_size = query_states.shape[0]
+ query_states, key_states, value_states, indices_q, cu_seq_lens, max_seq_lens = self._upad_input(
+ query_states, key_states, value_states, attention_mask, query_length
+ )
+
+ cu_seqlens_q, cu_seqlens_k = cu_seq_lens
+ max_seqlen_in_batch_q, max_seqlen_in_batch_k = max_seq_lens
+
+ attn_output_unpad = flash_attn_varlen_func(
+ query_states,
+ key_states,
+ value_states,
+ cu_seqlens_q=cu_seqlens_q,
+ cu_seqlens_k=cu_seqlens_k,
+ max_seqlen_q=max_seqlen_in_batch_q,
+ max_seqlen_k=max_seqlen_in_batch_k,
+ dropout_p=dropout,
+ softmax_scale=softmax_scale,
+ causal=causal,
+ )
+
+ attn_output = pad_input(attn_output_unpad, indices_q, batch_size, query_length)
+ else:
+ attn_output = flash_attn_func(
+ query_states, key_states, value_states, dropout, softmax_scale=softmax_scale, causal=causal
+ )
+
+ return attn_output
+
+ def _upad_input(self, query_layer, key_layer, value_layer, attention_mask, query_length):
+ indices_k, cu_seqlens_k, max_seqlen_in_batch_k = _get_unpad_data(attention_mask)
+ batch_size, kv_seq_len, num_key_value_heads, head_dim = key_layer.shape
+
+ key_layer = index_first_axis(
+ key_layer.reshape(batch_size * kv_seq_len, num_key_value_heads, head_dim), indices_k
+ )
+ value_layer = index_first_axis(
+ value_layer.reshape(batch_size * kv_seq_len, num_key_value_heads, head_dim), indices_k
+ )
+ if query_length == kv_seq_len:
+ query_layer = index_first_axis(
+ query_layer.reshape(batch_size * kv_seq_len, self.num_heads, head_dim), indices_k
+ )
+ cu_seqlens_q = cu_seqlens_k
+ max_seqlen_in_batch_q = max_seqlen_in_batch_k
+ indices_q = indices_k
+ elif query_length == 1:
+ max_seqlen_in_batch_q = 1
+ cu_seqlens_q = torch.arange(
+ batch_size + 1, dtype=torch.int32, device=query_layer.device
+ ) # There is a memcpy here, that is very bad.
+ indices_q = cu_seqlens_q[:-1]
+ query_layer = query_layer.squeeze(1)
+ else:
+ # The -q_len: slice assumes left padding.
+ attention_mask = attention_mask[:, -query_length:]
+ query_layer, indices_q, cu_seqlens_q, max_seqlen_in_batch_q = unpad_input(query_layer, attention_mask)
+
+ return (
+ query_layer,
+ key_layer,
+ value_layer,
+ indices_q,
+ (cu_seqlens_q, cu_seqlens_k),
+ (max_seqlen_in_batch_q, max_seqlen_in_batch_k),
+ )
+
+
+class LlamaSdpaAttention(LlamaAttention):
+ """
+ Llama attention module using torch.nn.functional.scaled_dot_product_attention. This module inherits from
+ `LlamaAttention` as the weights of the module stays untouched. The only changes are on the forward pass to adapt to
+ SDPA API.
+ """
+
+ # Adapted from LlamaAttention.forward
+ def forward(
+ self,
+ hidden_states: torch.Tensor,
+ attention_mask: Optional[torch.Tensor] = None,
+ position_ids: Optional[torch.LongTensor] = None,
+ past_key_value: Optional[Cache] = None,
+ output_attentions: bool = False,
+ use_cache: bool = False,
+ ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]:
+ if output_attentions:
+ # TODO: Improve this warning with e.g. `model.config.attn_implementation = "manual"` once this is implemented.
+ logger.warning_once(
+ "LlamaModel is using LlamaSdpaAttention, but `torch.nn.functional.scaled_dot_product_attention` does not support `output_attentions=True`. Falling back to the manual attention implementation, "
+ 'but specifying the manual implementation will be required from Transformers version v5.0.0 onwards. This warning can be removed using the argument `attn_implementation="eager"` when loading the model.'
+ )
+ return super().forward(
+ hidden_states=hidden_states,
+ attention_mask=attention_mask,
+ position_ids=position_ids,
+ past_key_value=past_key_value,
+ output_attentions=output_attentions,
+ use_cache=use_cache,
+ )
+
+ bsz, q_len, _ = hidden_states.size()
+
+ query_states = self.q_proj(hidden_states)
+ key_states = self.k_proj(hidden_states)
+ value_states = self.v_proj(hidden_states)
+
+ query_states = query_states.view(bsz, q_len, self.num_heads, self.head_dim).transpose(1, 2)
+ key_states = key_states.view(bsz, q_len, self.num_key_value_heads, self.head_dim).transpose(1, 2)
+ value_states = value_states.view(bsz, q_len, self.num_key_value_heads, self.head_dim).transpose(1, 2)
+
+ kv_seq_len = key_states.shape[-2]
+ if past_key_value is not None:
+ kv_seq_len += past_key_value.get_usable_length(kv_seq_len, self.layer_idx)
+ cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len)
+
+ query_states, key_states = apply_rotary_pos_emb(query_states, key_states, cos, sin, position_ids)
+
+ if past_key_value is not None:
+ cache_kwargs = {"sin": sin, "cos": cos} # Specific to RoPE models
+ key_states, value_states = past_key_value.update(key_states, value_states, self.layer_idx, cache_kwargs)
+
+ key_states = repeat_kv(key_states, self.num_key_value_groups)
+ value_states = repeat_kv(value_states, self.num_key_value_groups)
+
+ if attention_mask is not None:
+ if attention_mask.size() != (bsz, 1, q_len, kv_seq_len):
+ raise ValueError(
+ f"Attention mask should be of size {(bsz, 1, q_len, kv_seq_len)}, but is {attention_mask.size()}"
+ )
+
+ # SDPA with memory-efficient backend is currently (torch==2.1.2) bugged with non-contiguous inputs with custom attn_mask,
+ # Reference: https://github.com/pytorch/pytorch/issues/112577.
+ if query_states.device.type == "cuda" and attention_mask is not None:
+ query_states = query_states.contiguous()
+ key_states = key_states.contiguous()
+ value_states = value_states.contiguous()
+
+ attn_output = torch.nn.functional.scaled_dot_product_attention(
+ query_states,
+ key_states,
+ value_states,
+ attn_mask=attention_mask,
+ dropout_p=self.attention_dropout if self.training else 0.0,
+ # The q_len > 1 is necessary to match with AttentionMaskConverter.to_causal_4d that does not create a causal mask in case q_len == 1.
+ is_causal=self.is_causal and attention_mask is None and q_len > 1,
+ )
+
+ attn_output = attn_output.transpose(1, 2).contiguous()
+ attn_output = attn_output.reshape(bsz, q_len, self.hidden_size)
+
+ attn_output = self.o_proj(attn_output)
+
+ return attn_output, None, past_key_value
+
+
+LLAMA_ATTENTION_CLASSES = {
+ "eager": LlamaAttention,
+ "flash_attention_2": LlamaFlashAttention2,
+ "sdpa": LlamaSdpaAttention,
+}
+
+
+class LlamaDecoderLayer(nn.Module):
+ def __init__(self, config: LlamaConfig, layer_idx: int):
+ super().__init__()
+ self.hidden_size = config.hidden_size
+ config._attn_implementation="eager"
+ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx)
+
+ self.mlp = LlamaMLP(config)
+ self.input_layernorm = LlamaRMSNorm(config.hidden_size, eps=config.rms_norm_eps)
+ self.post_attention_layernorm = LlamaRMSNorm(config.hidden_size, eps=config.rms_norm_eps)
+ def forward(
+ self,
+ hidden_states: torch.Tensor,
+ attention_mask: Optional[torch.Tensor] = None,
+ position_ids: Optional[torch.LongTensor] = None,
+ past_key_value: Optional[Tuple[torch.Tensor]] = None,
+ output_attentions: Optional[bool] = False,
+ use_cache: Optional[bool] = False,
+ **kwargs,
+ ) -> Tuple[torch.FloatTensor, Optional[Tuple[torch.FloatTensor, torch.FloatTensor]]]:
+ """
+ Args:
+ hidden_states (`torch.FloatTensor`): input to the layer of shape `(batch, seq_len, embed_dim)`
+ attention_mask (`torch.FloatTensor`, *optional*):
+ attention mask of size `(batch_size, sequence_length)` if flash attention is used or `(batch_size, 1,
+ query_sequence_length, key_sequence_length)` if default attention is used.
+ output_attentions (`bool`, *optional*):
+ Whether or not to return the attentions tensors of all attention layers. See `attentions` under
+ returned tensors for more detail.
+ use_cache (`bool`, *optional*):
+ If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding
+ (see `past_key_values`).
+ past_key_value (`Tuple(torch.FloatTensor)`, *optional*): cached past key and value projection states
+ """
+ if "padding_mask" in kwargs:
+ warnings.warn(
+ "Passing `padding_mask` is deprecated and will be removed in v4.37. Please make sure use `attention_mask` instead.`"
+ )
+ residual = hidden_states
+
+ hidden_states = self.input_layernorm(hidden_states)
+ '''
+ attention_mask = attention_mask.clone()
+ if attention_mask.shape[2]!=1:
+ attention_mask[:,:,:576,:576]= -65504.0 # -1000000000000.0 -65504.0
+
+ for i in range(576):
+ if attention_mask.shape[2]!=1:
+ attention_mask[:,:,i,i]=0.0
+
+ # import pdb; pdb.set_trace()
+ attention_mask=attention_mask.clone()
+ '''
+ # Self Attention
+ hidden_states, self_attn_weights, present_key_value = self.self_attn(
+ hidden_states=hidden_states,
+ attention_mask=attention_mask,
+ position_ids=position_ids,
+ past_key_value=past_key_value,
+ output_attentions=output_attentions,
+ use_cache=use_cache,
+ **kwargs,
+ )
+ hidden_states = residual + hidden_states
+
+ # Fully Connected
+ residual = hidden_states
+ hidden_states = self.post_attention_layernorm(hidden_states)
+ hidden_states = self.mlp(hidden_states)
+ hidden_states = residual + hidden_states
+
+ outputs = (hidden_states,)
+
+ if output_attentions:
+ outputs += (self_attn_weights,)
+
+ if use_cache:
+ outputs += (present_key_value,)
+
+ return outputs
+
+
+LLAMA_START_DOCSTRING = r"""
+ This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the
+ library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads
+ etc.)
+
+ This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass.
+ Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage
+ and behavior.
+
+ Parameters:
+ config ([`LlamaConfig`]):
+ Model configuration class with all the parameters of the model. Initializing with a config file does not
+ load the weights associated with the model, only the configuration. Check out the
+ [`~PreTrainedModel.from_pretrained`] method to load the model weights.
+"""
+
+
+@add_start_docstrings(
+ "The bare LLaMA Model outputting raw hidden-states without any specific head on top.",
+ LLAMA_START_DOCSTRING,
+)
+class LlamaPreTrainedModel(PreTrainedModel):
+ config_class = LlamaConfig
+ base_model_prefix = "model"
+ supports_gradient_checkpointing = True
+ _no_split_modules = ["LlamaDecoderLayer"]
+ _skip_keys_device_placement = "past_key_values"
+ _supports_flash_attn_2 = True
+ _supports_sdpa = True
+ _supports_cache_class = True
+
+ def _init_weights(self, module):
+ std = self.config.initializer_range
+ if isinstance(module, nn.Linear):
+ module.weight.data.normal_(mean=0.0, std=std)
+ if module.bias is not None:
+ module.bias.data.zero_()
+ elif isinstance(module, nn.Embedding):
+ module.weight.data.normal_(mean=0.0, std=std)
+ if module.padding_idx is not None:
+ module.weight.data[module.padding_idx].zero_()
+
+
+LLAMA_INPUTS_DOCSTRING = r"""
+ Args:
+ input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`):
+ Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide
+ it.
+
+ Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
+ [`PreTrainedTokenizer.__call__`] for details.
+
+ [What are input IDs?](../glossary#input-ids)
+ attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*):
+ Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:
+
+ - 1 for tokens that are **not masked**,
+ - 0 for tokens that are **masked**.
+
+ [What are attention masks?](../glossary#attention-mask)
+
+ Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
+ [`PreTrainedTokenizer.__call__`] for details.
+
+ If `past_key_values` is used, optionally only the last `input_ids` have to be input (see
+ `past_key_values`).
+
+ If you want to change padding behavior, you should read [`modeling_opt._prepare_decoder_attention_mask`]
+ and modify to your needs. See diagram 1 in [the paper](https://arxiv.org/abs/1910.13461) for more
+ information on the default strategy.
+
+ - 1 indicates the head is **not masked**,
+ - 0 indicates the head is **masked**.
+ position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
+ Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0,
+ config.n_positions - 1]`.
+
+ [What are position IDs?](../glossary#position-ids)
+ past_key_values (`Cache` or `tuple(tuple(torch.FloatTensor))`, *optional*):
+ Pre-computed hidden-states (key and values in the self-attention blocks and in the cross-attention
+ blocks) that can be used to speed up sequential decoding. This typically consists in the `past_key_values`
+ returned by the model at a previous stage of decoding, when `use_cache=True` or `config.use_cache=True`.
+
+ Two formats are allowed:
+ - a [`~cache_utils.Cache`] instance;
+ - Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of
+ shape `(batch_size, num_heads, sequence_length, embed_size_per_head)`). This is also known as the legacy
+ cache format.
+
+ The model will output the same cache format that is fed as input. If no `past_key_values` are passed, the
+ legacy cache format will be returned.
+
+ If `past_key_values` are used, the user can optionally input only the last `input_ids` (those that don't
+ have their past key value states given to this model) of shape `(batch_size, 1)` instead of all `input_ids`
+ of shape `(batch_size, sequence_length)`.
+ inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*):
+ Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This
+ is useful if you want more control over how to convert `input_ids` indices into associated vectors than the
+ model's internal embedding lookup matrix.
+ use_cache (`bool`, *optional*):
+ If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see
+ `past_key_values`).
+ output_attentions (`bool`, *optional*):
+ Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned
+ tensors for more detail.
+ output_hidden_states (`bool`, *optional*):
+ Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for
+ more detail.
+ return_dict (`bool`, *optional*):
+ Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple.
+"""
+
+
+@add_start_docstrings(
+ "The bare LLaMA Model outputting raw hidden-states without any specific head on top.",
+ LLAMA_START_DOCSTRING,
+)
+class LlamaModel(LlamaPreTrainedModel):
+ """
+ Transformer decoder consisting of *config.num_hidden_layers* layers. Each layer is a [`LlamaDecoderLayer`]
+
+ Args:
+ config: LlamaConfig
+ """
+
+ def __init__(self, config: LlamaConfig):
+ super().__init__(config)
+ self.padding_idx = config.pad_token_id
+ self.vocab_size = config.vocab_size
+
+ self.embed_tokens = nn.Embedding(config.vocab_size, config.hidden_size, self.padding_idx)
+ self.layers = nn.ModuleList(
+ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)]
+ )
+ self._use_sdpa = config._attn_implementation == "sdpa"
+ self._use_flash_attention_2 = config._attn_implementation == "flash_attention_2"
+ self.norm = LlamaRMSNorm(config.hidden_size, eps=config.rms_norm_eps)
+
+ self.gradient_checkpointing = False
+ # Initialize weights and apply final processing
+ self.post_init()
+
+ def get_input_embeddings(self):
+ return self.embed_tokens
+
+ def set_input_embeddings(self, value):
+ self.embed_tokens = value
+
+ @add_start_docstrings_to_model_forward(LLAMA_INPUTS_DOCSTRING)
+ def forward(
+ self,
+ input_ids: torch.LongTensor = None,
+ attention_mask: Optional[torch.Tensor] = None,
+ position_ids: Optional[torch.LongTensor] = None,
+ past_key_values: Optional[List[torch.FloatTensor]] = None,
+ inputs_embeds: Optional[torch.FloatTensor] = None,
+ use_cache: Optional[bool] = None,
+ output_attentions: Optional[bool] = None,
+ output_hidden_states: Optional[bool] = None,
+ return_dict: Optional[bool] = None,
+ ) -> Union[Tuple, BaseModelOutputWithPast]:
+ output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
+ output_hidden_states = (
+ output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
+ )
+ use_cache = use_cache if use_cache is not None else self.config.use_cache
+
+ return_dict = return_dict if return_dict is not None else self.config.use_return_dict
+
+ # retrieve input_ids and inputs_embeds
+ if input_ids is not None and inputs_embeds is not None:
+ raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time")
+ elif input_ids is not None:
+ batch_size, seq_length = input_ids.shape[:2]
+ elif inputs_embeds is not None:
+ batch_size, seq_length = inputs_embeds.shape[:2]
+ else:
+ raise ValueError("You have to specify either input_ids or inputs_embeds")
+
+ if self.gradient_checkpointing and self.training:
+ if use_cache:
+ logger.warning_once(
+ "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
+ )
+ use_cache = False
+
+ past_key_values_length = 0
+ if use_cache:
+ use_legacy_cache = not isinstance(past_key_values, Cache)
+ if use_legacy_cache:
+ past_key_values = DynamicCache.from_legacy_cache(past_key_values)
+ past_key_values_length = past_key_values.get_usable_length(seq_length)
+
+ if position_ids is None:
+ device = input_ids.device if input_ids is not None else inputs_embeds.device
+ position_ids = torch.arange(
+ past_key_values_length, seq_length + past_key_values_length, dtype=torch.long, device=device
+ )
+ position_ids = position_ids.unsqueeze(0)
+
+ if inputs_embeds is None:
+ inputs_embeds = self.embed_tokens(input_ids)
+
+ if self._use_flash_attention_2:
+ # 2d mask is passed through the layers
+ attention_mask = attention_mask if (attention_mask is not None and 0 in attention_mask) else None
+ elif self._use_sdpa and not output_attentions:
+ # output_attentions=True can not be supported when using SDPA, and we fall back on
+ # the manual implementation that requires a 4D causal mask in all cases.
+ attention_mask = _prepare_4d_causal_attention_mask_for_sdpa(
+ attention_mask,
+ (batch_size, seq_length),
+ inputs_embeds,
+ past_key_values_length,
+ )
+ else:
+ # 4d mask is passed through the layers
+ attention_mask = _prepare_4d_causal_attention_mask(
+ attention_mask, (batch_size, seq_length), inputs_embeds, past_key_values_length
+ )
+
+ # embed positions
+ hidden_states = inputs_embeds
+
+ # decoder layers
+ all_hidden_states = () if output_hidden_states else None
+ all_self_attns = () if output_attentions else None
+ next_decoder_cache = None
+ import time
+ start = time.time()
+ for i, decoder_layer in enumerate(self.layers):
+ if output_hidden_states:
+ all_hidden_states += (hidden_states,)
+ #import pdb; pdb.set_trace()
+ if self.gradient_checkpointing and self.training:
+ layer_outputs = self._gradient_checkpointing_func(
+ decoder_layer.__call__,
+ hidden_states,
+ attention_mask,
+ position_ids,
+ past_key_values,
+ output_attentions,
+ use_cache,
+ )
+ else:
+ layer_outputs = decoder_layer(
+ hidden_states,
+ attention_mask=attention_mask,
+ position_ids=position_ids,
+ past_key_value=past_key_values,
+ output_attentions=output_attentions,
+ use_cache=use_cache,
+ )
+
+ hidden_states = layer_outputs[0]
+
+ if use_cache:
+ next_decoder_cache = layer_outputs[2 if output_attentions else 1]
+
+ if output_attentions:
+ all_self_attns += (layer_outputs[1],)
+ end = time.time()
+ # print(end-start, len(self.layers))
+ hidden_states = self.norm(hidden_states)
+
+ # add hidden states from the last decoder layer
+ if output_hidden_states:
+ all_hidden_states += (hidden_states,)
+
+ next_cache = None
+ if use_cache:
+ next_cache = next_decoder_cache.to_legacy_cache() if use_legacy_cache else next_decoder_cache
+ if not return_dict:
+ return tuple(v for v in [hidden_states, next_cache, all_hidden_states, all_self_attns] if v is not None)
+ return BaseModelOutputWithPast(
+ last_hidden_state=hidden_states,
+ past_key_values=next_cache,
+ hidden_states=all_hidden_states,
+ attentions=all_self_attns,
+ )
+
+
+class LlamaForCausalLM(LlamaPreTrainedModel):
+ _tied_weights_keys = ["lm_head.weight"]
+
+ def __init__(self, config):
+ super().__init__(config)
+ self.model = LlamaModel(config)
+ self.vocab_size = config.vocab_size
+ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False)
+
+ # Initialize weights and apply final processing
+ self.post_init()
+
+ def get_input_embeddings(self):
+ return self.model.embed_tokens
+
+ def set_input_embeddings(self, value):
+ self.model.embed_tokens = value
+
+ def get_output_embeddings(self):
+ return self.lm_head
+
+ def set_output_embeddings(self, new_embeddings):
+ self.lm_head = new_embeddings
+
+ def set_decoder(self, decoder):
+ self.model = decoder
+
+ def get_decoder(self):
+ return self.model
+
+ @add_start_docstrings_to_model_forward(LLAMA_INPUTS_DOCSTRING)
+ @replace_return_docstrings(output_type=CausalLMOutputWithPast, config_class=_CONFIG_FOR_DOC)
+ def forward(
+ self,
+ input_ids: torch.LongTensor = None,
+ attention_mask: Optional[torch.Tensor] = None,
+ position_ids: Optional[torch.LongTensor] = None,
+ past_key_values: Optional[List[torch.FloatTensor]] = None,
+ inputs_embeds: Optional[torch.FloatTensor] = None,
+ labels: Optional[torch.LongTensor] = None,
+ use_cache: Optional[bool] = None,
+ output_attentions: Optional[bool] = None,
+ output_hidden_states: Optional[bool] = None,
+ return_dict: Optional[bool] = None,
+ ) -> Union[Tuple, CausalLMOutputWithPast]:
+ r"""
+ Args:
+ labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
+ Labels for computing the masked language modeling loss. Indices should either be in `[0, ...,
+ config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored
+ (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`.
+
+ Returns:
+
+ Example:
+
+ ```python
+ >>> from transformers import AutoTokenizer, LlamaForCausalLM
+
+ >>> model = LlamaForCausalLM.from_pretrained(PATH_TO_CONVERTED_WEIGHTS)
+ >>> tokenizer = AutoTokenizer.from_pretrained(PATH_TO_CONVERTED_TOKENIZER)
+
+ >>> prompt = "Hey, are you conscious? Can you talk to me?"
+ >>> inputs = tokenizer(prompt, return_tensors="pt")
+
+ >>> # Generate
+ >>> generate_ids = model.generate(inputs.input_ids, max_length=30)
+ >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
+ "Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you."
+ ```"""
+ output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
+ output_hidden_states = (
+ output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
+ )
+ return_dict = return_dict if return_dict is not None else self.config.use_return_dict
+
+ # decoder outputs consists of (dec_features, layer_state, dec_hidden, dec_attn)
+ outputs = self.model(
+ input_ids=input_ids,
+ attention_mask=attention_mask,
+ position_ids=position_ids,
+ past_key_values=past_key_values,
+ inputs_embeds=inputs_embeds,
+ use_cache=use_cache,
+ output_attentions=output_attentions,
+ output_hidden_states=output_hidden_states,
+ return_dict=return_dict,
+ )
+
+ hidden_states = outputs[0]
+ if self.config.pretraining_tp > 1:
+ lm_head_slices = self.lm_head.weight.split(self.vocab_size // self.config.pretraining_tp, dim=0)
+ logits = [F.linear(hidden_states, lm_head_slices[i]) for i in range(self.config.pretraining_tp)]
+ logits = torch.cat(logits, dim=-1)
+ else:
+ logits = self.lm_head(hidden_states)
+ logits = logits.float()
+
+ loss = None
+ if labels is not None:
+ # Shift so that tokens < n predict n
+ shift_logits = logits[..., :-1, :].contiguous()
+ shift_labels = labels[..., 1:].contiguous()
+ # Flatten the tokens
+ loss_fct = CrossEntropyLoss()
+ shift_logits = shift_logits.view(-1, self.config.vocab_size)
+ shift_labels = shift_labels.view(-1)
+ # Enable model parallelism
+ shift_labels = shift_labels.to(shift_logits.device)
+ loss = loss_fct(shift_logits, shift_labels)
+
+ if not return_dict:
+ output = (logits,) + outputs[1:]
+ return (loss,) + output if loss is not None else output
+
+ return CausalLMOutputWithPast(
+ loss=loss,
+ logits=logits,
+ past_key_values=outputs.past_key_values,
+ hidden_states=outputs.hidden_states,
+ attentions=outputs.attentions,
+ )
+
+ def prepare_inputs_for_generation(
+ self, input_ids, past_key_values=None, attention_mask=None, inputs_embeds=None, **kwargs
+ ):
+ if past_key_values is not None:
+ if isinstance(past_key_values, Cache):
+ cache_length = past_key_values.get_seq_length()
+ past_length = past_key_values.seen_tokens
+ max_cache_length = past_key_values.get_max_length()
+ else:
+ cache_length = past_length = past_key_values[0][0].shape[2]
+ max_cache_length = None
+
+ # Keep only the unprocessed tokens:
+ # 1 - If the length of the attention_mask exceeds the length of input_ids, then we are in a setting where
+ # some of the inputs are exclusivelly passed as part of the cache (e.g. when passing input_embeds as
+ # input)
+ if attention_mask is not None and attention_mask.shape[1] > input_ids.shape[1]:
+ input_ids = input_ids[:, -(attention_mask.shape[1] - past_length) :]
+ # 2 - If the past_length is smaller than input_ids', then input_ids holds all input tokens. We can discard
+ # input_ids based on the past_length.
+ elif past_length < input_ids.shape[1]:
+ input_ids = input_ids[:, past_length:]
+ # 3 - Otherwise (past_length >= input_ids.shape[1]), let's assume input_ids only has unprocessed tokens.
+
+ # If we are about to go beyond the maximum cache length, we need to crop the input attention mask.
+ if (
+ max_cache_length is not None
+ and attention_mask is not None
+ and cache_length + input_ids.shape[1] > max_cache_length
+ ):
+ attention_mask = attention_mask[:, -max_cache_length:]
+
+ position_ids = kwargs.get("position_ids", None)
+ if attention_mask is not None and position_ids is None:
+ # create position_ids on the fly for batch generation
+ position_ids = attention_mask.long().cumsum(-1) - 1
+ position_ids.masked_fill_(attention_mask == 0, 1)
+ if past_key_values:
+ position_ids = position_ids[:, -input_ids.shape[1] :]
+
+ # if `inputs_embeds` are passed, we only want to use them in the 1st generation step
+ if inputs_embeds is not None and past_key_values is None:
+ model_inputs = {"inputs_embeds": inputs_embeds}
+ else:
+ model_inputs = {"input_ids": input_ids}
+
+ model_inputs.update(
+ {
+ "position_ids": position_ids,
+ "past_key_values": past_key_values,
+ "use_cache": kwargs.get("use_cache"),
+ "attention_mask": attention_mask,
+ }
+ )
+ return model_inputs
+
+ @staticmethod
+ def _reorder_cache(past_key_values, beam_idx):
+ reordered_past = ()
+ for layer_past in past_key_values:
+ reordered_past += (
+ tuple(past_state.index_select(0, beam_idx.to(past_state.device)) for past_state in layer_past),
+ )
+ return reordered_past
+
+
+@add_start_docstrings(
+ """
+ The LLaMa Model transformer with a sequence classification head on top (linear layer).
+
+ [`LlamaForSequenceClassification`] uses the last token in order to do the classification, as other causal models
+ (e.g. GPT-2) do.
+
+ Since it does classification on the last token, it requires to know the position of the last token. If a
+ `pad_token_id` is defined in the configuration, it finds the last token that is not a padding token in each row. If
+ no `pad_token_id` is defined, it simply takes the last value in each row of the batch. Since it cannot guess the
+ padding tokens when `inputs_embeds` are passed instead of `input_ids`, it does the same (take the last value in
+ each row of the batch).
+ """,
+ LLAMA_START_DOCSTRING,
+)
+class LlamaForSequenceClassification(LlamaPreTrainedModel):
+ def __init__(self, config):
+ super().__init__(config)
+ self.num_labels = config.num_labels
+ self.model = LlamaModel(config)
+ self.score = nn.Linear(config.hidden_size, self.num_labels, bias=False)
+
+ # Initialize weights and apply final processing
+ self.post_init()
+
+ def get_input_embeddings(self):
+ return self.model.embed_tokens
+
+ def set_input_embeddings(self, value):
+ self.model.embed_tokens = value
+
+ @add_start_docstrings_to_model_forward(LLAMA_INPUTS_DOCSTRING)
+ def forward(
+ self,
+ input_ids: torch.LongTensor = None,
+ attention_mask: Optional[torch.Tensor] = None,
+ position_ids: Optional[torch.LongTensor] = None,
+ past_key_values: Optional[List[torch.FloatTensor]] = None,
+ inputs_embeds: Optional[torch.FloatTensor] = None,
+ labels: Optional[torch.LongTensor] = None,
+ use_cache: Optional[bool] = None,
+ output_attentions: Optional[bool] = None,
+ output_hidden_states: Optional[bool] = None,
+ return_dict: Optional[bool] = None,
+ ) -> Union[Tuple, SequenceClassifierOutputWithPast]:
+ r"""
+ labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
+ Labels for computing the sequence classification/regression loss. Indices should be in `[0, ...,
+ config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If
+ `config.num_labels > 1` a classification loss is computed (Cross-Entropy).
+ """
+ return_dict = return_dict if return_dict is not None else self.config.use_return_dict
+
+ transformer_outputs = self.model(
+ input_ids,
+ attention_mask=attention_mask,
+ position_ids=position_ids,
+ past_key_values=past_key_values,
+ inputs_embeds=inputs_embeds,
+ use_cache=use_cache,
+ output_attentions=output_attentions,
+ output_hidden_states=output_hidden_states,
+ return_dict=return_dict,
+ )
+ hidden_states = transformer_outputs[0]
+ logits = self.score(hidden_states)
+
+ if input_ids is not None:
+ batch_size = input_ids.shape[0]
+ else:
+ batch_size = inputs_embeds.shape[0]
+
+ if self.config.pad_token_id is None and batch_size != 1:
+ raise ValueError("Cannot handle batch sizes > 1 if no padding token is defined.")
+ if self.config.pad_token_id is None:
+ sequence_lengths = -1
+ else:
+ if input_ids is not None:
+ sequence_lengths = (torch.eq(input_ids, self.config.pad_token_id).int().argmax(-1) - 1).to(
+ logits.device
+ )
+ else:
+ sequence_lengths = -1
+
+ pooled_logits = logits[torch.arange(batch_size, device=logits.device), sequence_lengths]
+
+ loss = None
+ if labels is not None:
+ labels = labels.to(logits.device)
+ if self.config.problem_type is None:
+ if self.num_labels == 1:
+ self.config.problem_type = "regression"
+ elif self.num_labels > 1 and (labels.dtype == torch.long or labels.dtype == torch.int):
+ self.config.problem_type = "single_label_classification"
+ else:
+ self.config.problem_type = "multi_label_classification"
+
+ if self.config.problem_type == "regression":
+ loss_fct = MSELoss()
+ if self.num_labels == 1:
+ loss = loss_fct(pooled_logits.squeeze(), labels.squeeze())
+ else:
+ loss = loss_fct(pooled_logits, labels)
+ elif self.config.problem_type == "single_label_classification":
+ loss_fct = CrossEntropyLoss()
+ loss = loss_fct(pooled_logits.view(-1, self.num_labels), labels.view(-1))
+ elif self.config.problem_type == "multi_label_classification":
+ loss_fct = BCEWithLogitsLoss()
+ loss = loss_fct(pooled_logits, labels)
+ if not return_dict:
+ output = (pooled_logits,) + transformer_outputs[1:]
+ return ((loss,) + output) if loss is not None else output
+
+ return SequenceClassifierOutputWithPast(
+ loss=loss,
+ logits=pooled_logits,
+ past_key_values=transformer_outputs.past_key_values,
+ hidden_states=transformer_outputs.hidden_states,
+ attentions=transformer_outputs.attentions,
+ )
diff --git a/special_tokens_map.json b/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..14761dcf1466dc232bd41de9c21d4c617b15755e
--- /dev/null
+++ b/special_tokens_map.json
@@ -0,0 +1,24 @@
+{
+ "bos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": "",
+ "unk_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/tokenizer.model b/tokenizer.model
new file mode 100644
index 0000000000000000000000000000000000000000..6c00c742ce03c627d6cd5b795984876fa49fa899
--- /dev/null
+++ b/tokenizer.model
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
+size 499723
diff --git a/tokenizer_config.json b/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..1ede259689f08e4dad55a405cb964626437a7591
--- /dev/null
+++ b/tokenizer_config.json
@@ -0,0 +1,42 @@
+{
+ "add_bos_token": true,
+ "add_eos_token": false,
+ "added_tokens_decoder": {
+ "0": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "1": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "2": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "",
+ "clean_up_tokenization_spaces": false,
+ "eos_token": "",
+ "legacy": false,
+ "model_max_length": 1560,
+ "pad_token": "",
+ "padding_side": "right",
+ "sp_model_kwargs": {},
+ "spaces_between_special_tokens": false,
+ "tokenizer_class": "LlamaTokenizer",
+ "unk_token": "",
+ "use_default_system_prompt": false
+}
diff --git a/trainer_state.json b/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..47e7db60750135734b745ff592970f7eaa785eea
--- /dev/null
+++ b/trainer_state.json
@@ -0,0 +1,31200 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 1.0,
+ "eval_steps": 500,
+ "global_step": 5195,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.282051282051282e-07,
+ "loss": 1.4715,
+ "step": 1
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.564102564102564e-07,
+ "loss": 1.491,
+ "step": 2
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 3.846153846153847e-07,
+ "loss": 1.3857,
+ "step": 3
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 5.128205128205128e-07,
+ "loss": 1.4981,
+ "step": 4
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 6.41025641025641e-07,
+ "loss": 1.451,
+ "step": 5
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 7.692307692307694e-07,
+ "loss": 1.4382,
+ "step": 6
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 8.974358974358975e-07,
+ "loss": 1.4591,
+ "step": 7
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.0256410256410257e-06,
+ "loss": 1.4096,
+ "step": 8
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.153846153846154e-06,
+ "loss": 1.401,
+ "step": 9
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.282051282051282e-06,
+ "loss": 1.3837,
+ "step": 10
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.4102564102564104e-06,
+ "loss": 1.359,
+ "step": 11
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.5384615384615387e-06,
+ "loss": 1.3611,
+ "step": 12
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.6666666666666667e-06,
+ "loss": 1.3102,
+ "step": 13
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.794871794871795e-06,
+ "loss": 1.2844,
+ "step": 14
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.9230769230769234e-06,
+ "loss": 1.2287,
+ "step": 15
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.0512820512820513e-06,
+ "loss": 1.2925,
+ "step": 16
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.1794871794871797e-06,
+ "loss": 1.2597,
+ "step": 17
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.307692307692308e-06,
+ "loss": 1.1856,
+ "step": 18
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.435897435897436e-06,
+ "loss": 1.1592,
+ "step": 19
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.564102564102564e-06,
+ "loss": 1.1085,
+ "step": 20
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.6923076923076923e-06,
+ "loss": 1.1725,
+ "step": 21
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.8205128205128207e-06,
+ "loss": 1.2145,
+ "step": 22
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.948717948717949e-06,
+ "loss": 1.2148,
+ "step": 23
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 3.0769230769230774e-06,
+ "loss": 1.1553,
+ "step": 24
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 3.205128205128206e-06,
+ "loss": 1.1441,
+ "step": 25
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.3333333333333333e-06,
+ "loss": 1.2483,
+ "step": 26
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.4615384615384617e-06,
+ "loss": 1.1238,
+ "step": 27
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.58974358974359e-06,
+ "loss": 1.1185,
+ "step": 28
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.7179487179487184e-06,
+ "loss": 1.0634,
+ "step": 29
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.846153846153847e-06,
+ "loss": 1.084,
+ "step": 30
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.974358974358974e-06,
+ "loss": 1.0823,
+ "step": 31
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.102564102564103e-06,
+ "loss": 1.0636,
+ "step": 32
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.230769230769231e-06,
+ "loss": 1.0135,
+ "step": 33
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.358974358974359e-06,
+ "loss": 1.0225,
+ "step": 34
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.487179487179488e-06,
+ "loss": 1.0208,
+ "step": 35
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.615384615384616e-06,
+ "loss": 4.3336,
+ "step": 36
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.743589743589744e-06,
+ "loss": 1.08,
+ "step": 37
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.871794871794872e-06,
+ "loss": 1.0742,
+ "step": 38
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5e-06,
+ "loss": 1.0457,
+ "step": 39
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.128205128205128e-06,
+ "loss": 1.0427,
+ "step": 40
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.256410256410257e-06,
+ "loss": 1.0599,
+ "step": 41
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.384615384615385e-06,
+ "loss": 1.0781,
+ "step": 42
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.512820512820514e-06,
+ "loss": 1.0193,
+ "step": 43
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.641025641025641e-06,
+ "loss": 0.9767,
+ "step": 44
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.769230769230769e-06,
+ "loss": 1.0332,
+ "step": 45
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.897435897435898e-06,
+ "loss": 0.9968,
+ "step": 46
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.025641025641026e-06,
+ "loss": 0.9327,
+ "step": 47
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.153846153846155e-06,
+ "loss": 1.0183,
+ "step": 48
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.282051282051282e-06,
+ "loss": 1.0013,
+ "step": 49
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.410256410256412e-06,
+ "loss": 3.5057,
+ "step": 50
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.538461538461539e-06,
+ "loss": 0.9982,
+ "step": 51
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.666666666666667e-06,
+ "loss": 0.9786,
+ "step": 52
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.794871794871796e-06,
+ "loss": 1.0033,
+ "step": 53
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.923076923076923e-06,
+ "loss": 0.9968,
+ "step": 54
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.051282051282053e-06,
+ "loss": 1.0369,
+ "step": 55
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.17948717948718e-06,
+ "loss": 0.894,
+ "step": 56
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.307692307692308e-06,
+ "loss": 0.9616,
+ "step": 57
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.435897435897437e-06,
+ "loss": 0.9738,
+ "step": 58
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.564102564102564e-06,
+ "loss": 0.9695,
+ "step": 59
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.692307692307694e-06,
+ "loss": 0.9637,
+ "step": 60
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.820512820512822e-06,
+ "loss": 0.9757,
+ "step": 61
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.948717948717949e-06,
+ "loss": 0.9546,
+ "step": 62
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.076923076923077e-06,
+ "loss": 0.9701,
+ "step": 63
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.205128205128205e-06,
+ "loss": 0.9771,
+ "step": 64
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.333333333333334e-06,
+ "loss": 1.007,
+ "step": 65
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.461538461538462e-06,
+ "loss": 1.0072,
+ "step": 66
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.58974358974359e-06,
+ "loss": 0.9828,
+ "step": 67
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.717948717948719e-06,
+ "loss": 1.0001,
+ "step": 68
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.846153846153847e-06,
+ "loss": 0.9775,
+ "step": 69
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.974358974358976e-06,
+ "loss": 1.0071,
+ "step": 70
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.102564102564104e-06,
+ "loss": 0.9132,
+ "step": 71
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.230769230769232e-06,
+ "loss": 0.9507,
+ "step": 72
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.358974358974359e-06,
+ "loss": 0.9136,
+ "step": 73
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.487179487179487e-06,
+ "loss": 0.9109,
+ "step": 74
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.615384615384616e-06,
+ "loss": 0.9334,
+ "step": 75
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.743589743589744e-06,
+ "loss": 0.9329,
+ "step": 76
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.871794871794872e-06,
+ "loss": 1.0024,
+ "step": 77
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1e-05,
+ "loss": 0.9483,
+ "step": 78
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.012820512820513e-05,
+ "loss": 0.969,
+ "step": 79
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0256410256410256e-05,
+ "loss": 0.9532,
+ "step": 80
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0384615384615386e-05,
+ "loss": 0.9433,
+ "step": 81
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0512820512820514e-05,
+ "loss": 0.9562,
+ "step": 82
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0641025641025643e-05,
+ "loss": 0.8785,
+ "step": 83
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.076923076923077e-05,
+ "loss": 3.2122,
+ "step": 84
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0897435897435898e-05,
+ "loss": 0.9635,
+ "step": 85
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1025641025641028e-05,
+ "loss": 0.8607,
+ "step": 86
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1153846153846154e-05,
+ "loss": 0.9525,
+ "step": 87
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1282051282051283e-05,
+ "loss": 0.9491,
+ "step": 88
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1410256410256411e-05,
+ "loss": 0.9165,
+ "step": 89
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1538461538461538e-05,
+ "loss": 0.9879,
+ "step": 90
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1666666666666668e-05,
+ "loss": 0.974,
+ "step": 91
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1794871794871796e-05,
+ "loss": 0.9685,
+ "step": 92
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1923076923076925e-05,
+ "loss": 0.9597,
+ "step": 93
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2051282051282051e-05,
+ "loss": 0.9608,
+ "step": 94
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.217948717948718e-05,
+ "loss": 0.9395,
+ "step": 95
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.230769230769231e-05,
+ "loss": 0.8969,
+ "step": 96
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2435897435897436e-05,
+ "loss": 0.8574,
+ "step": 97
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2564102564102565e-05,
+ "loss": 3.2338,
+ "step": 98
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2692307692307693e-05,
+ "loss": 0.9971,
+ "step": 99
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2820512820512823e-05,
+ "loss": 0.9778,
+ "step": 100
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.294871794871795e-05,
+ "loss": 0.8861,
+ "step": 101
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3076923076923078e-05,
+ "loss": 1.0107,
+ "step": 102
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3205128205128207e-05,
+ "loss": 3.1329,
+ "step": 103
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3333333333333333e-05,
+ "loss": 0.9429,
+ "step": 104
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3461538461538463e-05,
+ "loss": 0.9252,
+ "step": 105
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3589743589743592e-05,
+ "loss": 0.9298,
+ "step": 106
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3717948717948718e-05,
+ "loss": 0.9961,
+ "step": 107
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3846153846153847e-05,
+ "loss": 2.8413,
+ "step": 108
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3974358974358975e-05,
+ "loss": 0.9349,
+ "step": 109
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4102564102564105e-05,
+ "loss": 0.9491,
+ "step": 110
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4230769230769232e-05,
+ "loss": 0.9278,
+ "step": 111
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.435897435897436e-05,
+ "loss": 0.9079,
+ "step": 112
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4487179487179489e-05,
+ "loss": 0.914,
+ "step": 113
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4615384615384615e-05,
+ "loss": 0.8572,
+ "step": 114
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4743589743589745e-05,
+ "loss": 0.8608,
+ "step": 115
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4871794871794874e-05,
+ "loss": 2.8901,
+ "step": 116
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5000000000000002e-05,
+ "loss": 0.9161,
+ "step": 117
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5128205128205129e-05,
+ "loss": 0.9586,
+ "step": 118
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5256410256410257e-05,
+ "loss": 0.8921,
+ "step": 119
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5384615384615387e-05,
+ "loss": 0.9607,
+ "step": 120
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5512820512820516e-05,
+ "loss": 0.866,
+ "step": 121
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5641025641025644e-05,
+ "loss": 0.967,
+ "step": 122
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.576923076923077e-05,
+ "loss": 0.9094,
+ "step": 123
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5897435897435897e-05,
+ "loss": 0.8653,
+ "step": 124
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.602564102564103e-05,
+ "loss": 0.9287,
+ "step": 125
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.6153846153846154e-05,
+ "loss": 0.8918,
+ "step": 126
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.6282051282051282e-05,
+ "loss": 0.83,
+ "step": 127
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.641025641025641e-05,
+ "loss": 0.9351,
+ "step": 128
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.653846153846154e-05,
+ "loss": 0.9001,
+ "step": 129
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.6666666666666667e-05,
+ "loss": 0.9137,
+ "step": 130
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.6794871794871796e-05,
+ "loss": 0.8886,
+ "step": 131
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.6923076923076924e-05,
+ "loss": 0.9526,
+ "step": 132
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7051282051282053e-05,
+ "loss": 0.9424,
+ "step": 133
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.717948717948718e-05,
+ "loss": 0.9128,
+ "step": 134
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.730769230769231e-05,
+ "loss": 0.8978,
+ "step": 135
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7435897435897438e-05,
+ "loss": 0.9465,
+ "step": 136
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7564102564102566e-05,
+ "loss": 0.9033,
+ "step": 137
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7692307692307694e-05,
+ "loss": 0.9853,
+ "step": 138
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7820512820512823e-05,
+ "loss": 0.9438,
+ "step": 139
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.794871794871795e-05,
+ "loss": 0.9394,
+ "step": 140
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.807692307692308e-05,
+ "loss": 0.9071,
+ "step": 141
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8205128205128208e-05,
+ "loss": 0.9308,
+ "step": 142
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8333333333333333e-05,
+ "loss": 0.935,
+ "step": 143
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8461538461538465e-05,
+ "loss": 0.8907,
+ "step": 144
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8589743589743593e-05,
+ "loss": 0.9061,
+ "step": 145
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8717948717948718e-05,
+ "loss": 0.9679,
+ "step": 146
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8846153846153846e-05,
+ "loss": 0.931,
+ "step": 147
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8974358974358975e-05,
+ "loss": 0.8938,
+ "step": 148
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9102564102564106e-05,
+ "loss": 0.9108,
+ "step": 149
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.923076923076923e-05,
+ "loss": 2.6512,
+ "step": 150
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.935897435897436e-05,
+ "loss": 0.9489,
+ "step": 151
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9487179487179488e-05,
+ "loss": 0.9132,
+ "step": 152
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9615384615384617e-05,
+ "loss": 0.8711,
+ "step": 153
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9743589743589745e-05,
+ "loss": 2.5829,
+ "step": 154
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9871794871794873e-05,
+ "loss": 0.9284,
+ "step": 155
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 2e-05,
+ "loss": 0.9421,
+ "step": 156
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999999805651578e-05,
+ "loss": 0.8888,
+ "step": 157
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999992226063874e-05,
+ "loss": 0.8539,
+ "step": 158
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999982508646547e-05,
+ "loss": 0.8966,
+ "step": 159
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999968904267578e-05,
+ "loss": 0.9534,
+ "step": 160
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999995141293225e-05,
+ "loss": 0.9165,
+ "step": 161
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999993003464737e-05,
+ "loss": 2.667,
+ "step": 162
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999904769421242e-05,
+ "loss": 0.9234,
+ "step": 163
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999987561726369e-05,
+ "loss": 0.917,
+ "step": 164
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999984257818604e-05,
+ "loss": 0.9647,
+ "step": 165
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999805652201144e-05,
+ "loss": 2.4527,
+ "step": 166
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999976483932334e-05,
+ "loss": 0.9487,
+ "step": 167
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999720139568508e-05,
+ "loss": 0.925,
+ "step": 168
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999671552954013e-05,
+ "loss": 0.9104,
+ "step": 169
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999961907949874e-05,
+ "loss": 0.9418,
+ "step": 170
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999956271922309e-05,
+ "loss": 0.9397,
+ "step": 171
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999502472148968e-05,
+ "loss": 0.833,
+ "step": 172
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999943833829979e-05,
+ "loss": 0.9171,
+ "step": 173
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999937031770049e-05,
+ "loss": 0.9068,
+ "step": 174
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.99992984103775e-05,
+ "loss": 0.9174,
+ "step": 175
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999222616358778e-05,
+ "loss": 0.9062,
+ "step": 176
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999142935673778e-05,
+ "loss": 0.9674,
+ "step": 177
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999059368353474e-05,
+ "loss": 0.9047,
+ "step": 178
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999897191443035e-05,
+ "loss": 0.9285,
+ "step": 179
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9998880573938396e-05,
+ "loss": 0.949,
+ "step": 180
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9998785346913123e-05,
+ "loss": 0.928,
+ "step": 181
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998686233391537e-05,
+ "loss": 0.9573,
+ "step": 182
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998583233412162e-05,
+ "loss": 0.9193,
+ "step": 183
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998476347015042e-05,
+ "loss": 0.9167,
+ "step": 184
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999836557424172e-05,
+ "loss": 0.9052,
+ "step": 185
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998250915135255e-05,
+ "loss": 0.9266,
+ "step": 186
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998132369740207e-05,
+ "loss": 0.9519,
+ "step": 187
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999800993810266e-05,
+ "loss": 0.9358,
+ "step": 188
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9997883620270203e-05,
+ "loss": 0.9314,
+ "step": 189
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9997753416291935e-05,
+ "loss": 0.9292,
+ "step": 190
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9997619326218465e-05,
+ "loss": 0.8789,
+ "step": 191
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999748135010191e-05,
+ "loss": 0.8901,
+ "step": 192
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999733948799591e-05,
+ "loss": 0.9568,
+ "step": 193
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.99971937399556e-05,
+ "loss": 0.9219,
+ "step": 194
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999704410603763e-05,
+ "loss": 0.9421,
+ "step": 195
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996890586300168e-05,
+ "loss": 0.852,
+ "step": 196
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999673318080288e-05,
+ "loss": 0.9013,
+ "step": 197
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996571889606956e-05,
+ "loss": 0.9765,
+ "step": 198
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999640671277508e-05,
+ "loss": 0.8804,
+ "step": 199
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999623765037147e-05,
+ "loss": 0.8883,
+ "step": 200
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999606470246183e-05,
+ "loss": 2.4217,
+ "step": 201
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9995887869113382e-05,
+ "loss": 0.9569,
+ "step": 202
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999570715039487e-05,
+ "loss": 0.9408,
+ "step": 203
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9995522546376528e-05,
+ "loss": 0.9467,
+ "step": 204
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9995334057130117e-05,
+ "loss": 0.8541,
+ "step": 205
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9995141682728904e-05,
+ "loss": 0.938,
+ "step": 206
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999494542324766e-05,
+ "loss": 0.9554,
+ "step": 207
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9994745278762676e-05,
+ "loss": 0.9275,
+ "step": 208
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999454124935174e-05,
+ "loss": 0.8765,
+ "step": 209
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9994333335094162e-05,
+ "loss": 0.9015,
+ "step": 210
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9994121536070757e-05,
+ "loss": 0.9289,
+ "step": 211
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999390585236385e-05,
+ "loss": 0.8851,
+ "step": 212
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9993686284057283e-05,
+ "loss": 0.9212,
+ "step": 213
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9993462831236386e-05,
+ "loss": 0.9385,
+ "step": 214
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999323549398803e-05,
+ "loss": 0.9351,
+ "step": 215
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9993004272400576e-05,
+ "loss": 0.8561,
+ "step": 216
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9992769166563894e-05,
+ "loss": 0.9246,
+ "step": 217
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9992530176569372e-05,
+ "loss": 0.926,
+ "step": 218
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999228730250991e-05,
+ "loss": 0.9129,
+ "step": 219
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9992040544479904e-05,
+ "loss": 2.8827,
+ "step": 220
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9991789902575273e-05,
+ "loss": 0.8951,
+ "step": 221
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9991535376893435e-05,
+ "loss": 2.6192,
+ "step": 222
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9991276967533332e-05,
+ "loss": 0.9647,
+ "step": 223
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.99910146745954e-05,
+ "loss": 0.8961,
+ "step": 224
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9990748498181597e-05,
+ "loss": 2.2728,
+ "step": 225
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999047843839538e-05,
+ "loss": 0.9361,
+ "step": 226
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999020449534172e-05,
+ "loss": 0.8825,
+ "step": 227
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9989926669127105e-05,
+ "loss": 0.8717,
+ "step": 228
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9989644959859514e-05,
+ "loss": 0.9278,
+ "step": 229
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9989359367648457e-05,
+ "loss": 0.9299,
+ "step": 230
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.998906989260494e-05,
+ "loss": 0.9534,
+ "step": 231
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9988776534841477e-05,
+ "loss": 0.9126,
+ "step": 232
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9988479294472098e-05,
+ "loss": 0.9336,
+ "step": 233
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9988178171612337e-05,
+ "loss": 0.8693,
+ "step": 234
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9987873166379245e-05,
+ "loss": 0.9025,
+ "step": 235
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998756427889137e-05,
+ "loss": 0.8997,
+ "step": 236
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998725150926878e-05,
+ "loss": 0.89,
+ "step": 237
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9986934857633053e-05,
+ "loss": 2.393,
+ "step": 238
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9986614324107255e-05,
+ "loss": 0.8828,
+ "step": 239
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9986289908815986e-05,
+ "loss": 2.6581,
+ "step": 240
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998596161188535e-05,
+ "loss": 0.9248,
+ "step": 241
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9985629433442944e-05,
+ "loss": 0.9042,
+ "step": 242
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9985293373617892e-05,
+ "loss": 0.9174,
+ "step": 243
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998495343254082e-05,
+ "loss": 0.9341,
+ "step": 244
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9984609610343857e-05,
+ "loss": 0.942,
+ "step": 245
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9984261907160653e-05,
+ "loss": 0.8998,
+ "step": 246
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9983910323126346e-05,
+ "loss": 0.8785,
+ "step": 247
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998355485837761e-05,
+ "loss": 0.8845,
+ "step": 248
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9983195513052605e-05,
+ "loss": 0.9347,
+ "step": 249
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998283228729101e-05,
+ "loss": 0.9176,
+ "step": 250
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9982465181234007e-05,
+ "loss": 0.9586,
+ "step": 251
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9982094195024294e-05,
+ "loss": 0.8903,
+ "step": 252
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9981719328806065e-05,
+ "loss": 0.9348,
+ "step": 253
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9981340582725038e-05,
+ "loss": 0.9763,
+ "step": 254
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9980957956928425e-05,
+ "loss": 2.5989,
+ "step": 255
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998057145156495e-05,
+ "loss": 0.9226,
+ "step": 256
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998018106678485e-05,
+ "loss": 0.891,
+ "step": 257
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9979786802739863e-05,
+ "loss": 0.9653,
+ "step": 258
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9979388659583246e-05,
+ "loss": 0.8909,
+ "step": 259
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9978986637469747e-05,
+ "loss": 0.9247,
+ "step": 260
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9978580736555633e-05,
+ "loss": 0.9055,
+ "step": 261
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9978170956998677e-05,
+ "loss": 0.8504,
+ "step": 262
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9977757298958164e-05,
+ "loss": 0.9052,
+ "step": 263
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9977339762594868e-05,
+ "loss": 0.9661,
+ "step": 264
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.99769183480711e-05,
+ "loss": 0.952,
+ "step": 265
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9976493055550655e-05,
+ "loss": 0.8744,
+ "step": 266
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997606388519884e-05,
+ "loss": 0.8904,
+ "step": 267
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997563083718248e-05,
+ "loss": 0.8815,
+ "step": 268
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997519391166989e-05,
+ "loss": 2.3681,
+ "step": 269
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997475310883091e-05,
+ "loss": 0.8941,
+ "step": 270
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997430842883688e-05,
+ "loss": 0.9192,
+ "step": 271
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9973859871860635e-05,
+ "loss": 0.9162,
+ "step": 272
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9973407438076533e-05,
+ "loss": 0.9254,
+ "step": 273
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9972951127660434e-05,
+ "loss": 0.8783,
+ "step": 274
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9972490940789708e-05,
+ "loss": 0.902,
+ "step": 275
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9972026877643223e-05,
+ "loss": 0.9517,
+ "step": 276
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997155893840136e-05,
+ "loss": 0.9182,
+ "step": 277
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9971087123246008e-05,
+ "loss": 0.8874,
+ "step": 278
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9970611432360556e-05,
+ "loss": 2.3284,
+ "step": 279
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997013186592991e-05,
+ "loss": 0.8889,
+ "step": 280
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9969648424140467e-05,
+ "loss": 0.8681,
+ "step": 281
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9969161107180146e-05,
+ "loss": 2.4504,
+ "step": 282
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9968669915238364e-05,
+ "loss": 0.8838,
+ "step": 283
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9968174848506044e-05,
+ "loss": 0.9488,
+ "step": 284
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9967675907175617e-05,
+ "loss": 0.9117,
+ "step": 285
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9967173091441022e-05,
+ "loss": 0.8812,
+ "step": 286
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9966666401497707e-05,
+ "loss": 0.9091,
+ "step": 287
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9966155837542606e-05,
+ "loss": 0.9209,
+ "step": 288
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9965641399774187e-05,
+ "loss": 0.9242,
+ "step": 289
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9965123088392407e-05,
+ "loss": 0.9248,
+ "step": 290
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9964600903598724e-05,
+ "loss": 0.8664,
+ "step": 291
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9964074845596125e-05,
+ "loss": 0.8702,
+ "step": 292
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9963544914589073e-05,
+ "loss": 0.9963,
+ "step": 293
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996301111078356e-05,
+ "loss": 0.9096,
+ "step": 294
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9962473434387068e-05,
+ "loss": 0.8576,
+ "step": 295
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996193188560859e-05,
+ "loss": 0.9213,
+ "step": 296
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9961386464658626e-05,
+ "loss": 0.9229,
+ "step": 297
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9960837171749183e-05,
+ "loss": 0.9147,
+ "step": 298
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9960284007093764e-05,
+ "loss": 0.8636,
+ "step": 299
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995972697090738e-05,
+ "loss": 0.9169,
+ "step": 300
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9959166063406563e-05,
+ "loss": 0.8673,
+ "step": 301
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995860128480932e-05,
+ "loss": 0.8394,
+ "step": 302
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9958032635335187e-05,
+ "loss": 0.9012,
+ "step": 303
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9957460115205192e-05,
+ "loss": 0.8976,
+ "step": 304
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995688372464188e-05,
+ "loss": 0.8455,
+ "step": 305
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9956303463869285e-05,
+ "loss": 0.9157,
+ "step": 306
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995571933311295e-05,
+ "loss": 0.9118,
+ "step": 307
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9955131332599934e-05,
+ "loss": 0.9537,
+ "step": 308
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995453946255878e-05,
+ "loss": 0.9398,
+ "step": 309
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9953943723219553e-05,
+ "loss": 0.8884,
+ "step": 310
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9953344114813816e-05,
+ "loss": 0.8945,
+ "step": 311
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995274063757463e-05,
+ "loss": 0.9177,
+ "step": 312
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995213329173657e-05,
+ "loss": 0.8921,
+ "step": 313
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995152207753571e-05,
+ "loss": 2.3773,
+ "step": 314
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9950906995209617e-05,
+ "loss": 0.8849,
+ "step": 315
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995028804499738e-05,
+ "loss": 0.8814,
+ "step": 316
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9949665227139583e-05,
+ "loss": 2.2589,
+ "step": 317
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9949038541878312e-05,
+ "loss": 0.9221,
+ "step": 318
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9948407989457156e-05,
+ "loss": 0.8814,
+ "step": 319
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994777357012121e-05,
+ "loss": 0.859,
+ "step": 320
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994713528411707e-05,
+ "loss": 2.2627,
+ "step": 321
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9946493131692837e-05,
+ "loss": 2.1708,
+ "step": 322
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9945847113098112e-05,
+ "loss": 0.9061,
+ "step": 323
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9945197228584006e-05,
+ "loss": 0.8706,
+ "step": 324
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994454347840312e-05,
+ "loss": 0.9008,
+ "step": 325
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9943885862809566e-05,
+ "loss": 0.8927,
+ "step": 326
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994322438205896e-05,
+ "loss": 0.9191,
+ "step": 327
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9942559036408413e-05,
+ "loss": 0.8505,
+ "step": 328
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9941889826116546e-05,
+ "loss": 0.9509,
+ "step": 329
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9941216751443485e-05,
+ "loss": 0.8894,
+ "step": 330
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994053981265084e-05,
+ "loss": 0.871,
+ "step": 331
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.993985901000174e-05,
+ "loss": 0.8624,
+ "step": 332
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9939174343760813e-05,
+ "loss": 0.8688,
+ "step": 333
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9938485814194182e-05,
+ "loss": 2.4733,
+ "step": 334
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9937793421569482e-05,
+ "loss": 0.9278,
+ "step": 335
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.993709716615584e-05,
+ "loss": 0.8676,
+ "step": 336
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.993639704822389e-05,
+ "loss": 0.8876,
+ "step": 337
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9935693068045765e-05,
+ "loss": 0.917,
+ "step": 338
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.99349852258951e-05,
+ "loss": 0.9018,
+ "step": 339
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9934273522047028e-05,
+ "loss": 0.8855,
+ "step": 340
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9933557956778192e-05,
+ "loss": 0.8936,
+ "step": 341
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9932838530366726e-05,
+ "loss": 0.9035,
+ "step": 342
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.993211524309227e-05,
+ "loss": 0.8944,
+ "step": 343
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9931388095235964e-05,
+ "loss": 0.8541,
+ "step": 344
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9930657087080446e-05,
+ "loss": 0.9378,
+ "step": 345
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9929922218909855e-05,
+ "loss": 0.8988,
+ "step": 346
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9929183491009837e-05,
+ "loss": 0.8937,
+ "step": 347
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9928440903667532e-05,
+ "loss": 0.8783,
+ "step": 348
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992769445717158e-05,
+ "loss": 0.9128,
+ "step": 349
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9926944151812124e-05,
+ "loss": 0.855,
+ "step": 350
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.99261899878808e-05,
+ "loss": 0.8777,
+ "step": 351
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9925431965670752e-05,
+ "loss": 0.8686,
+ "step": 352
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9924670085476626e-05,
+ "loss": 0.9014,
+ "step": 353
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9923904347594558e-05,
+ "loss": 0.8633,
+ "step": 354
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9923134752322187e-05,
+ "loss": 0.8612,
+ "step": 355
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9922361299958654e-05,
+ "loss": 0.8516,
+ "step": 356
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.99215839908046e-05,
+ "loss": 0.8701,
+ "step": 357
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9920802825162153e-05,
+ "loss": 0.8995,
+ "step": 358
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992001780333496e-05,
+ "loss": 0.9227,
+ "step": 359
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9919228925628152e-05,
+ "loss": 0.8678,
+ "step": 360
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9918436192348362e-05,
+ "loss": 0.9593,
+ "step": 361
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991763960380373e-05,
+ "loss": 0.8926,
+ "step": 362
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9916839160303878e-05,
+ "loss": 0.9093,
+ "step": 363
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9916034862159943e-05,
+ "loss": 0.9356,
+ "step": 364
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991522670968455e-05,
+ "loss": 0.9201,
+ "step": 365
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9914414703191823e-05,
+ "loss": 0.8757,
+ "step": 366
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9913598842997394e-05,
+ "loss": 0.9001,
+ "step": 367
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9912779129418377e-05,
+ "loss": 0.8995,
+ "step": 368
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991195556277339e-05,
+ "loss": 0.8323,
+ "step": 369
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991112814338257e-05,
+ "loss": 0.889,
+ "step": 370
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9910296871567508e-05,
+ "loss": 0.916,
+ "step": 371
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.990946174765133e-05,
+ "loss": 0.8501,
+ "step": 372
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.990862277195864e-05,
+ "loss": 2.7066,
+ "step": 373
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9907779944815554e-05,
+ "loss": 0.9214,
+ "step": 374
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9906933266549665e-05,
+ "loss": 0.9269,
+ "step": 375
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9906082737490086e-05,
+ "loss": 0.9524,
+ "step": 376
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9905228357967403e-05,
+ "loss": 0.8736,
+ "step": 377
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.990437012831372e-05,
+ "loss": 0.884,
+ "step": 378
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9903508048862626e-05,
+ "loss": 0.8941,
+ "step": 379
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9902642119949203e-05,
+ "loss": 0.9201,
+ "step": 380
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9901772341910043e-05,
+ "loss": 2.5242,
+ "step": 381
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9900898715083218e-05,
+ "loss": 0.8817,
+ "step": 382
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9900021239808312e-05,
+ "loss": 0.901,
+ "step": 383
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.989913991642639e-05,
+ "loss": 0.8847,
+ "step": 384
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9898254745280024e-05,
+ "loss": 2.4684,
+ "step": 385
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9897365726713277e-05,
+ "loss": 0.9136,
+ "step": 386
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9896472861071704e-05,
+ "loss": 0.9159,
+ "step": 387
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.989557614870236e-05,
+ "loss": 0.9223,
+ "step": 388
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.98946755899538e-05,
+ "loss": 0.9004,
+ "step": 389
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.989377118517606e-05,
+ "loss": 0.79,
+ "step": 390
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9892862934720685e-05,
+ "loss": 0.9284,
+ "step": 391
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9891950838940708e-05,
+ "loss": 0.8883,
+ "step": 392
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9891034898190657e-05,
+ "loss": 0.9104,
+ "step": 393
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9890115112826556e-05,
+ "loss": 0.8942,
+ "step": 394
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.988919148320592e-05,
+ "loss": 0.8627,
+ "step": 395
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9888264009687765e-05,
+ "loss": 0.9196,
+ "step": 396
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9887332692632596e-05,
+ "loss": 2.224,
+ "step": 397
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.988639753240241e-05,
+ "loss": 0.9412,
+ "step": 398
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9885458529360704e-05,
+ "loss": 2.226,
+ "step": 399
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9884515683872464e-05,
+ "loss": 0.9191,
+ "step": 400
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.988356899630417e-05,
+ "loss": 0.8795,
+ "step": 401
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9882618467023804e-05,
+ "loss": 0.8568,
+ "step": 402
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.988166409640082e-05,
+ "loss": 0.8616,
+ "step": 403
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.988070588480619e-05,
+ "loss": 0.9114,
+ "step": 404
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9879743832612367e-05,
+ "loss": 0.9165,
+ "step": 405
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9878777940193294e-05,
+ "loss": 0.9183,
+ "step": 406
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9877808207924406e-05,
+ "loss": 0.9264,
+ "step": 407
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9876834636182644e-05,
+ "loss": 0.8964,
+ "step": 408
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987585722534643e-05,
+ "loss": 2.1586,
+ "step": 409
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9874875975795677e-05,
+ "loss": 0.8004,
+ "step": 410
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9873890887911796e-05,
+ "loss": 0.8615,
+ "step": 411
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9872901962077687e-05,
+ "loss": 0.8753,
+ "step": 412
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9871909198677744e-05,
+ "loss": 0.8499,
+ "step": 413
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9870912598097847e-05,
+ "loss": 0.8729,
+ "step": 414
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.986991216072538e-05,
+ "loss": 0.8856,
+ "step": 415
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9868907886949197e-05,
+ "loss": 0.8347,
+ "step": 416
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9867899777159665e-05,
+ "loss": 0.8403,
+ "step": 417
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9866887831748633e-05,
+ "loss": 0.9432,
+ "step": 418
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9865872051109435e-05,
+ "loss": 0.9856,
+ "step": 419
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.986485243563691e-05,
+ "loss": 0.8538,
+ "step": 420
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9863828985727373e-05,
+ "loss": 0.9194,
+ "step": 421
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9862801701778637e-05,
+ "loss": 0.9017,
+ "step": 422
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.986177058419001e-05,
+ "loss": 0.8562,
+ "step": 423
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9860735633362273e-05,
+ "loss": 0.8679,
+ "step": 424
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9859696849697718e-05,
+ "loss": 0.8983,
+ "step": 425
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9858654233600107e-05,
+ "loss": 0.885,
+ "step": 426
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9857607785474713e-05,
+ "loss": 0.9417,
+ "step": 427
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985655750572828e-05,
+ "loss": 2.3205,
+ "step": 428
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985550339476905e-05,
+ "loss": 2.2915,
+ "step": 429
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9854445453006753e-05,
+ "loss": 0.9068,
+ "step": 430
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985338368085261e-05,
+ "loss": 0.8349,
+ "step": 431
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9852318078719324e-05,
+ "loss": 0.8665,
+ "step": 432
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985124864702109e-05,
+ "loss": 0.9272,
+ "step": 433
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9850175386173596e-05,
+ "loss": 2.1325,
+ "step": 434
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.984909829659402e-05,
+ "loss": 0.8765,
+ "step": 435
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9848017378701018e-05,
+ "loss": 0.9026,
+ "step": 436
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9846932632914733e-05,
+ "loss": 0.8801,
+ "step": 437
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9845844059656814e-05,
+ "loss": 0.8453,
+ "step": 438
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.984475165935038e-05,
+ "loss": 0.8812,
+ "step": 439
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9843655432420045e-05,
+ "loss": 0.8965,
+ "step": 440
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9842555379291907e-05,
+ "loss": 0.8934,
+ "step": 441
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9841451500393552e-05,
+ "loss": 0.8806,
+ "step": 442
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.984034379615406e-05,
+ "loss": 0.8848,
+ "step": 443
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9839232267003988e-05,
+ "loss": 0.8571,
+ "step": 444
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9838116913375386e-05,
+ "loss": 0.928,
+ "step": 445
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9836997735701788e-05,
+ "loss": 0.8896,
+ "step": 446
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.983587473441821e-05,
+ "loss": 0.8484,
+ "step": 447
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9834747909961166e-05,
+ "loss": 0.8917,
+ "step": 448
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9833617262768644e-05,
+ "loss": 2.278,
+ "step": 449
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.983248279328013e-05,
+ "loss": 0.8912,
+ "step": 450
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9831344501936578e-05,
+ "loss": 0.8804,
+ "step": 451
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.983020238918045e-05,
+ "loss": 0.88,
+ "step": 452
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.982905645545567e-05,
+ "loss": 0.8896,
+ "step": 453
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9827906701207667e-05,
+ "loss": 0.8869,
+ "step": 454
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9826753126883346e-05,
+ "loss": 0.9485,
+ "step": 455
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9825595732931092e-05,
+ "loss": 0.8552,
+ "step": 456
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9824434519800788e-05,
+ "loss": 0.9151,
+ "step": 457
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9823269487943787e-05,
+ "loss": 0.9058,
+ "step": 458
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.982210063781294e-05,
+ "loss": 0.8899,
+ "step": 459
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9820927969862573e-05,
+ "loss": 2.187,
+ "step": 460
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9819751484548496e-05,
+ "loss": 2.1938,
+ "step": 461
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9818571182328003e-05,
+ "loss": 0.8661,
+ "step": 462
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.981738706365988e-05,
+ "loss": 0.8885,
+ "step": 463
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9816199129004387e-05,
+ "loss": 0.9885,
+ "step": 464
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9815007378823273e-05,
+ "loss": 0.9154,
+ "step": 465
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.981381181357976e-05,
+ "loss": 0.8881,
+ "step": 466
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9812612433738572e-05,
+ "loss": 0.8978,
+ "step": 467
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9811409239765896e-05,
+ "loss": 0.841,
+ "step": 468
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.981020223212941e-05,
+ "loss": 0.8902,
+ "step": 469
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9808991411298276e-05,
+ "loss": 0.8813,
+ "step": 470
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9807776777743136e-05,
+ "loss": 0.9064,
+ "step": 471
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9806558331936118e-05,
+ "loss": 0.8918,
+ "step": 472
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9805336074350817e-05,
+ "loss": 0.8283,
+ "step": 473
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9804110005462332e-05,
+ "loss": 0.9719,
+ "step": 474
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9802880125747227e-05,
+ "loss": 0.931,
+ "step": 475
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9801646435683556e-05,
+ "loss": 0.8596,
+ "step": 476
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9800408935750846e-05,
+ "loss": 2.2522,
+ "step": 477
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979916762643011e-05,
+ "loss": 2.3196,
+ "step": 478
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9797922508203843e-05,
+ "loss": 0.8702,
+ "step": 479
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979667358155602e-05,
+ "loss": 0.8356,
+ "step": 480
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9795420846972088e-05,
+ "loss": 0.8778,
+ "step": 481
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9794164304938985e-05,
+ "loss": 0.8719,
+ "step": 482
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979290395594513e-05,
+ "loss": 0.8731,
+ "step": 483
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9791639800480408e-05,
+ "loss": 2.2153,
+ "step": 484
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.97903718390362e-05,
+ "loss": 0.9212,
+ "step": 485
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9789100072105353e-05,
+ "loss": 0.8142,
+ "step": 486
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.97878245001822e-05,
+ "loss": 0.9199,
+ "step": 487
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9786545123762556e-05,
+ "loss": 0.9506,
+ "step": 488
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9785261943343702e-05,
+ "loss": 0.8306,
+ "step": 489
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9783974959424412e-05,
+ "loss": 0.8832,
+ "step": 490
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9782684172504933e-05,
+ "loss": 0.8651,
+ "step": 491
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.978138958308699e-05,
+ "loss": 0.9126,
+ "step": 492
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9780091191673784e-05,
+ "loss": 0.8705,
+ "step": 493
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9778788998769994e-05,
+ "loss": 0.895,
+ "step": 494
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9777483004881785e-05,
+ "loss": 0.8644,
+ "step": 495
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9776173210516783e-05,
+ "loss": 2.1701,
+ "step": 496
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9774859616184106e-05,
+ "loss": 0.9324,
+ "step": 497
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.977354222239435e-05,
+ "loss": 0.9036,
+ "step": 498
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.977222102965957e-05,
+ "loss": 0.8742,
+ "step": 499
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9770896038493317e-05,
+ "loss": 0.8798,
+ "step": 500
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.976956724941061e-05,
+ "loss": 2.2773,
+ "step": 501
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9768234662927942e-05,
+ "loss": 0.9096,
+ "step": 502
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.976689827956329e-05,
+ "loss": 2.0418,
+ "step": 503
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.97655580998361e-05,
+ "loss": 0.8622,
+ "step": 504
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.976421412426729e-05,
+ "loss": 2.0362,
+ "step": 505
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.976286635337927e-05,
+ "loss": 0.8618,
+ "step": 506
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9761514787695903e-05,
+ "loss": 0.8195,
+ "step": 507
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9760159427742547e-05,
+ "loss": 0.9455,
+ "step": 508
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9758800274046022e-05,
+ "loss": 0.8624,
+ "step": 509
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9757437327134627e-05,
+ "loss": 1.9268,
+ "step": 510
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9756070587538136e-05,
+ "loss": 0.923,
+ "step": 511
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9754700055787798e-05,
+ "loss": 0.8727,
+ "step": 512
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.975332573241633e-05,
+ "loss": 2.0775,
+ "step": 513
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.975194761795793e-05,
+ "loss": 0.8691,
+ "step": 514
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.975056571294827e-05,
+ "loss": 0.8607,
+ "step": 515
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9749180017924484e-05,
+ "loss": 0.8794,
+ "step": 516
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9747790533425192e-05,
+ "loss": 0.8425,
+ "step": 517
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9746397259990485e-05,
+ "loss": 0.8417,
+ "step": 518
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.974500019816192e-05,
+ "loss": 0.9344,
+ "step": 519
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.974359934848253e-05,
+ "loss": 0.8074,
+ "step": 520
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9742194711496823e-05,
+ "loss": 0.9141,
+ "step": 521
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9740786287750778e-05,
+ "loss": 0.8646,
+ "step": 522
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9739374077791843e-05,
+ "loss": 0.9109,
+ "step": 523
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9737958082168936e-05,
+ "loss": 0.9403,
+ "step": 524
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9736538301432457e-05,
+ "loss": 0.8529,
+ "step": 525
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9735114736134266e-05,
+ "loss": 0.8636,
+ "step": 526
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.97336873868277e-05,
+ "loss": 0.8864,
+ "step": 527
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9732256254067567e-05,
+ "loss": 0.9097,
+ "step": 528
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.973082133841014e-05,
+ "loss": 0.9274,
+ "step": 529
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9729382640413163e-05,
+ "loss": 0.8982,
+ "step": 530
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9727940160635862e-05,
+ "loss": 0.8219,
+ "step": 531
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.972649389963892e-05,
+ "loss": 0.8965,
+ "step": 532
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9725043857984493e-05,
+ "loss": 0.9066,
+ "step": 533
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.972359003623621e-05,
+ "loss": 2.0273,
+ "step": 534
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9722132434959163e-05,
+ "loss": 0.8955,
+ "step": 535
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9720671054719927e-05,
+ "loss": 0.9072,
+ "step": 536
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9719205896086522e-05,
+ "loss": 0.8935,
+ "step": 537
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.971773695962846e-05,
+ "loss": 0.8417,
+ "step": 538
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.971626424591671e-05,
+ "loss": 0.8562,
+ "step": 539
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.971478775552371e-05,
+ "loss": 0.8834,
+ "step": 540
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9713307489023368e-05,
+ "loss": 0.8216,
+ "step": 541
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9711823446991056e-05,
+ "loss": 0.8705,
+ "step": 542
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9710335630003623e-05,
+ "loss": 0.8728,
+ "step": 543
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9708844038639373e-05,
+ "loss": 0.8557,
+ "step": 544
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9707348673478083e-05,
+ "loss": 1.9381,
+ "step": 545
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9705849535101e-05,
+ "loss": 0.8903,
+ "step": 546
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9704346624090833e-05,
+ "loss": 0.8862,
+ "step": 547
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9702839941031758e-05,
+ "loss": 0.9135,
+ "step": 548
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.970132948650942e-05,
+ "loss": 0.9116,
+ "step": 549
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9699815261110925e-05,
+ "loss": 0.8216,
+ "step": 550
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9698297265424848e-05,
+ "loss": 0.9355,
+ "step": 551
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9696775500041227e-05,
+ "loss": 0.8924,
+ "step": 552
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9695249965551574e-05,
+ "loss": 0.9105,
+ "step": 553
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.969372066254886e-05,
+ "loss": 0.8302,
+ "step": 554
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.969218759162751e-05,
+ "loss": 0.8756,
+ "step": 555
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9690650753383434e-05,
+ "loss": 2.2122,
+ "step": 556
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.968911014841399e-05,
+ "loss": 0.8221,
+ "step": 557
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9687565777318008e-05,
+ "loss": 0.8695,
+ "step": 558
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9686017640695783e-05,
+ "loss": 0.8514,
+ "step": 559
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9684465739149066e-05,
+ "loss": 0.8399,
+ "step": 560
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.968291007328108e-05,
+ "loss": 0.8971,
+ "step": 561
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.968135064369651e-05,
+ "loss": 0.8107,
+ "step": 562
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9679787451001492e-05,
+ "loss": 0.9083,
+ "step": 563
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967822049580364e-05,
+ "loss": 0.9752,
+ "step": 564
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9676649778712023e-05,
+ "loss": 0.8837,
+ "step": 565
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9675075300337177e-05,
+ "loss": 0.9044,
+ "step": 566
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9673497061291096e-05,
+ "loss": 0.8906,
+ "step": 567
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9671915062187236e-05,
+ "loss": 0.9388,
+ "step": 568
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9670329303640508e-05,
+ "loss": 0.8719,
+ "step": 569
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9668739786267305e-05,
+ "loss": 0.9038,
+ "step": 570
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9667146510685457e-05,
+ "loss": 0.8419,
+ "step": 571
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.966554947751427e-05,
+ "loss": 0.8479,
+ "step": 572
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.96639486873745e-05,
+ "loss": 0.8749,
+ "step": 573
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9662344140888373e-05,
+ "loss": 0.9301,
+ "step": 574
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9660735838679575e-05,
+ "loss": 0.8432,
+ "step": 575
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9659123781373243e-05,
+ "loss": 0.8843,
+ "step": 576
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9657507969595974e-05,
+ "loss": 0.8868,
+ "step": 577
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.965588840397584e-05,
+ "loss": 0.8831,
+ "step": 578
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9654265085142356e-05,
+ "loss": 0.9226,
+ "step": 579
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.96526380137265e-05,
+ "loss": 0.8238,
+ "step": 580
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.965100719036071e-05,
+ "loss": 0.8839,
+ "step": 581
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9649372615678883e-05,
+ "loss": 0.8648,
+ "step": 582
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.964773429031637e-05,
+ "loss": 0.9046,
+ "step": 583
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9646092214909985e-05,
+ "loss": 0.9295,
+ "step": 584
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9644446390098e-05,
+ "loss": 0.8591,
+ "step": 585
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9642796816520134e-05,
+ "loss": 0.8256,
+ "step": 586
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.964114349481758e-05,
+ "loss": 0.9197,
+ "step": 587
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9639486425632978e-05,
+ "loss": 0.8519,
+ "step": 588
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.963782560961042e-05,
+ "loss": 0.8848,
+ "step": 589
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9636161047395464e-05,
+ "loss": 0.8409,
+ "step": 590
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9634492739635117e-05,
+ "loss": 0.8821,
+ "step": 591
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.963282068697785e-05,
+ "loss": 0.8602,
+ "step": 592
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9631144890073577e-05,
+ "loss": 0.8957,
+ "step": 593
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9629465349573686e-05,
+ "loss": 2.1188,
+ "step": 594
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9627782066131e-05,
+ "loss": 0.7983,
+ "step": 595
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9626095040399807e-05,
+ "loss": 0.8505,
+ "step": 596
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.962440427303585e-05,
+ "loss": 0.9251,
+ "step": 597
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9622709764696323e-05,
+ "loss": 0.8512,
+ "step": 598
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9621011516039882e-05,
+ "loss": 0.9174,
+ "step": 599
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9619309527726624e-05,
+ "loss": 0.84,
+ "step": 600
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9617603800418113e-05,
+ "loss": 0.8534,
+ "step": 601
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.961589433477735e-05,
+ "loss": 0.8736,
+ "step": 602
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9614181131468815e-05,
+ "loss": 0.8835,
+ "step": 603
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.961246419115841e-05,
+ "loss": 0.8491,
+ "step": 604
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9610743514513507e-05,
+ "loss": 0.8787,
+ "step": 605
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.960901910220293e-05,
+ "loss": 0.9005,
+ "step": 606
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9607290954896952e-05,
+ "loss": 0.9102,
+ "step": 607
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.96055590732673e-05,
+ "loss": 0.8503,
+ "step": 608
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9603823457987153e-05,
+ "loss": 0.8691,
+ "step": 609
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9602084109731132e-05,
+ "loss": 0.9273,
+ "step": 610
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9600341029175323e-05,
+ "loss": 0.9245,
+ "step": 611
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.959859421699725e-05,
+ "loss": 0.9015,
+ "step": 612
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.95968436738759e-05,
+ "loss": 0.8847,
+ "step": 613
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9595089400491697e-05,
+ "loss": 0.856,
+ "step": 614
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9593331397526524e-05,
+ "loss": 0.7942,
+ "step": 615
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.959156966566371e-05,
+ "loss": 0.8934,
+ "step": 616
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9589804205588043e-05,
+ "loss": 0.9177,
+ "step": 617
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.958803501798574e-05,
+ "loss": 0.8901,
+ "step": 618
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9586262103544484e-05,
+ "loss": 0.8884,
+ "step": 619
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9584485462953404e-05,
+ "loss": 2.0931,
+ "step": 620
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9582705096903068e-05,
+ "loss": 0.8792,
+ "step": 621
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9580921006085506e-05,
+ "loss": 0.8693,
+ "step": 622
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.957913319119418e-05,
+ "loss": 2.0109,
+ "step": 623
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9577341652924016e-05,
+ "loss": 2.1153,
+ "step": 624
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9575546391971376e-05,
+ "loss": 0.8229,
+ "step": 625
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.957374740903407e-05,
+ "loss": 0.8667,
+ "step": 626
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.957194470481136e-05,
+ "loss": 0.9419,
+ "step": 627
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.957013828000395e-05,
+ "loss": 0.9232,
+ "step": 628
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9568328135313995e-05,
+ "loss": 0.8608,
+ "step": 629
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9566514271445086e-05,
+ "loss": 0.896,
+ "step": 630
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9564696689102274e-05,
+ "loss": 0.8773,
+ "step": 631
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.956287538899204e-05,
+ "loss": 0.9829,
+ "step": 632
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9561050371822325e-05,
+ "loss": 0.9084,
+ "step": 633
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9559221638302502e-05,
+ "loss": 0.9247,
+ "step": 634
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9557389189143392e-05,
+ "loss": 1.9523,
+ "step": 635
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.955555302505727e-05,
+ "loss": 0.8928,
+ "step": 636
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.955371314675784e-05,
+ "loss": 0.8833,
+ "step": 637
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9551869554960263e-05,
+ "loss": 0.9029,
+ "step": 638
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.955002225038113e-05,
+ "loss": 0.9104,
+ "step": 639
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.954817123373849e-05,
+ "loss": 0.8972,
+ "step": 640
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9546316505751823e-05,
+ "loss": 0.9438,
+ "step": 641
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9544458067142056e-05,
+ "loss": 1.9818,
+ "step": 642
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.954259591863156e-05,
+ "loss": 0.8804,
+ "step": 643
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9540730060944142e-05,
+ "loss": 0.8838,
+ "step": 644
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.953886049480506e-05,
+ "loss": 0.8327,
+ "step": 645
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.953698722094101e-05,
+ "loss": 0.8597,
+ "step": 646
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.953511024008012e-05,
+ "loss": 0.8605,
+ "step": 647
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9533229552951972e-05,
+ "loss": 1.8238,
+ "step": 648
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9531345160287582e-05,
+ "loss": 0.8499,
+ "step": 649
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9529457062819408e-05,
+ "loss": 1.8133,
+ "step": 650
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9527565261281346e-05,
+ "loss": 0.975,
+ "step": 651
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9525669756408737e-05,
+ "loss": 0.8545,
+ "step": 652
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9523770548938352e-05,
+ "loss": 0.8604,
+ "step": 653
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9521867639608412e-05,
+ "loss": 0.8803,
+ "step": 654
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.951996102915857e-05,
+ "loss": 0.8892,
+ "step": 655
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9518050718329918e-05,
+ "loss": 0.8221,
+ "step": 656
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.951613670786499e-05,
+ "loss": 0.8612,
+ "step": 657
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9514218998507754e-05,
+ "loss": 0.863,
+ "step": 658
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.951229759100362e-05,
+ "loss": 0.8964,
+ "step": 659
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.951037248609943e-05,
+ "loss": 0.7953,
+ "step": 660
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9508443684543468e-05,
+ "loss": 0.8492,
+ "step": 661
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9506511187085455e-05,
+ "loss": 0.9282,
+ "step": 662
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.950457499447654e-05,
+ "loss": 1.9091,
+ "step": 663
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9502635107469326e-05,
+ "loss": 0.8868,
+ "step": 664
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.950069152681783e-05,
+ "loss": 0.8996,
+ "step": 665
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9498744253277522e-05,
+ "loss": 0.9014,
+ "step": 666
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9496793287605294e-05,
+ "loss": 0.8978,
+ "step": 667
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.949483863055949e-05,
+ "loss": 0.8086,
+ "step": 668
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9492880282899876e-05,
+ "loss": 0.9433,
+ "step": 669
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.949091824538765e-05,
+ "loss": 0.8898,
+ "step": 670
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9488952518785455e-05,
+ "loss": 0.8698,
+ "step": 671
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.948698310385736e-05,
+ "loss": 0.9142,
+ "step": 672
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9485010001368878e-05,
+ "loss": 0.867,
+ "step": 673
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9483033212086936e-05,
+ "loss": 0.8496,
+ "step": 674
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9481052736779914e-05,
+ "loss": 0.8595,
+ "step": 675
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.947906857621761e-05,
+ "loss": 0.802,
+ "step": 676
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9477080731171267e-05,
+ "loss": 0.8684,
+ "step": 677
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.947508920241355e-05,
+ "loss": 0.8849,
+ "step": 678
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9473093990718567e-05,
+ "loss": 0.8859,
+ "step": 679
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9471095096861843e-05,
+ "loss": 0.9454,
+ "step": 680
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9469092521620342e-05,
+ "loss": 0.8724,
+ "step": 681
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.946708626577246e-05,
+ "loss": 0.8913,
+ "step": 682
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9465076330098025e-05,
+ "loss": 0.9397,
+ "step": 683
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.946306271537829e-05,
+ "loss": 0.8886,
+ "step": 684
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9461045422395943e-05,
+ "loss": 0.8837,
+ "step": 685
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9459024451935097e-05,
+ "loss": 0.8523,
+ "step": 686
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9456999804781298e-05,
+ "loss": 0.912,
+ "step": 687
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9454971481721515e-05,
+ "loss": 0.853,
+ "step": 688
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.945293948354416e-05,
+ "loss": 0.8732,
+ "step": 689
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.945090381103906e-05,
+ "loss": 0.8319,
+ "step": 690
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9448864464997474e-05,
+ "loss": 0.9067,
+ "step": 691
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9446821446212087e-05,
+ "loss": 0.8413,
+ "step": 692
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.944477475547702e-05,
+ "loss": 0.8596,
+ "step": 693
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9442724393587806e-05,
+ "loss": 1.9811,
+ "step": 694
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9440670361341422e-05,
+ "loss": 0.8762,
+ "step": 695
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9438612659536265e-05,
+ "loss": 0.8925,
+ "step": 696
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.943655128897215e-05,
+ "loss": 0.8583,
+ "step": 697
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.943448625045033e-05,
+ "loss": 0.855,
+ "step": 698
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.943241754477348e-05,
+ "loss": 0.8727,
+ "step": 699
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9430345172745694e-05,
+ "loss": 0.9028,
+ "step": 700
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9428269135172503e-05,
+ "loss": 0.8321,
+ "step": 701
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9426189432860853e-05,
+ "loss": 0.8855,
+ "step": 702
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9424106066619117e-05,
+ "loss": 0.8865,
+ "step": 703
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9422019037257093e-05,
+ "loss": 0.9184,
+ "step": 704
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9419928345586007e-05,
+ "loss": 0.8623,
+ "step": 705
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9417833992418495e-05,
+ "loss": 0.8802,
+ "step": 706
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9415735978568636e-05,
+ "loss": 0.8327,
+ "step": 707
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9413634304851918e-05,
+ "loss": 0.901,
+ "step": 708
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9411528972085248e-05,
+ "loss": 0.887,
+ "step": 709
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9409419981086966e-05,
+ "loss": 0.8469,
+ "step": 710
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.940730733267684e-05,
+ "loss": 0.852,
+ "step": 711
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9405191027676036e-05,
+ "loss": 0.8505,
+ "step": 712
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.940307106690716e-05,
+ "loss": 0.8865,
+ "step": 713
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9400947451194242e-05,
+ "loss": 0.8612,
+ "step": 714
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9398820181362713e-05,
+ "loss": 0.8542,
+ "step": 715
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.939668925823944e-05,
+ "loss": 0.8302,
+ "step": 716
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9394554682652705e-05,
+ "loss": 0.8421,
+ "step": 717
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9392416455432216e-05,
+ "loss": 0.8609,
+ "step": 718
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.939027457740909e-05,
+ "loss": 0.927,
+ "step": 719
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.938812904941587e-05,
+ "loss": 0.8404,
+ "step": 720
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9385979872286516e-05,
+ "loss": 0.8801,
+ "step": 721
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9383827046856405e-05,
+ "loss": 0.8907,
+ "step": 722
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9381670573962338e-05,
+ "loss": 0.8869,
+ "step": 723
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.937951045444252e-05,
+ "loss": 0.8716,
+ "step": 724
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.937734668913659e-05,
+ "loss": 0.8687,
+ "step": 725
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.93751792788856e-05,
+ "loss": 0.8647,
+ "step": 726
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.937300822453201e-05,
+ "loss": 0.8559,
+ "step": 727
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.93708335269197e-05,
+ "loss": 0.8217,
+ "step": 728
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.936865518689397e-05,
+ "loss": 0.9501,
+ "step": 729
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9366473205301536e-05,
+ "loss": 0.8201,
+ "step": 730
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9364287582990526e-05,
+ "loss": 0.9139,
+ "step": 731
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9362098320810486e-05,
+ "loss": 0.8904,
+ "step": 732
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.935990541961237e-05,
+ "loss": 2.0239,
+ "step": 733
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9357708880248563e-05,
+ "loss": 0.8731,
+ "step": 734
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9355508703572837e-05,
+ "loss": 0.918,
+ "step": 735
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.935330489044041e-05,
+ "loss": 0.829,
+ "step": 736
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9351097441707885e-05,
+ "loss": 0.8486,
+ "step": 737
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9348886358233295e-05,
+ "loss": 0.8167,
+ "step": 738
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.934667164087608e-05,
+ "loss": 0.8382,
+ "step": 739
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9344453290497097e-05,
+ "loss": 0.8167,
+ "step": 740
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.934223130795861e-05,
+ "loss": 0.8882,
+ "step": 741
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9340005694124294e-05,
+ "loss": 0.8594,
+ "step": 742
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9337776449859243e-05,
+ "loss": 0.8371,
+ "step": 743
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9335543576029952e-05,
+ "loss": 0.8908,
+ "step": 744
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9333307073504336e-05,
+ "loss": 0.7918,
+ "step": 745
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9331066943151714e-05,
+ "loss": 0.8961,
+ "step": 746
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9328823185842816e-05,
+ "loss": 1.9781,
+ "step": 747
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.932657580244979e-05,
+ "loss": 0.8739,
+ "step": 748
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.932432479384618e-05,
+ "loss": 0.8734,
+ "step": 749
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9322070160906946e-05,
+ "loss": 1.851,
+ "step": 750
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9319811904508464e-05,
+ "loss": 0.8423,
+ "step": 751
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9317550025528505e-05,
+ "loss": 0.8527,
+ "step": 752
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9315284524846257e-05,
+ "loss": 0.8168,
+ "step": 753
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.931301540334231e-05,
+ "loss": 0.9058,
+ "step": 754
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9310742661898666e-05,
+ "loss": 0.8983,
+ "step": 755
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9308466301398734e-05,
+ "loss": 0.8756,
+ "step": 756
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9306186322727325e-05,
+ "loss": 0.8961,
+ "step": 757
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.930390272677066e-05,
+ "loss": 0.8812,
+ "step": 758
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.930161551441637e-05,
+ "loss": 0.8903,
+ "step": 759
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.929932468655348e-05,
+ "loss": 0.8206,
+ "step": 760
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9297030244072436e-05,
+ "loss": 0.8757,
+ "step": 761
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.929473218786507e-05,
+ "loss": 0.8794,
+ "step": 762
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9292430518824642e-05,
+ "loss": 0.8377,
+ "step": 763
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9290125237845793e-05,
+ "loss": 0.83,
+ "step": 764
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.928781634582458e-05,
+ "loss": 0.8953,
+ "step": 765
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9285503843658468e-05,
+ "loss": 0.8498,
+ "step": 766
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9283187732246315e-05,
+ "loss": 0.8585,
+ "step": 767
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9280868012488386e-05,
+ "loss": 0.8535,
+ "step": 768
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9278544685286347e-05,
+ "loss": 0.8518,
+ "step": 769
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9276217751543274e-05,
+ "loss": 0.8809,
+ "step": 770
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.927388721216363e-05,
+ "loss": 0.8461,
+ "step": 771
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.92715530680533e-05,
+ "loss": 0.9018,
+ "step": 772
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9269215320119547e-05,
+ "loss": 0.8726,
+ "step": 773
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9266873969271053e-05,
+ "loss": 0.8377,
+ "step": 774
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.926452901641789e-05,
+ "loss": 0.8281,
+ "step": 775
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9262180462471538e-05,
+ "loss": 0.8872,
+ "step": 776
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.925982830834487e-05,
+ "loss": 0.8049,
+ "step": 777
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.925747255495216e-05,
+ "loss": 0.9033,
+ "step": 778
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9255113203209083e-05,
+ "loss": 0.8644,
+ "step": 779
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9252750254032712e-05,
+ "loss": 0.8596,
+ "step": 780
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.925038370834152e-05,
+ "loss": 0.9318,
+ "step": 781
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.924801356705537e-05,
+ "loss": 0.9098,
+ "step": 782
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.924563983109553e-05,
+ "loss": 0.8574,
+ "step": 783
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.924326250138467e-05,
+ "loss": 0.908,
+ "step": 784
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9240881578846838e-05,
+ "loss": 0.8794,
+ "step": 785
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9238497064407505e-05,
+ "loss": 0.8415,
+ "step": 786
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9236108958993517e-05,
+ "loss": 0.8942,
+ "step": 787
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9233717263533124e-05,
+ "loss": 0.887,
+ "step": 788
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.923132197895597e-05,
+ "loss": 0.9117,
+ "step": 789
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9228923106193098e-05,
+ "loss": 0.8875,
+ "step": 790
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9226520646176937e-05,
+ "loss": 0.8072,
+ "step": 791
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.922411459984132e-05,
+ "loss": 0.8604,
+ "step": 792
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9221704968121467e-05,
+ "loss": 0.8764,
+ "step": 793
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9219291751953993e-05,
+ "loss": 0.8258,
+ "step": 794
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9216874952276914e-05,
+ "loss": 0.8293,
+ "step": 795
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9214454570029624e-05,
+ "loss": 2.0036,
+ "step": 796
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9212030606152922e-05,
+ "loss": 0.9099,
+ "step": 797
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9209603061588997e-05,
+ "loss": 0.8709,
+ "step": 798
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9207171937281423e-05,
+ "loss": 0.8771,
+ "step": 799
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9204737234175178e-05,
+ "loss": 0.8718,
+ "step": 800
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9202298953216613e-05,
+ "loss": 0.8508,
+ "step": 801
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.919985709535349e-05,
+ "loss": 0.8956,
+ "step": 802
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9197411661534945e-05,
+ "loss": 0.8416,
+ "step": 803
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9194962652711515e-05,
+ "loss": 0.8845,
+ "step": 804
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.919251006983512e-05,
+ "loss": 0.828,
+ "step": 805
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.919005391385907e-05,
+ "loss": 2.0,
+ "step": 806
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.918759418573806e-05,
+ "loss": 0.8745,
+ "step": 807
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9185130886428188e-05,
+ "loss": 0.8803,
+ "step": 808
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.918266401688693e-05,
+ "loss": 0.8094,
+ "step": 809
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9180193578073146e-05,
+ "loss": 0.8225,
+ "step": 810
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.917771957094709e-05,
+ "loss": 0.8184,
+ "step": 811
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.91752419964704e-05,
+ "loss": 0.8828,
+ "step": 812
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.91727608556061e-05,
+ "loss": 0.8603,
+ "step": 813
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9170276149318606e-05,
+ "loss": 0.8326,
+ "step": 814
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.916778787857371e-05,
+ "loss": 0.8389,
+ "step": 815
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9165296044338597e-05,
+ "loss": 0.8178,
+ "step": 816
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9162800647581837e-05,
+ "loss": 0.8665,
+ "step": 817
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9160301689273382e-05,
+ "loss": 0.8591,
+ "step": 818
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.915779917038457e-05,
+ "loss": 0.897,
+ "step": 819
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.915529309188812e-05,
+ "loss": 0.8462,
+ "step": 820
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9152783454758133e-05,
+ "loss": 0.8587,
+ "step": 821
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.915027025997011e-05,
+ "loss": 0.89,
+ "step": 822
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9147753508500907e-05,
+ "loss": 1.9879,
+ "step": 823
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9145233201328786e-05,
+ "loss": 0.8877,
+ "step": 824
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.914270933943338e-05,
+ "loss": 0.8262,
+ "step": 825
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9140181923795706e-05,
+ "loss": 0.8673,
+ "step": 826
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9137650955398163e-05,
+ "loss": 0.865,
+ "step": 827
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9135116435224535e-05,
+ "loss": 1.8647,
+ "step": 828
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9132578364259973e-05,
+ "loss": 0.8592,
+ "step": 829
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9130036743491023e-05,
+ "loss": 0.8648,
+ "step": 830
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9127491573905603e-05,
+ "loss": 0.8433,
+ "step": 831
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9124942856493013e-05,
+ "loss": 0.7873,
+ "step": 832
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.912239059224393e-05,
+ "loss": 0.9177,
+ "step": 833
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9119834782150416e-05,
+ "loss": 0.8094,
+ "step": 834
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9117275427205904e-05,
+ "loss": 0.9025,
+ "step": 835
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9114712528405202e-05,
+ "loss": 0.8593,
+ "step": 836
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9112146086744504e-05,
+ "loss": 0.8279,
+ "step": 837
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.910957610322138e-05,
+ "loss": 0.8588,
+ "step": 838
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9107002578834778e-05,
+ "loss": 0.8506,
+ "step": 839
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9104425514585005e-05,
+ "loss": 0.8724,
+ "step": 840
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.910184491147377e-05,
+ "loss": 0.8339,
+ "step": 841
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9099260770504143e-05,
+ "loss": 0.858,
+ "step": 842
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9096673092680572e-05,
+ "loss": 0.8862,
+ "step": 843
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9094081879008873e-05,
+ "loss": 0.8468,
+ "step": 844
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.909148713049625e-05,
+ "loss": 0.8562,
+ "step": 845
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.908888884815127e-05,
+ "loss": 0.8522,
+ "step": 846
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9086287032983878e-05,
+ "loss": 0.8307,
+ "step": 847
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.908368168600539e-05,
+ "loss": 0.8709,
+ "step": 848
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9081072808228497e-05,
+ "loss": 0.8176,
+ "step": 849
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9078460400667263e-05,
+ "loss": 0.829,
+ "step": 850
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9075844464337117e-05,
+ "loss": 0.8459,
+ "step": 851
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9073225000254876e-05,
+ "loss": 0.8469,
+ "step": 852
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9070602009438707e-05,
+ "loss": 0.8751,
+ "step": 853
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9067975492908167e-05,
+ "loss": 0.8625,
+ "step": 854
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9065345451684166e-05,
+ "loss": 0.8598,
+ "step": 855
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9062711886788997e-05,
+ "loss": 0.8253,
+ "step": 856
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9060074799246316e-05,
+ "loss": 0.9365,
+ "step": 857
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9057434190081154e-05,
+ "loss": 0.8732,
+ "step": 858
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9054790060319908e-05,
+ "loss": 0.8479,
+ "step": 859
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.905214241099034e-05,
+ "loss": 0.8697,
+ "step": 860
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.904949124312158e-05,
+ "loss": 0.8957,
+ "step": 861
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9046836557744135e-05,
+ "loss": 0.8531,
+ "step": 862
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.904417835588987e-05,
+ "loss": 0.8828,
+ "step": 863
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9041516638592025e-05,
+ "loss": 0.8692,
+ "step": 864
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.903885140688519e-05,
+ "loss": 0.8396,
+ "step": 865
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.903618266180534e-05,
+ "loss": 0.852,
+ "step": 866
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9033510404389806e-05,
+ "loss": 0.8906,
+ "step": 867
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9030834635677285e-05,
+ "loss": 0.8855,
+ "step": 868
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9028155356707843e-05,
+ "loss": 0.8598,
+ "step": 869
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9025472568522906e-05,
+ "loss": 0.8597,
+ "step": 870
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.902278627216526e-05,
+ "loss": 0.7999,
+ "step": 871
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.902009646867907e-05,
+ "loss": 1.7423,
+ "step": 872
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9017403159109844e-05,
+ "loss": 0.866,
+ "step": 873
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9014706344504467e-05,
+ "loss": 0.8374,
+ "step": 874
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.901200602591119e-05,
+ "loss": 0.8578,
+ "step": 875
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9009302204379604e-05,
+ "loss": 0.8213,
+ "step": 876
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.900659488096068e-05,
+ "loss": 0.8102,
+ "step": 877
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9003884056706758e-05,
+ "loss": 0.8773,
+ "step": 878
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.900116973267151e-05,
+ "loss": 0.837,
+ "step": 879
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.899845190991e-05,
+ "loss": 0.9111,
+ "step": 880
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8995730589478628e-05,
+ "loss": 0.8565,
+ "step": 881
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8993005772435162e-05,
+ "loss": 0.8207,
+ "step": 882
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8990277459838734e-05,
+ "loss": 0.8645,
+ "step": 883
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.898754565274983e-05,
+ "loss": 0.8795,
+ "step": 884
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8984810352230295e-05,
+ "loss": 0.8706,
+ "step": 885
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.898207155934333e-05,
+ "loss": 0.8597,
+ "step": 886
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8979329275153492e-05,
+ "loss": 0.8078,
+ "step": 887
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8976583500726706e-05,
+ "loss": 0.8416,
+ "step": 888
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.897383423713024e-05,
+ "loss": 0.8651,
+ "step": 889
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.897108148543273e-05,
+ "loss": 0.8698,
+ "step": 890
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.896832524670415e-05,
+ "loss": 0.8997,
+ "step": 891
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8965565522015856e-05,
+ "loss": 0.8408,
+ "step": 892
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8962802312440533e-05,
+ "loss": 0.839,
+ "step": 893
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.896003561905224e-05,
+ "loss": 1.7908,
+ "step": 894
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8957265442926376e-05,
+ "loss": 0.8504,
+ "step": 895
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8954491785139703e-05,
+ "loss": 0.8284,
+ "step": 896
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8951714646770332e-05,
+ "loss": 0.8635,
+ "step": 897
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8948934028897725e-05,
+ "loss": 0.8793,
+ "step": 898
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8946149932602708e-05,
+ "loss": 0.9216,
+ "step": 899
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8943362358967442e-05,
+ "loss": 0.8784,
+ "step": 900
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8940571309075445e-05,
+ "loss": 0.8246,
+ "step": 901
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8937776784011603e-05,
+ "loss": 0.7751,
+ "step": 902
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.893497878486213e-05,
+ "loss": 0.8965,
+ "step": 903
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8932177312714595e-05,
+ "loss": 0.8838,
+ "step": 904
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8929372368657933e-05,
+ "loss": 0.8169,
+ "step": 905
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8926563953782407e-05,
+ "loss": 0.833,
+ "step": 906
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8923752069179646e-05,
+ "loss": 0.8662,
+ "step": 907
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8920936715942614e-05,
+ "loss": 0.8803,
+ "step": 908
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8918117895165636e-05,
+ "loss": 0.9285,
+ "step": 909
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8915295607944374e-05,
+ "loss": 0.8695,
+ "step": 910
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8912469855375847e-05,
+ "loss": 0.8262,
+ "step": 911
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8909640638558415e-05,
+ "loss": 0.8702,
+ "step": 912
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.890680795859178e-05,
+ "loss": 0.8553,
+ "step": 913
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8903971816577e-05,
+ "loss": 0.7744,
+ "step": 914
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8901132213616477e-05,
+ "loss": 0.8519,
+ "step": 915
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8898289150813954e-05,
+ "loss": 0.8275,
+ "step": 916
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8895442629274516e-05,
+ "loss": 0.8741,
+ "step": 917
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.88925926501046e-05,
+ "loss": 0.7955,
+ "step": 918
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8889739214411987e-05,
+ "loss": 0.8977,
+ "step": 919
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8886882323305796e-05,
+ "loss": 0.8997,
+ "step": 920
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8884021977896493e-05,
+ "loss": 0.809,
+ "step": 921
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.888115817929588e-05,
+ "loss": 0.8396,
+ "step": 922
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.887829092861711e-05,
+ "loss": 0.8637,
+ "step": 923
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8875420226974676e-05,
+ "loss": 0.9193,
+ "step": 924
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.887254607548441e-05,
+ "loss": 0.8592,
+ "step": 925
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8869668475263484e-05,
+ "loss": 0.8746,
+ "step": 926
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.886678742743041e-05,
+ "loss": 0.854,
+ "step": 927
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.886390293310505e-05,
+ "loss": 0.9089,
+ "step": 928
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.886101499340859e-05,
+ "loss": 0.9015,
+ "step": 929
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8858123609463565e-05,
+ "loss": 0.8472,
+ "step": 930
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.885522878239385e-05,
+ "loss": 0.8878,
+ "step": 931
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.885233051332465e-05,
+ "loss": 0.8702,
+ "step": 932
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.884942880338252e-05,
+ "loss": 0.8413,
+ "step": 933
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.884652365369534e-05,
+ "loss": 1.9238,
+ "step": 934
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.884361506539233e-05,
+ "loss": 0.8753,
+ "step": 935
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8840703039604058e-05,
+ "loss": 0.8783,
+ "step": 936
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.883778757746241e-05,
+ "loss": 0.8666,
+ "step": 937
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8834868680100623e-05,
+ "loss": 0.8095,
+ "step": 938
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8831946348653263e-05,
+ "loss": 0.867,
+ "step": 939
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.882902058425623e-05,
+ "loss": 0.8048,
+ "step": 940
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8826091388046753e-05,
+ "loss": 0.8042,
+ "step": 941
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8823158761163415e-05,
+ "loss": 0.8386,
+ "step": 942
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8820222704746106e-05,
+ "loss": 1.8891,
+ "step": 943
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8817283219936066e-05,
+ "loss": 0.8651,
+ "step": 944
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8814340307875866e-05,
+ "loss": 0.9014,
+ "step": 945
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8811393969709405e-05,
+ "loss": 0.9171,
+ "step": 946
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8808444206581918e-05,
+ "loss": 0.8447,
+ "step": 947
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.880549101963996e-05,
+ "loss": 0.9168,
+ "step": 948
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8802534410031436e-05,
+ "loss": 0.8727,
+ "step": 949
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8799574378905567e-05,
+ "loss": 0.817,
+ "step": 950
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.87966109274129e-05,
+ "loss": 0.8332,
+ "step": 951
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.879364405670533e-05,
+ "loss": 0.8275,
+ "step": 952
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8790673767936068e-05,
+ "loss": 0.9103,
+ "step": 953
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8787700062259652e-05,
+ "loss": 0.8516,
+ "step": 954
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8784722940831956e-05,
+ "loss": 0.8814,
+ "step": 955
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8781742404810176e-05,
+ "loss": 0.8656,
+ "step": 956
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8778758455352836e-05,
+ "loss": 0.8612,
+ "step": 957
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.877577109361979e-05,
+ "loss": 0.839,
+ "step": 958
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8772780320772214e-05,
+ "loss": 0.8666,
+ "step": 959
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.876978613797261e-05,
+ "loss": 0.7871,
+ "step": 960
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8766788546384815e-05,
+ "loss": 0.8567,
+ "step": 961
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8763787547173976e-05,
+ "loss": 0.8126,
+ "step": 962
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8760783141506573e-05,
+ "loss": 0.9003,
+ "step": 963
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8757775330550413e-05,
+ "loss": 0.8303,
+ "step": 964
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8754764115474616e-05,
+ "loss": 0.8538,
+ "step": 965
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8751749497449642e-05,
+ "loss": 0.8058,
+ "step": 966
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8748731477647253e-05,
+ "loss": 0.8297,
+ "step": 967
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.874571005724055e-05,
+ "loss": 0.8169,
+ "step": 968
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8742685237403946e-05,
+ "loss": 0.8926,
+ "step": 969
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8739657019313183e-05,
+ "loss": 0.8506,
+ "step": 970
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8736625404145316e-05,
+ "loss": 0.8649,
+ "step": 971
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8733590393078723e-05,
+ "loss": 0.8535,
+ "step": 972
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8730551987293112e-05,
+ "loss": 0.8369,
+ "step": 973
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.872751018796949e-05,
+ "loss": 0.8594,
+ "step": 974
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8724464996290203e-05,
+ "loss": 0.8346,
+ "step": 975
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8721416413438906e-05,
+ "loss": 0.8633,
+ "step": 976
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.871836444060057e-05,
+ "loss": 0.8637,
+ "step": 977
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8715309078961494e-05,
+ "loss": 0.8714,
+ "step": 978
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.871225032970928e-05,
+ "loss": 0.9083,
+ "step": 979
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8709188194032854e-05,
+ "loss": 0.8289,
+ "step": 980
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8706122673122464e-05,
+ "loss": 0.8584,
+ "step": 981
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.870305376816967e-05,
+ "loss": 0.8928,
+ "step": 982
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8699981480367335e-05,
+ "loss": 0.8339,
+ "step": 983
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.869690581090966e-05,
+ "loss": 0.8881,
+ "step": 984
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.869382676099214e-05,
+ "loss": 1.7877,
+ "step": 985
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8690744331811592e-05,
+ "loss": 0.872,
+ "step": 986
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.868765852456615e-05,
+ "loss": 0.7828,
+ "step": 987
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8684569340455256e-05,
+ "loss": 0.9042,
+ "step": 988
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8681476780679668e-05,
+ "loss": 0.8865,
+ "step": 989
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8678380846441457e-05,
+ "loss": 0.8155,
+ "step": 990
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.867528153894399e-05,
+ "loss": 0.8594,
+ "step": 991
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8672178859391968e-05,
+ "loss": 0.7884,
+ "step": 992
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8669072808991394e-05,
+ "loss": 1.8222,
+ "step": 993
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8665963388949577e-05,
+ "loss": 1.8262,
+ "step": 994
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8662850600475138e-05,
+ "loss": 0.8107,
+ "step": 995
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8659734444778014e-05,
+ "loss": 0.9152,
+ "step": 996
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8656614923069433e-05,
+ "loss": 0.8458,
+ "step": 997
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8653492036561952e-05,
+ "loss": 0.8629,
+ "step": 998
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8650365786469425e-05,
+ "loss": 0.898,
+ "step": 999
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.864723617400702e-05,
+ "loss": 0.8968,
+ "step": 1000
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.86441032003912e-05,
+ "loss": 0.8836,
+ "step": 1001
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8640966866839746e-05,
+ "loss": 0.8187,
+ "step": 1002
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.863782717457174e-05,
+ "loss": 1.8772,
+ "step": 1003
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.863468412480757e-05,
+ "loss": 0.8107,
+ "step": 1004
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8631537718768927e-05,
+ "loss": 0.9199,
+ "step": 1005
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.862838795767882e-05,
+ "loss": 0.8908,
+ "step": 1006
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.862523484276154e-05,
+ "loss": 0.8966,
+ "step": 1007
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8622078375242696e-05,
+ "loss": 0.8979,
+ "step": 1008
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.86189185563492e-05,
+ "loss": 0.8301,
+ "step": 1009
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8615755387309257e-05,
+ "loss": 0.838,
+ "step": 1010
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8612588869352387e-05,
+ "loss": 0.8735,
+ "step": 1011
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.86094190037094e-05,
+ "loss": 0.8798,
+ "step": 1012
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.860624579161242e-05,
+ "loss": 0.9072,
+ "step": 1013
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.860306923429486e-05,
+ "loss": 0.837,
+ "step": 1014
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8599889332991438e-05,
+ "loss": 0.9098,
+ "step": 1015
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8596706088938172e-05,
+ "loss": 0.8586,
+ "step": 1016
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8593519503372378e-05,
+ "loss": 0.8595,
+ "step": 1017
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8590329577532674e-05,
+ "loss": 0.8073,
+ "step": 1018
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8587136312658972e-05,
+ "loss": 0.802,
+ "step": 1019
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8583939709992486e-05,
+ "loss": 0.8625,
+ "step": 1020
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8580739770775723e-05,
+ "loss": 0.8449,
+ "step": 1021
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8577536496252492e-05,
+ "loss": 0.8828,
+ "step": 1022
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.857432988766789e-05,
+ "loss": 0.8971,
+ "step": 1023
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8571119946268327e-05,
+ "loss": 0.8234,
+ "step": 1024
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8567906673301484e-05,
+ "loss": 0.8028,
+ "step": 1025
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8564690070016358e-05,
+ "loss": 0.8711,
+ "step": 1026
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.856147013766323e-05,
+ "loss": 0.8088,
+ "step": 1027
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.855824687749368e-05,
+ "loss": 0.9014,
+ "step": 1028
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8555020290760572e-05,
+ "loss": 0.8771,
+ "step": 1029
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.855179037871808e-05,
+ "loss": 0.8165,
+ "step": 1030
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8548557142621656e-05,
+ "loss": 0.8953,
+ "step": 1031
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8545320583728044e-05,
+ "loss": 0.8323,
+ "step": 1032
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8542080703295293e-05,
+ "loss": 0.9192,
+ "step": 1033
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8538837502582727e-05,
+ "loss": 0.8773,
+ "step": 1034
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8535590982850972e-05,
+ "loss": 0.8896,
+ "step": 1035
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.853234114536194e-05,
+ "loss": 0.8545,
+ "step": 1036
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.852908799137883e-05,
+ "loss": 0.8561,
+ "step": 1037
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8525831522166135e-05,
+ "loss": 0.8434,
+ "step": 1038
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.852257173898963e-05,
+ "loss": 0.8554,
+ "step": 1039
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.851930864311639e-05,
+ "loss": 0.7747,
+ "step": 1040
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8516042235814767e-05,
+ "loss": 0.9023,
+ "step": 1041
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.85127725183544e-05,
+ "loss": 0.8577,
+ "step": 1042
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8509499492006216e-05,
+ "loss": 0.8616,
+ "step": 1043
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8506223158042442e-05,
+ "loss": 0.8272,
+ "step": 1044
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8502943517736566e-05,
+ "loss": 0.8076,
+ "step": 1045
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8499660572363377e-05,
+ "loss": 0.7438,
+ "step": 1046
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.849637432319895e-05,
+ "loss": 0.8486,
+ "step": 1047
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8493084771520635e-05,
+ "loss": 0.8493,
+ "step": 1048
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8489791918607075e-05,
+ "loss": 0.8088,
+ "step": 1049
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8486495765738187e-05,
+ "loss": 0.8345,
+ "step": 1050
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8483196314195174e-05,
+ "loss": 0.9013,
+ "step": 1051
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8479893565260525e-05,
+ "loss": 0.797,
+ "step": 1052
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8476587520218013e-05,
+ "loss": 0.8801,
+ "step": 1053
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.847327818035268e-05,
+ "loss": 0.8436,
+ "step": 1054
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.846996554695086e-05,
+ "loss": 0.8206,
+ "step": 1055
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.846664962130016e-05,
+ "loss": 0.8285,
+ "step": 1056
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.846333040468947e-05,
+ "loss": 0.8749,
+ "step": 1057
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.846000789840896e-05,
+ "loss": 0.9032,
+ "step": 1058
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.845668210375008e-05,
+ "loss": 0.91,
+ "step": 1059
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8453353022005557e-05,
+ "loss": 1.8954,
+ "step": 1060
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8450020654469388e-05,
+ "loss": 0.834,
+ "step": 1061
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.844668500243686e-05,
+ "loss": 0.8378,
+ "step": 1062
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8443346067204525e-05,
+ "loss": 0.8826,
+ "step": 1063
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8440003850070222e-05,
+ "loss": 0.8121,
+ "step": 1064
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8436658352333056e-05,
+ "loss": 0.8299,
+ "step": 1065
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8433309575293414e-05,
+ "loss": 0.8593,
+ "step": 1066
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8429957520252953e-05,
+ "loss": 0.8542,
+ "step": 1067
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.842660218851461e-05,
+ "loss": 0.7941,
+ "step": 1068
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.842324358138259e-05,
+ "loss": 0.825,
+ "step": 1069
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8419881700162365e-05,
+ "loss": 0.8745,
+ "step": 1070
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.84165165461607e-05,
+ "loss": 0.8386,
+ "step": 1071
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8413148120685616e-05,
+ "loss": 0.9064,
+ "step": 1072
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8409776425046404e-05,
+ "loss": 0.868,
+ "step": 1073
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.840640146055364e-05,
+ "loss": 0.8705,
+ "step": 1074
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8403023228519155e-05,
+ "loss": 0.8762,
+ "step": 1075
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8399641730256057e-05,
+ "loss": 0.8368,
+ "step": 1076
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.839625696707873e-05,
+ "loss": 0.8745,
+ "step": 1077
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8392868940302815e-05,
+ "loss": 0.8563,
+ "step": 1078
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.838947765124523e-05,
+ "loss": 0.8857,
+ "step": 1079
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8386083101224158e-05,
+ "loss": 0.7878,
+ "step": 1080
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.838268529155905e-05,
+ "loss": 0.8264,
+ "step": 1081
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8379284223570624e-05,
+ "loss": 0.8511,
+ "step": 1082
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8375879898580862e-05,
+ "loss": 0.837,
+ "step": 1083
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8372472317913018e-05,
+ "loss": 0.8903,
+ "step": 1084
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8369061482891606e-05,
+ "loss": 0.8835,
+ "step": 1085
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8365647394842404e-05,
+ "loss": 0.8752,
+ "step": 1086
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8362230055092463e-05,
+ "loss": 0.7988,
+ "step": 1087
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.835880946497009e-05,
+ "loss": 0.9097,
+ "step": 1088
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8355385625804854e-05,
+ "loss": 0.7838,
+ "step": 1089
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8351958538927598e-05,
+ "loss": 0.8854,
+ "step": 1090
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.834852820567041e-05,
+ "loss": 0.8473,
+ "step": 1091
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8345094627366657e-05,
+ "loss": 0.871,
+ "step": 1092
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8341657805350957e-05,
+ "loss": 0.8566,
+ "step": 1093
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8338217740959197e-05,
+ "loss": 0.8475,
+ "step": 1094
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8334774435528514e-05,
+ "loss": 0.8956,
+ "step": 1095
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.833132789039731e-05,
+ "loss": 0.8986,
+ "step": 1096
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8327878106905242e-05,
+ "loss": 0.8859,
+ "step": 1097
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8324425086393243e-05,
+ "loss": 0.8271,
+ "step": 1098
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8320968830203482e-05,
+ "loss": 0.8638,
+ "step": 1099
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8317509339679392e-05,
+ "loss": 0.8239,
+ "step": 1100
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8314046616165675e-05,
+ "loss": 0.8214,
+ "step": 1101
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8310580661008272e-05,
+ "loss": 0.8217,
+ "step": 1102
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8307111475554396e-05,
+ "loss": 0.8505,
+ "step": 1103
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.83036390611525e-05,
+ "loss": 0.8585,
+ "step": 1104
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8300163419152316e-05,
+ "loss": 0.8566,
+ "step": 1105
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8296684550904794e-05,
+ "loss": 0.8324,
+ "step": 1106
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8293202457762177e-05,
+ "loss": 0.8306,
+ "step": 1107
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.828971714107793e-05,
+ "loss": 0.8884,
+ "step": 1108
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.82862286022068e-05,
+ "loss": 0.8544,
+ "step": 1109
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.828273684250476e-05,
+ "loss": 0.8451,
+ "step": 1110
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8279241863329043e-05,
+ "loss": 0.8479,
+ "step": 1111
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8275743666038144e-05,
+ "loss": 0.9265,
+ "step": 1112
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.82722422519918e-05,
+ "loss": 0.8745,
+ "step": 1113
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8268737622550996e-05,
+ "loss": 0.8622,
+ "step": 1114
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8265229779077976e-05,
+ "loss": 0.8766,
+ "step": 1115
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8261718722936225e-05,
+ "loss": 0.9092,
+ "step": 1116
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8258204455490475e-05,
+ "loss": 0.8282,
+ "step": 1117
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8254686978106717e-05,
+ "loss": 0.8842,
+ "step": 1118
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.825116629215218e-05,
+ "loss": 0.8078,
+ "step": 1119
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8247642398995347e-05,
+ "loss": 0.8122,
+ "step": 1120
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.824411530000594e-05,
+ "loss": 0.8708,
+ "step": 1121
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8240584996554932e-05,
+ "loss": 0.8435,
+ "step": 1122
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8237051490014545e-05,
+ "loss": 0.86,
+ "step": 1123
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8233514781758233e-05,
+ "loss": 0.901,
+ "step": 1124
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.822997487316071e-05,
+ "loss": 1.8032,
+ "step": 1125
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.822643176559793e-05,
+ "loss": 0.8724,
+ "step": 1126
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8222885460447083e-05,
+ "loss": 0.8357,
+ "step": 1127
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8219335959086606e-05,
+ "loss": 0.8654,
+ "step": 1128
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.821578326289618e-05,
+ "loss": 0.852,
+ "step": 1129
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.821222737325673e-05,
+ "loss": 0.8593,
+ "step": 1130
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8208668291550414e-05,
+ "loss": 0.9151,
+ "step": 1131
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.820510601916064e-05,
+ "loss": 0.8508,
+ "step": 1132
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.820154055747205e-05,
+ "loss": 0.8512,
+ "step": 1133
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8197971907870528e-05,
+ "loss": 0.8896,
+ "step": 1134
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8194400071743196e-05,
+ "loss": 0.8246,
+ "step": 1135
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8190825050478414e-05,
+ "loss": 0.9053,
+ "step": 1136
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8187246845465787e-05,
+ "loss": 0.8408,
+ "step": 1137
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8183665458096147e-05,
+ "loss": 0.7969,
+ "step": 1138
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8180080889761573e-05,
+ "loss": 0.8399,
+ "step": 1139
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8176493141855368e-05,
+ "loss": 0.8567,
+ "step": 1140
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8172902215772084e-05,
+ "loss": 0.8328,
+ "step": 1141
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.81693081129075e-05,
+ "loss": 0.8845,
+ "step": 1142
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8165710834658635e-05,
+ "loss": 0.8854,
+ "step": 1143
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8162110382423738e-05,
+ "loss": 0.881,
+ "step": 1144
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8158506757602296e-05,
+ "loss": 0.8746,
+ "step": 1145
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.815489996159502e-05,
+ "loss": 0.8301,
+ "step": 1146
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8151289995803866e-05,
+ "loss": 0.7774,
+ "step": 1147
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8147676861632014e-05,
+ "loss": 1.8419,
+ "step": 1148
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8144060560483878e-05,
+ "loss": 0.8321,
+ "step": 1149
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8140441093765103e-05,
+ "loss": 0.876,
+ "step": 1150
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.813681846288257e-05,
+ "loss": 0.8403,
+ "step": 1151
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8133192669244373e-05,
+ "loss": 0.7902,
+ "step": 1152
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8129563714259854e-05,
+ "loss": 0.864,
+ "step": 1153
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8125931599339575e-05,
+ "loss": 0.7606,
+ "step": 1154
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8122296325895327e-05,
+ "loss": 0.8309,
+ "step": 1155
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.811865789534013e-05,
+ "loss": 0.788,
+ "step": 1156
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.811501630908823e-05,
+ "loss": 0.8859,
+ "step": 1157
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8111371568555102e-05,
+ "loss": 0.8283,
+ "step": 1158
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8107723675157442e-05,
+ "loss": 0.8749,
+ "step": 1159
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8104072630313176e-05,
+ "loss": 0.776,
+ "step": 1160
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8100418435441454e-05,
+ "loss": 0.8932,
+ "step": 1161
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.809676109196265e-05,
+ "loss": 0.8657,
+ "step": 1162
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.809310060129836e-05,
+ "loss": 1.8987,
+ "step": 1163
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8089436964871407e-05,
+ "loss": 0.8831,
+ "step": 1164
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8085770184105837e-05,
+ "loss": 0.8574,
+ "step": 1165
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.808210026042691e-05,
+ "loss": 0.8898,
+ "step": 1166
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.807842719526112e-05,
+ "loss": 0.8305,
+ "step": 1167
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8074750990036176e-05,
+ "loss": 0.915,
+ "step": 1168
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8071071646181e-05,
+ "loss": 0.8682,
+ "step": 1169
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8067389165125752e-05,
+ "loss": 0.909,
+ "step": 1170
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8063703548301787e-05,
+ "loss": 0.7659,
+ "step": 1171
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8060014797141705e-05,
+ "loss": 0.8886,
+ "step": 1172
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8056322913079303e-05,
+ "loss": 0.8451,
+ "step": 1173
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8052627897549614e-05,
+ "loss": 0.8756,
+ "step": 1174
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8048929751988873e-05,
+ "loss": 0.8779,
+ "step": 1175
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8045228477834536e-05,
+ "loss": 0.8537,
+ "step": 1176
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8041524076525285e-05,
+ "loss": 0.8551,
+ "step": 1177
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8037816549500996e-05,
+ "loss": 0.8875,
+ "step": 1178
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8034105898202785e-05,
+ "loss": 0.8643,
+ "step": 1179
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8030392124072963e-05,
+ "loss": 0.845,
+ "step": 1180
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8026675228555066e-05,
+ "loss": 1.7516,
+ "step": 1181
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.802295521309384e-05,
+ "loss": 0.8958,
+ "step": 1182
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.801923207913524e-05,
+ "loss": 0.8256,
+ "step": 1183
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8015505828126438e-05,
+ "loss": 0.9215,
+ "step": 1184
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.801177646151582e-05,
+ "loss": 0.8475,
+ "step": 1185
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8008043980752967e-05,
+ "loss": 0.8053,
+ "step": 1186
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8004308387288697e-05,
+ "loss": 1.8174,
+ "step": 1187
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8000569682575016e-05,
+ "loss": 0.8164,
+ "step": 1188
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7996827868065145e-05,
+ "loss": 0.8403,
+ "step": 1189
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7993082945213522e-05,
+ "loss": 0.8428,
+ "step": 1190
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.798933491547578e-05,
+ "loss": 0.8677,
+ "step": 1191
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.798558378030877e-05,
+ "loss": 0.8296,
+ "step": 1192
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7981829541170547e-05,
+ "loss": 0.8454,
+ "step": 1193
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.797807219952037e-05,
+ "loss": 0.8547,
+ "step": 1194
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.797431175681871e-05,
+ "loss": 0.7966,
+ "step": 1195
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.797054821452723e-05,
+ "loss": 0.8561,
+ "step": 1196
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7966781574108815e-05,
+ "loss": 0.8459,
+ "step": 1197
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7963011837027544e-05,
+ "loss": 0.8423,
+ "step": 1198
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.79592390047487e-05,
+ "loss": 0.8452,
+ "step": 1199
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7955463078738777e-05,
+ "loss": 0.8832,
+ "step": 1200
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7951684060465456e-05,
+ "loss": 0.8104,
+ "step": 1201
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.794790195139764e-05,
+ "loss": 0.8145,
+ "step": 1202
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7944116753005416e-05,
+ "loss": 0.8953,
+ "step": 1203
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.794032846676008e-05,
+ "loss": 0.8428,
+ "step": 1204
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7936537094134127e-05,
+ "loss": 0.834,
+ "step": 1205
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7932742636601252e-05,
+ "loss": 0.8944,
+ "step": 1206
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.792894509563635e-05,
+ "loss": 0.84,
+ "step": 1207
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.792514447271551e-05,
+ "loss": 0.8788,
+ "step": 1208
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7921340769316024e-05,
+ "loss": 0.9051,
+ "step": 1209
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.791753398691638e-05,
+ "loss": 1.8386,
+ "step": 1210
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7913724126996263e-05,
+ "loss": 0.8356,
+ "step": 1211
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7909911191036548e-05,
+ "loss": 0.8676,
+ "step": 1212
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7906095180519317e-05,
+ "loss": 0.8033,
+ "step": 1213
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.790227609692784e-05,
+ "loss": 0.829,
+ "step": 1214
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7898453941746585e-05,
+ "loss": 0.8496,
+ "step": 1215
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7894628716461205e-05,
+ "loss": 0.8423,
+ "step": 1216
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7890800422558562e-05,
+ "loss": 0.8678,
+ "step": 1217
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.788696906152669e-05,
+ "loss": 0.9093,
+ "step": 1218
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.788313463485484e-05,
+ "loss": 0.8698,
+ "step": 1219
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.787929714403343e-05,
+ "loss": 0.8843,
+ "step": 1220
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.787545659055409e-05,
+ "loss": 0.7939,
+ "step": 1221
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7871612975909623e-05,
+ "loss": 0.8241,
+ "step": 1222
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.786776630159404e-05,
+ "loss": 0.8665,
+ "step": 1223
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.786391656910252e-05,
+ "loss": 0.8686,
+ "step": 1224
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.786006377993145e-05,
+ "loss": 0.8307,
+ "step": 1225
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.785620793557839e-05,
+ "loss": 0.8649,
+ "step": 1226
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.78523490375421e-05,
+ "loss": 0.8615,
+ "step": 1227
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.784848708732252e-05,
+ "loss": 0.8072,
+ "step": 1228
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7844622086420775e-05,
+ "loss": 0.8663,
+ "step": 1229
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7840754036339185e-05,
+ "loss": 0.8678,
+ "step": 1230
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.783688293858124e-05,
+ "loss": 0.8877,
+ "step": 1231
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7833008794651632e-05,
+ "loss": 0.7926,
+ "step": 1232
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7829131606056228e-05,
+ "loss": 0.8355,
+ "step": 1233
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7825251374302073e-05,
+ "loss": 0.808,
+ "step": 1234
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7821368100897404e-05,
+ "loss": 0.8718,
+ "step": 1235
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.781748178735164e-05,
+ "loss": 0.8748,
+ "step": 1236
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.781359243517537e-05,
+ "loss": 0.8689,
+ "step": 1237
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7809700045880383e-05,
+ "loss": 0.8604,
+ "step": 1238
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7805804620979633e-05,
+ "loss": 0.7991,
+ "step": 1239
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.780190616198726e-05,
+ "loss": 0.8309,
+ "step": 1240
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7798004670418587e-05,
+ "loss": 0.8856,
+ "step": 1241
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7794100147790105e-05,
+ "loss": 0.9274,
+ "step": 1242
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7790192595619494e-05,
+ "loss": 0.8363,
+ "step": 1243
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7786282015425605e-05,
+ "loss": 0.8763,
+ "step": 1244
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7782368408728468e-05,
+ "loss": 1.7543,
+ "step": 1245
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7778451777049288e-05,
+ "loss": 0.8383,
+ "step": 1246
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7774532121910452e-05,
+ "loss": 0.8198,
+ "step": 1247
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7770609444835517e-05,
+ "loss": 0.8555,
+ "step": 1248
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.776668374734921e-05,
+ "loss": 0.8394,
+ "step": 1249
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7762755030977444e-05,
+ "loss": 0.8871,
+ "step": 1250
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7758823297247293e-05,
+ "loss": 0.818,
+ "step": 1251
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7754888547687013e-05,
+ "loss": 0.8558,
+ "step": 1252
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7750950783826027e-05,
+ "loss": 0.8184,
+ "step": 1253
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7747010007194934e-05,
+ "loss": 0.8478,
+ "step": 1254
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7743066219325497e-05,
+ "loss": 0.8309,
+ "step": 1255
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7739119421750657e-05,
+ "loss": 0.8312,
+ "step": 1256
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7735169616004522e-05,
+ "loss": 0.8239,
+ "step": 1257
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7731216803622367e-05,
+ "loss": 0.9024,
+ "step": 1258
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7727260986140642e-05,
+ "loss": 0.9217,
+ "step": 1259
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7723302165096953e-05,
+ "loss": 0.8395,
+ "step": 1260
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.771934034203009e-05,
+ "loss": 0.8614,
+ "step": 1261
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7715375518479995e-05,
+ "loss": 0.8729,
+ "step": 1262
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7711407695987783e-05,
+ "loss": 1.7704,
+ "step": 1263
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7707436876095736e-05,
+ "loss": 0.8176,
+ "step": 1264
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.77034630603473e-05,
+ "loss": 0.8964,
+ "step": 1265
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7699486250287083e-05,
+ "loss": 0.8538,
+ "step": 1266
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.769550644746086e-05,
+ "loss": 0.7981,
+ "step": 1267
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7691523653415562e-05,
+ "loss": 0.8831,
+ "step": 1268
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7687537869699296e-05,
+ "loss": 0.8137,
+ "step": 1269
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.768354909786132e-05,
+ "loss": 0.8422,
+ "step": 1270
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7679557339452057e-05,
+ "loss": 0.8776,
+ "step": 1271
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7675562596023095e-05,
+ "loss": 0.8741,
+ "step": 1272
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.767156486912717e-05,
+ "loss": 0.8671,
+ "step": 1273
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7667564160318195e-05,
+ "loss": 0.7862,
+ "step": 1274
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7663560471151227e-05,
+ "loss": 0.8761,
+ "step": 1275
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7659553803182486e-05,
+ "loss": 0.852,
+ "step": 1276
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7655544157969355e-05,
+ "loss": 0.8378,
+ "step": 1277
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.765153153707037e-05,
+ "loss": 0.8167,
+ "step": 1278
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7647515942045227e-05,
+ "loss": 0.844,
+ "step": 1279
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7643497374454767e-05,
+ "loss": 0.8278,
+ "step": 1280
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7639475835861e-05,
+ "loss": 0.8179,
+ "step": 1281
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7635451327827087e-05,
+ "loss": 0.8621,
+ "step": 1282
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7631423851917334e-05,
+ "loss": 0.8506,
+ "step": 1283
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7627393409697217e-05,
+ "loss": 0.7965,
+ "step": 1284
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.762336000273335e-05,
+ "loss": 0.844,
+ "step": 1285
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.761932363259351e-05,
+ "loss": 0.8444,
+ "step": 1286
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.761528430084662e-05,
+ "loss": 0.8392,
+ "step": 1287
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7611242009062754e-05,
+ "loss": 0.7927,
+ "step": 1288
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.760719675881314e-05,
+ "loss": 0.8191,
+ "step": 1289
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7603148551670152e-05,
+ "loss": 0.8652,
+ "step": 1290
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7599097389207313e-05,
+ "loss": 0.8502,
+ "step": 1291
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7595043272999306e-05,
+ "loss": 0.8202,
+ "step": 1292
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7590986204621943e-05,
+ "loss": 0.8186,
+ "step": 1293
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.75869261856522e-05,
+ "loss": 0.8169,
+ "step": 1294
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7582863217668192e-05,
+ "loss": 0.811,
+ "step": 1295
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.757879730224918e-05,
+ "loss": 0.8408,
+ "step": 1296
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7574728440975574e-05,
+ "loss": 0.8646,
+ "step": 1297
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.757065663542893e-05,
+ "loss": 0.8249,
+ "step": 1298
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.756658188719194e-05,
+ "loss": 0.819,
+ "step": 1299
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7562504197848452e-05,
+ "loss": 0.8411,
+ "step": 1300
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7558423568983446e-05,
+ "loss": 0.8301,
+ "step": 1301
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7554340002183053e-05,
+ "loss": 0.8325,
+ "step": 1302
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.755025349903454e-05,
+ "loss": 0.8784,
+ "step": 1303
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7546164061126322e-05,
+ "loss": 0.9125,
+ "step": 1304
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7542071690047947e-05,
+ "loss": 0.8356,
+ "step": 1305
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7537976387390107e-05,
+ "loss": 0.8738,
+ "step": 1306
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7533878154744634e-05,
+ "loss": 0.8039,
+ "step": 1307
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.75297769937045e-05,
+ "loss": 0.815,
+ "step": 1308
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7525672905863808e-05,
+ "loss": 0.8537,
+ "step": 1309
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7521565892817808e-05,
+ "loss": 0.8326,
+ "step": 1310
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.751745595616288e-05,
+ "loss": 0.8746,
+ "step": 1311
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.751334309749655e-05,
+ "loss": 0.814,
+ "step": 1312
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7509227318417465e-05,
+ "loss": 0.8988,
+ "step": 1313
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7505108620525425e-05,
+ "loss": 0.842,
+ "step": 1314
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7500987005421345e-05,
+ "loss": 0.8591,
+ "step": 1315
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7496862474707287e-05,
+ "loss": 0.833,
+ "step": 1316
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7492735029986442e-05,
+ "loss": 0.8228,
+ "step": 1317
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7488604672863144e-05,
+ "loss": 0.7992,
+ "step": 1318
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7484471404942836e-05,
+ "loss": 1.8106,
+ "step": 1319
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7480335227832115e-05,
+ "loss": 0.8568,
+ "step": 1320
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7476196143138695e-05,
+ "loss": 0.8067,
+ "step": 1321
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7472054152471432e-05,
+ "loss": 0.8333,
+ "step": 1322
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.74679092574403e-05,
+ "loss": 0.8012,
+ "step": 1323
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7463761459656402e-05,
+ "loss": 0.8462,
+ "step": 1324
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7459610760731984e-05,
+ "loss": 0.8184,
+ "step": 1325
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.74554571622804e-05,
+ "loss": 0.8853,
+ "step": 1326
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.745130066591615e-05,
+ "loss": 0.801,
+ "step": 1327
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7447141273254845e-05,
+ "loss": 0.8147,
+ "step": 1328
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7442978985913227e-05,
+ "loss": 0.8099,
+ "step": 1329
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7438813805509168e-05,
+ "loss": 1.6249,
+ "step": 1330
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.743464573366166e-05,
+ "loss": 0.7834,
+ "step": 1331
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7430474771990816e-05,
+ "loss": 0.9115,
+ "step": 1332
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7426300922117877e-05,
+ "loss": 0.7822,
+ "step": 1333
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7422124185665203e-05,
+ "loss": 0.8433,
+ "step": 1334
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7417944564256285e-05,
+ "loss": 0.8887,
+ "step": 1335
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.741376205951572e-05,
+ "loss": 0.8394,
+ "step": 1336
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7409576673069247e-05,
+ "loss": 0.8774,
+ "step": 1337
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7405388406543697e-05,
+ "loss": 0.8054,
+ "step": 1338
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7401197261567048e-05,
+ "loss": 0.8064,
+ "step": 1339
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.739700323976838e-05,
+ "loss": 1.7548,
+ "step": 1340
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7392806342777897e-05,
+ "loss": 0.8788,
+ "step": 1341
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7388606572226912e-05,
+ "loss": 1.6834,
+ "step": 1342
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7384403929747877e-05,
+ "loss": 0.898,
+ "step": 1343
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7380198416974338e-05,
+ "loss": 0.7786,
+ "step": 1344
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.737599003554096e-05,
+ "loss": 0.8113,
+ "step": 1345
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7371778787083538e-05,
+ "loss": 0.7925,
+ "step": 1346
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7367564673238962e-05,
+ "loss": 1.5851,
+ "step": 1347
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.736334769564525e-05,
+ "loss": 0.8199,
+ "step": 1348
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7359127855941523e-05,
+ "loss": 0.8393,
+ "step": 1349
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7354905155768027e-05,
+ "loss": 0.8417,
+ "step": 1350
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7350679596766108e-05,
+ "loss": 0.8919,
+ "step": 1351
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7346451180578222e-05,
+ "loss": 0.8561,
+ "step": 1352
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.734221990884795e-05,
+ "loss": 0.8676,
+ "step": 1353
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.733798578321997e-05,
+ "loss": 0.8004,
+ "step": 1354
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7333748805340072e-05,
+ "loss": 0.851,
+ "step": 1355
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7329508976855162e-05,
+ "loss": 0.8417,
+ "step": 1356
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7325266299413237e-05,
+ "loss": 0.7874,
+ "step": 1357
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.732102077466342e-05,
+ "loss": 0.854,
+ "step": 1358
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7316772404255934e-05,
+ "loss": 0.8809,
+ "step": 1359
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7312521189842108e-05,
+ "loss": 0.8877,
+ "step": 1360
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.730826713307437e-05,
+ "loss": 1.806,
+ "step": 1361
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7304010235606256e-05,
+ "loss": 0.9063,
+ "step": 1362
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.729975049909242e-05,
+ "loss": 0.8165,
+ "step": 1363
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7295487925188598e-05,
+ "loss": 0.8667,
+ "step": 1364
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7291222515551643e-05,
+ "loss": 0.8482,
+ "step": 1365
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7286954271839507e-05,
+ "loss": 0.8336,
+ "step": 1366
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.728268319571124e-05,
+ "loss": 0.8388,
+ "step": 1367
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7278409288827002e-05,
+ "loss": 0.8516,
+ "step": 1368
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7274132552848042e-05,
+ "loss": 0.7786,
+ "step": 1369
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7269852989436713e-05,
+ "loss": 0.8013,
+ "step": 1370
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.726557060025647e-05,
+ "loss": 0.7433,
+ "step": 1371
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7261285386971863e-05,
+ "loss": 0.8963,
+ "step": 1372
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.725699735124854e-05,
+ "loss": 0.8206,
+ "step": 1373
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7252706494753254e-05,
+ "loss": 0.8627,
+ "step": 1374
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7248412819153835e-05,
+ "loss": 0.8815,
+ "step": 1375
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.724411632611923e-05,
+ "loss": 0.8526,
+ "step": 1376
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7239817017319473e-05,
+ "loss": 0.8652,
+ "step": 1377
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7235514894425687e-05,
+ "loss": 0.8723,
+ "step": 1378
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7231209959110097e-05,
+ "loss": 0.7981,
+ "step": 1379
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7226902213046016e-05,
+ "loss": 0.8708,
+ "step": 1380
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.722259165790785e-05,
+ "loss": 0.9044,
+ "step": 1381
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.72182782953711e-05,
+ "loss": 0.8484,
+ "step": 1382
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7213962127112355e-05,
+ "loss": 0.8652,
+ "step": 1383
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7209643154809298e-05,
+ "loss": 0.8913,
+ "step": 1384
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.72053213801407e-05,
+ "loss": 0.8035,
+ "step": 1385
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7200996804786423e-05,
+ "loss": 0.8028,
+ "step": 1386
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.719666943042741e-05,
+ "loss": 0.8526,
+ "step": 1387
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.71923392587457e-05,
+ "loss": 0.8902,
+ "step": 1388
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.718800629142442e-05,
+ "loss": 0.8539,
+ "step": 1389
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.718367053014778e-05,
+ "loss": 0.8222,
+ "step": 1390
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.717933197660107e-05,
+ "loss": 0.7978,
+ "step": 1391
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7174990632470682e-05,
+ "loss": 0.8444,
+ "step": 1392
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.717064649944408e-05,
+ "loss": 0.8333,
+ "step": 1393
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7166299579209806e-05,
+ "loss": 1.8398,
+ "step": 1394
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.716194987345751e-05,
+ "loss": 0.7629,
+ "step": 1395
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.715759738387789e-05,
+ "loss": 0.8554,
+ "step": 1396
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7153242112162763e-05,
+ "loss": 0.8713,
+ "step": 1397
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7148884060004997e-05,
+ "loss": 0.8026,
+ "step": 1398
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.714452322909856e-05,
+ "loss": 0.8585,
+ "step": 1399
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.714015962113849e-05,
+ "loss": 0.8849,
+ "step": 1400
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.713579323782091e-05,
+ "loss": 0.8276,
+ "step": 1401
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7131424080843016e-05,
+ "loss": 0.8763,
+ "step": 1402
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7127052151903087e-05,
+ "loss": 0.876,
+ "step": 1403
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7122677452700477e-05,
+ "loss": 0.8984,
+ "step": 1404
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7118299984935622e-05,
+ "loss": 0.8012,
+ "step": 1405
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7113919750310026e-05,
+ "loss": 0.8115,
+ "step": 1406
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7109536750526273e-05,
+ "loss": 0.8273,
+ "step": 1407
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.710515098728802e-05,
+ "loss": 0.8964,
+ "step": 1408
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7100762462300005e-05,
+ "loss": 0.878,
+ "step": 1409
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7096371177268026e-05,
+ "loss": 0.902,
+ "step": 1410
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7091977133898967e-05,
+ "loss": 0.8371,
+ "step": 1411
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7087580333900778e-05,
+ "loss": 0.8558,
+ "step": 1412
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7083180778982477e-05,
+ "loss": 0.8564,
+ "step": 1413
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7078778470854163e-05,
+ "loss": 0.8061,
+ "step": 1414
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7074373411226998e-05,
+ "loss": 0.8164,
+ "step": 1415
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.706996560181321e-05,
+ "loss": 0.8727,
+ "step": 1416
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.706555504432611e-05,
+ "loss": 0.836,
+ "step": 1417
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.706114174048006e-05,
+ "loss": 0.8605,
+ "step": 1418
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7056725691990496e-05,
+ "loss": 0.8855,
+ "step": 1419
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7052306900573926e-05,
+ "loss": 0.8402,
+ "step": 1420
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.704788536794792e-05,
+ "loss": 0.8197,
+ "step": 1421
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7043461095831113e-05,
+ "loss": 0.8182,
+ "step": 1422
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7039034085943206e-05,
+ "loss": 0.8335,
+ "step": 1423
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7034604340004967e-05,
+ "loss": 0.7831,
+ "step": 1424
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7030171859738217e-05,
+ "loss": 0.8162,
+ "step": 1425
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7025736646865852e-05,
+ "loss": 0.8223,
+ "step": 1426
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7021298703111824e-05,
+ "loss": 0.8487,
+ "step": 1427
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7016858030201154e-05,
+ "loss": 0.8462,
+ "step": 1428
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.7012414629859905e-05,
+ "loss": 0.8741,
+ "step": 1429
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.700796850381522e-05,
+ "loss": 0.8595,
+ "step": 1430
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.70035196537953e-05,
+ "loss": 0.8784,
+ "step": 1431
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6999068081529385e-05,
+ "loss": 0.7547,
+ "step": 1432
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6994613788747797e-05,
+ "loss": 0.8337,
+ "step": 1433
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6990156777181903e-05,
+ "loss": 0.8824,
+ "step": 1434
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.698569704856413e-05,
+ "loss": 0.848,
+ "step": 1435
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.698123460462796e-05,
+ "loss": 0.8109,
+ "step": 1436
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.697676944710793e-05,
+ "loss": 1.7661,
+ "step": 1437
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.697230157773963e-05,
+ "loss": 0.8823,
+ "step": 1438
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6967830998259715e-05,
+ "loss": 0.8503,
+ "step": 1439
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.696335771040588e-05,
+ "loss": 0.8708,
+ "step": 1440
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6958881715916878e-05,
+ "loss": 0.8191,
+ "step": 1441
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.695440301653251e-05,
+ "loss": 0.8157,
+ "step": 1442
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6949921613993644e-05,
+ "loss": 0.8638,
+ "step": 1443
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.694543751004217e-05,
+ "loss": 0.8387,
+ "step": 1444
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6940950706421062e-05,
+ "loss": 0.8661,
+ "step": 1445
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6936461204874318e-05,
+ "loss": 0.8051,
+ "step": 1446
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6931969007146988e-05,
+ "loss": 0.7785,
+ "step": 1447
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6927474114985186e-05,
+ "loss": 0.8714,
+ "step": 1448
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.692297653013606e-05,
+ "loss": 0.8966,
+ "step": 1449
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.69184762543478e-05,
+ "loss": 0.8205,
+ "step": 1450
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6913973289369657e-05,
+ "loss": 0.8285,
+ "step": 1451
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.690946763695191e-05,
+ "loss": 0.8379,
+ "step": 1452
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.69049592988459e-05,
+ "loss": 0.7809,
+ "step": 1453
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6900448276804004e-05,
+ "loss": 0.8077,
+ "step": 1454
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6895934572579637e-05,
+ "loss": 0.8302,
+ "step": 1455
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6891418187927265e-05,
+ "loss": 0.8465,
+ "step": 1456
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6886899124602392e-05,
+ "loss": 0.8203,
+ "step": 1457
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6882377384361562e-05,
+ "loss": 0.8165,
+ "step": 1458
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6877852968962363e-05,
+ "loss": 0.8386,
+ "step": 1459
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.687332588016342e-05,
+ "loss": 0.8612,
+ "step": 1460
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6868796119724396e-05,
+ "loss": 0.7903,
+ "step": 1461
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6864263689406004e-05,
+ "loss": 0.9177,
+ "step": 1462
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.685972859096997e-05,
+ "loss": 0.8649,
+ "step": 1463
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6855190826179088e-05,
+ "loss": 0.8463,
+ "step": 1464
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6850650396797163e-05,
+ "loss": 0.8863,
+ "step": 1465
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6846107304589047e-05,
+ "loss": 0.8522,
+ "step": 1466
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6841561551320628e-05,
+ "loss": 0.8844,
+ "step": 1467
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6837013138758824e-05,
+ "loss": 0.8606,
+ "step": 1468
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6832462068671588e-05,
+ "loss": 0.8243,
+ "step": 1469
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.682790834282791e-05,
+ "loss": 0.7835,
+ "step": 1470
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6823351962997807e-05,
+ "loss": 0.7769,
+ "step": 1471
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6818792930952327e-05,
+ "loss": 0.8679,
+ "step": 1472
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6814231248463556e-05,
+ "loss": 0.939,
+ "step": 1473
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6809666917304603e-05,
+ "loss": 0.8642,
+ "step": 1474
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.680509993924961e-05,
+ "loss": 0.886,
+ "step": 1475
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6800530316073748e-05,
+ "loss": 0.7648,
+ "step": 1476
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6795958049553208e-05,
+ "loss": 0.7842,
+ "step": 1477
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6791383141465222e-05,
+ "loss": 0.8959,
+ "step": 1478
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6786805593588045e-05,
+ "loss": 0.8031,
+ "step": 1479
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.678222540770095e-05,
+ "loss": 0.8405,
+ "step": 1480
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6777642585584243e-05,
+ "loss": 0.8619,
+ "step": 1481
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.677305712901925e-05,
+ "loss": 0.8663,
+ "step": 1482
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.676846903978833e-05,
+ "loss": 0.7846,
+ "step": 1483
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6763878319674853e-05,
+ "loss": 0.8108,
+ "step": 1484
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6759284970463215e-05,
+ "loss": 0.8314,
+ "step": 1485
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6754688993938843e-05,
+ "loss": 0.9031,
+ "step": 1486
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6750090391888175e-05,
+ "loss": 0.8558,
+ "step": 1487
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6745489166098672e-05,
+ "loss": 0.9226,
+ "step": 1488
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6740885318358824e-05,
+ "loss": 0.8329,
+ "step": 1489
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6736278850458122e-05,
+ "loss": 0.8743,
+ "step": 1490
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6731669764187087e-05,
+ "loss": 0.8657,
+ "step": 1491
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6727058061337264e-05,
+ "loss": 0.8845,
+ "step": 1492
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6722443743701194e-05,
+ "loss": 0.8871,
+ "step": 1493
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.671782681307246e-05,
+ "loss": 0.7874,
+ "step": 1494
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.671320727124565e-05,
+ "loss": 0.8655,
+ "step": 1495
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6708585120016353e-05,
+ "loss": 0.8773,
+ "step": 1496
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.670396036118119e-05,
+ "loss": 0.7845,
+ "step": 1497
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6699332996537795e-05,
+ "loss": 0.8221,
+ "step": 1498
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6694703027884802e-05,
+ "loss": 0.8959,
+ "step": 1499
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.669007045702187e-05,
+ "loss": 0.8668,
+ "step": 1500
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.668543528574967e-05,
+ "loss": 1.8623,
+ "step": 1501
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6680797515869868e-05,
+ "loss": 0.8455,
+ "step": 1502
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6676157149185152e-05,
+ "loss": 0.8584,
+ "step": 1503
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6671514187499226e-05,
+ "loss": 0.7923,
+ "step": 1504
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6666868632616786e-05,
+ "loss": 0.845,
+ "step": 1505
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6662220486343553e-05,
+ "loss": 0.8557,
+ "step": 1506
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6657569750486238e-05,
+ "loss": 0.8348,
+ "step": 1507
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.665291642685257e-05,
+ "loss": 0.9038,
+ "step": 1508
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6648260517251286e-05,
+ "loss": 0.9114,
+ "step": 1509
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6643602023492116e-05,
+ "loss": 0.8913,
+ "step": 1510
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6638940947385806e-05,
+ "loss": 0.7826,
+ "step": 1511
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6634277290744105e-05,
+ "loss": 0.8305,
+ "step": 1512
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6629611055379754e-05,
+ "loss": 1.6209,
+ "step": 1513
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.662494224310651e-05,
+ "loss": 0.8451,
+ "step": 1514
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.662027085573912e-05,
+ "loss": 1.7254,
+ "step": 1515
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6615596895093342e-05,
+ "loss": 0.7847,
+ "step": 1516
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6610920362985932e-05,
+ "loss": 0.8087,
+ "step": 1517
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6606241261234637e-05,
+ "loss": 0.8333,
+ "step": 1518
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.660155959165821e-05,
+ "loss": 0.8033,
+ "step": 1519
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6596875356076403e-05,
+ "loss": 0.8543,
+ "step": 1520
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.659218855630997e-05,
+ "loss": 0.8511,
+ "step": 1521
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6587499194180644e-05,
+ "loss": 0.8544,
+ "step": 1522
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6582807271511172e-05,
+ "loss": 0.8746,
+ "step": 1523
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.657811279012529e-05,
+ "loss": 0.8305,
+ "step": 1524
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6573415751847722e-05,
+ "loss": 0.8515,
+ "step": 1525
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6568716158504194e-05,
+ "loss": 0.8408,
+ "step": 1526
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.656401401192143e-05,
+ "loss": 0.8428,
+ "step": 1527
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.655930931392713e-05,
+ "loss": 0.8743,
+ "step": 1528
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6554602066350003e-05,
+ "loss": 0.7795,
+ "step": 1529
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.654989227101974e-05,
+ "loss": 0.8939,
+ "step": 1530
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6545179929767017e-05,
+ "loss": 0.8193,
+ "step": 1531
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6540465044423513e-05,
+ "loss": 0.8758,
+ "step": 1532
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6535747616821884e-05,
+ "loss": 0.8739,
+ "step": 1533
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6531027648795784e-05,
+ "loss": 0.8514,
+ "step": 1534
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.652630514217985e-05,
+ "loss": 0.8472,
+ "step": 1535
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.65215800988097e-05,
+ "loss": 0.8152,
+ "step": 1536
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6516852520521946e-05,
+ "loss": 0.83,
+ "step": 1537
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6512122409154184e-05,
+ "loss": 0.8183,
+ "step": 1538
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6507389766544997e-05,
+ "loss": 1.8315,
+ "step": 1539
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6502654594533938e-05,
+ "loss": 0.7601,
+ "step": 1540
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.649791689496156e-05,
+ "loss": 0.8073,
+ "step": 1541
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6493176669669396e-05,
+ "loss": 0.8772,
+ "step": 1542
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.648843392049995e-05,
+ "loss": 0.8429,
+ "step": 1543
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6483688649296714e-05,
+ "loss": 0.8025,
+ "step": 1544
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6478940857904163e-05,
+ "loss": 0.8581,
+ "step": 1545
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6474190548167744e-05,
+ "loss": 0.8801,
+ "step": 1546
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6469437721933892e-05,
+ "loss": 0.8358,
+ "step": 1547
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6464682381050015e-05,
+ "loss": 0.8258,
+ "step": 1548
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6459924527364496e-05,
+ "loss": 0.8601,
+ "step": 1549
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.64551641627267e-05,
+ "loss": 0.8507,
+ "step": 1550
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.645040128898697e-05,
+ "loss": 0.8482,
+ "step": 1551
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6445635907996612e-05,
+ "loss": 0.8066,
+ "step": 1552
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6440868021607915e-05,
+ "loss": 0.8264,
+ "step": 1553
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6436097631674148e-05,
+ "loss": 0.888,
+ "step": 1554
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.643132474004954e-05,
+ "loss": 0.7981,
+ "step": 1555
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.64265493485893e-05,
+ "loss": 0.8505,
+ "step": 1556
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6421771459149615e-05,
+ "loss": 0.8453,
+ "step": 1557
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6416991073587628e-05,
+ "loss": 0.8966,
+ "step": 1558
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.641220819376146e-05,
+ "loss": 0.8158,
+ "step": 1559
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6407422821530207e-05,
+ "loss": 0.874,
+ "step": 1560
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.640263495875392e-05,
+ "loss": 0.8357,
+ "step": 1561
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.639784460729363e-05,
+ "loss": 0.8826,
+ "step": 1562
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6393051769011333e-05,
+ "loss": 0.8547,
+ "step": 1563
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6388256445769987e-05,
+ "loss": 0.8261,
+ "step": 1564
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6383458639433522e-05,
+ "loss": 1.8858,
+ "step": 1565
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.637865835186683e-05,
+ "loss": 0.9009,
+ "step": 1566
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6373855584935765e-05,
+ "loss": 0.8377,
+ "step": 1567
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6369050340507152e-05,
+ "loss": 0.85,
+ "step": 1568
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6364242620448767e-05,
+ "loss": 0.893,
+ "step": 1569
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6359432426629363e-05,
+ "loss": 0.8388,
+ "step": 1570
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.635461976091864e-05,
+ "loss": 0.8559,
+ "step": 1571
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6349804625187275e-05,
+ "loss": 0.8778,
+ "step": 1572
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.634498702130689e-05,
+ "loss": 0.7816,
+ "step": 1573
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.634016695115007e-05,
+ "loss": 0.9,
+ "step": 1574
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.633534441659037e-05,
+ "loss": 0.8755,
+ "step": 1575
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.633051941950228e-05,
+ "loss": 0.8531,
+ "step": 1576
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.632569196176128e-05,
+ "loss": 0.7804,
+ "step": 1577
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.632086204524377e-05,
+ "loss": 0.8402,
+ "step": 1578
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6316029671827137e-05,
+ "loss": 0.8618,
+ "step": 1579
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6311194843389697e-05,
+ "loss": 0.7932,
+ "step": 1580
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6306357561810743e-05,
+ "loss": 0.8246,
+ "step": 1581
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.63015178289705e-05,
+ "loss": 0.8525,
+ "step": 1582
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6296675646750168e-05,
+ "loss": 0.8044,
+ "step": 1583
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.629183101703188e-05,
+ "loss": 0.8145,
+ "step": 1584
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6286983941698737e-05,
+ "loss": 0.8282,
+ "step": 1585
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6282134422634778e-05,
+ "loss": 0.8566,
+ "step": 1586
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.627728246172499e-05,
+ "loss": 0.8895,
+ "step": 1587
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6272428060855316e-05,
+ "loss": 0.8158,
+ "step": 1588
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.626757122191265e-05,
+ "loss": 0.8273,
+ "step": 1589
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6262711946784835e-05,
+ "loss": 0.7935,
+ "step": 1590
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6257850237360647e-05,
+ "loss": 0.8395,
+ "step": 1591
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.625298609552982e-05,
+ "loss": 0.8705,
+ "step": 1592
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6248119523183032e-05,
+ "loss": 0.8016,
+ "step": 1593
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6243250522211903e-05,
+ "loss": 0.805,
+ "step": 1594
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6238379094509e-05,
+ "loss": 0.8285,
+ "step": 1595
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.623350524196783e-05,
+ "loss": 0.8699,
+ "step": 1596
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6228628966482844e-05,
+ "loss": 0.8717,
+ "step": 1597
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6223750269949433e-05,
+ "loss": 0.8302,
+ "step": 1598
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6218869154263934e-05,
+ "loss": 0.8422,
+ "step": 1599
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6213985621323624e-05,
+ "loss": 0.8615,
+ "step": 1600
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6209099673026706e-05,
+ "loss": 0.8357,
+ "step": 1601
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.620421131127235e-05,
+ "loss": 0.7996,
+ "step": 1602
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6199320537960627e-05,
+ "loss": 0.7845,
+ "step": 1603
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6194427354992578e-05,
+ "loss": 0.8044,
+ "step": 1604
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6189531764270167e-05,
+ "loss": 0.864,
+ "step": 1605
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6184633767696292e-05,
+ "loss": 0.7957,
+ "step": 1606
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.617973336717479e-05,
+ "loss": 0.8188,
+ "step": 1607
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6174830564610426e-05,
+ "loss": 0.8218,
+ "step": 1608
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6169925361908912e-05,
+ "loss": 0.9085,
+ "step": 1609
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6165017760976878e-05,
+ "loss": 0.8224,
+ "step": 1610
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.61601077637219e-05,
+ "loss": 0.8329,
+ "step": 1611
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6155195372052473e-05,
+ "loss": 0.8712,
+ "step": 1612
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.615028058787803e-05,
+ "loss": 0.8563,
+ "step": 1613
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6145363413108927e-05,
+ "loss": 0.8338,
+ "step": 1614
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6140443849656466e-05,
+ "loss": 0.8401,
+ "step": 1615
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6135521899432854e-05,
+ "loss": 0.8838,
+ "step": 1616
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6130597564351245e-05,
+ "loss": 0.802,
+ "step": 1617
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6125670846325707e-05,
+ "loss": 0.8218,
+ "step": 1618
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.612074174727125e-05,
+ "loss": 0.849,
+ "step": 1619
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6115810269103783e-05,
+ "loss": 0.8457,
+ "step": 1620
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6110876413740168e-05,
+ "loss": 0.8197,
+ "step": 1621
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6105940183098176e-05,
+ "loss": 0.8825,
+ "step": 1622
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6101001579096504e-05,
+ "loss": 0.8639,
+ "step": 1623
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6096060603654775e-05,
+ "loss": 1.6711,
+ "step": 1624
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6091117258693524e-05,
+ "loss": 1.7162,
+ "step": 1625
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6086171546134216e-05,
+ "loss": 0.8373,
+ "step": 1626
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6081223467899236e-05,
+ "loss": 0.8117,
+ "step": 1627
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6076273025911886e-05,
+ "loss": 0.8525,
+ "step": 1628
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6071320222096387e-05,
+ "loss": 0.7917,
+ "step": 1629
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6066365058377877e-05,
+ "loss": 0.8782,
+ "step": 1630
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.606140753668241e-05,
+ "loss": 0.8134,
+ "step": 1631
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.605644765893697e-05,
+ "loss": 1.6413,
+ "step": 1632
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6051485427069434e-05,
+ "loss": 0.8495,
+ "step": 1633
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.604652084300861e-05,
+ "loss": 0.7985,
+ "step": 1634
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6041553908684214e-05,
+ "loss": 0.8142,
+ "step": 1635
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.603658462602688e-05,
+ "loss": 0.8213,
+ "step": 1636
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.6031612996968157e-05,
+ "loss": 0.8295,
+ "step": 1637
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.602663902344049e-05,
+ "loss": 0.7917,
+ "step": 1638
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.602166270737726e-05,
+ "loss": 0.8858,
+ "step": 1639
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.6016684050712735e-05,
+ "loss": 0.8878,
+ "step": 1640
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.6011703055382107e-05,
+ "loss": 0.7884,
+ "step": 1641
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.6006719723321477e-05,
+ "loss": 0.783,
+ "step": 1642
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.6001734056467844e-05,
+ "loss": 0.8655,
+ "step": 1643
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5996746056759123e-05,
+ "loss": 0.7917,
+ "step": 1644
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5991755726134134e-05,
+ "loss": 0.8301,
+ "step": 1645
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5986763066532608e-05,
+ "loss": 1.6918,
+ "step": 1646
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.598176807989517e-05,
+ "loss": 0.8099,
+ "step": 1647
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5976770768163352e-05,
+ "loss": 0.8558,
+ "step": 1648
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.59717711332796e-05,
+ "loss": 0.7908,
+ "step": 1649
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5966769177187254e-05,
+ "loss": 0.787,
+ "step": 1650
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5961764901830563e-05,
+ "loss": 0.8302,
+ "step": 1651
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5956758309154664e-05,
+ "loss": 0.853,
+ "step": 1652
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.595174940110561e-05,
+ "loss": 0.885,
+ "step": 1653
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.594673817963035e-05,
+ "loss": 0.84,
+ "step": 1654
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5941724646676725e-05,
+ "loss": 0.8177,
+ "step": 1655
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5936708804193478e-05,
+ "loss": 0.844,
+ "step": 1656
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5931690654130255e-05,
+ "loss": 0.8786,
+ "step": 1657
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5926670198437596e-05,
+ "loss": 0.8393,
+ "step": 1658
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5921647439066933e-05,
+ "loss": 0.8444,
+ "step": 1659
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.59166223779706e-05,
+ "loss": 0.8338,
+ "step": 1660
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5911595017101817e-05,
+ "loss": 0.7944,
+ "step": 1661
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.590656535841471e-05,
+ "loss": 0.8133,
+ "step": 1662
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5901533403864286e-05,
+ "loss": 0.8527,
+ "step": 1663
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5896499155406453e-05,
+ "loss": 0.8175,
+ "step": 1664
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.589146261499801e-05,
+ "loss": 0.8081,
+ "step": 1665
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5886423784596635e-05,
+ "loss": 0.851,
+ "step": 1666
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5881382666160917e-05,
+ "loss": 0.8288,
+ "step": 1667
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5876339261650313e-05,
+ "loss": 1.6439,
+ "step": 1668
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5871293573025188e-05,
+ "loss": 0.8485,
+ "step": 1669
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5866245602246775e-05,
+ "loss": 0.8494,
+ "step": 1670
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.586119535127721e-05,
+ "loss": 0.8036,
+ "step": 1671
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.585614282207951e-05,
+ "loss": 1.6353,
+ "step": 1672
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5851088016617575e-05,
+ "loss": 0.8154,
+ "step": 1673
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5846030936856193e-05,
+ "loss": 0.8676,
+ "step": 1674
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5840971584761037e-05,
+ "loss": 0.8256,
+ "step": 1675
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5835909962298654e-05,
+ "loss": 0.8608,
+ "step": 1676
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5830846071436492e-05,
+ "loss": 0.8425,
+ "step": 1677
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.582577991414286e-05,
+ "loss": 0.8412,
+ "step": 1678
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5820711492386963e-05,
+ "loss": 0.9138,
+ "step": 1679
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5815640808138876e-05,
+ "loss": 0.7383,
+ "step": 1680
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.581056786336956e-05,
+ "loss": 0.7589,
+ "step": 1681
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5805492660050852e-05,
+ "loss": 0.7716,
+ "step": 1682
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.580041520015547e-05,
+ "loss": 0.8291,
+ "step": 1683
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5795335485657003e-05,
+ "loss": 0.8508,
+ "step": 1684
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5790253518529923e-05,
+ "loss": 0.8638,
+ "step": 1685
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5785169300749576e-05,
+ "loss": 0.7749,
+ "step": 1686
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.578008283429217e-05,
+ "loss": 0.8065,
+ "step": 1687
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5774994121134816e-05,
+ "loss": 0.7763,
+ "step": 1688
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5769903163255468e-05,
+ "loss": 0.8204,
+ "step": 1689
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.576480996263297e-05,
+ "loss": 0.8114,
+ "step": 1690
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5759714521247036e-05,
+ "loss": 1.5978,
+ "step": 1691
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.575461684107824e-05,
+ "loss": 0.8252,
+ "step": 1692
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.574951692410804e-05,
+ "loss": 0.8237,
+ "step": 1693
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5744414772318756e-05,
+ "loss": 0.7976,
+ "step": 1694
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5739310387693578e-05,
+ "loss": 0.8221,
+ "step": 1695
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5734203772216564e-05,
+ "loss": 0.851,
+ "step": 1696
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5729094927872642e-05,
+ "loss": 0.864,
+ "step": 1697
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5723983856647595e-05,
+ "loss": 0.8408,
+ "step": 1698
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5718870560528096e-05,
+ "loss": 0.8395,
+ "step": 1699
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5713755041501652e-05,
+ "loss": 0.8535,
+ "step": 1700
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5708637301556657e-05,
+ "loss": 0.8563,
+ "step": 1701
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5703517342682357e-05,
+ "loss": 0.833,
+ "step": 1702
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5698395166868867e-05,
+ "loss": 0.8983,
+ "step": 1703
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5693270776107157e-05,
+ "loss": 0.746,
+ "step": 1704
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5688144172389067e-05,
+ "loss": 0.8429,
+ "step": 1705
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5683015357707282e-05,
+ "loss": 0.7763,
+ "step": 1706
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.567788433405537e-05,
+ "loss": 0.8275,
+ "step": 1707
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5672751103427733e-05,
+ "loss": 0.8128,
+ "step": 1708
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.566761566781964e-05,
+ "loss": 0.8845,
+ "step": 1709
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5662478029227225e-05,
+ "loss": 0.8889,
+ "step": 1710
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5657338189647472e-05,
+ "loss": 0.8327,
+ "step": 1711
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5652196151078218e-05,
+ "loss": 0.854,
+ "step": 1712
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5647051915518154e-05,
+ "loss": 0.8242,
+ "step": 1713
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5641905484966834e-05,
+ "loss": 0.8747,
+ "step": 1714
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5636756861424656e-05,
+ "loss": 0.8797,
+ "step": 1715
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5631606046892877e-05,
+ "loss": 1.6012,
+ "step": 1716
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5626453043373598e-05,
+ "loss": 0.8517,
+ "step": 1717
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5621297852869778e-05,
+ "loss": 0.8197,
+ "step": 1718
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.561614047738522e-05,
+ "loss": 0.825,
+ "step": 1719
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5610980918924586e-05,
+ "loss": 0.8355,
+ "step": 1720
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5605819179493374e-05,
+ "loss": 0.8437,
+ "step": 1721
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5600655261097938e-05,
+ "loss": 0.7772,
+ "step": 1722
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5595489165745476e-05,
+ "loss": 0.8435,
+ "step": 1723
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5590320895444035e-05,
+ "loss": 0.7808,
+ "step": 1724
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5585150452202502e-05,
+ "loss": 0.8486,
+ "step": 1725
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5579977838030617e-05,
+ "loss": 0.8476,
+ "step": 1726
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.557480305493895e-05,
+ "loss": 0.8439,
+ "step": 1727
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5569626104938934e-05,
+ "loss": 0.8229,
+ "step": 1728
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.556444699004283e-05,
+ "loss": 0.8184,
+ "step": 1729
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5559265712263737e-05,
+ "loss": 0.82,
+ "step": 1730
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5554082273615608e-05,
+ "loss": 0.8273,
+ "step": 1731
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5548896676113223e-05,
+ "loss": 0.87,
+ "step": 1732
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5543708921772217e-05,
+ "loss": 0.7973,
+ "step": 1733
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5538519012609043e-05,
+ "loss": 0.8263,
+ "step": 1734
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.553332695064101e-05,
+ "loss": 0.8861,
+ "step": 1735
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5528132737886252e-05,
+ "loss": 0.8313,
+ "step": 1736
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5522936376363747e-05,
+ "loss": 0.8094,
+ "step": 1737
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.55177378680933e-05,
+ "loss": 0.8757,
+ "step": 1738
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.551253721509556e-05,
+ "loss": 0.7988,
+ "step": 1739
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5507334419392e-05,
+ "loss": 0.7982,
+ "step": 1740
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5502129483004927e-05,
+ "loss": 0.8332,
+ "step": 1741
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5496922407957492e-05,
+ "loss": 0.8338,
+ "step": 1742
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.549171319627366e-05,
+ "loss": 0.8083,
+ "step": 1743
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5486501849978244e-05,
+ "loss": 0.8386,
+ "step": 1744
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.548128837109687e-05,
+ "loss": 0.8316,
+ "step": 1745
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.547607276165601e-05,
+ "loss": 0.7864,
+ "step": 1746
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.547085502368294e-05,
+ "loss": 0.8184,
+ "step": 1747
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5465635159205798e-05,
+ "loss": 1.5694,
+ "step": 1748
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.546041317025351e-05,
+ "loss": 0.8193,
+ "step": 1749
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5455189058855858e-05,
+ "loss": 0.8109,
+ "step": 1750
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5449962827043433e-05,
+ "loss": 0.8041,
+ "step": 1751
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5444734476847658e-05,
+ "loss": 0.8263,
+ "step": 1752
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5439504010300776e-05,
+ "loss": 0.8267,
+ "step": 1753
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.543427142943585e-05,
+ "loss": 1.6475,
+ "step": 1754
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5429036736286767e-05,
+ "loss": 0.8478,
+ "step": 1755
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.542379993288824e-05,
+ "loss": 0.8272,
+ "step": 1756
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5418561021275792e-05,
+ "loss": 0.8087,
+ "step": 1757
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.541332000348578e-05,
+ "loss": 0.8896,
+ "step": 1758
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5408076881555364e-05,
+ "loss": 0.847,
+ "step": 1759
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.540283165752253e-05,
+ "loss": 0.8583,
+ "step": 1760
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5397584333426083e-05,
+ "loss": 0.8147,
+ "step": 1761
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.539233491130564e-05,
+ "loss": 0.8321,
+ "step": 1762
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5387083393201633e-05,
+ "loss": 0.8511,
+ "step": 1763
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5381829781155307e-05,
+ "loss": 0.8575,
+ "step": 1764
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5376574077208737e-05,
+ "loss": 0.7854,
+ "step": 1765
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5371316283404786e-05,
+ "loss": 0.8679,
+ "step": 1766
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.536605640178715e-05,
+ "loss": 0.8858,
+ "step": 1767
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5360794434400322e-05,
+ "loss": 0.8047,
+ "step": 1768
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5355530383289616e-05,
+ "loss": 0.796,
+ "step": 1769
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5350264250501154e-05,
+ "loss": 0.8237,
+ "step": 1770
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.534499603808186e-05,
+ "loss": 0.8459,
+ "step": 1771
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.533972574807947e-05,
+ "loss": 1.5925,
+ "step": 1772
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.533445338254254e-05,
+ "loss": 0.8181,
+ "step": 1773
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5329178943520407e-05,
+ "loss": 1.4691,
+ "step": 1774
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5323902433063246e-05,
+ "loss": 0.8129,
+ "step": 1775
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5318623853222006e-05,
+ "loss": 0.81,
+ "step": 1776
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5313343206048458e-05,
+ "loss": 0.8448,
+ "step": 1777
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5308060493595173e-05,
+ "loss": 0.8518,
+ "step": 1778
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5302775717915528e-05,
+ "loss": 0.859,
+ "step": 1779
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5297488881063696e-05,
+ "loss": 0.758,
+ "step": 1780
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5292199985094652e-05,
+ "loss": 0.8031,
+ "step": 1781
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5286909032064174e-05,
+ "loss": 0.807,
+ "step": 1782
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5281616024028844e-05,
+ "loss": 0.832,
+ "step": 1783
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.527632096304603e-05,
+ "loss": 0.8136,
+ "step": 1784
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5271023851173908e-05,
+ "loss": 0.828,
+ "step": 1785
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5265724690471454e-05,
+ "loss": 1.6493,
+ "step": 1786
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5260423482998427e-05,
+ "loss": 0.8882,
+ "step": 1787
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5255120230815394e-05,
+ "loss": 0.8552,
+ "step": 1788
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5249814935983713e-05,
+ "loss": 0.8342,
+ "step": 1789
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5244507600565532e-05,
+ "loss": 1.6562,
+ "step": 1790
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5239198226623796e-05,
+ "loss": 0.8741,
+ "step": 1791
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5233886816222247e-05,
+ "loss": 0.73,
+ "step": 1792
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.522857337142541e-05,
+ "loss": 0.8176,
+ "step": 1793
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5223257894298602e-05,
+ "loss": 0.8138,
+ "step": 1794
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5217940386907933e-05,
+ "loss": 0.8376,
+ "step": 1795
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5212620851320305e-05,
+ "loss": 0.8724,
+ "step": 1796
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.52072992896034e-05,
+ "loss": 0.8471,
+ "step": 1797
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5201975703825698e-05,
+ "loss": 0.8481,
+ "step": 1798
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5196650096056451e-05,
+ "loss": 0.7695,
+ "step": 1799
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5191322468365716e-05,
+ "loss": 0.8064,
+ "step": 1800
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5185992822824318e-05,
+ "loss": 0.8679,
+ "step": 1801
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5180661161503877e-05,
+ "loss": 0.7404,
+ "step": 1802
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5175327486476792e-05,
+ "loss": 0.8415,
+ "step": 1803
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5169991799816247e-05,
+ "loss": 1.6459,
+ "step": 1804
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5164654103596204e-05,
+ "loss": 0.8807,
+ "step": 1805
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5159314399891406e-05,
+ "loss": 0.8816,
+ "step": 1806
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5153972690777386e-05,
+ "loss": 0.8265,
+ "step": 1807
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5148628978330444e-05,
+ "loss": 0.8393,
+ "step": 1808
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.514328326462767e-05,
+ "loss": 0.8426,
+ "step": 1809
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.513793555174692e-05,
+ "loss": 0.828,
+ "step": 1810
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5132585841766835e-05,
+ "loss": 0.7768,
+ "step": 1811
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5127234136766832e-05,
+ "loss": 0.8207,
+ "step": 1812
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5121880438827101e-05,
+ "loss": 0.8221,
+ "step": 1813
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5116524750028607e-05,
+ "loss": 0.821,
+ "step": 1814
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5111167072453091e-05,
+ "loss": 0.7764,
+ "step": 1815
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5105807408183059e-05,
+ "loss": 1.5924,
+ "step": 1816
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5100445759301805e-05,
+ "loss": 0.8031,
+ "step": 1817
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5095082127893377e-05,
+ "loss": 0.8367,
+ "step": 1818
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.508971651604261e-05,
+ "loss": 0.8099,
+ "step": 1819
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.508434892583509e-05,
+ "loss": 0.8778,
+ "step": 1820
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5078979359357194e-05,
+ "loss": 0.8771,
+ "step": 1821
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5073607818696043e-05,
+ "loss": 1.7129,
+ "step": 1822
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5068234305939546e-05,
+ "loss": 0.8056,
+ "step": 1823
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5062858823176374e-05,
+ "loss": 0.8486,
+ "step": 1824
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5057481372495948e-05,
+ "loss": 0.7933,
+ "step": 1825
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5052101955988478e-05,
+ "loss": 0.7819,
+ "step": 1826
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5046720575744917e-05,
+ "loss": 1.5546,
+ "step": 1827
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5041337233856998e-05,
+ "loss": 0.8569,
+ "step": 1828
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5035951932417205e-05,
+ "loss": 1.4716,
+ "step": 1829
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.503056467351879e-05,
+ "loss": 0.8125,
+ "step": 1830
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5025175459255761e-05,
+ "loss": 0.8321,
+ "step": 1831
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5019784291722888e-05,
+ "loss": 0.7877,
+ "step": 1832
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.50143911730157e-05,
+ "loss": 0.8137,
+ "step": 1833
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5008996105230493e-05,
+ "loss": 0.8091,
+ "step": 1834
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5003599090464307e-05,
+ "loss": 0.8049,
+ "step": 1835
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4998200130814941e-05,
+ "loss": 0.8711,
+ "step": 1836
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4992799228380959e-05,
+ "loss": 0.833,
+ "step": 1837
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4987396385261676e-05,
+ "loss": 0.7466,
+ "step": 1838
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4981991603557155e-05,
+ "loss": 0.8551,
+ "step": 1839
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4976584885368221e-05,
+ "loss": 0.767,
+ "step": 1840
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4971176232796447e-05,
+ "loss": 0.7866,
+ "step": 1841
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4965765647944161e-05,
+ "loss": 0.8463,
+ "step": 1842
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4960353132914436e-05,
+ "loss": 0.8995,
+ "step": 1843
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4954938689811105e-05,
+ "loss": 0.8723,
+ "step": 1844
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.494952232073874e-05,
+ "loss": 0.8527,
+ "step": 1845
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4944104027802668e-05,
+ "loss": 0.7739,
+ "step": 1846
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4938683813108968e-05,
+ "loss": 0.8038,
+ "step": 1847
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.493326167876445e-05,
+ "loss": 0.8898,
+ "step": 1848
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.492783762687669e-05,
+ "loss": 0.8127,
+ "step": 1849
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4922411659553993e-05,
+ "loss": 0.8014,
+ "step": 1850
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4916983778905418e-05,
+ "loss": 0.8183,
+ "step": 1851
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4911553987040767e-05,
+ "loss": 0.8254,
+ "step": 1852
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4906122286070582e-05,
+ "loss": 0.8353,
+ "step": 1853
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4900688678106146e-05,
+ "loss": 0.8231,
+ "step": 1854
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4895253165259487e-05,
+ "loss": 0.8217,
+ "step": 1855
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4889815749643371e-05,
+ "loss": 0.7719,
+ "step": 1856
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4884376433371304e-05,
+ "loss": 0.7798,
+ "step": 1857
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4878935218557531e-05,
+ "loss": 0.8071,
+ "step": 1858
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4873492107317039e-05,
+ "loss": 0.8403,
+ "step": 1859
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4868047101765541e-05,
+ "loss": 0.8202,
+ "step": 1860
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4862600204019497e-05,
+ "loss": 0.8582,
+ "step": 1861
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4857151416196103e-05,
+ "loss": 0.813,
+ "step": 1862
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.485170074041328e-05,
+ "loss": 0.8285,
+ "step": 1863
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.484624817878969e-05,
+ "loss": 0.7269,
+ "step": 1864
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4840793733444727e-05,
+ "loss": 0.8196,
+ "step": 1865
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4835337406498516e-05,
+ "loss": 0.8893,
+ "step": 1866
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4829879200071916e-05,
+ "loss": 0.8496,
+ "step": 1867
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4824419116286514e-05,
+ "loss": 1.6035,
+ "step": 1868
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4818957157264624e-05,
+ "loss": 0.7866,
+ "step": 1869
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4813493325129296e-05,
+ "loss": 1.4976,
+ "step": 1870
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.48080276220043e-05,
+ "loss": 0.8122,
+ "step": 1871
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4802560050014145e-05,
+ "loss": 0.7893,
+ "step": 1872
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4797090611284053e-05,
+ "loss": 0.8726,
+ "step": 1873
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.479161930793998e-05,
+ "loss": 0.8398,
+ "step": 1874
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4786146142108601e-05,
+ "loss": 0.8703,
+ "step": 1875
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4780671115917324e-05,
+ "loss": 0.7602,
+ "step": 1876
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.477519423149427e-05,
+ "loss": 1.5956,
+ "step": 1877
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4769715490968289e-05,
+ "loss": 0.8035,
+ "step": 1878
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4764234896468946e-05,
+ "loss": 0.8652,
+ "step": 1879
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4758752450126536e-05,
+ "loss": 0.8354,
+ "step": 1880
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4753268154072067e-05,
+ "loss": 0.7582,
+ "step": 1881
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4747782010437264e-05,
+ "loss": 0.8843,
+ "step": 1882
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4742294021354578e-05,
+ "loss": 0.8662,
+ "step": 1883
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4736804188957174e-05,
+ "loss": 0.8013,
+ "step": 1884
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.473131251537893e-05,
+ "loss": 0.8872,
+ "step": 1885
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.472581900275444e-05,
+ "loss": 0.8489,
+ "step": 1886
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4720323653219017e-05,
+ "loss": 0.7683,
+ "step": 1887
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.471482646890869e-05,
+ "loss": 0.8206,
+ "step": 1888
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4709327451960191e-05,
+ "loss": 0.8173,
+ "step": 1889
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4703826604510978e-05,
+ "loss": 0.8034,
+ "step": 1890
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4698323928699203e-05,
+ "loss": 0.7785,
+ "step": 1891
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4692819426663744e-05,
+ "loss": 0.8248,
+ "step": 1892
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4687313100544188e-05,
+ "loss": 0.8237,
+ "step": 1893
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4681804952480817e-05,
+ "loss": 0.7935,
+ "step": 1894
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4676294984614641e-05,
+ "loss": 0.7987,
+ "step": 1895
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.467078319908736e-05,
+ "loss": 0.8014,
+ "step": 1896
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.466526959804139e-05,
+ "loss": 0.831,
+ "step": 1897
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4659754183619847e-05,
+ "loss": 0.8585,
+ "step": 1898
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4654236957966558e-05,
+ "loss": 0.8191,
+ "step": 1899
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4648717923226057e-05,
+ "loss": 0.8552,
+ "step": 1900
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4643197081543566e-05,
+ "loss": 0.8626,
+ "step": 1901
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4637674435065024e-05,
+ "loss": 0.74,
+ "step": 1902
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4632149985937063e-05,
+ "loss": 0.8355,
+ "step": 1903
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.462662373630702e-05,
+ "loss": 0.8614,
+ "step": 1904
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4621095688322935e-05,
+ "loss": 0.8329,
+ "step": 1905
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4615565844133539e-05,
+ "loss": 0.7703,
+ "step": 1906
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4610034205888263e-05,
+ "loss": 0.7793,
+ "step": 1907
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4604500775737241e-05,
+ "loss": 0.817,
+ "step": 1908
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4598965555831297e-05,
+ "loss": 0.8367,
+ "step": 1909
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4593428548321957e-05,
+ "loss": 0.9066,
+ "step": 1910
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4587889755361437e-05,
+ "loss": 1.7026,
+ "step": 1911
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4582349179102643e-05,
+ "loss": 0.812,
+ "step": 1912
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4576806821699188e-05,
+ "loss": 0.813,
+ "step": 1913
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4571262685305365e-05,
+ "loss": 0.8543,
+ "step": 1914
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4565716772076161e-05,
+ "loss": 0.7935,
+ "step": 1915
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4560169084167259e-05,
+ "loss": 1.5397,
+ "step": 1916
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4554619623735023e-05,
+ "loss": 0.8571,
+ "step": 1917
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4549068392936514e-05,
+ "loss": 0.8299,
+ "step": 1918
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4543515393929472e-05,
+ "loss": 0.8604,
+ "step": 1919
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4537960628872338e-05,
+ "loss": 0.8108,
+ "step": 1920
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.453240409992423e-05,
+ "loss": 0.7789,
+ "step": 1921
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4526845809244952e-05,
+ "loss": 0.7876,
+ "step": 1922
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4521285758994994e-05,
+ "loss": 0.7993,
+ "step": 1923
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4515723951335527e-05,
+ "loss": 0.8486,
+ "step": 1924
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4510160388428414e-05,
+ "loss": 0.824,
+ "step": 1925
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4504595072436192e-05,
+ "loss": 1.6338,
+ "step": 1926
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.449902800552208e-05,
+ "loss": 0.8537,
+ "step": 1927
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4493459189849976e-05,
+ "loss": 0.8373,
+ "step": 1928
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.448788862758447e-05,
+ "loss": 0.8667,
+ "step": 1929
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4482316320890815e-05,
+ "loss": 0.8325,
+ "step": 1930
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4476742271934954e-05,
+ "loss": 0.7851,
+ "step": 1931
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.44711664828835e-05,
+ "loss": 0.7906,
+ "step": 1932
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4465588955903745e-05,
+ "loss": 0.8536,
+ "step": 1933
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4460009693163651e-05,
+ "loss": 0.775,
+ "step": 1934
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4454428696831868e-05,
+ "loss": 0.8306,
+ "step": 1935
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4448845969077706e-05,
+ "loss": 0.8112,
+ "step": 1936
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4443261512071155e-05,
+ "loss": 0.8217,
+ "step": 1937
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.443767532798288e-05,
+ "loss": 0.7701,
+ "step": 1938
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4432087418984207e-05,
+ "loss": 1.4883,
+ "step": 1939
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4426497787247143e-05,
+ "loss": 0.7719,
+ "step": 1940
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4420906434944355e-05,
+ "loss": 0.8353,
+ "step": 1941
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4415313364249188e-05,
+ "loss": 0.8249,
+ "step": 1942
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4409718577335654e-05,
+ "loss": 0.8051,
+ "step": 1943
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4404122076378425e-05,
+ "loss": 0.8345,
+ "step": 1944
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.439852386355284e-05,
+ "loss": 0.8472,
+ "step": 1945
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.439292394103491e-05,
+ "loss": 0.8487,
+ "step": 1946
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4387322311001308e-05,
+ "loss": 0.7806,
+ "step": 1947
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4381718975629373e-05,
+ "loss": 0.8233,
+ "step": 1948
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4376113937097098e-05,
+ "loss": 0.8293,
+ "step": 1949
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4370507197583145e-05,
+ "loss": 0.843,
+ "step": 1950
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4364898759266837e-05,
+ "loss": 1.5072,
+ "step": 1951
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4359288624328158e-05,
+ "loss": 0.8861,
+ "step": 1952
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4353676794947747e-05,
+ "loss": 0.8289,
+ "step": 1953
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4348063273306905e-05,
+ "loss": 0.8005,
+ "step": 1954
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4342448061587587e-05,
+ "loss": 0.8366,
+ "step": 1955
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4336831161972418e-05,
+ "loss": 0.8077,
+ "step": 1956
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4331212576644657e-05,
+ "loss": 0.8273,
+ "step": 1957
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.432559230778824e-05,
+ "loss": 0.8331,
+ "step": 1958
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4319970357587742e-05,
+ "loss": 0.795,
+ "step": 1959
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.43143467282284e-05,
+ "loss": 1.5681,
+ "step": 1960
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.43087214218961e-05,
+ "loss": 0.8219,
+ "step": 1961
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.430309444077738e-05,
+ "loss": 0.8305,
+ "step": 1962
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4297465787059427e-05,
+ "loss": 0.762,
+ "step": 1963
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.429183546293009e-05,
+ "loss": 0.8358,
+ "step": 1964
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4286203470577853e-05,
+ "loss": 0.8652,
+ "step": 1965
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4280569812191852e-05,
+ "loss": 0.8174,
+ "step": 1966
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.427493448996187e-05,
+ "loss": 0.8241,
+ "step": 1967
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4269297506078348e-05,
+ "loss": 0.872,
+ "step": 1968
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.426365886273236e-05,
+ "loss": 1.5384,
+ "step": 1969
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4258018562115623e-05,
+ "loss": 0.8711,
+ "step": 1970
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.425237660642051e-05,
+ "loss": 0.8128,
+ "step": 1971
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.424673299784003e-05,
+ "loss": 0.8254,
+ "step": 1972
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4241087738567837e-05,
+ "loss": 0.8231,
+ "step": 1973
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.423544083079822e-05,
+ "loss": 0.8292,
+ "step": 1974
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4229792276726122e-05,
+ "loss": 0.7113,
+ "step": 1975
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4224142078547112e-05,
+ "loss": 0.8272,
+ "step": 1976
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4218490238457407e-05,
+ "loss": 0.8636,
+ "step": 1977
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4212836758653862e-05,
+ "loss": 0.787,
+ "step": 1978
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4207181641333962e-05,
+ "loss": 0.8249,
+ "step": 1979
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4201524888695835e-05,
+ "loss": 0.8117,
+ "step": 1980
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4195866502938245e-05,
+ "loss": 0.8545,
+ "step": 1981
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4190206486260585e-05,
+ "loss": 0.8354,
+ "step": 1982
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4184544840862888e-05,
+ "loss": 0.8486,
+ "step": 1983
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4178881568945816e-05,
+ "loss": 0.83,
+ "step": 1984
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4173216672710665e-05,
+ "loss": 0.7718,
+ "step": 1985
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4167550154359365e-05,
+ "loss": 0.8457,
+ "step": 1986
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4161882016094475e-05,
+ "loss": 0.7612,
+ "step": 1987
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4156212260119176e-05,
+ "loss": 0.8438,
+ "step": 1988
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4150540888637287e-05,
+ "loss": 0.8576,
+ "step": 1989
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4144867903853254e-05,
+ "loss": 0.8016,
+ "step": 1990
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4139193307972148e-05,
+ "loss": 0.7674,
+ "step": 1991
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4133517103199666e-05,
+ "loss": 0.8558,
+ "step": 1992
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4127839291742129e-05,
+ "loss": 0.8396,
+ "step": 1993
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.412215987580649e-05,
+ "loss": 0.8536,
+ "step": 1994
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4116478857600311e-05,
+ "loss": 1.4713,
+ "step": 1995
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4110796239331791e-05,
+ "loss": 0.8204,
+ "step": 1996
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.410511202320975e-05,
+ "loss": 0.8432,
+ "step": 1997
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4099426211443619e-05,
+ "loss": 0.8468,
+ "step": 1998
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4093738806243458e-05,
+ "loss": 0.8294,
+ "step": 1999
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4088049809819939e-05,
+ "loss": 0.7814,
+ "step": 2000
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4082359224384361e-05,
+ "loss": 1.4731,
+ "step": 2001
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4076667052148635e-05,
+ "loss": 0.8233,
+ "step": 2002
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4070973295325294e-05,
+ "loss": 0.8542,
+ "step": 2003
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4065277956127478e-05,
+ "loss": 0.792,
+ "step": 2004
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.405958103676895e-05,
+ "loss": 0.8438,
+ "step": 2005
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4053882539464083e-05,
+ "loss": 0.842,
+ "step": 2006
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.404818246642787e-05,
+ "loss": 0.7613,
+ "step": 2007
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4042480819875907e-05,
+ "loss": 0.8467,
+ "step": 2008
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4036777602024405e-05,
+ "loss": 0.783,
+ "step": 2009
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4031072815090188e-05,
+ "loss": 0.8016,
+ "step": 2010
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.402536646129069e-05,
+ "loss": 1.3952,
+ "step": 2011
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4019658542843952e-05,
+ "loss": 0.806,
+ "step": 2012
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4013949061968626e-05,
+ "loss": 0.8161,
+ "step": 2013
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4008238020883965e-05,
+ "loss": 0.8789,
+ "step": 2014
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4002525421809831e-05,
+ "loss": 0.8288,
+ "step": 2015
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3996811266966704e-05,
+ "loss": 0.8349,
+ "step": 2016
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3991095558575645e-05,
+ "loss": 0.8191,
+ "step": 2017
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3985378298858341e-05,
+ "loss": 0.7958,
+ "step": 2018
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3979659490037065e-05,
+ "loss": 0.8051,
+ "step": 2019
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.397393913433471e-05,
+ "loss": 0.8177,
+ "step": 2020
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.396821723397475e-05,
+ "loss": 0.8265,
+ "step": 2021
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3962493791181275e-05,
+ "loss": 0.8376,
+ "step": 2022
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.395676880817897e-05,
+ "loss": 0.8216,
+ "step": 2023
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3951042287193116e-05,
+ "loss": 0.8566,
+ "step": 2024
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3945314230449594e-05,
+ "loss": 0.8183,
+ "step": 2025
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.393958464017488e-05,
+ "loss": 0.7981,
+ "step": 2026
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3933853518596048e-05,
+ "loss": 0.8454,
+ "step": 2027
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.392812086794077e-05,
+ "loss": 0.8355,
+ "step": 2028
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3922386690437307e-05,
+ "loss": 0.8407,
+ "step": 2029
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3916650988314516e-05,
+ "loss": 0.7655,
+ "step": 2030
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3910913763801843e-05,
+ "loss": 0.8622,
+ "step": 2031
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3905175019129338e-05,
+ "loss": 1.5037,
+ "step": 2032
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3899434756527623e-05,
+ "loss": 0.8237,
+ "step": 2033
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3893692978227924e-05,
+ "loss": 0.8214,
+ "step": 2034
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3887949686462055e-05,
+ "loss": 0.8267,
+ "step": 2035
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.388220488346241e-05,
+ "loss": 1.4523,
+ "step": 2036
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.387645857146198e-05,
+ "loss": 0.8219,
+ "step": 2037
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3870710752694334e-05,
+ "loss": 0.8004,
+ "step": 2038
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3864961429393632e-05,
+ "loss": 0.8027,
+ "step": 2039
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3859210603794626e-05,
+ "loss": 0.8222,
+ "step": 2040
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3853458278132637e-05,
+ "loss": 0.8066,
+ "step": 2041
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3847704454643571e-05,
+ "loss": 0.8311,
+ "step": 2042
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3841949135563928e-05,
+ "loss": 0.8129,
+ "step": 2043
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3836192323130782e-05,
+ "loss": 0.8017,
+ "step": 2044
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3830434019581786e-05,
+ "loss": 0.7915,
+ "step": 2045
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3824674227155175e-05,
+ "loss": 0.8273,
+ "step": 2046
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.381891294808976e-05,
+ "loss": 0.7652,
+ "step": 2047
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3813150184624936e-05,
+ "loss": 0.8004,
+ "step": 2048
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3807385939000668e-05,
+ "loss": 0.8169,
+ "step": 2049
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.38016202134575e-05,
+ "loss": 0.8429,
+ "step": 2050
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3795853010236555e-05,
+ "loss": 0.8712,
+ "step": 2051
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.379008433157952e-05,
+ "loss": 0.7793,
+ "step": 2052
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3784314179728668e-05,
+ "loss": 0.8289,
+ "step": 2053
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.377854255692684e-05,
+ "loss": 0.8071,
+ "step": 2054
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.377276946541744e-05,
+ "loss": 0.8841,
+ "step": 2055
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.376699490744446e-05,
+ "loss": 0.8563,
+ "step": 2056
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3761218885252443e-05,
+ "loss": 0.81,
+ "step": 2057
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3755441401086517e-05,
+ "loss": 0.7953,
+ "step": 2058
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.374966245719237e-05,
+ "loss": 0.8403,
+ "step": 2059
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3743882055816257e-05,
+ "loss": 0.821,
+ "step": 2060
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3738100199205007e-05,
+ "loss": 0.8011,
+ "step": 2061
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3732316889606005e-05,
+ "loss": 0.8251,
+ "step": 2062
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3726532129267206e-05,
+ "loss": 0.8257,
+ "step": 2063
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3720745920437128e-05,
+ "loss": 0.884,
+ "step": 2064
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3714958265364855e-05,
+ "loss": 0.8138,
+ "step": 2065
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3709169166300028e-05,
+ "loss": 0.8531,
+ "step": 2066
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3703378625492852e-05,
+ "loss": 0.8633,
+ "step": 2067
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.369758664519409e-05,
+ "loss": 0.8609,
+ "step": 2068
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3691793227655066e-05,
+ "loss": 0.8702,
+ "step": 2069
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3685998375127669e-05,
+ "loss": 0.8105,
+ "step": 2070
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3680202089864331e-05,
+ "loss": 0.8124,
+ "step": 2071
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.367440437411806e-05,
+ "loss": 0.8124,
+ "step": 2072
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3668605230142403e-05,
+ "loss": 0.8047,
+ "step": 2073
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3662804660191469e-05,
+ "loss": 0.7949,
+ "step": 2074
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3657002666519925e-05,
+ "loss": 0.7802,
+ "step": 2075
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3651199251382983e-05,
+ "loss": 0.755,
+ "step": 2076
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3645394417036414e-05,
+ "loss": 0.858,
+ "step": 2077
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3639588165736544e-05,
+ "loss": 0.8697,
+ "step": 2078
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3633780499740237e-05,
+ "loss": 0.8493,
+ "step": 2079
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3627971421304916e-05,
+ "loss": 0.823,
+ "step": 2080
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3622160932688553e-05,
+ "loss": 0.7997,
+ "step": 2081
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3616349036149668e-05,
+ "loss": 0.8064,
+ "step": 2082
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3610535733947325e-05,
+ "loss": 0.8588,
+ "step": 2083
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3604721028341137e-05,
+ "loss": 0.8451,
+ "step": 2084
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3598904921591261e-05,
+ "loss": 0.8,
+ "step": 2085
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3593087415958397e-05,
+ "loss": 0.7998,
+ "step": 2086
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3587268513703795e-05,
+ "loss": 0.8446,
+ "step": 2087
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3581448217089243e-05,
+ "loss": 0.7675,
+ "step": 2088
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3575626528377073e-05,
+ "loss": 1.4518,
+ "step": 2089
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3569803449830152e-05,
+ "loss": 0.8245,
+ "step": 2090
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3563978983711899e-05,
+ "loss": 0.817,
+ "step": 2091
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.355815313228626e-05,
+ "loss": 0.8268,
+ "step": 2092
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3552325897817728e-05,
+ "loss": 0.8245,
+ "step": 2093
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3546497282571331e-05,
+ "loss": 0.7846,
+ "step": 2094
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3540667288812633e-05,
+ "loss": 0.7669,
+ "step": 2095
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3534835918807731e-05,
+ "loss": 0.7768,
+ "step": 2096
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3529003174823265e-05,
+ "loss": 0.8183,
+ "step": 2097
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.35231690591264e-05,
+ "loss": 0.8425,
+ "step": 2098
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3517333573984842e-05,
+ "loss": 0.8123,
+ "step": 2099
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3511496721666823e-05,
+ "loss": 0.7654,
+ "step": 2100
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.350565850444111e-05,
+ "loss": 0.8453,
+ "step": 2101
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3499818924577e-05,
+ "loss": 0.7931,
+ "step": 2102
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3493977984344317e-05,
+ "loss": 0.8105,
+ "step": 2103
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.348813568601342e-05,
+ "loss": 0.8463,
+ "step": 2104
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.348229203185519e-05,
+ "loss": 0.8329,
+ "step": 2105
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3476447024141038e-05,
+ "loss": 0.8123,
+ "step": 2106
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3470600665142894e-05,
+ "loss": 0.8507,
+ "step": 2107
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3464752957133226e-05,
+ "loss": 0.8071,
+ "step": 2108
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3458903902385018e-05,
+ "loss": 0.778,
+ "step": 2109
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3453053503171782e-05,
+ "loss": 0.8046,
+ "step": 2110
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3447201761767543e-05,
+ "loss": 0.8273,
+ "step": 2111
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.344134868044686e-05,
+ "loss": 0.7741,
+ "step": 2112
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3435494261484804e-05,
+ "loss": 0.8587,
+ "step": 2113
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.342963850715697e-05,
+ "loss": 0.8456,
+ "step": 2114
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3423781419739472e-05,
+ "loss": 0.8138,
+ "step": 2115
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.341792300150894e-05,
+ "loss": 0.8785,
+ "step": 2116
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3412063254742527e-05,
+ "loss": 0.8028,
+ "step": 2117
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.340620218171789e-05,
+ "loss": 0.8446,
+ "step": 2118
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.340033978471322e-05,
+ "loss": 0.7998,
+ "step": 2119
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3394476066007202e-05,
+ "loss": 0.8054,
+ "step": 2120
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3388611027879052e-05,
+ "loss": 0.7546,
+ "step": 2121
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3382744672608489e-05,
+ "loss": 0.8334,
+ "step": 2122
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3376877002475746e-05,
+ "loss": 0.8111,
+ "step": 2123
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3371008019761568e-05,
+ "loss": 0.8494,
+ "step": 2124
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3365137726747213e-05,
+ "loss": 0.8299,
+ "step": 2125
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3359266125714443e-05,
+ "loss": 0.8418,
+ "step": 2126
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3353393218945532e-05,
+ "loss": 0.8157,
+ "step": 2127
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3347519008723258e-05,
+ "loss": 1.6095,
+ "step": 2128
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3341643497330911e-05,
+ "loss": 0.8543,
+ "step": 2129
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.333576668705228e-05,
+ "loss": 0.8448,
+ "step": 2130
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3329888580171666e-05,
+ "loss": 0.8678,
+ "step": 2131
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3324009178973872e-05,
+ "loss": 0.7885,
+ "step": 2132
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3318128485744197e-05,
+ "loss": 0.8342,
+ "step": 2133
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3312246502768453e-05,
+ "loss": 0.8349,
+ "step": 2134
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3306363232332944e-05,
+ "loss": 0.8206,
+ "step": 2135
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3300478676724482e-05,
+ "loss": 0.7821,
+ "step": 2136
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3294592838230375e-05,
+ "loss": 0.7722,
+ "step": 2137
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3288705719138429e-05,
+ "loss": 0.7724,
+ "step": 2138
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3282817321736946e-05,
+ "loss": 0.8028,
+ "step": 2139
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3276927648314732e-05,
+ "loss": 0.7467,
+ "step": 2140
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3271036701161082e-05,
+ "loss": 1.5157,
+ "step": 2141
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.326514448256579e-05,
+ "loss": 0.7712,
+ "step": 2142
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3259250994819141e-05,
+ "loss": 0.8354,
+ "step": 2143
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3253356240211917e-05,
+ "loss": 0.7842,
+ "step": 2144
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.324746022103539e-05,
+ "loss": 0.8298,
+ "step": 2145
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.324156293958132e-05,
+ "loss": 0.8105,
+ "step": 2146
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3235664398141968e-05,
+ "loss": 0.8203,
+ "step": 2147
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3229764599010077e-05,
+ "loss": 0.7636,
+ "step": 2148
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3223863544478875e-05,
+ "loss": 0.8246,
+ "step": 2149
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3217961236842089e-05,
+ "loss": 0.9116,
+ "step": 2150
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3212057678393926e-05,
+ "loss": 0.7909,
+ "step": 2151
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.320615287142908e-05,
+ "loss": 0.8605,
+ "step": 2152
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3200246818242733e-05,
+ "loss": 0.8387,
+ "step": 2153
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3194339521130544e-05,
+ "loss": 0.8246,
+ "step": 2154
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3188430982388664e-05,
+ "loss": 0.8535,
+ "step": 2155
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3182521204313726e-05,
+ "loss": 0.8546,
+ "step": 2156
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3176610189202836e-05,
+ "loss": 0.8126,
+ "step": 2157
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3170697939353593e-05,
+ "loss": 0.7746,
+ "step": 2158
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3164784457064067e-05,
+ "loss": 0.8113,
+ "step": 2159
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3158869744632807e-05,
+ "loss": 1.5595,
+ "step": 2160
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.315295380435885e-05,
+ "loss": 0.7639,
+ "step": 2161
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3147036638541698e-05,
+ "loss": 0.8094,
+ "step": 2162
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3141118249481335e-05,
+ "loss": 0.8076,
+ "step": 2163
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3135198639478222e-05,
+ "loss": 0.7325,
+ "step": 2164
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3129277810833292e-05,
+ "loss": 0.7562,
+ "step": 2165
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3123355765847952e-05,
+ "loss": 0.8102,
+ "step": 2166
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3117432506824083e-05,
+ "loss": 0.7741,
+ "step": 2167
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3111508036064037e-05,
+ "loss": 0.8393,
+ "step": 2168
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3105582355870636e-05,
+ "loss": 0.8461,
+ "step": 2169
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3099655468547174e-05,
+ "loss": 0.8486,
+ "step": 2170
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3093727376397413e-05,
+ "loss": 0.7992,
+ "step": 2171
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3087798081725587e-05,
+ "loss": 1.5695,
+ "step": 2172
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3081867586836388e-05,
+ "loss": 0.7966,
+ "step": 2173
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3075935894034984e-05,
+ "loss": 0.8765,
+ "step": 2174
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3070003005627008e-05,
+ "loss": 0.7865,
+ "step": 2175
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3064068923918551e-05,
+ "loss": 1.4331,
+ "step": 2176
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3058133651216172e-05,
+ "loss": 0.7614,
+ "step": 2177
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3052197189826892e-05,
+ "loss": 0.7823,
+ "step": 2178
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3046259542058198e-05,
+ "loss": 0.8641,
+ "step": 2179
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3040320710218035e-05,
+ "loss": 0.8162,
+ "step": 2180
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3034380696614806e-05,
+ "loss": 1.3658,
+ "step": 2181
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3028439503557376e-05,
+ "loss": 0.8345,
+ "step": 2182
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3022497133355066e-05,
+ "loss": 0.7922,
+ "step": 2183
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3016553588317662e-05,
+ "loss": 1.4329,
+ "step": 2184
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3010608870755395e-05,
+ "loss": 0.8166,
+ "step": 2185
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3004662982978964e-05,
+ "loss": 0.7827,
+ "step": 2186
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2998715927299513e-05,
+ "loss": 0.8293,
+ "step": 2187
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2992767706028641e-05,
+ "loss": 0.8503,
+ "step": 2188
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.298681832147841e-05,
+ "loss": 0.7854,
+ "step": 2189
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2980867775961324e-05,
+ "loss": 0.8536,
+ "step": 2190
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2974916071790337e-05,
+ "loss": 1.3462,
+ "step": 2191
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2968963211278865e-05,
+ "loss": 0.8058,
+ "step": 2192
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2963009196740762e-05,
+ "loss": 0.798,
+ "step": 2193
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2957054030490332e-05,
+ "loss": 0.8304,
+ "step": 2194
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2951097714842331e-05,
+ "loss": 0.7906,
+ "step": 2195
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2945140252111967e-05,
+ "loss": 1.4169,
+ "step": 2196
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.293918164461488e-05,
+ "loss": 0.7907,
+ "step": 2197
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.293322189466716e-05,
+ "loss": 0.8019,
+ "step": 2198
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2927261004585347e-05,
+ "loss": 1.3729,
+ "step": 2199
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2921298976686421e-05,
+ "loss": 0.8585,
+ "step": 2200
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2915335813287802e-05,
+ "loss": 0.8877,
+ "step": 2201
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2909371516707355e-05,
+ "loss": 1.3763,
+ "step": 2202
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2903406089263375e-05,
+ "loss": 0.8311,
+ "step": 2203
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2897439533274615e-05,
+ "loss": 0.7956,
+ "step": 2204
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2891471851060251e-05,
+ "loss": 0.8031,
+ "step": 2205
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2885503044939903e-05,
+ "loss": 0.8064,
+ "step": 2206
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2879533117233628e-05,
+ "loss": 0.7661,
+ "step": 2207
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2873562070261918e-05,
+ "loss": 0.8124,
+ "step": 2208
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.28675899063457e-05,
+ "loss": 0.8199,
+ "step": 2209
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2861616627806334e-05,
+ "loss": 0.8269,
+ "step": 2210
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2855642236965615e-05,
+ "loss": 0.8122,
+ "step": 2211
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2849666736145774e-05,
+ "loss": 0.8029,
+ "step": 2212
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.284369012766946e-05,
+ "loss": 0.8263,
+ "step": 2213
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2837712413859773e-05,
+ "loss": 0.8007,
+ "step": 2214
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2831733597040224e-05,
+ "loss": 0.8125,
+ "step": 2215
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2825753679534762e-05,
+ "loss": 0.8223,
+ "step": 2216
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2819772663667762e-05,
+ "loss": 0.79,
+ "step": 2217
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.281379055176403e-05,
+ "loss": 0.867,
+ "step": 2218
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2807807346148786e-05,
+ "loss": 1.4071,
+ "step": 2219
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2801823049147689e-05,
+ "loss": 0.8119,
+ "step": 2220
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2795837663086812e-05,
+ "loss": 0.8368,
+ "step": 2221
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2789851190292664e-05,
+ "loss": 0.8273,
+ "step": 2222
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2783863633092159e-05,
+ "loss": 0.8272,
+ "step": 2223
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2777874993812646e-05,
+ "loss": 0.8475,
+ "step": 2224
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.277188527478189e-05,
+ "loss": 0.762,
+ "step": 2225
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2765894478328073e-05,
+ "loss": 0.8076,
+ "step": 2226
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2759902606779804e-05,
+ "loss": 0.7958,
+ "step": 2227
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2753909662466099e-05,
+ "loss": 0.8331,
+ "step": 2228
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2747915647716403e-05,
+ "loss": 0.8075,
+ "step": 2229
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2741920564860562e-05,
+ "loss": 0.8216,
+ "step": 2230
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2735924416228853e-05,
+ "loss": 1.4252,
+ "step": 2231
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2729927204151958e-05,
+ "loss": 0.8264,
+ "step": 2232
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2723928930960972e-05,
+ "loss": 0.8397,
+ "step": 2233
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2717929598987406e-05,
+ "loss": 0.7639,
+ "step": 2234
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2711929210563187e-05,
+ "loss": 0.8296,
+ "step": 2235
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2705927768020638e-05,
+ "loss": 0.791,
+ "step": 2236
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2699925273692507e-05,
+ "loss": 0.7577,
+ "step": 2237
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.269392172991194e-05,
+ "loss": 0.8188,
+ "step": 2238
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.26879171390125e-05,
+ "loss": 0.7987,
+ "step": 2239
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2681911503328153e-05,
+ "loss": 0.7623,
+ "step": 2240
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2675904825193264e-05,
+ "loss": 0.8523,
+ "step": 2241
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2669897106942615e-05,
+ "loss": 0.8383,
+ "step": 2242
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2663888350911389e-05,
+ "loss": 0.8324,
+ "step": 2243
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2657878559435166e-05,
+ "loss": 0.855,
+ "step": 2244
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2651867734849935e-05,
+ "loss": 0.8265,
+ "step": 2245
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2645855879492084e-05,
+ "loss": 0.8886,
+ "step": 2246
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2639842995698402e-05,
+ "loss": 0.8413,
+ "step": 2247
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.263382908580608e-05,
+ "loss": 0.8602,
+ "step": 2248
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2627814152152702e-05,
+ "loss": 0.8541,
+ "step": 2249
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2621798197076259e-05,
+ "loss": 0.836,
+ "step": 2250
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.261578122291513e-05,
+ "loss": 0.8026,
+ "step": 2251
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2609763232008092e-05,
+ "loss": 0.8664,
+ "step": 2252
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2603744226694322e-05,
+ "loss": 0.8095,
+ "step": 2253
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2597724209313392e-05,
+ "loss": 0.774,
+ "step": 2254
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2591703182205257e-05,
+ "loss": 0.868,
+ "step": 2255
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2585681147710276e-05,
+ "loss": 0.7662,
+ "step": 2256
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.257965810816919e-05,
+ "loss": 0.8033,
+ "step": 2257
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.257363406592314e-05,
+ "loss": 0.7772,
+ "step": 2258
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2567609023313648e-05,
+ "loss": 0.7474,
+ "step": 2259
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2561582982682634e-05,
+ "loss": 0.8176,
+ "step": 2260
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.25555559463724e-05,
+ "loss": 0.7517,
+ "step": 2261
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2549527916725632e-05,
+ "loss": 0.7591,
+ "step": 2262
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2543498896085409e-05,
+ "loss": 0.7999,
+ "step": 2263
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.253746888679519e-05,
+ "loss": 0.7495,
+ "step": 2264
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2531437891198825e-05,
+ "loss": 0.8399,
+ "step": 2265
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2525405911640542e-05,
+ "loss": 0.7863,
+ "step": 2266
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.251937295046495e-05,
+ "loss": 0.8071,
+ "step": 2267
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.251333901001704e-05,
+ "loss": 0.8048,
+ "step": 2268
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2507304092642191e-05,
+ "loss": 1.4082,
+ "step": 2269
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2501268200686153e-05,
+ "loss": 0.8152,
+ "step": 2270
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.249523133649506e-05,
+ "loss": 0.829,
+ "step": 2271
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.248919350241542e-05,
+ "loss": 0.8693,
+ "step": 2272
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2483154700794122e-05,
+ "loss": 0.8201,
+ "step": 2273
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2477114933978429e-05,
+ "loss": 0.8002,
+ "step": 2274
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2471074204315975e-05,
+ "loss": 0.81,
+ "step": 2275
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2465032514154778e-05,
+ "loss": 0.7809,
+ "step": 2276
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2458989865843223e-05,
+ "loss": 0.8241,
+ "step": 2277
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2452946261730069e-05,
+ "loss": 0.812,
+ "step": 2278
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2446901704164441e-05,
+ "loss": 0.8027,
+ "step": 2279
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.244085619549584e-05,
+ "loss": 0.8353,
+ "step": 2280
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2434809738074144e-05,
+ "loss": 0.8479,
+ "step": 2281
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2428762334249584e-05,
+ "loss": 0.8409,
+ "step": 2282
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2422713986372772e-05,
+ "loss": 1.4932,
+ "step": 2283
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2416664696794675e-05,
+ "loss": 0.8269,
+ "step": 2284
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.241061446786664e-05,
+ "loss": 0.787,
+ "step": 2285
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2404563301940366e-05,
+ "loss": 0.8458,
+ "step": 2286
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.239851120136793e-05,
+ "loss": 0.7956,
+ "step": 2287
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2392458168501755e-05,
+ "loss": 0.8396,
+ "step": 2288
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.238640420569464e-05,
+ "loss": 0.8343,
+ "step": 2289
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2380349315299744e-05,
+ "loss": 0.8175,
+ "step": 2290
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.237429349967058e-05,
+ "loss": 0.828,
+ "step": 2291
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2368236761161023e-05,
+ "loss": 0.8628,
+ "step": 2292
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2362179102125315e-05,
+ "loss": 0.8096,
+ "step": 2293
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2356120524918045e-05,
+ "loss": 0.7894,
+ "step": 2294
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2350061031894159e-05,
+ "loss": 0.8139,
+ "step": 2295
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2344000625408964e-05,
+ "loss": 0.8311,
+ "step": 2296
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2337939307818127e-05,
+ "loss": 0.7878,
+ "step": 2297
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2331877081477661e-05,
+ "loss": 0.8472,
+ "step": 2298
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.232581394874393e-05,
+ "loss": 0.8502,
+ "step": 2299
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2319749911973658e-05,
+ "loss": 0.8055,
+ "step": 2300
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2313684973523914e-05,
+ "loss": 0.8065,
+ "step": 2301
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2307619135752123e-05,
+ "loss": 0.8161,
+ "step": 2302
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2301552401016058e-05,
+ "loss": 0.7736,
+ "step": 2303
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2295484771673838e-05,
+ "loss": 0.778,
+ "step": 2304
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.228941625008393e-05,
+ "loss": 0.8154,
+ "step": 2305
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2283346838605148e-05,
+ "loss": 0.8406,
+ "step": 2306
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.227727653959666e-05,
+ "loss": 0.7576,
+ "step": 2307
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2271205355417966e-05,
+ "loss": 0.7654,
+ "step": 2308
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2265133288428919e-05,
+ "loss": 0.8159,
+ "step": 2309
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.225906034098971e-05,
+ "loss": 0.8119,
+ "step": 2310
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2252986515460878e-05,
+ "loss": 0.8275,
+ "step": 2311
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2246911814203295e-05,
+ "loss": 0.8664,
+ "step": 2312
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.224083623957818e-05,
+ "loss": 0.7816,
+ "step": 2313
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2234759793947092e-05,
+ "loss": 0.8186,
+ "step": 2314
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2228682479671927e-05,
+ "loss": 0.7581,
+ "step": 2315
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2222604299114911e-05,
+ "loss": 0.7987,
+ "step": 2316
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2216525254638618e-05,
+ "loss": 0.8287,
+ "step": 2317
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2210445348605954e-05,
+ "loss": 0.8238,
+ "step": 2318
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2204364583380164e-05,
+ "loss": 0.8154,
+ "step": 2319
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.219828296132481e-05,
+ "loss": 0.8225,
+ "step": 2320
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2192200484803812e-05,
+ "loss": 0.7518,
+ "step": 2321
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2186117156181401e-05,
+ "loss": 0.8165,
+ "step": 2322
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2180032977822149e-05,
+ "loss": 0.8216,
+ "step": 2323
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2173947952090961e-05,
+ "loss": 0.8351,
+ "step": 2324
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2167862081353063e-05,
+ "loss": 0.8333,
+ "step": 2325
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2161775367974019e-05,
+ "loss": 0.8646,
+ "step": 2326
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2155687814319705e-05,
+ "loss": 0.7792,
+ "step": 2327
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2149599422756347e-05,
+ "loss": 0.8777,
+ "step": 2328
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2143510195650473e-05,
+ "loss": 0.8441,
+ "step": 2329
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2137420135368951e-05,
+ "loss": 0.8,
+ "step": 2330
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2131329244278969e-05,
+ "loss": 0.8349,
+ "step": 2331
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2125237524748037e-05,
+ "loss": 0.8006,
+ "step": 2332
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2119144979143984e-05,
+ "loss": 0.8532,
+ "step": 2333
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2113051609834966e-05,
+ "loss": 1.5857,
+ "step": 2334
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2106957419189453e-05,
+ "loss": 0.8363,
+ "step": 2335
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2100862409576243e-05,
+ "loss": 0.7795,
+ "step": 2336
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2094766583364445e-05,
+ "loss": 0.8689,
+ "step": 2337
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2088669942923486e-05,
+ "loss": 0.7989,
+ "step": 2338
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2082572490623113e-05,
+ "loss": 0.8502,
+ "step": 2339
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2076474228833383e-05,
+ "loss": 0.7805,
+ "step": 2340
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2070375159924674e-05,
+ "loss": 0.787,
+ "step": 2341
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2064275286267675e-05,
+ "loss": 0.811,
+ "step": 2342
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2058174610233384e-05,
+ "loss": 0.7782,
+ "step": 2343
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2052073134193118e-05,
+ "loss": 0.8083,
+ "step": 2344
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2045970860518504e-05,
+ "loss": 0.793,
+ "step": 2345
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.203986779158147e-05,
+ "loss": 0.8163,
+ "step": 2346
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2033763929754265e-05,
+ "loss": 0.7506,
+ "step": 2347
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2027659277409436e-05,
+ "loss": 0.7951,
+ "step": 2348
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2021553836919848e-05,
+ "loss": 1.5493,
+ "step": 2349
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2015447610658661e-05,
+ "loss": 0.8187,
+ "step": 2350
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.200934060099935e-05,
+ "loss": 0.8126,
+ "step": 2351
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2003232810315688e-05,
+ "loss": 0.8474,
+ "step": 2352
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1997124240981754e-05,
+ "loss": 0.8095,
+ "step": 2353
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.199101489537193e-05,
+ "loss": 0.8213,
+ "step": 2354
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1984904775860897e-05,
+ "loss": 0.8405,
+ "step": 2355
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1978793884823643e-05,
+ "loss": 1.4552,
+ "step": 2356
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1972682224635454e-05,
+ "loss": 0.7795,
+ "step": 2357
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.196656979767191e-05,
+ "loss": 0.8382,
+ "step": 2358
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1960456606308887e-05,
+ "loss": 0.8542,
+ "step": 2359
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.195434265292257e-05,
+ "loss": 0.8119,
+ "step": 2360
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.194822793988943e-05,
+ "loss": 0.8208,
+ "step": 2361
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1942112469586241e-05,
+ "loss": 1.4211,
+ "step": 2362
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1935996244390064e-05,
+ "loss": 0.8531,
+ "step": 2363
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1929879266678256e-05,
+ "loss": 0.7558,
+ "step": 2364
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1923761538828465e-05,
+ "loss": 0.8345,
+ "step": 2365
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1917643063218638e-05,
+ "loss": 1.3651,
+ "step": 2366
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1911523842227002e-05,
+ "loss": 0.8019,
+ "step": 2367
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.190540387823208e-05,
+ "loss": 0.7863,
+ "step": 2368
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1899283173612686e-05,
+ "loss": 0.8035,
+ "step": 2369
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1893161730747917e-05,
+ "loss": 0.7937,
+ "step": 2370
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1887039552017157e-05,
+ "loss": 0.8266,
+ "step": 2371
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1880916639800076e-05,
+ "loss": 0.8127,
+ "step": 2372
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1874792996476634e-05,
+ "loss": 1.3498,
+ "step": 2373
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.186866862442707e-05,
+ "loss": 0.8405,
+ "step": 2374
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1862543526031914e-05,
+ "loss": 0.72,
+ "step": 2375
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1856417703671962e-05,
+ "loss": 0.8073,
+ "step": 2376
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1850291159728307e-05,
+ "loss": 0.8296,
+ "step": 2377
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1844163896582321e-05,
+ "loss": 0.7904,
+ "step": 2378
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1838035916615645e-05,
+ "loss": 0.8316,
+ "step": 2379
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1831907222210212e-05,
+ "loss": 0.7923,
+ "step": 2380
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.182577781574822e-05,
+ "loss": 0.8617,
+ "step": 2381
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1819647699612153e-05,
+ "loss": 0.7661,
+ "step": 2382
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.181351687618477e-05,
+ "loss": 0.8285,
+ "step": 2383
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1807385347849098e-05,
+ "loss": 0.8331,
+ "step": 2384
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1801253116988447e-05,
+ "loss": 0.8112,
+ "step": 2385
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.179512018598639e-05,
+ "loss": 0.7849,
+ "step": 2386
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1788986557226788e-05,
+ "loss": 0.8304,
+ "step": 2387
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1782852233093754e-05,
+ "loss": 0.8272,
+ "step": 2388
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.177671721597168e-05,
+ "loss": 0.7969,
+ "step": 2389
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1770581508245235e-05,
+ "loss": 0.7804,
+ "step": 2390
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1764445112299347e-05,
+ "loss": 0.7757,
+ "step": 2391
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1758308030519208e-05,
+ "loss": 1.3863,
+ "step": 2392
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1752170265290287e-05,
+ "loss": 0.8083,
+ "step": 2393
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1746031818998314e-05,
+ "loss": 0.7964,
+ "step": 2394
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1739892694029285e-05,
+ "loss": 0.8189,
+ "step": 2395
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1733752892769457e-05,
+ "loss": 0.844,
+ "step": 2396
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1727612417605348e-05,
+ "loss": 0.7763,
+ "step": 2397
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1721471270923746e-05,
+ "loss": 1.3661,
+ "step": 2398
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.171532945511169e-05,
+ "loss": 1.3483,
+ "step": 2399
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1709186972556494e-05,
+ "loss": 0.8528,
+ "step": 2400
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1703043825645715e-05,
+ "loss": 0.8288,
+ "step": 2401
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1696900016767173e-05,
+ "loss": 0.8654,
+ "step": 2402
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.169075554830895e-05,
+ "loss": 0.8181,
+ "step": 2403
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1684610422659382e-05,
+ "loss": 0.8018,
+ "step": 2404
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1678464642207057e-05,
+ "loss": 0.7686,
+ "step": 2405
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1672318209340824e-05,
+ "loss": 0.8024,
+ "step": 2406
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1666171126449779e-05,
+ "loss": 0.8473,
+ "step": 2407
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1660023395923279e-05,
+ "loss": 0.7687,
+ "step": 2408
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.165387502015092e-05,
+ "loss": 0.843,
+ "step": 2409
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1647726001522562e-05,
+ "loss": 0.8503,
+ "step": 2410
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1641576342428305e-05,
+ "loss": 0.8179,
+ "step": 2411
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1635426045258508e-05,
+ "loss": 0.8424,
+ "step": 2412
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1629275112403766e-05,
+ "loss": 0.8023,
+ "step": 2413
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1623123546254931e-05,
+ "loss": 0.8562,
+ "step": 2414
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1616971349203091e-05,
+ "loss": 0.7001,
+ "step": 2415
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1610818523639598e-05,
+ "loss": 0.7984,
+ "step": 2416
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1604665071956023e-05,
+ "loss": 0.8417,
+ "step": 2417
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1598510996544201e-05,
+ "loss": 0.8332,
+ "step": 2418
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1592356299796198e-05,
+ "loss": 0.7805,
+ "step": 2419
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1586200984104323e-05,
+ "loss": 0.7974,
+ "step": 2420
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1580045051861138e-05,
+ "loss": 0.7853,
+ "step": 2421
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1573888505459423e-05,
+ "loss": 0.8308,
+ "step": 2422
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1567731347292217e-05,
+ "loss": 0.8602,
+ "step": 2423
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.156157357975278e-05,
+ "loss": 0.8004,
+ "step": 2424
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1555415205234623e-05,
+ "loss": 0.8161,
+ "step": 2425
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1549256226131482e-05,
+ "loss": 0.8056,
+ "step": 2426
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1543096644837335e-05,
+ "loss": 0.8042,
+ "step": 2427
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1536936463746395e-05,
+ "loss": 0.8271,
+ "step": 2428
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1530775685253101e-05,
+ "loss": 0.8295,
+ "step": 2429
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1524614311752128e-05,
+ "loss": 0.7569,
+ "step": 2430
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1518452345638383e-05,
+ "loss": 0.8464,
+ "step": 2431
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1512289789307003e-05,
+ "loss": 0.8437,
+ "step": 2432
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1506126645153355e-05,
+ "loss": 0.8194,
+ "step": 2433
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1499962915573035e-05,
+ "loss": 0.772,
+ "step": 2434
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1493798602961862e-05,
+ "loss": 0.7859,
+ "step": 2435
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1487633709715884e-05,
+ "loss": 0.801,
+ "step": 2436
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1481468238231378e-05,
+ "loss": 0.8083,
+ "step": 2437
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1475302190904847e-05,
+ "loss": 0.7512,
+ "step": 2438
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.146913557013301e-05,
+ "loss": 0.7876,
+ "step": 2439
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1462968378312808e-05,
+ "loss": 0.816,
+ "step": 2440
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1456800617841417e-05,
+ "loss": 0.7904,
+ "step": 2441
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1450632291116226e-05,
+ "loss": 0.7401,
+ "step": 2442
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1444463400534837e-05,
+ "loss": 0.8093,
+ "step": 2443
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1438293948495086e-05,
+ "loss": 0.7766,
+ "step": 2444
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1432123937395015e-05,
+ "loss": 0.6873,
+ "step": 2445
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.142595336963289e-05,
+ "loss": 0.7735,
+ "step": 2446
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.141978224760719e-05,
+ "loss": 0.7314,
+ "step": 2447
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1413610573716607e-05,
+ "loss": 0.8259,
+ "step": 2448
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.140743835036006e-05,
+ "loss": 0.7565,
+ "step": 2449
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1401265579936669e-05,
+ "loss": 0.8235,
+ "step": 2450
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1395092264845764e-05,
+ "loss": 1.3823,
+ "step": 2451
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.13889184074869e-05,
+ "loss": 0.7666,
+ "step": 2452
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1382744010259835e-05,
+ "loss": 0.8227,
+ "step": 2453
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1376569075564537e-05,
+ "loss": 0.8207,
+ "step": 2454
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1370393605801184e-05,
+ "loss": 0.8312,
+ "step": 2455
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.136421760337016e-05,
+ "loss": 0.8081,
+ "step": 2456
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.135804107067206e-05,
+ "loss": 0.8029,
+ "step": 2457
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1351864010107678e-05,
+ "loss": 0.88,
+ "step": 2458
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1345686424078027e-05,
+ "loss": 0.8238,
+ "step": 2459
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1339508314984306e-05,
+ "loss": 0.8231,
+ "step": 2460
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1333329685227934e-05,
+ "loss": 0.763,
+ "step": 2461
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1327150537210517e-05,
+ "loss": 0.8346,
+ "step": 2462
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1320970873333876e-05,
+ "loss": 0.8342,
+ "step": 2463
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1314790696000023e-05,
+ "loss": 0.8044,
+ "step": 2464
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1308610007611178e-05,
+ "loss": 0.8126,
+ "step": 2465
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1302428810569755e-05,
+ "loss": 0.8002,
+ "step": 2466
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1296247107278362e-05,
+ "loss": 0.8228,
+ "step": 2467
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1290064900139808e-05,
+ "loss": 0.8533,
+ "step": 2468
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1283882191557099e-05,
+ "loss": 0.7716,
+ "step": 2469
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1277698983933431e-05,
+ "loss": 0.8439,
+ "step": 2470
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1271515279672205e-05,
+ "loss": 0.8005,
+ "step": 2471
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1265331081177002e-05,
+ "loss": 0.8298,
+ "step": 2472
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1259146390851598e-05,
+ "loss": 0.8076,
+ "step": 2473
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1252961211099966e-05,
+ "loss": 0.8009,
+ "step": 2474
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1246775544326268e-05,
+ "loss": 0.8164,
+ "step": 2475
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1240589392934849e-05,
+ "loss": 0.8065,
+ "step": 2476
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.123440275933025e-05,
+ "loss": 0.8344,
+ "step": 2477
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.122821564591719e-05,
+ "loss": 0.768,
+ "step": 2478
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1222028055100584e-05,
+ "loss": 0.8849,
+ "step": 2479
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1215839989285536e-05,
+ "loss": 0.8047,
+ "step": 2480
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1209651450877315e-05,
+ "loss": 0.7738,
+ "step": 2481
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1203462442281394e-05,
+ "loss": 0.7968,
+ "step": 2482
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.119727296590342e-05,
+ "loss": 0.7821,
+ "step": 2483
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1191083024149225e-05,
+ "loss": 0.7917,
+ "step": 2484
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1184892619424815e-05,
+ "loss": 0.817,
+ "step": 2485
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.117870175413638e-05,
+ "loss": 0.8327,
+ "step": 2486
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1172510430690298e-05,
+ "loss": 0.8247,
+ "step": 2487
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.116631865149311e-05,
+ "loss": 0.7822,
+ "step": 2488
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1160126418951542e-05,
+ "loss": 0.783,
+ "step": 2489
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1153933735472498e-05,
+ "loss": 0.8412,
+ "step": 2490
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1147740603463051e-05,
+ "loss": 0.7695,
+ "step": 2491
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1141547025330454e-05,
+ "loss": 0.78,
+ "step": 2492
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1135353003482132e-05,
+ "loss": 0.801,
+ "step": 2493
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.112915854032568e-05,
+ "loss": 0.8129,
+ "step": 2494
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1122963638268864e-05,
+ "loss": 0.7529,
+ "step": 2495
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1116768299719628e-05,
+ "loss": 0.8157,
+ "step": 2496
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1110572527086079e-05,
+ "loss": 0.781,
+ "step": 2497
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1104376322776493e-05,
+ "loss": 0.7924,
+ "step": 2498
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1098179689199316e-05,
+ "loss": 0.7902,
+ "step": 2499
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1091982628763155e-05,
+ "loss": 0.8619,
+ "step": 2500
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.10857851438768e-05,
+ "loss": 0.7954,
+ "step": 2501
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.107958723694918e-05,
+ "loss": 0.7698,
+ "step": 2502
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1073388910389411e-05,
+ "loss": 0.7949,
+ "step": 2503
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1067190166606758e-05,
+ "loss": 0.7733,
+ "step": 2504
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1060991008010658e-05,
+ "loss": 0.7923,
+ "step": 2505
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.10547914370107e-05,
+ "loss": 0.7978,
+ "step": 2506
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1048591456016638e-05,
+ "loss": 0.8287,
+ "step": 2507
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1042391067438385e-05,
+ "loss": 0.8159,
+ "step": 2508
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1036190273686019e-05,
+ "loss": 0.8437,
+ "step": 2509
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1029989077169761e-05,
+ "loss": 0.8031,
+ "step": 2510
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1023787480299996e-05,
+ "loss": 0.8409,
+ "step": 2511
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.101758548548727e-05,
+ "loss": 0.8082,
+ "step": 2512
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1011383095142279e-05,
+ "loss": 0.831,
+ "step": 2513
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1005180311675869e-05,
+ "loss": 0.8049,
+ "step": 2514
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.0998977137499044e-05,
+ "loss": 0.7548,
+ "step": 2515
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.0992773575022962e-05,
+ "loss": 0.7563,
+ "step": 2516
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.0986569626658918e-05,
+ "loss": 0.7963,
+ "step": 2517
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.0980365294818377e-05,
+ "loss": 1.479,
+ "step": 2518
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.097416058191294e-05,
+ "loss": 0.8155,
+ "step": 2519
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.096795549035436e-05,
+ "loss": 0.8546,
+ "step": 2520
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0961750022554533e-05,
+ "loss": 0.7771,
+ "step": 2521
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.095554418092551e-05,
+ "loss": 0.8379,
+ "step": 2522
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0949337967879478e-05,
+ "loss": 0.8849,
+ "step": 2523
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0943131385828773e-05,
+ "loss": 0.7753,
+ "step": 2524
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0936924437185879e-05,
+ "loss": 0.8314,
+ "step": 2525
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.093071712436341e-05,
+ "loss": 0.7968,
+ "step": 2526
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.092450944977413e-05,
+ "loss": 0.7854,
+ "step": 2527
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0918301415830945e-05,
+ "loss": 0.8069,
+ "step": 2528
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.09120930249469e-05,
+ "loss": 0.8283,
+ "step": 2529
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0905884279535174e-05,
+ "loss": 1.4572,
+ "step": 2530
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0899675182009086e-05,
+ "loss": 0.7944,
+ "step": 2531
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0893465734782094e-05,
+ "loss": 0.8359,
+ "step": 2532
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0887255940267792e-05,
+ "loss": 0.8327,
+ "step": 2533
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0881045800879904e-05,
+ "loss": 0.8221,
+ "step": 2534
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0874835319032295e-05,
+ "loss": 0.7967,
+ "step": 2535
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.086862449713896e-05,
+ "loss": 0.7771,
+ "step": 2536
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0862413337614021e-05,
+ "loss": 0.7781,
+ "step": 2537
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0856201842871741e-05,
+ "loss": 0.811,
+ "step": 2538
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0849990015326508e-05,
+ "loss": 1.3475,
+ "step": 2539
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0843777857392836e-05,
+ "loss": 0.8179,
+ "step": 2540
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0837565371485375e-05,
+ "loss": 0.8175,
+ "step": 2541
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0831352560018897e-05,
+ "loss": 1.4904,
+ "step": 2542
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0825139425408304e-05,
+ "loss": 0.8488,
+ "step": 2543
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0818925970068617e-05,
+ "loss": 0.8,
+ "step": 2544
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0812712196414989e-05,
+ "loss": 0.8336,
+ "step": 2545
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.08064981068627e-05,
+ "loss": 0.8003,
+ "step": 2546
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.080028370382714e-05,
+ "loss": 0.7873,
+ "step": 2547
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.079406898972383e-05,
+ "loss": 0.8068,
+ "step": 2548
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0787853966968408e-05,
+ "loss": 0.7887,
+ "step": 2549
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0781638637976636e-05,
+ "loss": 0.7707,
+ "step": 2550
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0775423005164394e-05,
+ "loss": 0.7942,
+ "step": 2551
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0769207070947675e-05,
+ "loss": 0.7946,
+ "step": 2552
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0762990837742598e-05,
+ "loss": 0.8177,
+ "step": 2553
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0756774307965385e-05,
+ "loss": 0.8143,
+ "step": 2554
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.075055748403239e-05,
+ "loss": 0.8135,
+ "step": 2555
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0744340368360069e-05,
+ "loss": 0.8248,
+ "step": 2556
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0738122963364996e-05,
+ "loss": 0.7779,
+ "step": 2557
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0731905271463858e-05,
+ "loss": 0.8235,
+ "step": 2558
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0725687295073449e-05,
+ "loss": 1.3706,
+ "step": 2559
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.071946903661068e-05,
+ "loss": 0.7585,
+ "step": 2560
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0713250498492565e-05,
+ "loss": 0.8093,
+ "step": 2561
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0707031683136232e-05,
+ "loss": 0.7831,
+ "step": 2562
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0700812592958918e-05,
+ "loss": 0.7987,
+ "step": 2563
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0694593230377959e-05,
+ "loss": 0.7956,
+ "step": 2564
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0688373597810801e-05,
+ "loss": 0.7642,
+ "step": 2565
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0682153697675e-05,
+ "loss": 0.8168,
+ "step": 2566
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.067593353238821e-05,
+ "loss": 0.7948,
+ "step": 2567
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0669713104368189e-05,
+ "loss": 0.7808,
+ "step": 2568
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0663492416032799e-05,
+ "loss": 0.819,
+ "step": 2569
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0657271469799998e-05,
+ "loss": 0.7869,
+ "step": 2570
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0651050268087853e-05,
+ "loss": 0.733,
+ "step": 2571
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.064482881331452e-05,
+ "loss": 0.8114,
+ "step": 2572
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0638607107898266e-05,
+ "loss": 0.7261,
+ "step": 2573
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0632385154257445e-05,
+ "loss": 0.8392,
+ "step": 2574
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0626162954810508e-05,
+ "loss": 0.7418,
+ "step": 2575
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0619940511976006e-05,
+ "loss": 0.8591,
+ "step": 2576
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0613717828172584e-05,
+ "loss": 0.8045,
+ "step": 2577
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.060749490581898e-05,
+ "loss": 0.7388,
+ "step": 2578
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0601271747334025e-05,
+ "loss": 0.7336,
+ "step": 2579
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0595048355136634e-05,
+ "loss": 1.3689,
+ "step": 2580
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.058882473164583e-05,
+ "loss": 0.8408,
+ "step": 2581
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0582600879280709e-05,
+ "loss": 0.8795,
+ "step": 2582
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0576376800460464e-05,
+ "loss": 0.7892,
+ "step": 2583
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0570152497604378e-05,
+ "loss": 0.7991,
+ "step": 2584
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0563927973131816e-05,
+ "loss": 0.8263,
+ "step": 2585
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0557703229462229e-05,
+ "loss": 0.7934,
+ "step": 2586
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0551478269015154e-05,
+ "loss": 0.8438,
+ "step": 2587
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.054525309421022e-05,
+ "loss": 0.8051,
+ "step": 2588
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0539027707467126e-05,
+ "loss": 1.3724,
+ "step": 2589
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0532802111205667e-05,
+ "loss": 0.8084,
+ "step": 2590
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0526576307845706e-05,
+ "loss": 0.8064,
+ "step": 2591
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0520350299807196e-05,
+ "loss": 0.8103,
+ "step": 2592
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0514124089510167e-05,
+ "loss": 0.8188,
+ "step": 2593
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0507897679374729e-05,
+ "loss": 0.8822,
+ "step": 2594
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0501671071821064e-05,
+ "loss": 0.7909,
+ "step": 2595
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0495444269269434e-05,
+ "loss": 0.8185,
+ "step": 2596
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0489217274140185e-05,
+ "loss": 0.7584,
+ "step": 2597
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0482990088853721e-05,
+ "loss": 0.8072,
+ "step": 2598
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0476762715830538e-05,
+ "loss": 0.8226,
+ "step": 2599
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0470535157491189e-05,
+ "loss": 0.803,
+ "step": 2600
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.046430741625631e-05,
+ "loss": 0.7861,
+ "step": 2601
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0458079494546605e-05,
+ "loss": 0.8536,
+ "step": 2602
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0451851394782844e-05,
+ "loss": 0.794,
+ "step": 2603
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.044562311938587e-05,
+ "loss": 0.8183,
+ "step": 2604
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0439394670776598e-05,
+ "loss": 0.7972,
+ "step": 2605
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0433166051376007e-05,
+ "loss": 0.8253,
+ "step": 2606
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0426937263605135e-05,
+ "loss": 0.8289,
+ "step": 2607
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0420708309885096e-05,
+ "loss": 0.7848,
+ "step": 2608
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0414479192637065e-05,
+ "loss": 0.7619,
+ "step": 2609
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.040824991428228e-05,
+ "loss": 0.8076,
+ "step": 2610
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0402020477242043e-05,
+ "loss": 0.858,
+ "step": 2611
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0395790883937718e-05,
+ "loss": 0.753,
+ "step": 2612
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.038956113679072e-05,
+ "loss": 0.8197,
+ "step": 2613
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0383331238222539e-05,
+ "loss": 0.7958,
+ "step": 2614
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0377101190654716e-05,
+ "loss": 0.8188,
+ "step": 2615
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0370870996508849e-05,
+ "loss": 0.8358,
+ "step": 2616
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.03646406582066e-05,
+ "loss": 0.76,
+ "step": 2617
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0358410178169673e-05,
+ "loss": 0.8385,
+ "step": 2618
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0352179558819844e-05,
+ "loss": 0.7664,
+ "step": 2619
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0345948802578932e-05,
+ "loss": 0.8307,
+ "step": 2620
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0339717911868808e-05,
+ "loss": 0.8024,
+ "step": 2621
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0333486889111408e-05,
+ "loss": 0.8101,
+ "step": 2622
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0327255736728707e-05,
+ "loss": 0.7777,
+ "step": 2623
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0321024457142732e-05,
+ "loss": 0.795,
+ "step": 2624
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0314793052775563e-05,
+ "loss": 0.8482,
+ "step": 2625
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0308561526049327e-05,
+ "loss": 0.7572,
+ "step": 2626
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0302329879386203e-05,
+ "loss": 1.3093,
+ "step": 2627
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0296098115208408e-05,
+ "loss": 0.834,
+ "step": 2628
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0289866235938208e-05,
+ "loss": 0.8352,
+ "step": 2629
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0283634243997917e-05,
+ "loss": 0.8238,
+ "step": 2630
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0277402141809891e-05,
+ "loss": 0.8225,
+ "step": 2631
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.027116993179653e-05,
+ "loss": 0.8036,
+ "step": 2632
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0264937616380274e-05,
+ "loss": 0.7977,
+ "step": 2633
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0258705197983598e-05,
+ "loss": 0.7948,
+ "step": 2634
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0252472679029028e-05,
+ "loss": 0.7632,
+ "step": 2635
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0246240061939128e-05,
+ "loss": 0.8038,
+ "step": 2636
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0240007349136489e-05,
+ "loss": 0.8068,
+ "step": 2637
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0233774543043753e-05,
+ "loss": 0.7927,
+ "step": 2638
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.022754164608359e-05,
+ "loss": 1.3129,
+ "step": 2639
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0221308660678708e-05,
+ "loss": 0.8112,
+ "step": 2640
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0215075589251846e-05,
+ "loss": 0.8205,
+ "step": 2641
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.020884243422578e-05,
+ "loss": 0.8345,
+ "step": 2642
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0202609198023324e-05,
+ "loss": 0.8114,
+ "step": 2643
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0196375883067309e-05,
+ "loss": 0.8227,
+ "step": 2644
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0190142491780608e-05,
+ "loss": 0.8405,
+ "step": 2645
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0183909026586118e-05,
+ "loss": 0.8283,
+ "step": 2646
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0177675489906773e-05,
+ "loss": 0.7871,
+ "step": 2647
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0171441884165524e-05,
+ "loss": 0.8091,
+ "step": 2648
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.016520821178536e-05,
+ "loss": 0.7876,
+ "step": 2649
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.015897447518928e-05,
+ "loss": 0.7895,
+ "step": 2650
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0152740676800326e-05,
+ "loss": 0.8351,
+ "step": 2651
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.014650681904155e-05,
+ "loss": 0.8216,
+ "step": 2652
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.014027290433604e-05,
+ "loss": 0.8282,
+ "step": 2653
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0134038935106892e-05,
+ "loss": 0.7689,
+ "step": 2654
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0127804913777235e-05,
+ "loss": 0.7819,
+ "step": 2655
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.012157084277021e-05,
+ "loss": 0.8065,
+ "step": 2656
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0115336724508984e-05,
+ "loss": 0.8574,
+ "step": 2657
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0109102561416732e-05,
+ "loss": 0.7351,
+ "step": 2658
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0102868355916662e-05,
+ "loss": 0.7974,
+ "step": 2659
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0096634110431989e-05,
+ "loss": 0.7762,
+ "step": 2660
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0090399827385942e-05,
+ "loss": 0.7998,
+ "step": 2661
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0084165509201764e-05,
+ "loss": 0.8414,
+ "step": 2662
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0077931158302718e-05,
+ "loss": 0.8146,
+ "step": 2663
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0071696777112076e-05,
+ "loss": 0.8235,
+ "step": 2664
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0065462368053127e-05,
+ "loss": 0.823,
+ "step": 2665
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0059227933549163e-05,
+ "loss": 0.7725,
+ "step": 2666
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0052993476023485e-05,
+ "loss": 0.8385,
+ "step": 2667
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0046758997899409e-05,
+ "loss": 0.821,
+ "step": 2668
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0040524501600259e-05,
+ "loss": 0.7842,
+ "step": 2669
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0034289989549364e-05,
+ "loss": 0.8497,
+ "step": 2670
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.002805546417006e-05,
+ "loss": 0.7666,
+ "step": 2671
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0021820927885684e-05,
+ "loss": 0.8077,
+ "step": 2672
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.001558638311958e-05,
+ "loss": 0.8067,
+ "step": 2673
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0009351832295103e-05,
+ "loss": 0.8182,
+ "step": 2674
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0003117277835593e-05,
+ "loss": 0.8125,
+ "step": 2675
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.99688272216441e-06,
+ "loss": 1.3756,
+ "step": 2676
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.990648167704904e-06,
+ "loss": 0.8025,
+ "step": 2677
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.984413616880422e-06,
+ "loss": 0.823,
+ "step": 2678
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.97817907211432e-06,
+ "loss": 0.7877,
+ "step": 2679
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.971944535829943e-06,
+ "loss": 1.3349,
+ "step": 2680
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.965710010450637e-06,
+ "loss": 1.328,
+ "step": 2681
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.959475498399743e-06,
+ "loss": 0.7768,
+ "step": 2682
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.953241002100593e-06,
+ "loss": 0.8324,
+ "step": 2683
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.94700652397652e-06,
+ "loss": 0.749,
+ "step": 2684
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.940772066450842e-06,
+ "loss": 0.778,
+ "step": 2685
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.934537631946875e-06,
+ "loss": 0.6959,
+ "step": 2686
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.928303222887925e-06,
+ "loss": 0.7837,
+ "step": 2687
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.922068841697284e-06,
+ "loss": 0.8089,
+ "step": 2688
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.915834490798239e-06,
+ "loss": 1.3238,
+ "step": 2689
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.909600172614062e-06,
+ "loss": 0.8041,
+ "step": 2690
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.903365889568015e-06,
+ "loss": 0.7887,
+ "step": 2691
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.897131644083338e-06,
+ "loss": 0.8017,
+ "step": 2692
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.890897438583271e-06,
+ "loss": 0.8013,
+ "step": 2693
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.884663275491023e-06,
+ "loss": 0.789,
+ "step": 2694
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.878429157229792e-06,
+ "loss": 0.7431,
+ "step": 2695
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.872195086222768e-06,
+ "loss": 0.7232,
+ "step": 2696
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.86596106489311e-06,
+ "loss": 0.7764,
+ "step": 2697
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.859727095663962e-06,
+ "loss": 0.8183,
+ "step": 2698
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.853493180958449e-06,
+ "loss": 0.7589,
+ "step": 2699
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.847259323199674e-06,
+ "loss": 0.7882,
+ "step": 2700
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.841025524810724e-06,
+ "loss": 0.7997,
+ "step": 2701
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.834791788214645e-06,
+ "loss": 0.7209,
+ "step": 2702
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.828558115834477e-06,
+ "loss": 0.8503,
+ "step": 2703
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.82232451009323e-06,
+ "loss": 0.7985,
+ "step": 2704
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.816090973413885e-06,
+ "loss": 0.7912,
+ "step": 2705
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.809857508219395e-06,
+ "loss": 0.8438,
+ "step": 2706
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.803624116932695e-06,
+ "loss": 0.7951,
+ "step": 2707
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.79739080197668e-06,
+ "loss": 0.8036,
+ "step": 2708
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.79115756577422e-06,
+ "loss": 0.767,
+ "step": 2709
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.784924410748159e-06,
+ "loss": 0.8279,
+ "step": 2710
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.778691339321297e-06,
+ "loss": 0.7439,
+ "step": 2711
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.772458353916413e-06,
+ "loss": 0.8204,
+ "step": 2712
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.766225456956248e-06,
+ "loss": 0.8565,
+ "step": 2713
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.759992650863513e-06,
+ "loss": 0.8241,
+ "step": 2714
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.753759938060875e-06,
+ "loss": 0.8002,
+ "step": 2715
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.747527320970974e-06,
+ "loss": 0.7275,
+ "step": 2716
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.741294802016403e-06,
+ "loss": 0.8299,
+ "step": 2717
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.735062383619731e-06,
+ "loss": 0.7883,
+ "step": 2718
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.728830068203472e-06,
+ "loss": 0.8062,
+ "step": 2719
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.722597858190112e-06,
+ "loss": 0.7878,
+ "step": 2720
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.716365756002086e-06,
+ "loss": 0.7148,
+ "step": 2721
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.710133764061795e-06,
+ "loss": 0.8883,
+ "step": 2722
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.703901884791594e-06,
+ "loss": 0.8265,
+ "step": 2723
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.6976701206138e-06,
+ "loss": 0.806,
+ "step": 2724
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.691438473950674e-06,
+ "loss": 0.7874,
+ "step": 2725
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.685206947224437e-06,
+ "loss": 0.7912,
+ "step": 2726
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.678975542857275e-06,
+ "loss": 0.7579,
+ "step": 2727
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.6727442632713e-06,
+ "loss": 0.7589,
+ "step": 2728
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.666513110888595e-06,
+ "loss": 0.7848,
+ "step": 2729
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.660282088131193e-06,
+ "loss": 0.8149,
+ "step": 2730
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.654051197421073e-06,
+ "loss": 0.8901,
+ "step": 2731
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.647820441180159e-06,
+ "loss": 0.7844,
+ "step": 2732
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.641589821830326e-06,
+ "loss": 0.7956,
+ "step": 2733
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.635359341793404e-06,
+ "loss": 0.7724,
+ "step": 2734
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.629129003491153e-06,
+ "loss": 0.8569,
+ "step": 2735
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.622898809345289e-06,
+ "loss": 0.8116,
+ "step": 2736
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.616668761777465e-06,
+ "loss": 0.7914,
+ "step": 2737
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.610438863209284e-06,
+ "loss": 0.8362,
+ "step": 2738
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.604209116062286e-06,
+ "loss": 0.8204,
+ "step": 2739
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.59797952275796e-06,
+ "loss": 0.8028,
+ "step": 2740
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.59175008571772e-06,
+ "loss": 0.7854,
+ "step": 2741
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.585520807362936e-06,
+ "loss": 0.8089,
+ "step": 2742
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.579291690114903e-06,
+ "loss": 0.801,
+ "step": 2743
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.573062736394868e-06,
+ "loss": 0.7886,
+ "step": 2744
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.566833948623999e-06,
+ "loss": 0.8356,
+ "step": 2745
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.560605329223406e-06,
+ "loss": 0.8171,
+ "step": 2746
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.554376880614133e-06,
+ "loss": 0.7986,
+ "step": 2747
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.54814860521716e-06,
+ "loss": 0.8701,
+ "step": 2748
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.5419205054534e-06,
+ "loss": 0.8156,
+ "step": 2749
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.535692583743691e-06,
+ "loss": 0.8201,
+ "step": 2750
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.529464842508813e-06,
+ "loss": 0.7353,
+ "step": 2751
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.523237284169469e-06,
+ "loss": 0.7944,
+ "step": 2752
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.517009911146282e-06,
+ "loss": 0.8205,
+ "step": 2753
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.51078272585982e-06,
+ "loss": 0.7705,
+ "step": 2754
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.504555730730567e-06,
+ "loss": 1.3912,
+ "step": 2755
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.49832892817894e-06,
+ "loss": 0.8136,
+ "step": 2756
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.492102320625275e-06,
+ "loss": 0.8263,
+ "step": 2757
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.485875910489835e-06,
+ "loss": 0.7837,
+ "step": 2758
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.479649700192805e-06,
+ "loss": 0.8108,
+ "step": 2759
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.473423692154299e-06,
+ "loss": 0.827,
+ "step": 2760
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.467197888794338e-06,
+ "loss": 0.8268,
+ "step": 2761
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.460972292532876e-06,
+ "loss": 0.7982,
+ "step": 2762
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.454746905789785e-06,
+ "loss": 0.767,
+ "step": 2763
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.448521730984848e-06,
+ "loss": 0.8283,
+ "step": 2764
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.442296770537775e-06,
+ "loss": 0.7894,
+ "step": 2765
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.436072026868186e-06,
+ "loss": 0.7279,
+ "step": 2766
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.429847502395623e-06,
+ "loss": 0.7667,
+ "step": 2767
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.423623199539536e-06,
+ "loss": 0.7563,
+ "step": 2768
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.417399120719296e-06,
+ "loss": 0.7294,
+ "step": 2769
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.411175268354174e-06,
+ "loss": 0.8583,
+ "step": 2770
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.404951644863367e-06,
+ "loss": 0.8191,
+ "step": 2771
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.398728252665978e-06,
+ "loss": 1.2408,
+ "step": 2772
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.392505094181023e-06,
+ "loss": 0.8301,
+ "step": 2773
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.386282171827417e-06,
+ "loss": 0.7375,
+ "step": 2774
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.380059488023995e-06,
+ "loss": 0.7588,
+ "step": 2775
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.373837045189494e-06,
+ "loss": 0.8237,
+ "step": 2776
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.36761484574256e-06,
+ "loss": 0.8353,
+ "step": 2777
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.361392892101735e-06,
+ "loss": 0.7639,
+ "step": 2778
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.355171186685483e-06,
+ "loss": 0.7314,
+ "step": 2779
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.34894973191215e-06,
+ "loss": 0.8213,
+ "step": 2780
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.342728530200005e-06,
+ "loss": 0.8365,
+ "step": 2781
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.336507583967204e-06,
+ "loss": 0.8282,
+ "step": 2782
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.330286895631814e-06,
+ "loss": 0.7968,
+ "step": 2783
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.324066467611792e-06,
+ "loss": 1.2637,
+ "step": 2784
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.317846302325e-06,
+ "loss": 0.7585,
+ "step": 2785
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.311626402189202e-06,
+ "loss": 0.7363,
+ "step": 2786
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.305406769622046e-06,
+ "loss": 0.7812,
+ "step": 2787
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.299187407041087e-06,
+ "loss": 0.8045,
+ "step": 2788
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.29296831686377e-06,
+ "loss": 0.7645,
+ "step": 2789
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.286749501507438e-06,
+ "loss": 0.8475,
+ "step": 2790
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.280530963389324e-06,
+ "loss": 0.7968,
+ "step": 2791
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.274312704926553e-06,
+ "loss": 0.8028,
+ "step": 2792
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.268094728536144e-06,
+ "loss": 0.82,
+ "step": 2793
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.261877036635007e-06,
+ "loss": 0.7725,
+ "step": 2794
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.255659631639934e-06,
+ "loss": 0.8018,
+ "step": 2795
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.249442515967614e-06,
+ "loss": 0.8472,
+ "step": 2796
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.243225692034617e-06,
+ "loss": 0.752,
+ "step": 2797
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.237009162257407e-06,
+ "loss": 0.7229,
+ "step": 2798
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.230792929052326e-06,
+ "loss": 0.78,
+ "step": 2799
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.22457699483561e-06,
+ "loss": 0.7959,
+ "step": 2800
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.218361362023365e-06,
+ "loss": 0.7893,
+ "step": 2801
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.212146033031593e-06,
+ "loss": 0.8494,
+ "step": 2802
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.205931010276174e-06,
+ "loss": 0.8163,
+ "step": 2803
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.199716296172864e-06,
+ "loss": 0.7644,
+ "step": 2804
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.193501893137302e-06,
+ "loss": 1.2571,
+ "step": 2805
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.187287803585013e-06,
+ "loss": 0.7978,
+ "step": 2806
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.181074029931386e-06,
+ "loss": 0.7711,
+ "step": 2807
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.1748605745917e-06,
+ "loss": 0.7961,
+ "step": 2808
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.168647439981104e-06,
+ "loss": 0.7365,
+ "step": 2809
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.162434628514627e-06,
+ "loss": 0.8205,
+ "step": 2810
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.156222142607167e-06,
+ "loss": 0.7842,
+ "step": 2811
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.150009984673497e-06,
+ "loss": 0.8216,
+ "step": 2812
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.143798157128262e-06,
+ "loss": 0.8078,
+ "step": 2813
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.13758666238598e-06,
+ "loss": 0.7648,
+ "step": 2814
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.131375502861042e-06,
+ "loss": 0.7441,
+ "step": 2815
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.125164680967706e-06,
+ "loss": 0.8086,
+ "step": 2816
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.118954199120096e-06,
+ "loss": 0.7872,
+ "step": 2817
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.11274405973221e-06,
+ "loss": 0.8249,
+ "step": 2818
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.10653426521791e-06,
+ "loss": 0.7994,
+ "step": 2819
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.100324817990917e-06,
+ "loss": 0.7911,
+ "step": 2820
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.09411572046483e-06,
+ "loss": 0.7608,
+ "step": 2821
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.087906975053105e-06,
+ "loss": 0.7814,
+ "step": 2822
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.081698584169058e-06,
+ "loss": 0.8191,
+ "step": 2823
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.075490550225873e-06,
+ "loss": 0.8544,
+ "step": 2824
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.069282875636593e-06,
+ "loss": 0.8143,
+ "step": 2825
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.063075562814125e-06,
+ "loss": 0.8062,
+ "step": 2826
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.056868614171227e-06,
+ "loss": 0.7386,
+ "step": 2827
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.050662032120527e-06,
+ "loss": 0.777,
+ "step": 2828
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.044455819074493e-06,
+ "loss": 0.8095,
+ "step": 2829
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.038249977445469e-06,
+ "loss": 1.2888,
+ "step": 2830
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.032044509645643e-06,
+ "loss": 0.7722,
+ "step": 2831
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 9.025839418087063e-06,
+ "loss": 0.7983,
+ "step": 2832
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 9.019634705181624e-06,
+ "loss": 0.8176,
+ "step": 2833
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 9.013430373341082e-06,
+ "loss": 0.772,
+ "step": 2834
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 9.00722642497704e-06,
+ "loss": 0.8331,
+ "step": 2835
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 9.001022862500959e-06,
+ "loss": 0.8053,
+ "step": 2836
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.994819688324134e-06,
+ "loss": 0.8266,
+ "step": 2837
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.988616904857726e-06,
+ "loss": 0.742,
+ "step": 2838
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.982414514512732e-06,
+ "loss": 0.7606,
+ "step": 2839
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.976212519700007e-06,
+ "loss": 0.794,
+ "step": 2840
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.970010922830244e-06,
+ "loss": 0.7591,
+ "step": 2841
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.963809726313983e-06,
+ "loss": 0.8041,
+ "step": 2842
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.957608932561615e-06,
+ "loss": 0.799,
+ "step": 2843
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.951408543983363e-06,
+ "loss": 0.8343,
+ "step": 2844
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.945208562989306e-06,
+ "loss": 0.8112,
+ "step": 2845
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.939008991989347e-06,
+ "loss": 0.7825,
+ "step": 2846
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.932809833393245e-06,
+ "loss": 0.8778,
+ "step": 2847
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.926611089610592e-06,
+ "loss": 0.8062,
+ "step": 2848
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.920412763050823e-06,
+ "loss": 0.7708,
+ "step": 2849
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.914214856123205e-06,
+ "loss": 1.3593,
+ "step": 2850
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.908017371236843e-06,
+ "loss": 0.7897,
+ "step": 2851
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.901820310800686e-06,
+ "loss": 0.8305,
+ "step": 2852
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.895623677223512e-06,
+ "loss": 0.7865,
+ "step": 2853
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.889427472913925e-06,
+ "loss": 0.7754,
+ "step": 2854
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.883231700280376e-06,
+ "loss": 0.8067,
+ "step": 2855
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.877036361731138e-06,
+ "loss": 0.85,
+ "step": 2856
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.870841459674325e-06,
+ "loss": 0.7949,
+ "step": 2857
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.864646996517871e-06,
+ "loss": 0.8646,
+ "step": 2858
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.858452974669547e-06,
+ "loss": 0.7696,
+ "step": 2859
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.85225939653695e-06,
+ "loss": 0.7748,
+ "step": 2860
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.846066264527504e-06,
+ "loss": 0.7586,
+ "step": 2861
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.839873581048462e-06,
+ "loss": 0.78,
+ "step": 2862
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.833681348506894e-06,
+ "loss": 0.7954,
+ "step": 2863
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.827489569309705e-06,
+ "loss": 0.7575,
+ "step": 2864
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.821298245863621e-06,
+ "loss": 0.8047,
+ "step": 2865
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.81510738057519e-06,
+ "loss": 0.8135,
+ "step": 2866
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.808916975850778e-06,
+ "loss": 0.8078,
+ "step": 2867
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.80272703409658e-06,
+ "loss": 0.7785,
+ "step": 2868
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.796537557718608e-06,
+ "loss": 0.7388,
+ "step": 2869
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.790348549122689e-06,
+ "loss": 0.7856,
+ "step": 2870
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.78416001071447e-06,
+ "loss": 0.8094,
+ "step": 2871
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.777971944899417e-06,
+ "loss": 0.754,
+ "step": 2872
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.771784354082812e-06,
+ "loss": 0.797,
+ "step": 2873
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.765597240669755e-06,
+ "loss": 0.7877,
+ "step": 2874
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.759410607065154e-06,
+ "loss": 0.8112,
+ "step": 2875
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.753224455673734e-06,
+ "loss": 0.839,
+ "step": 2876
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.747038788900033e-06,
+ "loss": 0.7578,
+ "step": 2877
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.740853609148402e-06,
+ "loss": 0.8096,
+ "step": 2878
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.734668918823003e-06,
+ "loss": 0.7659,
+ "step": 2879
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.728484720327797e-06,
+ "loss": 0.7669,
+ "step": 2880
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.72230101606657e-06,
+ "loss": 0.772,
+ "step": 2881
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.716117808442905e-06,
+ "loss": 0.7559,
+ "step": 2882
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.709935099860194e-06,
+ "loss": 0.8116,
+ "step": 2883
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.70375289272164e-06,
+ "loss": 0.7873,
+ "step": 2884
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.697571189430247e-06,
+ "loss": 0.8337,
+ "step": 2885
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.691389992388822e-06,
+ "loss": 0.8265,
+ "step": 2886
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.68520930399998e-06,
+ "loss": 0.7904,
+ "step": 2887
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.679029126666129e-06,
+ "loss": 0.7702,
+ "step": 2888
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.672849462789487e-06,
+ "loss": 0.7531,
+ "step": 2889
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.66667031477207e-06,
+ "loss": 0.8202,
+ "step": 2890
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.660491685015697e-06,
+ "loss": 0.7714,
+ "step": 2891
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.654313575921975e-06,
+ "loss": 0.8459,
+ "step": 2892
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.648135989892322e-06,
+ "loss": 0.7553,
+ "step": 2893
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.641958929327942e-06,
+ "loss": 0.8233,
+ "step": 2894
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.635782396629844e-06,
+ "loss": 0.7823,
+ "step": 2895
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.62960639419882e-06,
+ "loss": 0.7784,
+ "step": 2896
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.623430924435465e-06,
+ "loss": 0.7511,
+ "step": 2897
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.617255989740169e-06,
+ "loss": 0.7975,
+ "step": 2898
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.611081592513103e-06,
+ "loss": 0.7601,
+ "step": 2899
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.604907735154239e-06,
+ "loss": 0.8105,
+ "step": 2900
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.598734420063335e-06,
+ "loss": 0.8206,
+ "step": 2901
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.592561649639942e-06,
+ "loss": 0.8912,
+ "step": 2902
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.586389426283393e-06,
+ "loss": 0.7742,
+ "step": 2903
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.580217752392817e-06,
+ "loss": 0.7975,
+ "step": 2904
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.574046630367114e-06,
+ "loss": 0.7939,
+ "step": 2905
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.567876062604989e-06,
+ "loss": 0.7736,
+ "step": 2906
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.561706051504917e-06,
+ "loss": 0.7582,
+ "step": 2907
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.555536599465166e-06,
+ "loss": 0.7766,
+ "step": 2908
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.549367708883777e-06,
+ "loss": 0.8044,
+ "step": 2909
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.543199382158583e-06,
+ "loss": 0.7675,
+ "step": 2910
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.53703162168719e-06,
+ "loss": 0.7563,
+ "step": 2911
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.530864429866996e-06,
+ "loss": 1.3686,
+ "step": 2912
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.524697809095156e-06,
+ "loss": 0.6935,
+ "step": 2913
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.518531761768623e-06,
+ "loss": 0.7884,
+ "step": 2914
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.512366290284119e-06,
+ "loss": 0.7486,
+ "step": 2915
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.506201397038142e-06,
+ "loss": 0.7468,
+ "step": 2916
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.500037084426968e-06,
+ "loss": 0.7433,
+ "step": 2917
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.493873354846647e-06,
+ "loss": 0.8552,
+ "step": 2918
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.487710210692999e-06,
+ "loss": 0.741,
+ "step": 2919
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.481547654361619e-06,
+ "loss": 0.7786,
+ "step": 2920
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.475385688247877e-06,
+ "loss": 0.7863,
+ "step": 2921
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.469224314746904e-06,
+ "loss": 0.8006,
+ "step": 2922
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.463063536253607e-06,
+ "loss": 0.794,
+ "step": 2923
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.456903355162667e-06,
+ "loss": 0.7329,
+ "step": 2924
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.450743773868521e-06,
+ "loss": 0.8645,
+ "step": 2925
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.44458479476538e-06,
+ "loss": 1.4479,
+ "step": 2926
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.438426420247221e-06,
+ "loss": 0.7929,
+ "step": 2927
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.432268652707787e-06,
+ "loss": 0.8403,
+ "step": 2928
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.426111494540578e-06,
+ "loss": 0.7474,
+ "step": 2929
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.419954948138867e-06,
+ "loss": 0.8167,
+ "step": 2930
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.413799015895679e-06,
+ "loss": 0.8305,
+ "step": 2931
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.407643700203806e-06,
+ "loss": 0.7602,
+ "step": 2932
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.401489003455802e-06,
+ "loss": 1.2792,
+ "step": 2933
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.395334928043982e-06,
+ "loss": 0.8034,
+ "step": 2934
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.389181476360407e-06,
+ "loss": 0.8422,
+ "step": 2935
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.383028650796908e-06,
+ "loss": 0.8239,
+ "step": 2936
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.37687645374507e-06,
+ "loss": 0.769,
+ "step": 2937
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.370724887596237e-06,
+ "loss": 0.8292,
+ "step": 2938
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.364573954741495e-06,
+ "loss": 0.8327,
+ "step": 2939
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.358423657571698e-06,
+ "loss": 0.797,
+ "step": 2940
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.352273998477442e-06,
+ "loss": 1.2814,
+ "step": 2941
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.346124979849082e-06,
+ "loss": 0.731,
+ "step": 2942
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.339976604076724e-06,
+ "loss": 0.8337,
+ "step": 2943
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.33382887355022e-06,
+ "loss": 0.7568,
+ "step": 2944
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.327681790659179e-06,
+ "loss": 0.703,
+ "step": 2945
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.321535357792947e-06,
+ "loss": 0.8045,
+ "step": 2946
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.315389577340625e-06,
+ "loss": 1.2868,
+ "step": 2947
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.309244451691055e-06,
+ "loss": 0.7617,
+ "step": 2948
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.30309998323283e-06,
+ "loss": 0.7849,
+ "step": 2949
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.296956174354288e-06,
+ "loss": 0.8032,
+ "step": 2950
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.290813027443508e-06,
+ "loss": 0.7593,
+ "step": 2951
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.28467054488831e-06,
+ "loss": 0.8377,
+ "step": 2952
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.278528729076256e-06,
+ "loss": 0.7263,
+ "step": 2953
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.272387582394652e-06,
+ "loss": 0.7796,
+ "step": 2954
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.266247107230548e-06,
+ "loss": 0.8101,
+ "step": 2955
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.260107305970716e-06,
+ "loss": 1.3075,
+ "step": 2956
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.253968181001688e-06,
+ "loss": 0.7981,
+ "step": 2957
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.247829734709716e-06,
+ "loss": 0.8025,
+ "step": 2958
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.241691969480794e-06,
+ "loss": 1.3402,
+ "step": 2959
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.235554887700656e-06,
+ "loss": 0.8279,
+ "step": 2960
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.229418491754766e-06,
+ "loss": 0.7726,
+ "step": 2961
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.22328278402832e-06,
+ "loss": 0.7381,
+ "step": 2962
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.217147766906253e-06,
+ "loss": 0.8307,
+ "step": 2963
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.211013442773218e-06,
+ "loss": 0.7832,
+ "step": 2964
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.204879814013612e-06,
+ "loss": 0.843,
+ "step": 2965
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.198746883011555e-06,
+ "loss": 1.2771,
+ "step": 2966
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.192614652150905e-06,
+ "loss": 0.8048,
+ "step": 2967
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.186483123815233e-06,
+ "loss": 1.3677,
+ "step": 2968
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.180352300387849e-06,
+ "loss": 0.771,
+ "step": 2969
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.174222184251781e-06,
+ "loss": 0.8077,
+ "step": 2970
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.168092777789793e-06,
+ "loss": 0.7857,
+ "step": 2971
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.161964083384357e-06,
+ "loss": 0.7871,
+ "step": 2972
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.155836103417684e-06,
+ "loss": 0.7237,
+ "step": 2973
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.149708840271694e-06,
+ "loss": 0.7881,
+ "step": 2974
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.143582296328041e-06,
+ "loss": 1.3039,
+ "step": 2975
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.13745647396809e-06,
+ "loss": 0.7868,
+ "step": 2976
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.131331375572931e-06,
+ "loss": 0.8031,
+ "step": 2977
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.125207003523368e-06,
+ "loss": 0.8303,
+ "step": 2978
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.119083360199926e-06,
+ "loss": 0.8214,
+ "step": 2979
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.11296044798285e-06,
+ "loss": 0.7854,
+ "step": 2980
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.106838269252088e-06,
+ "loss": 0.8601,
+ "step": 2981
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.100716826387316e-06,
+ "loss": 0.8163,
+ "step": 2982
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.094596121767923e-06,
+ "loss": 0.7406,
+ "step": 2983
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.088476157773002e-06,
+ "loss": 0.7713,
+ "step": 2984
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.082356936781365e-06,
+ "loss": 0.8311,
+ "step": 2985
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.076238461171536e-06,
+ "loss": 0.7915,
+ "step": 2986
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.070120733321746e-06,
+ "loss": 0.8176,
+ "step": 2987
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.064003755609941e-06,
+ "loss": 0.7989,
+ "step": 2988
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.057887530413764e-06,
+ "loss": 0.8027,
+ "step": 2989
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.051772060110572e-06,
+ "loss": 0.8603,
+ "step": 2990
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.045657347077434e-06,
+ "loss": 0.8168,
+ "step": 2991
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.039543393691116e-06,
+ "loss": 0.8022,
+ "step": 2992
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.033430202328094e-06,
+ "loss": 0.8319,
+ "step": 2993
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.02731777536455e-06,
+ "loss": 0.8167,
+ "step": 2994
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.021206115176357e-06,
+ "loss": 0.7668,
+ "step": 2995
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.015095224139104e-06,
+ "loss": 0.8056,
+ "step": 2996
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.008985104628076e-06,
+ "loss": 0.7956,
+ "step": 2997
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.002875759018251e-06,
+ "loss": 0.7826,
+ "step": 2998
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.996767189684315e-06,
+ "loss": 0.7673,
+ "step": 2999
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.990659399000654e-06,
+ "loss": 0.8133,
+ "step": 3000
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.98455238934134e-06,
+ "loss": 0.742,
+ "step": 3001
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.978446163080155e-06,
+ "loss": 0.7956,
+ "step": 3002
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.972340722590564e-06,
+ "loss": 0.8175,
+ "step": 3003
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.966236070245737e-06,
+ "loss": 0.8302,
+ "step": 3004
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.960132208418533e-06,
+ "loss": 0.7484,
+ "step": 3005
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.954029139481501e-06,
+ "loss": 0.7502,
+ "step": 3006
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.947926865806884e-06,
+ "loss": 0.7719,
+ "step": 3007
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.941825389766618e-06,
+ "loss": 0.7853,
+ "step": 3008
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.935724713732328e-06,
+ "loss": 0.8296,
+ "step": 3009
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.929624840075329e-06,
+ "loss": 0.789,
+ "step": 3010
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.92352577116662e-06,
+ "loss": 0.8031,
+ "step": 3011
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.917427509376889e-06,
+ "loss": 0.7772,
+ "step": 3012
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.911330057076514e-06,
+ "loss": 0.8169,
+ "step": 3013
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.905233416635558e-06,
+ "loss": 0.7848,
+ "step": 3014
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.899137590423758e-06,
+ "loss": 0.7502,
+ "step": 3015
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.89304258081055e-06,
+ "loss": 0.7433,
+ "step": 3016
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.886948390165038e-06,
+ "loss": 0.8532,
+ "step": 3017
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.88085502085602e-06,
+ "loss": 0.7856,
+ "step": 3018
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.874762475251966e-06,
+ "loss": 0.8122,
+ "step": 3019
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.868670755721033e-06,
+ "loss": 0.8116,
+ "step": 3020
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.86257986463105e-06,
+ "loss": 1.3661,
+ "step": 3021
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.856489804349532e-06,
+ "loss": 0.7881,
+ "step": 3022
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.850400577243658e-06,
+ "loss": 1.2617,
+ "step": 3023
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.844312185680298e-06,
+ "loss": 0.7286,
+ "step": 3024
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.838224632025986e-06,
+ "loss": 0.8107,
+ "step": 3025
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.83213791864694e-06,
+ "loss": 0.8551,
+ "step": 3026
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.826052047909042e-06,
+ "loss": 0.8143,
+ "step": 3027
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.819967022177853e-06,
+ "loss": 0.7619,
+ "step": 3028
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.8138828438186e-06,
+ "loss": 0.8136,
+ "step": 3029
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.807799515196188e-06,
+ "loss": 0.8521,
+ "step": 3030
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.801717038675192e-06,
+ "loss": 0.802,
+ "step": 3031
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.795635416619843e-06,
+ "loss": 0.8392,
+ "step": 3032
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.789554651394047e-06,
+ "loss": 0.783,
+ "step": 3033
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.783474745361384e-06,
+ "loss": 0.7438,
+ "step": 3034
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.777395700885092e-06,
+ "loss": 0.838,
+ "step": 3035
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.771317520328077e-06,
+ "loss": 0.84,
+ "step": 3036
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.76524020605291e-06,
+ "loss": 0.8544,
+ "step": 3037
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.75916376042182e-06,
+ "loss": 0.6826,
+ "step": 3038
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.753088185796712e-06,
+ "loss": 0.7726,
+ "step": 3039
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.747013484539127e-06,
+ "loss": 0.8081,
+ "step": 3040
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.740939659010292e-06,
+ "loss": 0.8141,
+ "step": 3041
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.734866711571083e-06,
+ "loss": 0.8129,
+ "step": 3042
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.728794644582035e-06,
+ "loss": 1.3064,
+ "step": 3043
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.722723460403342e-06,
+ "loss": 0.8188,
+ "step": 3044
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.716653161394852e-06,
+ "loss": 0.8181,
+ "step": 3045
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.710583749916072e-06,
+ "loss": 0.7714,
+ "step": 3046
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.704515228326169e-06,
+ "loss": 0.8172,
+ "step": 3047
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.698447598983946e-06,
+ "loss": 0.7997,
+ "step": 3048
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.692380864247882e-06,
+ "loss": 0.8065,
+ "step": 3049
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.68631502647609e-06,
+ "loss": 0.8489,
+ "step": 3050
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.680250088026345e-06,
+ "loss": 0.7746,
+ "step": 3051
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.674186051256071e-06,
+ "loss": 0.7638,
+ "step": 3052
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.668122918522342e-06,
+ "loss": 0.8087,
+ "step": 3053
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.662060692181873e-06,
+ "loss": 1.2318,
+ "step": 3054
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.655999374591034e-06,
+ "loss": 0.7751,
+ "step": 3055
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.649938968105846e-06,
+ "loss": 0.8091,
+ "step": 3056
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.643879475081961e-06,
+ "loss": 1.3881,
+ "step": 3057
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.637820897874688e-06,
+ "loss": 0.8194,
+ "step": 3058
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.631763238838979e-06,
+ "loss": 0.8175,
+ "step": 3059
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.625706500329424e-06,
+ "loss": 0.7796,
+ "step": 3060
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.6196506847002586e-06,
+ "loss": 0.8404,
+ "step": 3061
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.61359579430536e-06,
+ "loss": 0.8196,
+ "step": 3062
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.607541831498247e-06,
+ "loss": 0.7912,
+ "step": 3063
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.601488798632075e-06,
+ "loss": 0.8063,
+ "step": 3064
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.595436698059637e-06,
+ "loss": 0.7772,
+ "step": 3065
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.589385532133365e-06,
+ "loss": 0.7315,
+ "step": 3066
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.583335303205328e-06,
+ "loss": 0.7602,
+ "step": 3067
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.5772860136272315e-06,
+ "loss": 0.7564,
+ "step": 3068
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.57123766575042e-06,
+ "loss": 0.8303,
+ "step": 3069
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.565190261925859e-06,
+ "loss": 0.7911,
+ "step": 3070
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.55914380450416e-06,
+ "loss": 0.8327,
+ "step": 3071
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.553098295835562e-06,
+ "loss": 0.7519,
+ "step": 3072
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.547053738269937e-06,
+ "loss": 0.8126,
+ "step": 3073
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.54101013415678e-06,
+ "loss": 1.3388,
+ "step": 3074
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.534967485845225e-06,
+ "loss": 0.8474,
+ "step": 3075
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.528925795684028e-06,
+ "loss": 0.7867,
+ "step": 3076
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.522885066021575e-06,
+ "loss": 0.8267,
+ "step": 3077
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.51684529920588e-06,
+ "loss": 0.752,
+ "step": 3078
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.510806497584582e-06,
+ "loss": 0.7891,
+ "step": 3079
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.504768663504942e-06,
+ "loss": 0.798,
+ "step": 3080
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.498731799313852e-06,
+ "loss": 0.7997,
+ "step": 3081
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.492695907357812e-06,
+ "loss": 0.8045,
+ "step": 3082
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.486660989982962e-06,
+ "loss": 0.8025,
+ "step": 3083
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.480627049535054e-06,
+ "loss": 0.8083,
+ "step": 3084
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.474594088359459e-06,
+ "loss": 0.843,
+ "step": 3085
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.468562108801176e-06,
+ "loss": 0.8193,
+ "step": 3086
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.46253111320481e-06,
+ "loss": 0.7638,
+ "step": 3087
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.456501103914593e-06,
+ "loss": 0.8147,
+ "step": 3088
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.450472083274369e-06,
+ "loss": 1.2536,
+ "step": 3089
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.4444440536276055e-06,
+ "loss": 0.7941,
+ "step": 3090
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.438417017317369e-06,
+ "loss": 0.7885,
+ "step": 3091
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.4323909766863555e-06,
+ "loss": 1.2565,
+ "step": 3092
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.426365934076865e-06,
+ "loss": 0.8464,
+ "step": 3093
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.4203418918308135e-06,
+ "loss": 0.8042,
+ "step": 3094
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.414318852289728e-06,
+ "loss": 0.7503,
+ "step": 3095
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.408296817794747e-06,
+ "loss": 0.8062,
+ "step": 3096
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.402275790686611e-06,
+ "loss": 0.7742,
+ "step": 3097
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.396255773305681e-06,
+ "loss": 0.8116,
+ "step": 3098
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.390236767991912e-06,
+ "loss": 0.744,
+ "step": 3099
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.384218777084875e-06,
+ "loss": 0.682,
+ "step": 3100
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.378201802923743e-06,
+ "loss": 1.2287,
+ "step": 3101
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.372185847847301e-06,
+ "loss": 0.7931,
+ "step": 3102
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.366170914193923e-06,
+ "loss": 0.7825,
+ "step": 3103
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.360157004301599e-06,
+ "loss": 0.787,
+ "step": 3104
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.354144120507918e-06,
+ "loss": 0.7552,
+ "step": 3105
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.34813226515007e-06,
+ "loss": 0.7599,
+ "step": 3106
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.342121440564837e-06,
+ "loss": 0.8039,
+ "step": 3107
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.336111649088615e-06,
+ "loss": 0.7387,
+ "step": 3108
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.330102893057387e-06,
+ "loss": 0.8352,
+ "step": 3109
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.324095174806738e-06,
+ "loss": 0.8128,
+ "step": 3110
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.318088496671851e-06,
+ "loss": 0.7426,
+ "step": 3111
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.312082860987501e-06,
+ "loss": 0.7676,
+ "step": 3112
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.306078270088061e-06,
+ "loss": 0.7718,
+ "step": 3113
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.300074726307496e-06,
+ "loss": 0.7564,
+ "step": 3114
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.2940722319793665e-06,
+ "loss": 0.7869,
+ "step": 3115
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.288070789436819e-06,
+ "loss": 0.7903,
+ "step": 3116
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.282070401012595e-06,
+ "loss": 0.7586,
+ "step": 3117
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.276071069039033e-06,
+ "loss": 0.7463,
+ "step": 3118
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.270072795848047e-06,
+ "loss": 0.813,
+ "step": 3119
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.264075583771149e-06,
+ "loss": 0.7592,
+ "step": 3120
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.258079435139439e-06,
+ "loss": 0.8319,
+ "step": 3121
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.2520843522836014e-06,
+ "loss": 0.779,
+ "step": 3122
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.246090337533904e-06,
+ "loss": 0.8433,
+ "step": 3123
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.240097393220201e-06,
+ "loss": 0.8026,
+ "step": 3124
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.23410552167193e-06,
+ "loss": 0.7965,
+ "step": 3125
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.228114725218114e-06,
+ "loss": 0.7686,
+ "step": 3126
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.222125006187357e-06,
+ "loss": 0.8242,
+ "step": 3127
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.216136366907844e-06,
+ "loss": 0.7833,
+ "step": 3128
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.21014880970734e-06,
+ "loss": 0.8288,
+ "step": 3129
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.204162336913187e-06,
+ "loss": 1.1994,
+ "step": 3130
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.198176950852314e-06,
+ "loss": 0.8218,
+ "step": 3131
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.192192653851219e-06,
+ "loss": 0.7499,
+ "step": 3132
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.186209448235974e-06,
+ "loss": 0.7875,
+ "step": 3133
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.18022733633224e-06,
+ "loss": 0.7659,
+ "step": 3134
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.1742463204652415e-06,
+ "loss": 0.7979,
+ "step": 3135
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.1682664029597785e-06,
+ "loss": 0.7493,
+ "step": 3136
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.1622875861402285e-06,
+ "loss": 0.7917,
+ "step": 3137
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.156309872330538e-06,
+ "loss": 0.8401,
+ "step": 3138
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.1503332638542285e-06,
+ "loss": 0.796,
+ "step": 3139
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.144357763034386e-06,
+ "loss": 0.797,
+ "step": 3140
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.13838337219367e-06,
+ "loss": 0.8209,
+ "step": 3141
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.132410093654304e-06,
+ "loss": 0.8351,
+ "step": 3142
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.126437929738085e-06,
+ "loss": 0.7836,
+ "step": 3143
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.120466882766374e-06,
+ "loss": 0.7656,
+ "step": 3144
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.1144969550601e-06,
+ "loss": 0.7588,
+ "step": 3145
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.1085281489397515e-06,
+ "loss": 1.2276,
+ "step": 3146
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.102560466725387e-06,
+ "loss": 0.8374,
+ "step": 3147
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.096593910736624e-06,
+ "loss": 0.8092,
+ "step": 3148
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.090628483292652e-06,
+ "loss": 0.7733,
+ "step": 3149
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.0846641867122e-06,
+ "loss": 0.795,
+ "step": 3150
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.078701023313582e-06,
+ "loss": 0.7292,
+ "step": 3151
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.072738995414654e-06,
+ "loss": 0.7212,
+ "step": 3152
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.066778105332842e-06,
+ "loss": 0.7629,
+ "step": 3153
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.060818355385124e-06,
+ "loss": 0.758,
+ "step": 3154
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.054859747888037e-06,
+ "loss": 0.7815,
+ "step": 3155
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.0489022851576685e-06,
+ "loss": 0.7896,
+ "step": 3156
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.042945969509673e-06,
+ "loss": 0.713,
+ "step": 3157
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.036990803259245e-06,
+ "loss": 0.7787,
+ "step": 3158
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.0310367887211394e-06,
+ "loss": 0.7607,
+ "step": 3159
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.0250839282096635e-06,
+ "loss": 0.7696,
+ "step": 3160
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.019132224038681e-06,
+ "loss": 0.7015,
+ "step": 3161
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.013181678521593e-06,
+ "loss": 0.8395,
+ "step": 3162
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.0072322939713585e-06,
+ "loss": 0.7697,
+ "step": 3163
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.0012840727004895e-06,
+ "loss": 0.7864,
+ "step": 3164
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.9953370170210374e-06,
+ "loss": 0.7093,
+ "step": 3165
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.989391129244607e-06,
+ "loss": 0.813,
+ "step": 3166
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.983446411682343e-06,
+ "loss": 0.7543,
+ "step": 3167
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.9775028666449365e-06,
+ "loss": 0.7954,
+ "step": 3168
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.971560496442627e-06,
+ "loss": 0.7716,
+ "step": 3169
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.965619303385196e-06,
+ "loss": 0.8074,
+ "step": 3170
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.959679289781968e-06,
+ "loss": 1.201,
+ "step": 3171
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.953740457941803e-06,
+ "loss": 0.6806,
+ "step": 3172
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.9478028101731086e-06,
+ "loss": 0.8593,
+ "step": 3173
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.941866348783835e-06,
+ "loss": 0.7543,
+ "step": 3174
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.9359310760814545e-06,
+ "loss": 0.806,
+ "step": 3175
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.929996994372995e-06,
+ "loss": 0.8486,
+ "step": 3176
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.924064105965018e-06,
+ "loss": 0.738,
+ "step": 3177
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.918132413163614e-06,
+ "loss": 0.827,
+ "step": 3178
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.912201918274415e-06,
+ "loss": 0.8038,
+ "step": 3179
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.906272623602587e-06,
+ "loss": 0.8206,
+ "step": 3180
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.900344531452825e-06,
+ "loss": 0.7769,
+ "step": 3181
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.894417644129368e-06,
+ "loss": 0.8019,
+ "step": 3182
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.888491963935968e-06,
+ "loss": 0.8207,
+ "step": 3183
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.882567493175921e-06,
+ "loss": 0.7762,
+ "step": 3184
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.876644234152051e-06,
+ "loss": 0.7737,
+ "step": 3185
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.8707221891667105e-06,
+ "loss": 0.8196,
+ "step": 3186
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.86480136052178e-06,
+ "loss": 0.7699,
+ "step": 3187
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.8588817505186685e-06,
+ "loss": 0.7713,
+ "step": 3188
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.852963361458305e-06,
+ "loss": 0.8062,
+ "step": 3189
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.847046195641151e-06,
+ "loss": 0.7726,
+ "step": 3190
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.841130255367196e-06,
+ "loss": 0.8163,
+ "step": 3191
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.835215542935938e-06,
+ "loss": 0.7837,
+ "step": 3192
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.82930206064641e-06,
+ "loss": 1.2956,
+ "step": 3193
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.823389810797167e-06,
+ "loss": 0.7646,
+ "step": 3194
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.817478795686279e-06,
+ "loss": 0.8191,
+ "step": 3195
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.811569017611337e-06,
+ "loss": 0.8378,
+ "step": 3196
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.805660478869458e-06,
+ "loss": 0.7783,
+ "step": 3197
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.799753181757271e-06,
+ "loss": 0.7941,
+ "step": 3198
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.793847128570923e-06,
+ "loss": 0.8533,
+ "step": 3199
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.787942321606078e-06,
+ "loss": 0.7582,
+ "step": 3200
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.782038763157915e-06,
+ "loss": 0.7461,
+ "step": 3201
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.776136455521128e-06,
+ "loss": 0.7666,
+ "step": 3202
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.770235400989926e-06,
+ "loss": 0.7886,
+ "step": 3203
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.764335601858034e-06,
+ "loss": 0.7448,
+ "step": 3204
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.758437060418681e-06,
+ "loss": 0.7178,
+ "step": 3205
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.752539778964613e-06,
+ "loss": 0.8347,
+ "step": 3206
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.746643759788083e-06,
+ "loss": 0.7434,
+ "step": 3207
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.740749005180862e-06,
+ "loss": 0.8373,
+ "step": 3208
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.734855517434212e-06,
+ "loss": 0.8214,
+ "step": 3209
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.728963298838921e-06,
+ "loss": 0.7664,
+ "step": 3210
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.72307235168527e-06,
+ "loss": 0.7887,
+ "step": 3211
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.717182678263055e-06,
+ "loss": 0.8002,
+ "step": 3212
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.711294280861573e-06,
+ "loss": 0.8349,
+ "step": 3213
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.705407161769628e-06,
+ "loss": 0.8555,
+ "step": 3214
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.69952132327552e-06,
+ "loss": 0.764,
+ "step": 3215
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.69363676766706e-06,
+ "loss": 0.7223,
+ "step": 3216
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.687753497231552e-06,
+ "loss": 0.84,
+ "step": 3217
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.681871514255807e-06,
+ "loss": 1.2321,
+ "step": 3218
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.675990821026131e-06,
+ "loss": 0.7783,
+ "step": 3219
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.6701114198283355e-06,
+ "loss": 0.7807,
+ "step": 3220
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.664233312947721e-06,
+ "loss": 0.7744,
+ "step": 3221
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.658356502669091e-06,
+ "loss": 0.8368,
+ "step": 3222
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.652480991276742e-06,
+ "loss": 0.8511,
+ "step": 3223
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.6466067810544676e-06,
+ "loss": 0.7922,
+ "step": 3224
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.64073387428556e-06,
+ "loss": 0.8364,
+ "step": 3225
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.6348622732527915e-06,
+ "loss": 0.7785,
+ "step": 3226
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.628991980238435e-06,
+ "loss": 0.7841,
+ "step": 3227
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.623122997524257e-06,
+ "loss": 0.7941,
+ "step": 3228
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.617255327391514e-06,
+ "loss": 1.3055,
+ "step": 3229
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.611388972120949e-06,
+ "loss": 0.7787,
+ "step": 3230
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.6055239339928e-06,
+ "loss": 0.8154,
+ "step": 3231
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.599660215286783e-06,
+ "loss": 0.7486,
+ "step": 3232
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.593797818282113e-06,
+ "loss": 0.7708,
+ "step": 3233
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.5879367452574775e-06,
+ "loss": 0.7423,
+ "step": 3234
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.582076998491061e-06,
+ "loss": 0.7577,
+ "step": 3235
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.5762185802605294e-06,
+ "loss": 0.8116,
+ "step": 3236
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.5703614928430336e-06,
+ "loss": 0.7769,
+ "step": 3237
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.564505738515199e-06,
+ "loss": 0.7769,
+ "step": 3238
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.558651319553142e-06,
+ "loss": 0.7812,
+ "step": 3239
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.552798238232457e-06,
+ "loss": 0.7855,
+ "step": 3240
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.54694649682822e-06,
+ "loss": 0.7678,
+ "step": 3241
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.541096097614983e-06,
+ "loss": 1.2276,
+ "step": 3242
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.535247042866777e-06,
+ "loss": 0.7492,
+ "step": 3243
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.529399334857109e-06,
+ "loss": 0.7894,
+ "step": 3244
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.523552975858967e-06,
+ "loss": 0.7713,
+ "step": 3245
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.517707968144812e-06,
+ "loss": 0.7926,
+ "step": 3246
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.511864313986582e-06,
+ "loss": 1.2593,
+ "step": 3247
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.506022015655684e-06,
+ "loss": 0.7911,
+ "step": 3248
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.500181075423001e-06,
+ "loss": 0.7839,
+ "step": 3249
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.494341495558894e-06,
+ "loss": 0.7949,
+ "step": 3250
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.4885032783331804e-06,
+ "loss": 0.7981,
+ "step": 3251
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.482666426015161e-06,
+ "loss": 0.8281,
+ "step": 3252
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.476830940873603e-06,
+ "loss": 0.817,
+ "step": 3253
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.470996825176739e-06,
+ "loss": 0.7674,
+ "step": 3254
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.46516408119227e-06,
+ "loss": 0.8722,
+ "step": 3255
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.4593327111873695e-06,
+ "loss": 0.7702,
+ "step": 3256
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.45350271742867e-06,
+ "loss": 0.8265,
+ "step": 3257
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.447674102182275e-06,
+ "loss": 0.7932,
+ "step": 3258
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.441846867713744e-06,
+ "loss": 0.8147,
+ "step": 3259
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.4360210162881054e-06,
+ "loss": 0.8068,
+ "step": 3260
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.430196550169849e-06,
+ "loss": 0.8043,
+ "step": 3261
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.42437347162293e-06,
+ "loss": 0.7847,
+ "step": 3262
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.41855178291076e-06,
+ "loss": 0.7224,
+ "step": 3263
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.412731486296206e-06,
+ "loss": 0.7411,
+ "step": 3264
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.406912584041605e-06,
+ "loss": 0.8164,
+ "step": 3265
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.401095078408741e-06,
+ "loss": 0.8539,
+ "step": 3266
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.395278971658867e-06,
+ "loss": 0.7896,
+ "step": 3267
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.389464266052679e-06,
+ "loss": 0.7946,
+ "step": 3268
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.383650963850336e-06,
+ "loss": 0.8126,
+ "step": 3269
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.377839067311449e-06,
+ "loss": 0.8013,
+ "step": 3270
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.3720285786950865e-06,
+ "loss": 0.801,
+ "step": 3271
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.366219500259766e-06,
+ "loss": 0.7789,
+ "step": 3272
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.360411834263458e-06,
+ "loss": 0.7212,
+ "step": 3273
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.354605582963587e-06,
+ "loss": 0.8059,
+ "step": 3274
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.348800748617022e-06,
+ "loss": 0.8392,
+ "step": 3275
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.342997333480081e-06,
+ "loss": 0.7685,
+ "step": 3276
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.337195339808535e-06,
+ "loss": 0.7439,
+ "step": 3277
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.3313947698576006e-06,
+ "loss": 0.7743,
+ "step": 3278
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.3255956258819415e-06,
+ "loss": 0.7796,
+ "step": 3279
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.3197979101356696e-06,
+ "loss": 0.7503,
+ "step": 3280
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.314001624872335e-06,
+ "loss": 0.8123,
+ "step": 3281
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.308206772344935e-06,
+ "loss": 0.8353,
+ "step": 3282
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.302413354805912e-06,
+ "loss": 0.844,
+ "step": 3283
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.296621374507154e-06,
+ "loss": 0.7907,
+ "step": 3284
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.290830833699975e-06,
+ "loss": 0.81,
+ "step": 3285
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.285041734635149e-06,
+ "loss": 1.1693,
+ "step": 3286
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.279254079562873e-06,
+ "loss": 0.8037,
+ "step": 3287
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.273467870732797e-06,
+ "loss": 0.7661,
+ "step": 3288
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.267683110393996e-06,
+ "loss": 0.7325,
+ "step": 3289
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.261899800794996e-06,
+ "loss": 0.7569,
+ "step": 3290
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.2561179441837435e-06,
+ "loss": 0.8028,
+ "step": 3291
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.250337542807636e-06,
+ "loss": 0.7797,
+ "step": 3292
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.244558598913487e-06,
+ "loss": 0.7832,
+ "step": 3293
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.238781114747561e-06,
+ "loss": 0.7869,
+ "step": 3294
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.233005092555544e-06,
+ "loss": 0.849,
+ "step": 3295
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.2272305345825624e-06,
+ "loss": 0.7782,
+ "step": 3296
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.221457443073164e-06,
+ "loss": 0.7506,
+ "step": 3297
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.215685820271332e-06,
+ "loss": 1.1714,
+ "step": 3298
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.20991566842048e-06,
+ "loss": 0.8022,
+ "step": 3299
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.204146989763449e-06,
+ "loss": 0.8058,
+ "step": 3300
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.198379786542503e-06,
+ "loss": 0.7978,
+ "step": 3301
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.192614060999337e-06,
+ "loss": 0.8206,
+ "step": 3302
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.1868498153750685e-06,
+ "loss": 0.7693,
+ "step": 3303
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.181087051910242e-06,
+ "loss": 0.7712,
+ "step": 3304
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.175325772844828e-06,
+ "loss": 0.7441,
+ "step": 3305
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.1695659804182175e-06,
+ "loss": 0.8179,
+ "step": 3306
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.16380767686922e-06,
+ "loss": 0.7219,
+ "step": 3307
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.158050864436073e-06,
+ "loss": 0.8213,
+ "step": 3308
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.152295545356434e-06,
+ "loss": 0.7851,
+ "step": 3309
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.14654172186737e-06,
+ "loss": 1.2747,
+ "step": 3310
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.1407893962053765e-06,
+ "loss": 0.748,
+ "step": 3311
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.1350385706063685e-06,
+ "loss": 0.8223,
+ "step": 3312
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.12928924730567e-06,
+ "loss": 0.7863,
+ "step": 3313
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.123541428538025e-06,
+ "loss": 0.7289,
+ "step": 3314
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.117795116537593e-06,
+ "loss": 0.7651,
+ "step": 3315
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.11205031353795e-06,
+ "loss": 0.798,
+ "step": 3316
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.10630702177208e-06,
+ "loss": 0.7876,
+ "step": 3317
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.1005652434723825e-06,
+ "loss": 0.8331,
+ "step": 3318
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.094824980870669e-06,
+ "loss": 0.7982,
+ "step": 3319
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.089086236198159e-06,
+ "loss": 0.7532,
+ "step": 3320
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.083349011685487e-06,
+ "loss": 0.7917,
+ "step": 3321
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.077613309562697e-06,
+ "loss": 0.8158,
+ "step": 3322
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.0718791320592325e-06,
+ "loss": 0.7868,
+ "step": 3323
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.066146481403953e-06,
+ "loss": 0.7998,
+ "step": 3324
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.060415359825123e-06,
+ "loss": 0.8345,
+ "step": 3325
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.0546857695504115e-06,
+ "loss": 0.7784,
+ "step": 3326
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.048957712806888e-06,
+ "loss": 0.7713,
+ "step": 3327
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.043231191821033e-06,
+ "loss": 0.7978,
+ "step": 3328
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.037506208818728e-06,
+ "loss": 0.7693,
+ "step": 3329
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.031782766025253e-06,
+ "loss": 0.8119,
+ "step": 3330
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.026060865665293e-06,
+ "loss": 0.7267,
+ "step": 3331
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.020340509962935e-06,
+ "loss": 0.7751,
+ "step": 3332
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.014621701141663e-06,
+ "loss": 0.7099,
+ "step": 3333
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.008904441424359e-06,
+ "loss": 0.837,
+ "step": 3334
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.003188733033303e-06,
+ "loss": 0.7929,
+ "step": 3335
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.997474578190171e-06,
+ "loss": 0.8319,
+ "step": 3336
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.9917619791160395e-06,
+ "loss": 0.714,
+ "step": 3337
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.986050938031378e-06,
+ "loss": 0.798,
+ "step": 3338
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.980341457156051e-06,
+ "loss": 0.7709,
+ "step": 3339
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.974633538709311e-06,
+ "loss": 0.7851,
+ "step": 3340
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.968927184909812e-06,
+ "loss": 0.8099,
+ "step": 3341
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.963222397975595e-06,
+ "loss": 0.8006,
+ "step": 3342
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.957519180124097e-06,
+ "loss": 0.8174,
+ "step": 3343
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.951817533572133e-06,
+ "loss": 0.7964,
+ "step": 3344
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.9461174605359185e-06,
+ "loss": 1.2692,
+ "step": 3345
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.940418963231051e-06,
+ "loss": 0.8142,
+ "step": 3346
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.934722043872524e-06,
+ "loss": 0.7955,
+ "step": 3347
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.929026704674707e-06,
+ "loss": 0.7889,
+ "step": 3348
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.923332947851365e-06,
+ "loss": 0.7832,
+ "step": 3349
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.917640775615641e-06,
+ "loss": 0.7532,
+ "step": 3350
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.911950190180067e-06,
+ "loss": 0.7575,
+ "step": 3351
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.9062611937565485e-06,
+ "loss": 0.761,
+ "step": 3352
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.900573788556384e-06,
+ "loss": 0.8281,
+ "step": 3353
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.894887976790251e-06,
+ "loss": 0.7861,
+ "step": 3354
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.8892037606682094e-06,
+ "loss": 0.8481,
+ "step": 3355
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.883521142399691e-06,
+ "loss": 0.796,
+ "step": 3356
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.877840124193513e-06,
+ "loss": 0.7548,
+ "step": 3357
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.87216070825787e-06,
+ "loss": 0.8459,
+ "step": 3358
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.866482896800336e-06,
+ "loss": 0.8163,
+ "step": 3359
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.860806692027854e-06,
+ "loss": 0.777,
+ "step": 3360
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.85513209614675e-06,
+ "loss": 0.7938,
+ "step": 3361
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.8494591113627165e-06,
+ "loss": 0.7921,
+ "step": 3362
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.843787739880827e-06,
+ "loss": 1.3007,
+ "step": 3363
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.8381179839055305e-06,
+ "loss": 0.7872,
+ "step": 3364
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.8324498456406344e-06,
+ "loss": 0.7864,
+ "step": 3365
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.826783327289336e-06,
+ "loss": 0.7826,
+ "step": 3366
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.8211184310541835e-06,
+ "loss": 0.8082,
+ "step": 3367
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.815455159137118e-06,
+ "loss": 0.7421,
+ "step": 3368
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.809793513739421e-06,
+ "loss": 1.2202,
+ "step": 3369
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.804133497061759e-06,
+ "loss": 0.8003,
+ "step": 3370
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.798475111304168e-06,
+ "loss": 0.7949,
+ "step": 3371
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.7928183586660395e-06,
+ "loss": 0.8154,
+ "step": 3372
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.787163241346141e-06,
+ "loss": 0.7718,
+ "step": 3373
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.781509761542594e-06,
+ "loss": 0.789,
+ "step": 3374
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.7758579214528875e-06,
+ "loss": 0.7981,
+ "step": 3375
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.770207723273881e-06,
+ "loss": 0.7888,
+ "step": 3376
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.764559169201783e-06,
+ "loss": 0.7438,
+ "step": 3377
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.75891226143217e-06,
+ "loss": 1.1786,
+ "step": 3378
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.753267002159972e-06,
+ "loss": 0.7542,
+ "step": 3379
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.747623393579494e-06,
+ "loss": 0.7929,
+ "step": 3380
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.741981437884381e-06,
+ "loss": 0.7156,
+ "step": 3381
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.736341137267642e-06,
+ "loss": 0.7691,
+ "step": 3382
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.730702493921653e-06,
+ "loss": 0.7917,
+ "step": 3383
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.72506551003813e-06,
+ "loss": 0.8229,
+ "step": 3384
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.719430187808153e-06,
+ "loss": 1.1332,
+ "step": 3385
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.7137965294221534e-06,
+ "loss": 0.7686,
+ "step": 3386
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.708164537069911e-06,
+ "loss": 0.7924,
+ "step": 3387
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.702534212940575e-06,
+ "loss": 0.7499,
+ "step": 3388
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.696905559222623e-06,
+ "loss": 0.7555,
+ "step": 3389
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.691278578103905e-06,
+ "loss": 1.2469,
+ "step": 3390
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.685653271771605e-06,
+ "loss": 0.7656,
+ "step": 3391
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.680029642412259e-06,
+ "loss": 0.7681,
+ "step": 3392
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.6744076922117665e-06,
+ "loss": 0.7544,
+ "step": 3393
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.668787423355344e-06,
+ "loss": 0.7927,
+ "step": 3394
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.663168838027587e-06,
+ "loss": 0.8166,
+ "step": 3395
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.657551938412416e-06,
+ "loss": 0.7901,
+ "step": 3396
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.651936726693099e-06,
+ "loss": 0.7953,
+ "step": 3397
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.6463232050522575e-06,
+ "loss": 0.7999,
+ "step": 3398
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.640711375671843e-06,
+ "loss": 0.7804,
+ "step": 3399
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.635101240733164e-06,
+ "loss": 0.8252,
+ "step": 3400
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.629492802416858e-06,
+ "loss": 0.773,
+ "step": 3401
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.623886062902906e-06,
+ "loss": 1.252,
+ "step": 3402
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.618281024370632e-06,
+ "loss": 0.7961,
+ "step": 3403
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.612677688998691e-06,
+ "loss": 0.8008,
+ "step": 3404
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.607076058965092e-06,
+ "loss": 0.7885,
+ "step": 3405
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.601476136447165e-06,
+ "loss": 0.8022,
+ "step": 3406
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.595877923621579e-06,
+ "loss": 0.8144,
+ "step": 3407
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.5902814226643485e-06,
+ "loss": 0.7569,
+ "step": 3408
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.584686635750809e-06,
+ "loss": 0.7945,
+ "step": 3409
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.579093565055649e-06,
+ "loss": 0.8089,
+ "step": 3410
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.573502212752861e-06,
+ "loss": 0.7674,
+ "step": 3411
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.567912581015797e-06,
+ "loss": 0.8384,
+ "step": 3412
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.562324672017125e-06,
+ "loss": 0.7565,
+ "step": 3413
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.556738487928845e-06,
+ "loss": 0.778,
+ "step": 3414
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.551154030922297e-06,
+ "loss": 0.7405,
+ "step": 3415
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.5455713031681366e-06,
+ "loss": 0.7746,
+ "step": 3416
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.539990306836349e-06,
+ "loss": 0.8146,
+ "step": 3417
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.534411044096259e-06,
+ "loss": 0.7815,
+ "step": 3418
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.528833517116504e-06,
+ "loss": 0.8176,
+ "step": 3419
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.5232577280650505e-06,
+ "loss": 0.8247,
+ "step": 3420
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.5176836791091846e-06,
+ "loss": 0.7207,
+ "step": 3421
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.512111372415533e-06,
+ "loss": 0.8296,
+ "step": 3422
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.5065408101500275e-06,
+ "loss": 0.7691,
+ "step": 3423
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.500971994477924e-06,
+ "loss": 0.7595,
+ "step": 3424
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.495404927563812e-06,
+ "loss": 0.8123,
+ "step": 3425
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.489839611571586e-06,
+ "loss": 0.7915,
+ "step": 3426
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.484276048664477e-06,
+ "loss": 1.198,
+ "step": 3427
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.4787142410050086e-06,
+ "loss": 0.8015,
+ "step": 3428
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.47315419075505e-06,
+ "loss": 0.7762,
+ "step": 3429
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.467595900075773e-06,
+ "loss": 0.7752,
+ "step": 3430
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.462039371127661e-06,
+ "loss": 1.2129,
+ "step": 3431
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.456484606070529e-06,
+ "loss": 0.7934,
+ "step": 3432
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.4509316070634925e-06,
+ "loss": 1.2001,
+ "step": 3433
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.445380376264979e-06,
+ "loss": 0.7522,
+ "step": 3434
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.439830915832745e-06,
+ "loss": 0.7491,
+ "step": 3435
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.434283227923842e-06,
+ "loss": 0.754,
+ "step": 3436
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.428737314694639e-06,
+ "loss": 0.7394,
+ "step": 3437
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.423193178300813e-06,
+ "loss": 1.1703,
+ "step": 3438
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.4176508208973574e-06,
+ "loss": 0.7737,
+ "step": 3439
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.4121102446385685e-06,
+ "loss": 0.7759,
+ "step": 3440
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.406571451678045e-06,
+ "loss": 0.8139,
+ "step": 3441
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.401034444168705e-06,
+ "loss": 0.8469,
+ "step": 3442
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.395499224262763e-06,
+ "loss": 0.7667,
+ "step": 3443
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.3899657941117426e-06,
+ "loss": 0.8383,
+ "step": 3444
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.384434155866467e-06,
+ "loss": 0.7785,
+ "step": 3445
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.378904311677067e-06,
+ "loss": 0.7699,
+ "step": 3446
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.373376263692982e-06,
+ "loss": 0.7526,
+ "step": 3447
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.367850014062937e-06,
+ "loss": 0.7674,
+ "step": 3448
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.362325564934978e-06,
+ "loss": 0.8091,
+ "step": 3449
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.356802918456437e-06,
+ "loss": 0.8256,
+ "step": 3450
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.351282076773945e-06,
+ "loss": 0.7566,
+ "step": 3451
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.34576304203344e-06,
+ "loss": 0.7329,
+ "step": 3452
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.340245816380156e-06,
+ "loss": 0.7439,
+ "step": 3453
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.334730401958616e-06,
+ "loss": 0.7612,
+ "step": 3454
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.329216800912646e-06,
+ "loss": 0.8292,
+ "step": 3455
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.323705015385362e-06,
+ "loss": 0.7332,
+ "step": 3456
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.318195047519184e-06,
+ "loss": 0.7816,
+ "step": 3457
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.312686899455813e-06,
+ "loss": 0.7606,
+ "step": 3458
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.3071805733362555e-06,
+ "loss": 0.7752,
+ "step": 3459
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.3016760713008e-06,
+ "loss": 0.8364,
+ "step": 3460
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.296173395489028e-06,
+ "loss": 0.7654,
+ "step": 3461
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.290672548039812e-06,
+ "loss": 0.8211,
+ "step": 3462
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.285173531091312e-06,
+ "loss": 0.7538,
+ "step": 3463
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.279676346780984e-06,
+ "loss": 0.7916,
+ "step": 3464
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.274180997245564e-06,
+ "loss": 0.7772,
+ "step": 3465
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.268687484621073e-06,
+ "loss": 0.8265,
+ "step": 3466
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.263195811042829e-06,
+ "loss": 1.2034,
+ "step": 3467
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.2577059786454196e-06,
+ "loss": 0.8587,
+ "step": 3468
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.25221798956274e-06,
+ "loss": 0.852,
+ "step": 3469
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.246731845927937e-06,
+ "loss": 0.825,
+ "step": 3470
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.2412475498734675e-06,
+ "loss": 0.7756,
+ "step": 3471
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.235765103531058e-06,
+ "loss": 0.7683,
+ "step": 3472
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.230284509031716e-06,
+ "loss": 0.8596,
+ "step": 3473
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.224805768505734e-06,
+ "loss": 0.7327,
+ "step": 3474
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.2193288840826775e-06,
+ "loss": 0.8407,
+ "step": 3475
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.2138538578914e-06,
+ "loss": 0.7562,
+ "step": 3476
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.208380692060023e-06,
+ "loss": 0.7735,
+ "step": 3477
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.202909388715951e-06,
+ "loss": 0.7524,
+ "step": 3478
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.1974399499858595e-06,
+ "loss": 0.7437,
+ "step": 3479
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.1919723779957e-06,
+ "loss": 0.7753,
+ "step": 3480
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.186506674870708e-06,
+ "loss": 0.7868,
+ "step": 3481
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.181042842735381e-06,
+ "loss": 0.7918,
+ "step": 3482
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.175580883713489e-06,
+ "loss": 0.718,
+ "step": 3483
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.170120799928087e-06,
+ "loss": 0.7346,
+ "step": 3484
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.164662593501484e-06,
+ "loss": 0.8014,
+ "step": 3485
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.159206266555278e-06,
+ "loss": 0.785,
+ "step": 3486
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.1537518212103125e-06,
+ "loss": 0.8027,
+ "step": 3487
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.148299259586723e-06,
+ "loss": 0.7592,
+ "step": 3488
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.142848583803901e-06,
+ "loss": 0.7568,
+ "step": 3489
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.137399795980502e-06,
+ "loss": 0.7384,
+ "step": 3490
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.1319528982344614e-06,
+ "loss": 0.7632,
+ "step": 3491
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.126507892682966e-06,
+ "loss": 0.7111,
+ "step": 3492
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.121064781442469e-06,
+ "loss": 0.8269,
+ "step": 3493
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.115623566628698e-06,
+ "loss": 0.7632,
+ "step": 3494
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.110184250356633e-06,
+ "loss": 0.8052,
+ "step": 3495
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.104746834740518e-06,
+ "loss": 0.7971,
+ "step": 3496
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.099311321893856e-06,
+ "loss": 0.7642,
+ "step": 3497
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.09387771392942e-06,
+ "loss": 0.7829,
+ "step": 3498
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.088446012959236e-06,
+ "loss": 1.2205,
+ "step": 3499
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.083016221094582e-06,
+ "loss": 0.7645,
+ "step": 3500
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.077588340446009e-06,
+ "loss": 0.7319,
+ "step": 3501
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.0721623731233145e-06,
+ "loss": 0.8052,
+ "step": 3502
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.066738321235554e-06,
+ "loss": 0.7852,
+ "step": 3503
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.061316186891039e-06,
+ "loss": 0.7405,
+ "step": 3504
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.055895972197334e-06,
+ "loss": 0.8299,
+ "step": 3505
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.050477679261264e-06,
+ "loss": 0.7705,
+ "step": 3506
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.045061310188897e-06,
+ "loss": 0.7861,
+ "step": 3507
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.0396468670855654e-06,
+ "loss": 0.7343,
+ "step": 3508
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.034234352055843e-06,
+ "loss": 0.8264,
+ "step": 3509
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.0288237672035526e-06,
+ "loss": 0.8062,
+ "step": 3510
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.02341511463178e-06,
+ "loss": 0.7801,
+ "step": 3511
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.018008396442847e-06,
+ "loss": 0.8022,
+ "step": 3512
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.012603614738328e-06,
+ "loss": 0.773,
+ "step": 3513
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.007200771619045e-06,
+ "loss": 0.7735,
+ "step": 3514
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.001799869185061e-06,
+ "loss": 0.7994,
+ "step": 3515
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.996400909535698e-06,
+ "loss": 0.784,
+ "step": 3516
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.991003894769508e-06,
+ "loss": 0.7299,
+ "step": 3517
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.9856088269842995e-06,
+ "loss": 0.7633,
+ "step": 3518
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.980215708277116e-06,
+ "loss": 0.7783,
+ "step": 3519
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.974824540744245e-06,
+ "loss": 0.7461,
+ "step": 3520
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.969435326481217e-06,
+ "loss": 0.7304,
+ "step": 3521
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.964048067582797e-06,
+ "loss": 0.7875,
+ "step": 3522
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.958662766143006e-06,
+ "loss": 0.7967,
+ "step": 3523
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.953279424255083e-06,
+ "loss": 1.1518,
+ "step": 3524
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.947898044011525e-06,
+ "loss": 1.1688,
+ "step": 3525
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.942518627504054e-06,
+ "loss": 0.6801,
+ "step": 3526
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.9371411768236286e-06,
+ "loss": 0.7548,
+ "step": 3527
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.931765694060457e-06,
+ "loss": 0.7243,
+ "step": 3528
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.92639218130396e-06,
+ "loss": 0.7922,
+ "step": 3529
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.921020640642812e-06,
+ "loss": 0.7405,
+ "step": 3530
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.9156510741649135e-06,
+ "loss": 0.8494,
+ "step": 3531
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.9102834839573934e-06,
+ "loss": 0.7862,
+ "step": 3532
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.904917872106625e-06,
+ "loss": 1.3618,
+ "step": 3533
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.899554240698195e-06,
+ "loss": 0.7915,
+ "step": 3534
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.894192591816942e-06,
+ "loss": 0.7906,
+ "step": 3535
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.888832927546914e-06,
+ "loss": 0.7345,
+ "step": 3536
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.883475249971396e-06,
+ "loss": 0.7532,
+ "step": 3537
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.878119561172904e-06,
+ "loss": 0.8169,
+ "step": 3538
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.87276586323317e-06,
+ "loss": 1.1021,
+ "step": 3539
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.867414158233167e-06,
+ "loss": 0.7322,
+ "step": 3540
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.862064448253084e-06,
+ "loss": 0.7942,
+ "step": 3541
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.856716735372332e-06,
+ "loss": 0.8073,
+ "step": 3542
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.851371021669556e-06,
+ "loss": 0.7717,
+ "step": 3543
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.846027309222614e-06,
+ "loss": 0.8102,
+ "step": 3544
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.840685600108598e-06,
+ "loss": 0.7298,
+ "step": 3545
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.835345896403801e-06,
+ "loss": 0.7568,
+ "step": 3546
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.8300082001837575e-06,
+ "loss": 0.7444,
+ "step": 3547
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.824672513523212e-06,
+ "loss": 0.8101,
+ "step": 3548
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.8193388384961235e-06,
+ "loss": 0.7763,
+ "step": 3549
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.814007177175683e-06,
+ "loss": 0.769,
+ "step": 3550
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.808677531634288e-06,
+ "loss": 0.7484,
+ "step": 3551
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.803349903943549e-06,
+ "loss": 0.7823,
+ "step": 3552
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.798024296174306e-06,
+ "loss": 0.7498,
+ "step": 3553
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.792700710396603e-06,
+ "loss": 0.7134,
+ "step": 3554
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.787379148679699e-06,
+ "loss": 0.7813,
+ "step": 3555
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.7820596130920685e-06,
+ "loss": 0.7607,
+ "step": 3556
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.776742105701402e-06,
+ "loss": 0.7761,
+ "step": 3557
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.771426628574594e-06,
+ "loss": 0.7654,
+ "step": 3558
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.766113183777753e-06,
+ "loss": 1.2137,
+ "step": 3559
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.7608017733762035e-06,
+ "loss": 0.7815,
+ "step": 3560
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.7554923994344674e-06,
+ "loss": 0.7753,
+ "step": 3561
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.750185064016292e-06,
+ "loss": 0.8187,
+ "step": 3562
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.744879769184611e-06,
+ "loss": 0.7833,
+ "step": 3563
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.739576517001575e-06,
+ "loss": 0.7443,
+ "step": 3564
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.73427530952855e-06,
+ "loss": 0.7718,
+ "step": 3565
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.728976148826091e-06,
+ "loss": 0.8064,
+ "step": 3566
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.723679036953972e-06,
+ "loss": 0.7667,
+ "step": 3567
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.71838397597116e-06,
+ "loss": 0.7994,
+ "step": 3568
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.713090967935825e-06,
+ "loss": 1.2296,
+ "step": 3569
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.707800014905351e-06,
+ "loss": 0.7746,
+ "step": 3570
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.702511118936308e-06,
+ "loss": 0.7535,
+ "step": 3571
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.6972242820844774e-06,
+ "loss": 0.7706,
+ "step": 3572
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.691939506404828e-06,
+ "loss": 0.7406,
+ "step": 3573
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.686656793951545e-06,
+ "loss": 0.7923,
+ "step": 3574
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.681376146777999e-06,
+ "loss": 0.7851,
+ "step": 3575
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.676097566936756e-06,
+ "loss": 0.7777,
+ "step": 3576
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.670821056479592e-06,
+ "loss": 0.7658,
+ "step": 3577
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.6655466174574635e-06,
+ "loss": 0.7867,
+ "step": 3578
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.6602742519205325e-06,
+ "loss": 0.7735,
+ "step": 3579
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.655003961918146e-06,
+ "loss": 0.7854,
+ "step": 3580
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.64973574949885e-06,
+ "loss": 0.8049,
+ "step": 3581
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.644469616710386e-06,
+ "loss": 0.7743,
+ "step": 3582
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.639205565599678e-06,
+ "loss": 1.1762,
+ "step": 3583
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.633943598212852e-06,
+ "loss": 0.7495,
+ "step": 3584
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.628683716595216e-06,
+ "loss": 0.7587,
+ "step": 3585
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.623425922791264e-06,
+ "loss": 0.8094,
+ "step": 3586
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.618170218844692e-06,
+ "loss": 0.7301,
+ "step": 3587
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.612916606798371e-06,
+ "loss": 0.7073,
+ "step": 3588
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.607665088694365e-06,
+ "loss": 0.7758,
+ "step": 3589
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.602415666573922e-06,
+ "loss": 0.7894,
+ "step": 3590
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.597168342477472e-06,
+ "loss": 0.7814,
+ "step": 3591
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.59192311844464e-06,
+ "loss": 0.8355,
+ "step": 3592
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.5866799965142205e-06,
+ "loss": 0.8139,
+ "step": 3593
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.581438978724208e-06,
+ "loss": 0.7556,
+ "step": 3594
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.576200067111763e-06,
+ "loss": 0.7997,
+ "step": 3595
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.570963263713236e-06,
+ "loss": 0.8076,
+ "step": 3596
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.565728570564156e-06,
+ "loss": 1.3216,
+ "step": 3597
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.560495989699226e-06,
+ "loss": 0.7301,
+ "step": 3598
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.555265523152344e-06,
+ "loss": 0.7988,
+ "step": 3599
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.55003717295657e-06,
+ "loss": 0.8051,
+ "step": 3600
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.544810941144143e-06,
+ "loss": 0.723,
+ "step": 3601
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.539586829746492e-06,
+ "loss": 0.7805,
+ "step": 3602
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.534364840794204e-06,
+ "loss": 0.7817,
+ "step": 3603
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.5291449763170614e-06,
+ "loss": 0.7671,
+ "step": 3604
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.5239272383439936e-06,
+ "loss": 0.7664,
+ "step": 3605
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.518711628903131e-06,
+ "loss": 0.8106,
+ "step": 3606
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.51349815002176e-06,
+ "loss": 0.7789,
+ "step": 3607
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.50828680372634e-06,
+ "loss": 0.7439,
+ "step": 3608
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.503077592042512e-06,
+ "loss": 0.8354,
+ "step": 3609
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.497870516995073e-06,
+ "loss": 0.7589,
+ "step": 3610
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.492665580608005e-06,
+ "loss": 0.7646,
+ "step": 3611
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.4874627849044435e-06,
+ "loss": 1.1448,
+ "step": 3612
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.482262131906702e-06,
+ "loss": 0.8069,
+ "step": 3613
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.477063623636258e-06,
+ "loss": 0.7554,
+ "step": 3614
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.471867262113749e-06,
+ "loss": 0.7882,
+ "step": 3615
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.466673049358993e-06,
+ "loss": 0.8075,
+ "step": 3616
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.46148098739096e-06,
+ "loss": 0.6988,
+ "step": 3617
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.456291078227787e-06,
+ "loss": 0.7794,
+ "step": 3618
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.451103323886778e-06,
+ "loss": 1.2143,
+ "step": 3619
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.445917726384395e-06,
+ "loss": 0.7502,
+ "step": 3620
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.440734287736268e-06,
+ "loss": 0.7789,
+ "step": 3621
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.435553009957174e-06,
+ "loss": 0.8071,
+ "step": 3622
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.4303738950610675e-06,
+ "loss": 0.7945,
+ "step": 3623
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.425196945061052e-06,
+ "loss": 0.7286,
+ "step": 3624
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.420022161969386e-06,
+ "loss": 0.7643,
+ "step": 3625
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.414849547797501e-06,
+ "loss": 1.1842,
+ "step": 3626
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.40967910455597e-06,
+ "loss": 0.755,
+ "step": 3627
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.404510834254526e-06,
+ "loss": 0.7559,
+ "step": 3628
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.399344738902066e-06,
+ "loss": 0.7498,
+ "step": 3629
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.39418082050663e-06,
+ "loss": 0.7952,
+ "step": 3630
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.38901908107542e-06,
+ "loss": 0.7815,
+ "step": 3631
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.383859522614781e-06,
+ "loss": 0.8042,
+ "step": 3632
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.378702147130226e-06,
+ "loss": 0.8159,
+ "step": 3633
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.3735469566264065e-06,
+ "loss": 0.7689,
+ "step": 3634
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.368393953107125e-06,
+ "loss": 0.7759,
+ "step": 3635
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.363243138575345e-06,
+ "loss": 0.7637,
+ "step": 3636
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.3580945150331685e-06,
+ "loss": 0.8008,
+ "step": 3637
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.35294808448185e-06,
+ "loss": 0.8099,
+ "step": 3638
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.347803848921789e-06,
+ "loss": 0.7553,
+ "step": 3639
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.342661810352531e-06,
+ "loss": 0.82,
+ "step": 3640
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.337521970772778e-06,
+ "loss": 1.2579,
+ "step": 3641
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.332384332180362e-06,
+ "loss": 0.7633,
+ "step": 3642
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.327248896572272e-06,
+ "loss": 0.7949,
+ "step": 3643
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.322115665944634e-06,
+ "loss": 0.7602,
+ "step": 3644
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.3169846422927154e-06,
+ "loss": 0.8052,
+ "step": 3645
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.3118558276109345e-06,
+ "loss": 0.794,
+ "step": 3646
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.3067292238928435e-06,
+ "loss": 0.7818,
+ "step": 3647
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.301604833131136e-06,
+ "loss": 0.807,
+ "step": 3648
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.296482657317646e-06,
+ "loss": 0.8045,
+ "step": 3649
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.291362698443344e-06,
+ "loss": 0.7658,
+ "step": 3650
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.28624495849835e-06,
+ "loss": 0.8008,
+ "step": 3651
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.281129439471906e-06,
+ "loss": 0.7214,
+ "step": 3652
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.276016143352404e-06,
+ "loss": 0.8214,
+ "step": 3653
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.270905072127362e-06,
+ "loss": 0.8076,
+ "step": 3654
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.265796227783441e-06,
+ "loss": 0.8055,
+ "step": 3655
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.2606896123064274e-06,
+ "loss": 0.7083,
+ "step": 3656
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.255585227681247e-06,
+ "loss": 0.8015,
+ "step": 3657
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.2504830758919646e-06,
+ "loss": 1.2403,
+ "step": 3658
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.245383158921764e-06,
+ "loss": 0.7738,
+ "step": 3659
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.2402854787529665e-06,
+ "loss": 0.7779,
+ "step": 3660
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.235190037367031e-06,
+ "loss": 0.7597,
+ "step": 3661
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.2300968367445304e-06,
+ "loss": 0.8004,
+ "step": 3662
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.225005878865185e-06,
+ "loss": 0.7702,
+ "step": 3663
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.219917165707829e-06,
+ "loss": 0.7906,
+ "step": 3664
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.214830699250431e-06,
+ "loss": 0.7192,
+ "step": 3665
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.209746481470081e-06,
+ "loss": 0.786,
+ "step": 3666
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.204664514342999e-06,
+ "loss": 0.8222,
+ "step": 3667
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.199584799844534e-06,
+ "loss": 0.7611,
+ "step": 3668
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.1945073399491486e-06,
+ "loss": 0.7692,
+ "step": 3669
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.189432136630442e-06,
+ "loss": 0.8179,
+ "step": 3670
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.184359191861128e-06,
+ "loss": 0.7855,
+ "step": 3671
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.179288507613042e-06,
+ "loss": 0.794,
+ "step": 3672
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.1742200858571445e-06,
+ "loss": 0.7401,
+ "step": 3673
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.169153928563511e-06,
+ "loss": 0.8011,
+ "step": 3674
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.164090037701347e-06,
+ "loss": 0.7393,
+ "step": 3675
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.159028415238968e-06,
+ "loss": 0.7674,
+ "step": 3676
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.153969063143808e-06,
+ "loss": 0.7984,
+ "step": 3677
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.148911983382428e-06,
+ "loss": 0.7981,
+ "step": 3678
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.143857177920491e-06,
+ "loss": 0.8025,
+ "step": 3679
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.138804648722795e-06,
+ "loss": 0.774,
+ "step": 3680
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.133754397753229e-06,
+ "loss": 0.7561,
+ "step": 3681
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.128706426974818e-06,
+ "loss": 0.8003,
+ "step": 3682
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.1236607383496904e-06,
+ "loss": 0.8248,
+ "step": 3683
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.118617333839085e-06,
+ "loss": 0.7808,
+ "step": 3684
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.113576215403366e-06,
+ "loss": 0.7355,
+ "step": 3685
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.108537385001995e-06,
+ "loss": 0.8139,
+ "step": 3686
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.103500844593546e-06,
+ "loss": 0.7873,
+ "step": 3687
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.098466596135715e-06,
+ "loss": 0.7456,
+ "step": 3688
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.093434641585293e-06,
+ "loss": 1.2071,
+ "step": 3689
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.088404982898186e-06,
+ "loss": 0.7186,
+ "step": 3690
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.083377622029403e-06,
+ "loss": 1.1454,
+ "step": 3691
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.078352560933069e-06,
+ "loss": 0.7708,
+ "step": 3692
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.0733298015624075e-06,
+ "loss": 0.8385,
+ "step": 3693
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.068309345869745e-06,
+ "loss": 0.7841,
+ "step": 3694
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.063291195806524e-06,
+ "loss": 0.7708,
+ "step": 3695
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.05827535332328e-06,
+ "loss": 0.8143,
+ "step": 3696
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.053261820369655e-06,
+ "loss": 1.2651,
+ "step": 3697
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.0482505988943935e-06,
+ "loss": 0.7654,
+ "step": 3698
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.043241690845338e-06,
+ "loss": 0.7638,
+ "step": 3699
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.038235098169442e-06,
+ "loss": 1.2481,
+ "step": 3700
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.033230822812745e-06,
+ "loss": 0.7241,
+ "step": 3701
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.028228866720402e-06,
+ "loss": 0.7779,
+ "step": 3702
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.023229231836651e-06,
+ "loss": 0.7745,
+ "step": 3703
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.0182319201048335e-06,
+ "loss": 0.7861,
+ "step": 3704
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.0132369334673945e-06,
+ "loss": 0.7656,
+ "step": 3705
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.008244273865867e-06,
+ "loss": 0.7561,
+ "step": 3706
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.00325394324088e-06,
+ "loss": 0.7804,
+ "step": 3707
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 3.998265943532161e-06,
+ "loss": 0.7667,
+ "step": 3708
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 3.993280276678526e-06,
+ "loss": 0.7488,
+ "step": 3709
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 3.988296944617894e-06,
+ "loss": 0.7918,
+ "step": 3710
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 3.983315949287265e-06,
+ "loss": 0.7725,
+ "step": 3711
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 3.978337292622743e-06,
+ "loss": 0.7325,
+ "step": 3712
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 3.973360976559512e-06,
+ "loss": 1.1583,
+ "step": 3713
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 3.96838700303185e-06,
+ "loss": 0.7517,
+ "step": 3714
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.963415373973124e-06,
+ "loss": 0.7855,
+ "step": 3715
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.958446091315789e-06,
+ "loss": 0.7581,
+ "step": 3716
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.953479156991396e-06,
+ "loss": 0.7725,
+ "step": 3717
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.9485145729305685e-06,
+ "loss": 0.8016,
+ "step": 3718
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.943552341063034e-06,
+ "loss": 0.7736,
+ "step": 3719
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.93859246331759e-06,
+ "loss": 0.811,
+ "step": 3720
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.933634941622124e-06,
+ "loss": 0.7528,
+ "step": 3721
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.928679777903616e-06,
+ "loss": 0.7651,
+ "step": 3722
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.923726974088117e-06,
+ "loss": 0.7606,
+ "step": 3723
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.918776532100768e-06,
+ "loss": 0.7438,
+ "step": 3724
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.913828453865789e-06,
+ "loss": 0.7513,
+ "step": 3725
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.90888274130648e-06,
+ "loss": 0.7986,
+ "step": 3726
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.903939396345231e-06,
+ "loss": 0.7707,
+ "step": 3727
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.898998420903497e-06,
+ "loss": 0.7589,
+ "step": 3728
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.894059816901826e-06,
+ "loss": 0.7243,
+ "step": 3729
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.889123586259835e-06,
+ "loss": 1.193,
+ "step": 3730
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.884189730896221e-06,
+ "loss": 0.7372,
+ "step": 3731
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.879258252728758e-06,
+ "loss": 0.7483,
+ "step": 3732
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.874329153674293e-06,
+ "loss": 0.7588,
+ "step": 3733
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.869402435648758e-06,
+ "loss": 0.8353,
+ "step": 3734
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.864478100567149e-06,
+ "loss": 0.7483,
+ "step": 3735
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.859556150343536e-06,
+ "loss": 0.8023,
+ "step": 3736
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.854636586891073e-06,
+ "loss": 0.7645,
+ "step": 3737
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.849719412121971e-06,
+ "loss": 0.7518,
+ "step": 3738
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.844804627947528e-06,
+ "loss": 0.7771,
+ "step": 3739
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.839892236278102e-06,
+ "loss": 0.8175,
+ "step": 3740
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.8349822390231225e-06,
+ "loss": 0.8161,
+ "step": 3741
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.830074638091092e-06,
+ "loss": 0.7913,
+ "step": 3742
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.825169435389574e-06,
+ "loss": 0.7363,
+ "step": 3743
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.820266632825214e-06,
+ "loss": 0.7889,
+ "step": 3744
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.815366232303711e-06,
+ "loss": 1.1392,
+ "step": 3745
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.8104682357298327e-06,
+ "loss": 0.7672,
+ "step": 3746
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.8055726450074215e-06,
+ "loss": 0.7748,
+ "step": 3747
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.800679462039375e-06,
+ "loss": 0.7266,
+ "step": 3748
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7957886887276574e-06,
+ "loss": 0.788,
+ "step": 3749
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7909003269732926e-06,
+ "loss": 0.7722,
+ "step": 3750
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7860143786763805e-06,
+ "loss": 0.7807,
+ "step": 3751
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7811308457360683e-06,
+ "loss": 0.8154,
+ "step": 3752
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7762497300505676e-06,
+ "loss": 0.8175,
+ "step": 3753
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.771371033517159e-06,
+ "loss": 1.2248,
+ "step": 3754
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.766494758032171e-06,
+ "loss": 1.1719,
+ "step": 3755
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7616209054910047e-06,
+ "loss": 0.7511,
+ "step": 3756
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.756749477788101e-06,
+ "loss": 0.7274,
+ "step": 3757
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7518804768169704e-06,
+ "loss": 0.8364,
+ "step": 3758
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7470139044701824e-06,
+ "loss": 0.7805,
+ "step": 3759
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7421497626393545e-06,
+ "loss": 0.7024,
+ "step": 3760
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.737288053215168e-06,
+ "loss": 0.7524,
+ "step": 3761
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7324287780873503e-06,
+ "loss": 0.7883,
+ "step": 3762
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.727571939144685e-06,
+ "loss": 0.7595,
+ "step": 3763
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7227175382750145e-06,
+ "loss": 0.7885,
+ "step": 3764
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7178655773652283e-06,
+ "loss": 0.7672,
+ "step": 3765
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7130160583012666e-06,
+ "loss": 1.2901,
+ "step": 3766
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.708168982968118e-06,
+ "loss": 0.7626,
+ "step": 3767
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.703324353249833e-06,
+ "loss": 0.8268,
+ "step": 3768
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.698482171029502e-06,
+ "loss": 0.7568,
+ "step": 3769
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.69364243818926e-06,
+ "loss": 0.7475,
+ "step": 3770
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6888051566103044e-06,
+ "loss": 0.7538,
+ "step": 3771
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6839703281728677e-06,
+ "loss": 0.8194,
+ "step": 3772
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.679137954756232e-06,
+ "loss": 0.7866,
+ "step": 3773
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.674308038238725e-06,
+ "loss": 0.8196,
+ "step": 3774
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6694805804977183e-06,
+ "loss": 0.731,
+ "step": 3775
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6646555834096355e-06,
+ "loss": 0.7629,
+ "step": 3776
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.659833048849929e-06,
+ "loss": 0.8304,
+ "step": 3777
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.655012978693113e-06,
+ "loss": 0.7851,
+ "step": 3778
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6501953748127283e-06,
+ "loss": 0.7865,
+ "step": 3779
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6453802390813586e-06,
+ "loss": 0.7215,
+ "step": 3780
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.640567573370639e-06,
+ "loss": 0.7821,
+ "step": 3781
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6357573795512347e-06,
+ "loss": 0.8062,
+ "step": 3782
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.630949659492853e-06,
+ "loss": 0.7739,
+ "step": 3783
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6261444150642387e-06,
+ "loss": 0.815,
+ "step": 3784
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.621341648133172e-06,
+ "loss": 0.8086,
+ "step": 3785
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.61654136056648e-06,
+ "loss": 0.7513,
+ "step": 3786
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.611743554230013e-06,
+ "loss": 0.7267,
+ "step": 3787
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6069482309886695e-06,
+ "loss": 0.782,
+ "step": 3788
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6021553927063725e-06,
+ "loss": 0.7625,
+ "step": 3789
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.597365041246085e-06,
+ "loss": 0.7173,
+ "step": 3790
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5925771784697993e-06,
+ "loss": 0.7725,
+ "step": 3791
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.587791806238541e-06,
+ "loss": 0.7973,
+ "step": 3792
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.583008926412375e-06,
+ "loss": 0.7634,
+ "step": 3793
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.578228540850388e-06,
+ "loss": 0.7802,
+ "step": 3794
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5734506514106983e-06,
+ "loss": 0.7853,
+ "step": 3795
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5686752599504624e-06,
+ "loss": 0.7848,
+ "step": 3796
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.563902368325853e-06,
+ "loss": 0.7624,
+ "step": 3797
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.559131978392086e-06,
+ "loss": 0.7735,
+ "step": 3798
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5543640920033928e-06,
+ "loss": 0.728,
+ "step": 3799
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.549598711013035e-06,
+ "loss": 0.8054,
+ "step": 3800
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5448358372733018e-06,
+ "loss": 0.778,
+ "step": 3801
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.540075472635505e-06,
+ "loss": 0.7995,
+ "step": 3802
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5353176189499873e-06,
+ "loss": 0.7497,
+ "step": 3803
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5305622780661074e-06,
+ "loss": 0.7593,
+ "step": 3804
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.525809451832257e-06,
+ "loss": 0.793,
+ "step": 3805
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5210591420958408e-06,
+ "loss": 0.7881,
+ "step": 3806
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5163113507032898e-06,
+ "loss": 0.7067,
+ "step": 3807
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.511566079500055e-06,
+ "loss": 0.7512,
+ "step": 3808
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5068233303306056e-06,
+ "loss": 0.8103,
+ "step": 3809
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5020831050384395e-06,
+ "loss": 0.7544,
+ "step": 3810
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.497345405466065e-06,
+ "loss": 0.7686,
+ "step": 3811
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.492610233455006e-06,
+ "loss": 0.7914,
+ "step": 3812
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.4878775908458174e-06,
+ "loss": 0.7679,
+ "step": 3813
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.483147479478054e-06,
+ "loss": 0.7607,
+ "step": 3814
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.478419901190305e-06,
+ "loss": 0.831,
+ "step": 3815
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.4736948578201534e-06,
+ "loss": 0.7982,
+ "step": 3816
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.4689723512042183e-06,
+ "loss": 0.7883,
+ "step": 3817
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.4642523831781206e-06,
+ "loss": 0.7926,
+ "step": 3818
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4595349555764914e-06,
+ "loss": 0.774,
+ "step": 3819
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.454820070232987e-06,
+ "loss": 0.7732,
+ "step": 3820
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4501077289802665e-06,
+ "loss": 0.6924,
+ "step": 3821
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4453979336499975e-06,
+ "loss": 0.7507,
+ "step": 3822
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4406906860728705e-06,
+ "loss": 0.7889,
+ "step": 3823
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.435985988078574e-06,
+ "loss": 0.7364,
+ "step": 3824
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4312838414958084e-06,
+ "loss": 0.759,
+ "step": 3825
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4265842481522817e-06,
+ "loss": 0.7948,
+ "step": 3826
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4218872098747147e-06,
+ "loss": 0.7182,
+ "step": 3827
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4171927284888307e-06,
+ "loss": 0.7387,
+ "step": 3828
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4125008058193564e-06,
+ "loss": 0.7649,
+ "step": 3829
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4078114436900323e-06,
+ "loss": 0.7541,
+ "step": 3830
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4031246439235965e-06,
+ "loss": 0.7561,
+ "step": 3831
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.398440408341792e-06,
+ "loss": 0.7613,
+ "step": 3832
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.393758738765368e-06,
+ "loss": 0.7598,
+ "step": 3833
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.389079637014071e-06,
+ "loss": 0.7703,
+ "step": 3834
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3844031049066594e-06,
+ "loss": 0.7657,
+ "step": 3835
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3797291442608805e-06,
+ "loss": 1.1791,
+ "step": 3836
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3750577568934937e-06,
+ "loss": 0.7566,
+ "step": 3837
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3703889446202497e-06,
+ "loss": 0.7534,
+ "step": 3838
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3657227092558975e-06,
+ "loss": 0.7816,
+ "step": 3839
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3610590526141938e-06,
+ "loss": 0.7949,
+ "step": 3840
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.356397976507887e-06,
+ "loss": 0.7049,
+ "step": 3841
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3517394827487194e-06,
+ "loss": 0.8039,
+ "step": 3842
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.347083573147434e-06,
+ "loss": 0.7637,
+ "step": 3843
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.342430249513765e-06,
+ "loss": 0.7894,
+ "step": 3844
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3377795136564515e-06,
+ "loss": 0.7709,
+ "step": 3845
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3331313673832132e-06,
+ "loss": 0.786,
+ "step": 3846
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.328485812500776e-06,
+ "loss": 0.7129,
+ "step": 3847
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.323842850814849e-06,
+ "loss": 0.7689,
+ "step": 3848
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3192024841301373e-06,
+ "loss": 0.7487,
+ "step": 3849
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.314564714250337e-06,
+ "loss": 0.7567,
+ "step": 3850
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3099295429781296e-06,
+ "loss": 0.76,
+ "step": 3851
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3052969721152007e-06,
+ "loss": 0.7373,
+ "step": 3852
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.300667003462208e-06,
+ "loss": 0.7361,
+ "step": 3853
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2960396388188122e-06,
+ "loss": 0.8288,
+ "step": 3854
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.291414879983652e-06,
+ "loss": 0.7207,
+ "step": 3855
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2867927287543535e-06,
+ "loss": 1.1389,
+ "step": 3856
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.282173186927539e-06,
+ "loss": 0.7355,
+ "step": 3857
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.277556256298806e-06,
+ "loss": 0.785,
+ "step": 3858
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2729419386627414e-06,
+ "loss": 0.8176,
+ "step": 3859
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2683302358129164e-06,
+ "loss": 0.8204,
+ "step": 3860
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.263721149541881e-06,
+ "loss": 0.7912,
+ "step": 3861
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.25911468164118e-06,
+ "loss": 0.7486,
+ "step": 3862
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2545108339013254e-06,
+ "loss": 0.7406,
+ "step": 3863
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2499096081118254e-06,
+ "loss": 0.7887,
+ "step": 3864
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.245311006061159e-06,
+ "loss": 0.6952,
+ "step": 3865
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.240715029536787e-06,
+ "loss": 0.7667,
+ "step": 3866
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2361216803251528e-06,
+ "loss": 0.8074,
+ "step": 3867
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.231530960211672e-06,
+ "loss": 0.7723,
+ "step": 3868
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2269428709807505e-06,
+ "loss": 0.7505,
+ "step": 3869
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.22235741441576e-06,
+ "loss": 0.7503,
+ "step": 3870
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.217774592299051e-06,
+ "loss": 0.8255,
+ "step": 3871
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.213194406411957e-06,
+ "loss": 0.8293,
+ "step": 3872
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.2086168585347756e-06,
+ "loss": 0.7731,
+ "step": 3873
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.204041950446792e-06,
+ "loss": 0.8047,
+ "step": 3874
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1994696839262562e-06,
+ "loss": 0.7575,
+ "step": 3875
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.194900060750393e-06,
+ "loss": 0.717,
+ "step": 3876
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1903330826954006e-06,
+ "loss": 0.7603,
+ "step": 3877
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1857687515364445e-06,
+ "loss": 0.7718,
+ "step": 3878
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.181207069047674e-06,
+ "loss": 0.7577,
+ "step": 3879
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1766480370021967e-06,
+ "loss": 0.7884,
+ "step": 3880
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1720916571720915e-06,
+ "loss": 0.7499,
+ "step": 3881
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.167537931328414e-06,
+ "loss": 0.7345,
+ "step": 3882
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.162986861241181e-06,
+ "loss": 0.7921,
+ "step": 3883
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1584384486793774e-06,
+ "loss": 0.8441,
+ "step": 3884
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.153892695410956e-06,
+ "loss": 0.7605,
+ "step": 3885
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1493496032028416e-06,
+ "loss": 0.7107,
+ "step": 3886
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1448091738209165e-06,
+ "loss": 0.7327,
+ "step": 3887
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.140271409030029e-06,
+ "loss": 0.7479,
+ "step": 3888
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1357363105939997e-06,
+ "loss": 0.7568,
+ "step": 3889
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.131203880275604e-06,
+ "loss": 0.7774,
+ "step": 3890
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1266741198365834e-06,
+ "loss": 1.2203,
+ "step": 3891
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1221470310376423e-06,
+ "loss": 0.7699,
+ "step": 3892
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1176226156384405e-06,
+ "loss": 0.8065,
+ "step": 3893
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1131008753976123e-06,
+ "loss": 0.7619,
+ "step": 3894
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1085818120727363e-06,
+ "loss": 0.735,
+ "step": 3895
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.104065427420364e-06,
+ "loss": 0.7266,
+ "step": 3896
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0995517231959994e-06,
+ "loss": 1.2077,
+ "step": 3897
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0950407011540996e-06,
+ "loss": 0.6905,
+ "step": 3898
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0905323630480912e-06,
+ "loss": 0.7742,
+ "step": 3899
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0860267106303487e-06,
+ "loss": 0.7729,
+ "step": 3900
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.081523745652204e-06,
+ "loss": 0.8238,
+ "step": 3901
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0770234698639434e-06,
+ "loss": 0.8042,
+ "step": 3902
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.072525885014814e-06,
+ "loss": 0.7633,
+ "step": 3903
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.068030992853013e-06,
+ "loss": 0.8117,
+ "step": 3904
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0635387951256846e-06,
+ "loss": 0.783,
+ "step": 3905
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0590492935789396e-06,
+ "loss": 0.7768,
+ "step": 3906
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.054562489957831e-06,
+ "loss": 0.6831,
+ "step": 3907
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.050078386006362e-06,
+ "loss": 0.7998,
+ "step": 3908
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0455969834674926e-06,
+ "loss": 0.7605,
+ "step": 3909
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0411182840831255e-06,
+ "loss": 0.8155,
+ "step": 3910
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.036642289594124e-06,
+ "loss": 0.7727,
+ "step": 3911
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.032169001740285e-06,
+ "loss": 0.7584,
+ "step": 3912
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.02769842226037e-06,
+ "loss": 0.7378,
+ "step": 3913
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0232305528920748e-06,
+ "loss": 0.849,
+ "step": 3914
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.018765395372042e-06,
+ "loss": 0.8129,
+ "step": 3915
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0143029514358725e-06,
+ "loss": 0.7725,
+ "step": 3916
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0098432228180994e-06,
+ "loss": 0.7661,
+ "step": 3917
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.005386211252207e-06,
+ "loss": 0.7841,
+ "step": 3918
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.00093191847062e-06,
+ "loss": 0.7934,
+ "step": 3919
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 2.9964803462047055e-06,
+ "loss": 0.7461,
+ "step": 3920
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 2.992031496184781e-06,
+ "loss": 0.7471,
+ "step": 3921
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 2.9875853701400947e-06,
+ "loss": 0.7719,
+ "step": 3922
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9831419697988486e-06,
+ "loss": 1.2252,
+ "step": 3923
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9787012968881755e-06,
+ "loss": 0.8063,
+ "step": 3924
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9742633531341493e-06,
+ "loss": 0.7244,
+ "step": 3925
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9698281402617856e-06,
+ "loss": 0.756,
+ "step": 3926
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.965395659995035e-06,
+ "loss": 0.7426,
+ "step": 3927
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9609659140567947e-06,
+ "loss": 0.7456,
+ "step": 3928
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9565389041688896e-06,
+ "loss": 0.8143,
+ "step": 3929
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.952114632052081e-06,
+ "loss": 0.7991,
+ "step": 3930
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9476930994260764e-06,
+ "loss": 0.7508,
+ "step": 3931
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.943274308009505e-06,
+ "loss": 1.155,
+ "step": 3932
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9388582595199442e-06,
+ "loss": 0.8017,
+ "step": 3933
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.934444955673893e-06,
+ "loss": 0.8083,
+ "step": 3934
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9300343981867906e-06,
+ "loss": 0.8033,
+ "step": 3935
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.925626588773005e-06,
+ "loss": 0.7796,
+ "step": 3936
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9212215291458368e-06,
+ "loss": 0.7691,
+ "step": 3937
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9168192210175238e-06,
+ "loss": 1.1556,
+ "step": 3938
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9124196660992255e-06,
+ "loss": 0.7673,
+ "step": 3939
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9080228661010333e-06,
+ "loss": 0.7739,
+ "step": 3940
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.903628822731975e-06,
+ "loss": 0.7754,
+ "step": 3941
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.899237537699998e-06,
+ "loss": 0.7095,
+ "step": 3942
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.894849012711982e-06,
+ "loss": 0.7502,
+ "step": 3943
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8904632494737283e-06,
+ "loss": 0.7175,
+ "step": 3944
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8860802496899765e-06,
+ "loss": 0.7819,
+ "step": 3945
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8817000150643813e-06,
+ "loss": 0.7557,
+ "step": 3946
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8773225472995224e-06,
+ "loss": 1.1978,
+ "step": 3947
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8729478480969154e-06,
+ "loss": 0.776,
+ "step": 3948
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8685759191569852e-06,
+ "loss": 0.7326,
+ "step": 3949
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8642067621790926e-06,
+ "loss": 0.7735,
+ "step": 3950
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.859840378861515e-06,
+ "loss": 0.7393,
+ "step": 3951
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.855476770901442e-06,
+ "loss": 0.7098,
+ "step": 3952
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.851115939995005e-06,
+ "loss": 0.7916,
+ "step": 3953
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8467578878372394e-06,
+ "loss": 0.7565,
+ "step": 3954
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.84240261612211e-06,
+ "loss": 0.7774,
+ "step": 3955
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8380501265424963e-06,
+ "loss": 1.2595,
+ "step": 3956
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.833700420790193e-06,
+ "loss": 0.7177,
+ "step": 3957
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8293535005559237e-06,
+ "loss": 0.731,
+ "step": 3958
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.82500936752932e-06,
+ "loss": 0.7912,
+ "step": 3959
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8206680233989314e-06,
+ "loss": 0.7582,
+ "step": 3960
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8163294698522224e-06,
+ "loss": 0.6975,
+ "step": 3961
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.811993708575581e-06,
+ "loss": 0.7846,
+ "step": 3962
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.807660741254302e-06,
+ "loss": 0.7952,
+ "step": 3963
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.803330569572591e-06,
+ "loss": 0.7562,
+ "step": 3964
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.79900319521358e-06,
+ "loss": 0.746,
+ "step": 3965
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.794678619859301e-06,
+ "loss": 0.6961,
+ "step": 3966
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.7903568451907036e-06,
+ "loss": 0.7253,
+ "step": 3967
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.7860378728876493e-06,
+ "loss": 0.8343,
+ "step": 3968
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.7817217046289037e-06,
+ "loss": 1.1623,
+ "step": 3969
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.777408342092155e-06,
+ "loss": 0.7688,
+ "step": 3970
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.7730977869539878e-06,
+ "loss": 0.6961,
+ "step": 3971
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.768790040889906e-06,
+ "loss": 0.7522,
+ "step": 3972
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.764485105574316e-06,
+ "loss": 0.8109,
+ "step": 3973
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.760182982680528e-06,
+ "loss": 0.7617,
+ "step": 3974
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7558836738807704e-06,
+ "loss": 0.7466,
+ "step": 3975
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.751587180846167e-06,
+ "loss": 0.8031,
+ "step": 3976
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.747293505246752e-06,
+ "loss": 0.7094,
+ "step": 3977
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7430026487514628e-06,
+ "loss": 0.7432,
+ "step": 3978
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7387146130281394e-06,
+ "loss": 0.7835,
+ "step": 3979
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.734429399743533e-06,
+ "loss": 0.7724,
+ "step": 3980
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7301470105632875e-06,
+ "loss": 0.8196,
+ "step": 3981
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.725867447151961e-06,
+ "loss": 0.7518,
+ "step": 3982
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.721590711173e-06,
+ "loss": 0.7661,
+ "step": 3983
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.717316804288761e-06,
+ "loss": 0.7786,
+ "step": 3984
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.713045728160496e-06,
+ "loss": 0.7328,
+ "step": 3985
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.708777484448358e-06,
+ "loss": 0.7499,
+ "step": 3986
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.704512074811405e-06,
+ "loss": 0.8226,
+ "step": 3987
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7002495009075856e-06,
+ "loss": 0.768,
+ "step": 3988
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6959897643937448e-06,
+ "loss": 0.782,
+ "step": 3989
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.691732866925636e-06,
+ "loss": 0.7476,
+ "step": 3990
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.687478810157895e-06,
+ "loss": 0.7623,
+ "step": 3991
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6832275957440658e-06,
+ "loss": 0.8096,
+ "step": 3992
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.67897922533658e-06,
+ "loss": 0.6826,
+ "step": 3993
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.674733700586766e-06,
+ "loss": 0.8375,
+ "step": 3994
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6704910231448446e-06,
+ "loss": 0.7539,
+ "step": 3995
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.666251194659929e-06,
+ "loss": 0.7722,
+ "step": 3996
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6620142167800335e-06,
+ "loss": 0.7059,
+ "step": 3997
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.65778009115205e-06,
+ "loss": 0.7642,
+ "step": 3998
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6535488194217786e-06,
+ "loss": 0.7813,
+ "step": 3999
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.649320403233897e-06,
+ "loss": 0.7971,
+ "step": 4000
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6450948442319767e-06,
+ "loss": 0.7571,
+ "step": 4001
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6408721440584793e-06,
+ "loss": 0.8291,
+ "step": 4002
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6366523043547534e-06,
+ "loss": 0.8367,
+ "step": 4003
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6324353267610413e-06,
+ "loss": 0.6772,
+ "step": 4004
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6282212129164676e-06,
+ "loss": 1.1704,
+ "step": 4005
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.624009964459041e-06,
+ "loss": 0.7266,
+ "step": 4006
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.619801583025666e-06,
+ "loss": 0.7687,
+ "step": 4007
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.615596070252123e-06,
+ "loss": 0.7544,
+ "step": 4008
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6113934277730866e-06,
+ "loss": 0.8181,
+ "step": 4009
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.607193657222108e-06,
+ "loss": 0.8149,
+ "step": 4010
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.602996760231624e-06,
+ "loss": 0.764,
+ "step": 4011
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5988027384329562e-06,
+ "loss": 0.757,
+ "step": 4012
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.594611593456303e-06,
+ "loss": 0.75,
+ "step": 4013
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5904233269307566e-06,
+ "loss": 0.7562,
+ "step": 4014
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5862379404842795e-06,
+ "loss": 0.7887,
+ "step": 4015
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5820554357437166e-06,
+ "loss": 0.7729,
+ "step": 4016
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5778758143347983e-06,
+ "loss": 0.7553,
+ "step": 4017
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5736990778821268e-06,
+ "loss": 0.805,
+ "step": 4018
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5695252280091888e-06,
+ "loss": 0.7339,
+ "step": 4019
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5653542663383426e-06,
+ "loss": 0.832,
+ "step": 4020
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5611861944908334e-06,
+ "loss": 0.7799,
+ "step": 4021
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5570210140867747e-06,
+ "loss": 0.7611,
+ "step": 4022
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5528587267451554e-06,
+ "loss": 0.8132,
+ "step": 4023
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5486993340838505e-06,
+ "loss": 0.7385,
+ "step": 4024
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5445428377196e-06,
+ "loss": 0.779,
+ "step": 4025
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5403892392680197e-06,
+ "loss": 1.1896,
+ "step": 4026
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.5362385403436006e-06,
+ "loss": 0.7439,
+ "step": 4027
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.532090742559704e-06,
+ "loss": 0.769,
+ "step": 4028
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.5279458475285714e-06,
+ "loss": 0.7591,
+ "step": 4029
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.5238038568613045e-06,
+ "loss": 1.1012,
+ "step": 4030
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.5196647721678878e-06,
+ "loss": 0.7534,
+ "step": 4031
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.5155285950571675e-06,
+ "loss": 0.786,
+ "step": 4032
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.5113953271368584e-06,
+ "loss": 0.771,
+ "step": 4033
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.5072649700135575e-06,
+ "loss": 0.7706,
+ "step": 4034
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.503137525292716e-06,
+ "loss": 0.8177,
+ "step": 4035
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.49901299457866e-06,
+ "loss": 1.2941,
+ "step": 4036
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.494891379474581e-06,
+ "loss": 0.786,
+ "step": 4037
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.490772681582535e-06,
+ "loss": 0.8324,
+ "step": 4038
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4866569025034527e-06,
+ "loss": 0.7648,
+ "step": 4039
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4825440438371186e-06,
+ "loss": 0.8472,
+ "step": 4040
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.478434107182194e-06,
+ "loss": 0.7997,
+ "step": 4041
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4743270941361953e-06,
+ "loss": 0.7554,
+ "step": 4042
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4702230062955055e-06,
+ "loss": 0.841,
+ "step": 4043
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.46612184525537e-06,
+ "loss": 0.7515,
+ "step": 4044
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.462023612609895e-06,
+ "loss": 0.8043,
+ "step": 4045
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4579283099520556e-06,
+ "loss": 0.7837,
+ "step": 4046
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4538359388736775e-06,
+ "loss": 0.7656,
+ "step": 4047
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4497465009654597e-06,
+ "loss": 0.757,
+ "step": 4048
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4456599978169495e-06,
+ "loss": 0.7207,
+ "step": 4049
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4415764310165546e-06,
+ "loss": 0.791,
+ "step": 4050
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4374958021515505e-06,
+ "loss": 0.7416,
+ "step": 4051
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4334181128080626e-06,
+ "loss": 0.8293,
+ "step": 4052
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.429343364571075e-06,
+ "loss": 0.7254,
+ "step": 4053
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4252715590244303e-06,
+ "loss": 0.7358,
+ "step": 4054
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.421202697750822e-06,
+ "loss": 0.7926,
+ "step": 4055
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4171367823318115e-06,
+ "loss": 0.7814,
+ "step": 4056
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4130738143478005e-06,
+ "loss": 0.7611,
+ "step": 4057
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.409013795378058e-06,
+ "loss": 0.7754,
+ "step": 4058
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.404956727000698e-06,
+ "loss": 0.7809,
+ "step": 4059
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.400902610792688e-06,
+ "loss": 0.7669,
+ "step": 4060
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.396851448329853e-06,
+ "loss": 0.839,
+ "step": 4061
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.392803241186862e-06,
+ "loss": 0.7573,
+ "step": 4062
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3887579909372472e-06,
+ "loss": 0.8159,
+ "step": 4063
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3847156991533825e-06,
+ "loss": 0.7657,
+ "step": 4064
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3806763674064893e-06,
+ "loss": 0.748,
+ "step": 4065
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3766399972666498e-06,
+ "loss": 0.7531,
+ "step": 4066
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3726065903027816e-06,
+ "loss": 0.7887,
+ "step": 4067
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3685761480826653e-06,
+ "loss": 1.1255,
+ "step": 4068
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.364548672172916e-06,
+ "loss": 0.7625,
+ "step": 4069
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.360524164139002e-06,
+ "loss": 1.2141,
+ "step": 4070
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3565026255452363e-06,
+ "loss": 0.8026,
+ "step": 4071
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.352484057954776e-06,
+ "loss": 0.7819,
+ "step": 4072
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3484684629296307e-06,
+ "loss": 0.7882,
+ "step": 4073
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3444558420306464e-06,
+ "loss": 0.7319,
+ "step": 4074
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.340446196817515e-06,
+ "loss": 1.144,
+ "step": 4075
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.336439528848776e-06,
+ "loss": 0.7421,
+ "step": 4076
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.332435839681808e-06,
+ "loss": 0.738,
+ "step": 4077
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3284351308728325e-06,
+ "loss": 0.7585,
+ "step": 4078
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.324437403976908e-06,
+ "loss": 0.7964,
+ "step": 4079
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.3204426605479448e-06,
+ "loss": 1.1395,
+ "step": 4080
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.3164509021386828e-06,
+ "loss": 0.7808,
+ "step": 4081
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.3124621303007055e-06,
+ "loss": 0.7552,
+ "step": 4082
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.308476346584441e-06,
+ "loss": 0.7837,
+ "step": 4083
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.3044935525391454e-06,
+ "loss": 0.7692,
+ "step": 4084
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.3005137497129193e-06,
+ "loss": 0.7567,
+ "step": 4085
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2965369396527048e-06,
+ "loss": 0.788,
+ "step": 4086
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2925631239042666e-06,
+ "loss": 0.7714,
+ "step": 4087
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2885923040122214e-06,
+ "loss": 0.8239,
+ "step": 4088
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2846244815200082e-06,
+ "loss": 0.7266,
+ "step": 4089
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.280659657969914e-06,
+ "loss": 0.769,
+ "step": 4090
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.27669783490305e-06,
+ "loss": 0.8103,
+ "step": 4091
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.272739013859361e-06,
+ "loss": 0.8667,
+ "step": 4092
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.268783196377634e-06,
+ "loss": 0.8043,
+ "step": 4093
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.26483038399548e-06,
+ "loss": 0.7723,
+ "step": 4094
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2608805782493448e-06,
+ "loss": 0.7534,
+ "step": 4095
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.256933780674504e-06,
+ "loss": 0.7555,
+ "step": 4096
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.252989992805068e-06,
+ "loss": 0.8078,
+ "step": 4097
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2490492161739753e-06,
+ "loss": 0.7448,
+ "step": 4098
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.245111452312987e-06,
+ "loss": 0.7474,
+ "step": 4099
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2411767027527076e-06,
+ "loss": 0.7199,
+ "step": 4100
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2372449690225585e-06,
+ "loss": 0.8119,
+ "step": 4101
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2333162526507923e-06,
+ "loss": 0.7598,
+ "step": 4102
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.229390555164488e-06,
+ "loss": 0.7779,
+ "step": 4103
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.225467878089549e-06,
+ "loss": 0.8051,
+ "step": 4104
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2215482229507145e-06,
+ "loss": 0.7832,
+ "step": 4105
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.217631591271535e-06,
+ "loss": 0.803,
+ "step": 4106
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2137179845743986e-06,
+ "loss": 0.7715,
+ "step": 4107
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2098074043805094e-06,
+ "loss": 0.8064,
+ "step": 4108
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.205899852209895e-06,
+ "loss": 0.7473,
+ "step": 4109
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2019953295814146e-06,
+ "loss": 0.7468,
+ "step": 4110
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1980938380127394e-06,
+ "loss": 0.7713,
+ "step": 4111
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1941953790203686e-06,
+ "loss": 0.772,
+ "step": 4112
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.19029995411962e-06,
+ "loss": 0.7275,
+ "step": 4113
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1864075648246296e-06,
+ "loss": 0.7303,
+ "step": 4114
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1825182126483646e-06,
+ "loss": 0.8494,
+ "step": 4115
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1786318991025956e-06,
+ "loss": 0.8386,
+ "step": 4116
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.174748625697928e-06,
+ "loss": 0.697,
+ "step": 4117
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1708683939437745e-06,
+ "loss": 0.7129,
+ "step": 4118
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.166991205348369e-06,
+ "loss": 0.7632,
+ "step": 4119
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1631170614187613e-06,
+ "loss": 0.706,
+ "step": 4120
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1592459636608187e-06,
+ "loss": 0.7304,
+ "step": 4121
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1553779135792274e-06,
+ "loss": 0.7582,
+ "step": 4122
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1515129126774856e-06,
+ "loss": 0.7418,
+ "step": 4123
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1476509624579035e-06,
+ "loss": 0.7345,
+ "step": 4124
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1437920644216138e-06,
+ "loss": 0.7348,
+ "step": 4125
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1399362200685535e-06,
+ "loss": 0.696,
+ "step": 4126
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1360834308974822e-06,
+ "loss": 0.7189,
+ "step": 4127
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.132233698405963e-06,
+ "loss": 0.7379,
+ "step": 4128
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.128387024090377e-06,
+ "loss": 0.7461,
+ "step": 4129
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1245434094459137e-06,
+ "loss": 0.7608,
+ "step": 4130
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.12070285596657e-06,
+ "loss": 0.7706,
+ "step": 4131
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.1168653651451633e-06,
+ "loss": 1.1693,
+ "step": 4132
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.1130309384733116e-06,
+ "loss": 0.8052,
+ "step": 4133
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.109199577441441e-06,
+ "loss": 0.783,
+ "step": 4134
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.1053712835387963e-06,
+ "loss": 0.8266,
+ "step": 4135
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.101546058253419e-06,
+ "loss": 0.7965,
+ "step": 4136
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.097723903072162e-06,
+ "loss": 0.7499,
+ "step": 4137
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0939048194806833e-06,
+ "loss": 0.7818,
+ "step": 4138
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.090088808963454e-06,
+ "loss": 0.7977,
+ "step": 4139
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0862758730037425e-06,
+ "loss": 0.6959,
+ "step": 4140
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0824660130836215e-06,
+ "loss": 1.1799,
+ "step": 4141
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.078659230683978e-06,
+ "loss": 0.7175,
+ "step": 4142
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0748555272844904e-06,
+ "loss": 0.7767,
+ "step": 4143
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.071054904363652e-06,
+ "loss": 0.7983,
+ "step": 4144
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0672573633987493e-06,
+ "loss": 0.8174,
+ "step": 4145
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.063462905865875e-06,
+ "loss": 0.6972,
+ "step": 4146
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.059671533239924e-06,
+ "loss": 0.7484,
+ "step": 4147
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0558832469945853e-06,
+ "loss": 0.7582,
+ "step": 4148
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0520980486023623e-06,
+ "loss": 0.7655,
+ "step": 4149
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0483159395345454e-06,
+ "loss": 0.7791,
+ "step": 4150
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0445369212612255e-06,
+ "loss": 0.7414,
+ "step": 4151
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0407609952513008e-06,
+ "loss": 0.7668,
+ "step": 4152
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.036988162972459e-06,
+ "loss": 0.8031,
+ "step": 4153
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0332184258911893e-06,
+ "loss": 0.751,
+ "step": 4154
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0294517854727724e-06,
+ "loss": 0.7332,
+ "step": 4155
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0256882431812953e-06,
+ "loss": 0.742,
+ "step": 4156
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.021927800479633e-06,
+ "loss": 0.8245,
+ "step": 4157
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.018170458829454e-06,
+ "loss": 0.7538,
+ "step": 4158
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0144162196912308e-06,
+ "loss": 0.792,
+ "step": 4159
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0106650845242224e-06,
+ "loss": 0.7403,
+ "step": 4160
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0069170547864803e-06,
+ "loss": 0.7098,
+ "step": 4161
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0031721319348596e-06,
+ "loss": 0.7661,
+ "step": 4162
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9994303174249887e-06,
+ "loss": 0.7877,
+ "step": 4163
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.995691612711308e-06,
+ "loss": 0.7428,
+ "step": 4164
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9919560192470346e-06,
+ "loss": 0.7383,
+ "step": 4165
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9882235384841864e-06,
+ "loss": 0.7764,
+ "step": 4166
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9844941718735653e-06,
+ "loss": 0.7643,
+ "step": 4167
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9807679208647625e-06,
+ "loss": 0.7959,
+ "step": 4168
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9770447869061625e-06,
+ "loss": 1.2125,
+ "step": 4169
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9733247714449354e-06,
+ "loss": 0.8095,
+ "step": 4170
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9696078759270386e-06,
+ "loss": 0.724,
+ "step": 4171
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9658941017972187e-06,
+ "loss": 0.8154,
+ "step": 4172
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9621834504990044e-06,
+ "loss": 0.7682,
+ "step": 4173
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9584759234747187e-06,
+ "loss": 0.7696,
+ "step": 4174
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9547715221654617e-06,
+ "loss": 0.8234,
+ "step": 4175
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.951070248011128e-06,
+ "loss": 0.7091,
+ "step": 4176
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9473721024503865e-06,
+ "loss": 0.7834,
+ "step": 4177
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.943677086920698e-06,
+ "loss": 0.7356,
+ "step": 4178
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9399852028583e-06,
+ "loss": 0.7115,
+ "step": 4179
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9362964516982143e-06,
+ "loss": 0.7398,
+ "step": 4180
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9326108348742545e-06,
+ "loss": 1.2088,
+ "step": 4181
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.928928353819003e-06,
+ "loss": 0.7729,
+ "step": 4182
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.9252490099638265e-06,
+ "loss": 0.723,
+ "step": 4183
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.9215728047388804e-06,
+ "loss": 0.7785,
+ "step": 4184
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.917899739573088e-06,
+ "loss": 0.8018,
+ "step": 4185
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.914229815894164e-06,
+ "loss": 0.7813,
+ "step": 4186
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.910563035128593e-06,
+ "loss": 0.786,
+ "step": 4187
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.9068993987016415e-06,
+ "loss": 0.7803,
+ "step": 4188
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.9032389080373537e-06,
+ "loss": 0.7771,
+ "step": 4189
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.899581564558548e-06,
+ "loss": 0.7189,
+ "step": 4190
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8959273696868264e-06,
+ "loss": 0.7228,
+ "step": 4191
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.892276324842558e-06,
+ "loss": 0.7754,
+ "step": 4192
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8886284314449e-06,
+ "loss": 0.827,
+ "step": 4193
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8849836909117713e-06,
+ "loss": 0.763,
+ "step": 4194
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8813421046598723e-06,
+ "loss": 0.8162,
+ "step": 4195
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8777036741046761e-06,
+ "loss": 0.7067,
+ "step": 4196
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.874068400660427e-06,
+ "loss": 0.7933,
+ "step": 4197
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8704362857401492e-06,
+ "loss": 0.7951,
+ "step": 4198
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.866807330755631e-06,
+ "loss": 0.781,
+ "step": 4199
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8631815371174344e-06,
+ "loss": 0.785,
+ "step": 4200
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8595589062348985e-06,
+ "loss": 0.7712,
+ "step": 4201
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8559394395161223e-06,
+ "loss": 0.7723,
+ "step": 4202
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8523231383679885e-06,
+ "loss": 0.7592,
+ "step": 4203
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8487100041961382e-06,
+ "loss": 0.784,
+ "step": 4204
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8451000384049845e-06,
+ "loss": 0.8096,
+ "step": 4205
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8414932423977105e-06,
+ "loss": 0.7743,
+ "step": 4206
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.837889617576264e-06,
+ "loss": 0.75,
+ "step": 4207
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8342891653413676e-06,
+ "loss": 0.7579,
+ "step": 4208
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8306918870925028e-06,
+ "loss": 0.7385,
+ "step": 4209
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.827097784227917e-06,
+ "loss": 0.763,
+ "step": 4210
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8235068581446348e-06,
+ "loss": 0.7595,
+ "step": 4211
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8199191102384317e-06,
+ "loss": 0.7465,
+ "step": 4212
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8163345419038559e-06,
+ "loss": 0.801,
+ "step": 4213
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8127531545342147e-06,
+ "loss": 0.7562,
+ "step": 4214
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.809174949521587e-06,
+ "loss": 0.759,
+ "step": 4215
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8055999282568092e-06,
+ "loss": 0.7798,
+ "step": 4216
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8020280921294752e-06,
+ "loss": 0.7762,
+ "step": 4217
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7984594425279534e-06,
+ "loss": 0.7141,
+ "step": 4218
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7948939808393628e-06,
+ "loss": 0.818,
+ "step": 4219
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7913317084495862e-06,
+ "loss": 0.747,
+ "step": 4220
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7877726267432749e-06,
+ "loss": 1.1651,
+ "step": 4221
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.784216737103821e-06,
+ "loss": 0.7747,
+ "step": 4222
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7806640409133969e-06,
+ "loss": 0.6959,
+ "step": 4223
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7771145395529188e-06,
+ "loss": 1.1366,
+ "step": 4224
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7735682344020722e-06,
+ "loss": 0.7501,
+ "step": 4225
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.770025126839291e-06,
+ "loss": 0.7161,
+ "step": 4226
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.766485218241768e-06,
+ "loss": 0.7524,
+ "step": 4227
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7629485099854604e-06,
+ "loss": 0.7233,
+ "step": 4228
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7594150034450709e-06,
+ "loss": 0.8027,
+ "step": 4229
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7558846999940648e-06,
+ "loss": 0.7496,
+ "step": 4230
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7523576010046585e-06,
+ "loss": 0.7694,
+ "step": 4231
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7488337078478212e-06,
+ "loss": 0.7784,
+ "step": 4232
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7453130218932858e-06,
+ "loss": 0.7786,
+ "step": 4233
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7417955445095258e-06,
+ "loss": 0.7795,
+ "step": 4234
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7382812770637791e-06,
+ "loss": 0.7721,
+ "step": 4235
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7347702209220264e-06,
+ "loss": 0.7209,
+ "step": 4236
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7312623774490057e-06,
+ "loss": 0.7568,
+ "step": 4237
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7277577480082042e-06,
+ "loss": 1.1669,
+ "step": 4238
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7242563339618578e-06,
+ "loss": 0.8169,
+ "step": 4239
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7207581366709603e-06,
+ "loss": 0.7435,
+ "step": 4240
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7172631574952448e-06,
+ "loss": 0.7282,
+ "step": 4241
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7137713977932024e-06,
+ "loss": 0.7334,
+ "step": 4242
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7102828589220689e-06,
+ "loss": 0.8034,
+ "step": 4243
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7067975422378247e-06,
+ "loss": 0.7057,
+ "step": 4244
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7033154490952064e-06,
+ "loss": 0.6886,
+ "step": 4245
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6998365808476891e-06,
+ "loss": 0.7539,
+ "step": 4246
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6963609388475e-06,
+ "loss": 0.7731,
+ "step": 4247
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6928885244456084e-06,
+ "loss": 0.7239,
+ "step": 4248
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.689419338991729e-06,
+ "loss": 0.777,
+ "step": 4249
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.685953383834329e-06,
+ "loss": 0.7917,
+ "step": 4250
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.682490660320608e-06,
+ "loss": 1.1064,
+ "step": 4251
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6790311697965212e-06,
+ "loss": 0.7321,
+ "step": 4252
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.675574913606759e-06,
+ "loss": 0.7212,
+ "step": 4253
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.672121893094758e-06,
+ "loss": 0.7969,
+ "step": 4254
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6686721096026959e-06,
+ "loss": 0.6972,
+ "step": 4255
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6652255644714888e-06,
+ "loss": 0.7476,
+ "step": 4256
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.661782259040805e-06,
+ "loss": 0.7291,
+ "step": 4257
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.658342194649043e-06,
+ "loss": 1.158,
+ "step": 4258
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.654905372633343e-06,
+ "loss": 1.2205,
+ "step": 4259
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.651471794329591e-06,
+ "loss": 0.697,
+ "step": 4260
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6480414610724038e-06,
+ "loss": 0.745,
+ "step": 4261
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.644614374195146e-06,
+ "loss": 0.7669,
+ "step": 4262
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6411905350299118e-06,
+ "loss": 0.8075,
+ "step": 4263
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.637769944907539e-06,
+ "loss": 0.754,
+ "step": 4264
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6343526051575976e-06,
+ "loss": 0.8274,
+ "step": 4265
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.630938517108397e-06,
+ "loss": 0.7374,
+ "step": 4266
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6275276820869845e-06,
+ "loss": 0.7227,
+ "step": 4267
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6241201014191399e-06,
+ "loss": 0.8375,
+ "step": 4268
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.620715776429378e-06,
+ "loss": 0.7446,
+ "step": 4269
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6173147084409513e-06,
+ "loss": 0.727,
+ "step": 4270
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6139168987758437e-06,
+ "loss": 0.7956,
+ "step": 4271
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6105223487547727e-06,
+ "loss": 0.7856,
+ "step": 4272
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6071310596971856e-06,
+ "loss": 0.736,
+ "step": 4273
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6037430329212722e-06,
+ "loss": 0.7586,
+ "step": 4274
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6003582697439447e-06,
+ "loss": 0.7571,
+ "step": 4275
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.5969767714808481e-06,
+ "loss": 0.7461,
+ "step": 4276
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.5935985394463637e-06,
+ "loss": 0.7967,
+ "step": 4277
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.5902235749535977e-06,
+ "loss": 0.8176,
+ "step": 4278
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.586851879314386e-06,
+ "loss": 0.7909,
+ "step": 4279
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.5834834538393029e-06,
+ "loss": 0.8185,
+ "step": 4280
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.5801182998376363e-06,
+ "loss": 0.7672,
+ "step": 4281
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.5767564186174167e-06,
+ "loss": 0.7617,
+ "step": 4282
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.573397811485392e-06,
+ "loss": 0.7495,
+ "step": 4283
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.5700424797470482e-06,
+ "loss": 0.7221,
+ "step": 4284
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.5666904247065885e-06,
+ "loss": 0.7233,
+ "step": 4285
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.563341647666945e-06,
+ "loss": 0.7206,
+ "step": 4286
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.55999614992978e-06,
+ "loss": 0.7307,
+ "step": 4287
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5566539327954766e-06,
+ "loss": 0.7443,
+ "step": 4288
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5533149975631445e-06,
+ "loss": 0.7909,
+ "step": 4289
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5499793455306133e-06,
+ "loss": 0.8103,
+ "step": 4290
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5466469779944459e-06,
+ "loss": 0.7773,
+ "step": 4291
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5433178962499207e-06,
+ "loss": 0.7647,
+ "step": 4292
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5399921015910402e-06,
+ "loss": 0.7596,
+ "step": 4293
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5366695953105326e-06,
+ "loss": 0.7778,
+ "step": 4294
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5333503786998449e-06,
+ "loss": 0.7797,
+ "step": 4295
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5300344530491428e-06,
+ "loss": 0.7874,
+ "step": 4296
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5267218196473254e-06,
+ "loss": 0.7831,
+ "step": 4297
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5234124797819905e-06,
+ "loss": 0.7554,
+ "step": 4298
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5201064347394768e-06,
+ "loss": 0.7508,
+ "step": 4299
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5168036858048274e-06,
+ "loss": 0.764,
+ "step": 4300
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5135042342618178e-06,
+ "loss": 0.7583,
+ "step": 4301
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5102080813929298e-06,
+ "loss": 0.7656,
+ "step": 4302
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5069152284793653e-06,
+ "loss": 0.7693,
+ "step": 4303
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5036256768010516e-06,
+ "loss": 0.7772,
+ "step": 4304
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5003394276366245e-06,
+ "loss": 0.7493,
+ "step": 4305
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4970564822634382e-06,
+ "loss": 0.775,
+ "step": 4306
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4937768419575626e-06,
+ "loss": 0.7958,
+ "step": 4307
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4905005079937829e-06,
+ "loss": 0.7744,
+ "step": 4308
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4872274816456034e-06,
+ "loss": 0.7577,
+ "step": 4309
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4839577641852342e-06,
+ "loss": 0.7954,
+ "step": 4310
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.48069135688361e-06,
+ "loss": 0.7297,
+ "step": 4311
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4774282610103695e-06,
+ "loss": 0.7431,
+ "step": 4312
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4741684778338672e-06,
+ "loss": 0.7869,
+ "step": 4313
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4709120086211726e-06,
+ "loss": 0.8037,
+ "step": 4314
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.467658854638062e-06,
+ "loss": 0.7895,
+ "step": 4315
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4644090171490298e-06,
+ "loss": 0.7474,
+ "step": 4316
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4611624974172756e-06,
+ "loss": 0.7732,
+ "step": 4317
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4579192967047085e-06,
+ "loss": 0.8465,
+ "step": 4318
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4546794162719568e-06,
+ "loss": 0.7058,
+ "step": 4319
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4514428573783456e-06,
+ "loss": 0.7829,
+ "step": 4320
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4482096212819208e-06,
+ "loss": 0.7101,
+ "step": 4321
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.444979709239428e-06,
+ "loss": 0.7683,
+ "step": 4322
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4417531225063242e-06,
+ "loss": 0.7438,
+ "step": 4323
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4385298623367727e-06,
+ "loss": 0.8318,
+ "step": 4324
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4353099299836437e-06,
+ "loss": 0.7563,
+ "step": 4325
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4320933266985182e-06,
+ "loss": 0.7113,
+ "step": 4326
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4288800537316772e-06,
+ "loss": 0.8123,
+ "step": 4327
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4256701123321082e-06,
+ "loss": 0.7616,
+ "step": 4328
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.42246350374751e-06,
+ "loss": 0.7565,
+ "step": 4329
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4192602292242786e-06,
+ "loss": 0.7498,
+ "step": 4330
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.416060290007517e-06,
+ "loss": 0.7557,
+ "step": 4331
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4128636873410283e-06,
+ "loss": 0.7683,
+ "step": 4332
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4096704224673285e-06,
+ "loss": 0.7659,
+ "step": 4333
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4064804966276247e-06,
+ "loss": 0.7466,
+ "step": 4334
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4032939110618294e-06,
+ "loss": 0.7758,
+ "step": 4335
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.400110667008564e-06,
+ "loss": 0.7854,
+ "step": 4336
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.3969307657051412e-06,
+ "loss": 0.7504,
+ "step": 4337
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3937542083875811e-06,
+ "loss": 0.7996,
+ "step": 4338
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3905809962906002e-06,
+ "loss": 0.7713,
+ "step": 4339
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3874111306476168e-06,
+ "loss": 0.7833,
+ "step": 4340
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3842446126907472e-06,
+ "loss": 0.7829,
+ "step": 4341
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3810814436508046e-06,
+ "loss": 0.8074,
+ "step": 4342
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3779216247573068e-06,
+ "loss": 0.8084,
+ "step": 4343
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3747651572384636e-06,
+ "loss": 0.8085,
+ "step": 4344
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.371612042321182e-06,
+ "loss": 0.7424,
+ "step": 4345
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3684622812310723e-06,
+ "loss": 0.7949,
+ "step": 4346
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3653158751924334e-06,
+ "loss": 0.7521,
+ "step": 4347
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3621728254282652e-06,
+ "loss": 0.7842,
+ "step": 4348
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3590331331602568e-06,
+ "loss": 0.7513,
+ "step": 4349
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3558967996088034e-06,
+ "loss": 0.7677,
+ "step": 4350
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3527638259929843e-06,
+ "loss": 0.7938,
+ "step": 4351
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3496342135305752e-06,
+ "loss": 0.7119,
+ "step": 4352
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3465079634380496e-06,
+ "loss": 0.7645,
+ "step": 4353
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.34338507693057e-06,
+ "loss": 0.7676,
+ "step": 4354
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3402655552219902e-06,
+ "loss": 0.7324,
+ "step": 4355
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3371493995248642e-06,
+ "loss": 0.7595,
+ "step": 4356
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3340366110504243e-06,
+ "loss": 0.7257,
+ "step": 4357
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3309271910086074e-06,
+ "loss": 0.7587,
+ "step": 4358
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.327821140608031e-06,
+ "loss": 0.8,
+ "step": 4359
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3247184610560115e-06,
+ "loss": 0.7929,
+ "step": 4360
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3216191535585487e-06,
+ "loss": 0.7163,
+ "step": 4361
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3185232193203311e-06,
+ "loss": 1.1385,
+ "step": 4362
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3154306595447441e-06,
+ "loss": 0.7905,
+ "step": 4363
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3123414754338515e-06,
+ "loss": 0.809,
+ "step": 4364
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3092556681884106e-06,
+ "loss": 0.74,
+ "step": 4365
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3061732390078652e-06,
+ "loss": 0.8002,
+ "step": 4366
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3030941890903437e-06,
+ "loss": 0.782,
+ "step": 4367
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3000185196326665e-06,
+ "loss": 0.7173,
+ "step": 4368
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2969462318303338e-06,
+ "loss": 0.805,
+ "step": 4369
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2938773268775373e-06,
+ "loss": 0.7501,
+ "step": 4370
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.290811805967148e-06,
+ "loss": 0.7668,
+ "step": 4371
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2877496702907232e-06,
+ "loss": 0.7077,
+ "step": 4372
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2846909210385116e-06,
+ "loss": 0.7661,
+ "step": 4373
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2816355593994312e-06,
+ "loss": 0.7427,
+ "step": 4374
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2785835865610974e-06,
+ "loss": 0.7742,
+ "step": 4375
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2755350037097991e-06,
+ "loss": 0.8013,
+ "step": 4376
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2724898120305107e-06,
+ "loss": 0.7678,
+ "step": 4377
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.269448012706892e-06,
+ "loss": 0.7956,
+ "step": 4378
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2664096069212762e-06,
+ "loss": 0.7323,
+ "step": 4379
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2633745958546862e-06,
+ "loss": 0.7753,
+ "step": 4380
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2603429806868206e-06,
+ "loss": 0.7454,
+ "step": 4381
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2573147625960569e-06,
+ "loss": 0.8148,
+ "step": 4382
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2542899427594545e-06,
+ "loss": 0.7909,
+ "step": 4383
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2512685223527487e-06,
+ "loss": 0.7919,
+ "step": 4384
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2482505025503611e-06,
+ "loss": 0.7648,
+ "step": 4385
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.245235884525383e-06,
+ "loss": 0.7794,
+ "step": 4386
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2422246694495898e-06,
+ "loss": 0.7718,
+ "step": 4387
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2392168584934283e-06,
+ "loss": 0.749,
+ "step": 4388
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2362124528260278e-06,
+ "loss": 0.7535,
+ "step": 4389
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2332114536151895e-06,
+ "loss": 0.7363,
+ "step": 4390
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2302138620273907e-06,
+ "loss": 0.7678,
+ "step": 4391
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.227219679227789e-06,
+ "loss": 0.8405,
+ "step": 4392
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2242289063802137e-06,
+ "loss": 0.7608,
+ "step": 4393
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.221241544647165e-06,
+ "loss": 0.7368,
+ "step": 4394
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2182575951898257e-06,
+ "loss": 0.7547,
+ "step": 4395
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.215277059168044e-06,
+ "loss": 0.868,
+ "step": 4396
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2122999377403478e-06,
+ "loss": 0.8214,
+ "step": 4397
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2093262320639333e-06,
+ "loss": 0.8005,
+ "step": 4398
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2063559432946715e-06,
+ "loss": 0.7045,
+ "step": 4399
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2033890725871022e-06,
+ "loss": 0.8685,
+ "step": 4400
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2004256210944377e-06,
+ "loss": 1.0964,
+ "step": 4401
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1974655899685671e-06,
+ "loss": 0.7989,
+ "step": 4402
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1945089803600419e-06,
+ "loss": 0.753,
+ "step": 4403
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1915557934180855e-06,
+ "loss": 0.7606,
+ "step": 4404
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1886060302905956e-06,
+ "loss": 0.7804,
+ "step": 4405
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1856596921241359e-06,
+ "loss": 0.7899,
+ "step": 4406
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1827167800639362e-06,
+ "loss": 0.7469,
+ "step": 4407
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1797772952538967e-06,
+ "loss": 0.6897,
+ "step": 4408
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.176841238836589e-06,
+ "loss": 0.7825,
+ "step": 4409
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1739086119532472e-06,
+ "loss": 0.7578,
+ "step": 4410
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1709794157437738e-06,
+ "loss": 0.7736,
+ "step": 4411
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1680536513467389e-06,
+ "loss": 0.7445,
+ "step": 4412
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1651313198993786e-06,
+ "loss": 0.7601,
+ "step": 4413
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1622124225375909e-06,
+ "loss": 0.8155,
+ "step": 4414
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1592969603959469e-06,
+ "loss": 0.7956,
+ "step": 4415
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1563849346076716e-06,
+ "loss": 0.781,
+ "step": 4416
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1534763463046661e-06,
+ "loss": 0.7438,
+ "step": 4417
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1505711966174838e-06,
+ "loss": 0.7693,
+ "step": 4418
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1476694866753513e-06,
+ "loss": 0.7517,
+ "step": 4419
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1447712176061543e-06,
+ "loss": 0.6971,
+ "step": 4420
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.141876390536436e-06,
+ "loss": 0.8003,
+ "step": 4421
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1389850065914133e-06,
+ "loss": 0.7862,
+ "step": 4422
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1360970668949534e-06,
+ "loss": 0.7135,
+ "step": 4423
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1332125725695918e-06,
+ "loss": 0.7954,
+ "step": 4424
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.130331524736521e-06,
+ "loss": 0.7662,
+ "step": 4425
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1274539245155925e-06,
+ "loss": 0.8178,
+ "step": 4426
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.124579773025326e-06,
+ "loss": 0.7961,
+ "step": 4427
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1217090713828904e-06,
+ "loss": 0.7904,
+ "step": 4428
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1188418207041218e-06,
+ "loss": 0.756,
+ "step": 4429
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1159780221035111e-06,
+ "loss": 0.7082,
+ "step": 4430
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1131176766942042e-06,
+ "loss": 0.7458,
+ "step": 4431
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1102607855880155e-06,
+ "loss": 0.7534,
+ "step": 4432
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1074073498953996e-06,
+ "loss": 0.7838,
+ "step": 4433
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1045573707254864e-06,
+ "loss": 0.8127,
+ "step": 4434
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1017108491860485e-06,
+ "loss": 0.7601,
+ "step": 4435
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.098867786383524e-06,
+ "loss": 0.7085,
+ "step": 4436
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.0960281834230002e-06,
+ "loss": 0.7722,
+ "step": 4437
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.0931920414082197e-06,
+ "loss": 0.8074,
+ "step": 4438
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.0903593614415875e-06,
+ "loss": 0.8213,
+ "step": 4439
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.0875301446241536e-06,
+ "loss": 0.7998,
+ "step": 4440
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.0847043920556266e-06,
+ "loss": 0.7988,
+ "step": 4441
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0818821048343665e-06,
+ "loss": 0.8343,
+ "step": 4442
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0790632840573867e-06,
+ "loss": 0.7209,
+ "step": 4443
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0762479308203567e-06,
+ "loss": 0.751,
+ "step": 4444
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.073436046217593e-06,
+ "loss": 0.7193,
+ "step": 4445
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0706276313420693e-06,
+ "loss": 0.82,
+ "step": 4446
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0678226872854058e-06,
+ "loss": 0.7157,
+ "step": 4447
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0650212151378736e-06,
+ "loss": 0.7799,
+ "step": 4448
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0622232159884004e-06,
+ "loss": 0.783,
+ "step": 4449
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0594286909245543e-06,
+ "loss": 0.7774,
+ "step": 4450
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.056637641032563e-06,
+ "loss": 0.7644,
+ "step": 4451
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0538500673972974e-06,
+ "loss": 0.7453,
+ "step": 4452
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0510659711022753e-06,
+ "loss": 0.7789,
+ "step": 4453
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.048285353229671e-06,
+ "loss": 0.8127,
+ "step": 4454
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.045508214860298e-06,
+ "loss": 0.7193,
+ "step": 4455
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0427345570736246e-06,
+ "loss": 0.7436,
+ "step": 4456
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0399643809477623e-06,
+ "loss": 0.7292,
+ "step": 4457
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.037197687559467e-06,
+ "loss": 0.6826,
+ "step": 4458
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0344344779841475e-06,
+ "loss": 0.8215,
+ "step": 4459
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0316747532958493e-06,
+ "loss": 0.756,
+ "step": 4460
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0289185145672743e-06,
+ "loss": 1.1297,
+ "step": 4461
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.026165762869762e-06,
+ "loss": 0.7722,
+ "step": 4462
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0234164992732954e-06,
+ "loss": 0.8167,
+ "step": 4463
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0206707248465086e-06,
+ "loss": 1.1845,
+ "step": 4464
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0179284406566736e-06,
+ "loss": 0.7551,
+ "step": 4465
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0151896477697086e-06,
+ "loss": 0.7157,
+ "step": 4466
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0124543472501713e-06,
+ "loss": 0.6979,
+ "step": 4467
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0097225401612675e-06,
+ "loss": 0.7708,
+ "step": 4468
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0069942275648403e-06,
+ "loss": 0.7571,
+ "step": 4469
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0042694105213747e-06,
+ "loss": 0.8047,
+ "step": 4470
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0015480900900021e-06,
+ "loss": 0.8037,
+ "step": 4471
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.988302673284877e-07,
+ "loss": 0.788,
+ "step": 4472
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.961159432932443e-07,
+ "loss": 0.8062,
+ "step": 4473
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.9340511903932e-07,
+ "loss": 0.7699,
+ "step": 4474
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.906977956204e-07,
+ "loss": 0.7527,
+ "step": 4475
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.879939740888155e-07,
+ "loss": 0.7756,
+ "step": 4476
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.852936554955328e-07,
+ "loss": 0.7755,
+ "step": 4477
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.825968408901576e-07,
+ "loss": 0.773,
+ "step": 4478
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.799035313209337e-07,
+ "loss": 1.1851,
+ "step": 4479
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.772137278347394e-07,
+ "loss": 0.7206,
+ "step": 4480
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.745274314770968e-07,
+ "loss": 0.792,
+ "step": 4481
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.71844643292159e-07,
+ "loss": 0.7739,
+ "step": 4482
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.69165364322716e-07,
+ "loss": 0.7835,
+ "step": 4483
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.66489595610195e-07,
+ "loss": 0.7586,
+ "step": 4484
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.638173381946626e-07,
+ "loss": 0.8065,
+ "step": 4485
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.611485931148123e-07,
+ "loss": 0.7804,
+ "step": 4486
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.58483361407978e-07,
+ "loss": 0.8061,
+ "step": 4487
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.558216441101297e-07,
+ "loss": 0.7622,
+ "step": 4488
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.531634422558656e-07,
+ "loss": 0.8041,
+ "step": 4489
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.505087568784199e-07,
+ "loss": 0.8144,
+ "step": 4490
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.478575890096664e-07,
+ "loss": 0.7413,
+ "step": 4491
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.452099396800962e-07,
+ "loss": 0.7487,
+ "step": 4492
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.425658099188484e-07,
+ "loss": 0.7619,
+ "step": 4493
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.399252007536863e-07,
+ "loss": 0.7861,
+ "step": 4494
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.372881132110068e-07,
+ "loss": 0.7508,
+ "step": 4495
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.346545483158387e-07,
+ "loss": 0.753,
+ "step": 4496
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.320245070918366e-07,
+ "loss": 0.7463,
+ "step": 4497
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.293979905612938e-07,
+ "loss": 0.7698,
+ "step": 4498
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.267749997451259e-07,
+ "loss": 0.7616,
+ "step": 4499
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.241555356628828e-07,
+ "loss": 0.7521,
+ "step": 4500
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.215395993327414e-07,
+ "loss": 0.7657,
+ "step": 4501
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.189271917715048e-07,
+ "loss": 0.7622,
+ "step": 4502
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.16318313994613e-07,
+ "loss": 0.7621,
+ "step": 4503
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.137129670161238e-07,
+ "loss": 0.6591,
+ "step": 4504
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.111111518487314e-07,
+ "loss": 0.7403,
+ "step": 4505
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.085128695037515e-07,
+ "loss": 0.7654,
+ "step": 4506
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.059181209911272e-07,
+ "loss": 0.716,
+ "step": 4507
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.033269073194318e-07,
+ "loss": 0.7541,
+ "step": 4508
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.007392294958572e-07,
+ "loss": 0.8126,
+ "step": 4509
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.981550885262302e-07,
+ "loss": 0.7906,
+ "step": 4510
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.955744854149961e-07,
+ "loss": 0.7956,
+ "step": 4511
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.92997421165227e-07,
+ "loss": 0.72,
+ "step": 4512
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.904238967786205e-07,
+ "loss": 0.7832,
+ "step": 4513
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.878539132554953e-07,
+ "loss": 0.7732,
+ "step": 4514
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.852874715948001e-07,
+ "loss": 0.8034,
+ "step": 4515
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.827245727941003e-07,
+ "loss": 0.7721,
+ "step": 4516
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.801652178495857e-07,
+ "loss": 0.7653,
+ "step": 4517
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.776094077560704e-07,
+ "loss": 0.7759,
+ "step": 4518
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.750571435069888e-07,
+ "loss": 0.7366,
+ "step": 4519
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.725084260943995e-07,
+ "loss": 0.7412,
+ "step": 4520
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.699632565089811e-07,
+ "loss": 0.7048,
+ "step": 4521
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.674216357400289e-07,
+ "loss": 0.7702,
+ "step": 4522
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.648835647754694e-07,
+ "loss": 0.7517,
+ "step": 4523
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.623490446018379e-07,
+ "loss": 0.7644,
+ "step": 4524
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.598180762042974e-07,
+ "loss": 0.7684,
+ "step": 4525
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.572906605666231e-07,
+ "loss": 0.7492,
+ "step": 4526
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.547667986712182e-07,
+ "loss": 0.7849,
+ "step": 4527
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.522464914990969e-07,
+ "loss": 0.7438,
+ "step": 4528
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.497297400298954e-07,
+ "loss": 0.7944,
+ "step": 4529
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.472165452418679e-07,
+ "loss": 0.7652,
+ "step": 4530
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.447069081118842e-07,
+ "loss": 0.7667,
+ "step": 4531
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.422008296154328e-07,
+ "loss": 0.7339,
+ "step": 4532
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.396983107266199e-07,
+ "loss": 0.8204,
+ "step": 4533
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.371993524181643e-07,
+ "loss": 0.7517,
+ "step": 4534
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.347039556614045e-07,
+ "loss": 0.8065,
+ "step": 4535
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.322121214262924e-07,
+ "loss": 0.7672,
+ "step": 4536
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.297238506813965e-07,
+ "loss": 0.7056,
+ "step": 4537
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.272391443939021e-07,
+ "loss": 0.772,
+ "step": 4538
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.24758003529601e-07,
+ "loss": 0.7746,
+ "step": 4539
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.222804290529108e-07,
+ "loss": 0.7414,
+ "step": 4540
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.198064219268553e-07,
+ "loss": 0.7508,
+ "step": 4541
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.173359831130711e-07,
+ "loss": 0.7978,
+ "step": 4542
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.148691135718112e-07,
+ "loss": 0.6886,
+ "step": 4543
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.124058142619407e-07,
+ "loss": 0.7473,
+ "step": 4544
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.099460861409359e-07,
+ "loss": 1.1156,
+ "step": 4545
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 8.074899301648842e-07,
+ "loss": 0.8139,
+ "step": 4546
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 8.050373472884876e-07,
+ "loss": 0.7859,
+ "step": 4547
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 8.025883384650568e-07,
+ "loss": 0.7619,
+ "step": 4548
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 8.001429046465104e-07,
+ "loss": 0.7518,
+ "step": 4549
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.97701046783389e-07,
+ "loss": 0.7918,
+ "step": 4550
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.952627658248257e-07,
+ "loss": 0.7681,
+ "step": 4551
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.928280627185781e-07,
+ "loss": 0.8077,
+ "step": 4552
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.903969384110044e-07,
+ "loss": 0.7835,
+ "step": 4553
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.87969393847079e-07,
+ "loss": 0.773,
+ "step": 4554
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.85545429970379e-07,
+ "loss": 0.7996,
+ "step": 4555
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.831250477230889e-07,
+ "loss": 0.8026,
+ "step": 4556
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.807082480460082e-07,
+ "loss": 0.7977,
+ "step": 4557
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.782950318785365e-07,
+ "loss": 0.797,
+ "step": 4558
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.758854001586835e-07,
+ "loss": 0.794,
+ "step": 4559
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.734793538230656e-07,
+ "loss": 0.7694,
+ "step": 4560
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.710768938069046e-07,
+ "loss": 0.7377,
+ "step": 4561
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.686780210440315e-07,
+ "loss": 0.8234,
+ "step": 4562
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.662827364668768e-07,
+ "loss": 0.7606,
+ "step": 4563
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.638910410064848e-07,
+ "loss": 0.7332,
+ "step": 4564
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.615029355924974e-07,
+ "loss": 0.7446,
+ "step": 4565
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.591184211531622e-07,
+ "loss": 0.796,
+ "step": 4566
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.567374986153375e-07,
+ "loss": 0.7554,
+ "step": 4567
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.543601689044733e-07,
+ "loss": 0.6957,
+ "step": 4568
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.519864329446358e-07,
+ "loss": 0.7727,
+ "step": 4569
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.49616291658486e-07,
+ "loss": 0.7071,
+ "step": 4570
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.472497459672901e-07,
+ "loss": 0.8275,
+ "step": 4571
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.44886796790919e-07,
+ "loss": 0.8304,
+ "step": 4572
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.425274450478415e-07,
+ "loss": 1.1019,
+ "step": 4573
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.401716916551316e-07,
+ "loss": 0.7257,
+ "step": 4574
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.378195375284636e-07,
+ "loss": 0.6697,
+ "step": 4575
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.354709835821116e-07,
+ "loss": 0.8061,
+ "step": 4576
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.331260307289501e-07,
+ "loss": 0.7684,
+ "step": 4577
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.307846798804552e-07,
+ "loss": 0.7804,
+ "step": 4578
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.284469319467036e-07,
+ "loss": 0.7711,
+ "step": 4579
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.261127878363694e-07,
+ "loss": 0.7838,
+ "step": 4580
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.237822484567292e-07,
+ "loss": 1.1084,
+ "step": 4581
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.214553147136549e-07,
+ "loss": 0.7475,
+ "step": 4582
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.191319875116165e-07,
+ "loss": 0.7546,
+ "step": 4583
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.168122677536893e-07,
+ "loss": 1.0881,
+ "step": 4584
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.144961563415342e-07,
+ "loss": 0.7993,
+ "step": 4585
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.121836541754212e-07,
+ "loss": 0.7406,
+ "step": 4586
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.098747621542102e-07,
+ "loss": 1.2514,
+ "step": 4587
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.075694811753609e-07,
+ "loss": 0.7607,
+ "step": 4588
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.052678121349299e-07,
+ "loss": 0.7363,
+ "step": 4589
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.029697559275662e-07,
+ "loss": 0.7613,
+ "step": 4590
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.006753134465194e-07,
+ "loss": 0.8137,
+ "step": 4591
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 6.98384485583632e-07,
+ "loss": 0.7246,
+ "step": 4592
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 6.960972732293414e-07,
+ "loss": 0.7559,
+ "step": 4593
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 6.938136772726778e-07,
+ "loss": 0.75,
+ "step": 4594
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 6.915336986012677e-07,
+ "loss": 0.7862,
+ "step": 4595
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 6.892573381013357e-07,
+ "loss": 0.6902,
+ "step": 4596
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 6.869845966576927e-07,
+ "loss": 0.6926,
+ "step": 4597
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.847154751537443e-07,
+ "loss": 0.7889,
+ "step": 4598
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.824499744714952e-07,
+ "loss": 0.7974,
+ "step": 4599
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.801880954915374e-07,
+ "loss": 0.7645,
+ "step": 4600
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.779298390930544e-07,
+ "loss": 0.7612,
+ "step": 4601
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.75675206153823e-07,
+ "loss": 1.1987,
+ "step": 4602
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.734241975502143e-07,
+ "loss": 0.7571,
+ "step": 4603
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.711768141571861e-07,
+ "loss": 0.7602,
+ "step": 4604
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.689330568482888e-07,
+ "loss": 0.7528,
+ "step": 4605
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.666929264956679e-07,
+ "loss": 0.7906,
+ "step": 4606
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.644564239700513e-07,
+ "loss": 0.716,
+ "step": 4607
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.622235501407593e-07,
+ "loss": 0.7562,
+ "step": 4608
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.599943058757085e-07,
+ "loss": 0.7796,
+ "step": 4609
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.577686920413928e-07,
+ "loss": 0.7489,
+ "step": 4610
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.555467095029045e-07,
+ "loss": 0.7001,
+ "step": 4611
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.533283591239203e-07,
+ "loss": 0.7781,
+ "step": 4612
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.511136417667075e-07,
+ "loss": 0.7276,
+ "step": 4613
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.489025582921183e-07,
+ "loss": 0.7551,
+ "step": 4614
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.46695109559593e-07,
+ "loss": 0.7311,
+ "step": 4615
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.444912964271633e-07,
+ "loss": 0.7558,
+ "step": 4616
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.422911197514414e-07,
+ "loss": 0.6831,
+ "step": 4617
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.400945803876302e-07,
+ "loss": 0.8161,
+ "step": 4618
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.379016791895176e-07,
+ "loss": 0.7969,
+ "step": 4619
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.357124170094753e-07,
+ "loss": 0.7763,
+ "step": 4620
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.335267946984658e-07,
+ "loss": 0.7893,
+ "step": 4621
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.313448131060307e-07,
+ "loss": 0.7989,
+ "step": 4622
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.291664730803026e-07,
+ "loss": 0.7556,
+ "step": 4623
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.269917754679944e-07,
+ "loss": 0.7515,
+ "step": 4624
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.248207211144008e-07,
+ "loss": 0.726,
+ "step": 4625
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.226533108634102e-07,
+ "loss": 1.1801,
+ "step": 4626
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.204895455574811e-07,
+ "loss": 0.7409,
+ "step": 4627
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.183294260376671e-07,
+ "loss": 0.7798,
+ "step": 4628
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.161729531435967e-07,
+ "loss": 1.1696,
+ "step": 4629
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.140201277134872e-07,
+ "loss": 0.7746,
+ "step": 4630
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.118709505841324e-07,
+ "loss": 0.7912,
+ "step": 4631
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.097254225909122e-07,
+ "loss": 0.6999,
+ "step": 4632
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.075835445677869e-07,
+ "loss": 0.7169,
+ "step": 4633
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.054453173472962e-07,
+ "loss": 0.6762,
+ "step": 4634
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.033107417605633e-07,
+ "loss": 0.7405,
+ "step": 4635
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.011798186372919e-07,
+ "loss": 0.7764,
+ "step": 4636
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.990525488057608e-07,
+ "loss": 0.778,
+ "step": 4637
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.969289330928385e-07,
+ "loss": 0.7419,
+ "step": 4638
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.948089723239636e-07,
+ "loss": 0.7824,
+ "step": 4639
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.926926673231626e-07,
+ "loss": 0.706,
+ "step": 4640
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.905800189130329e-07,
+ "loss": 0.8175,
+ "step": 4641
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.884710279147543e-07,
+ "loss": 0.7387,
+ "step": 4642
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.863656951480878e-07,
+ "loss": 0.7525,
+ "step": 4643
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.842640214313667e-07,
+ "loss": 0.7324,
+ "step": 4644
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.821660075815061e-07,
+ "loss": 0.7166,
+ "step": 4645
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.800716544139972e-07,
+ "loss": 0.7557,
+ "step": 4646
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.779809627429078e-07,
+ "loss": 0.7651,
+ "step": 4647
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.758939333808855e-07,
+ "loss": 0.6884,
+ "step": 4648
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.73810567139148e-07,
+ "loss": 0.7441,
+ "step": 4649
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.717308648274977e-07,
+ "loss": 0.8059,
+ "step": 4650
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.696548272543068e-07,
+ "loss": 0.7809,
+ "step": 4651
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.67582455226523e-07,
+ "loss": 0.7389,
+ "step": 4652
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.655137495496721e-07,
+ "loss": 1.1157,
+ "step": 4653
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.634487110278519e-07,
+ "loss": 0.7814,
+ "step": 4654
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.613873404637382e-07,
+ "loss": 0.6713,
+ "step": 4655
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.5932963865858e-07,
+ "loss": 0.751,
+ "step": 4656
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.572756064121953e-07,
+ "loss": 0.7541,
+ "step": 4657
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.552252445229855e-07,
+ "loss": 0.794,
+ "step": 4658
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.531785537879141e-07,
+ "loss": 0.7252,
+ "step": 4659
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.511355350025305e-07,
+ "loss": 0.8045,
+ "step": 4660
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.490961889609425e-07,
+ "loss": 0.7728,
+ "step": 4661
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.470605164558407e-07,
+ "loss": 0.7674,
+ "step": 4662
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.450285182784853e-07,
+ "loss": 0.788,
+ "step": 4663
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.430001952187048e-07,
+ "loss": 0.792,
+ "step": 4664
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.409755480649059e-07,
+ "loss": 0.811,
+ "step": 4665
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.389545776040583e-07,
+ "loss": 0.7225,
+ "step": 4666
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.369372846217103e-07,
+ "loss": 0.762,
+ "step": 4667
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.349236699019766e-07,
+ "loss": 0.8235,
+ "step": 4668
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.329137342275403e-07,
+ "loss": 0.7511,
+ "step": 4669
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.309074783796608e-07,
+ "loss": 0.7654,
+ "step": 4670
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.289049031381599e-07,
+ "loss": 0.7704,
+ "step": 4671
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.269060092814348e-07,
+ "loss": 0.7361,
+ "step": 4672
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.249107975864487e-07,
+ "loss": 0.7309,
+ "step": 4673
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.229192688287332e-07,
+ "loss": 0.7879,
+ "step": 4674
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.209314237823904e-07,
+ "loss": 0.7898,
+ "step": 4675
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.189472632200898e-07,
+ "loss": 0.7388,
+ "step": 4676
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.169667879130657e-07,
+ "loss": 0.8108,
+ "step": 4677
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.149899986311246e-07,
+ "loss": 0.684,
+ "step": 4678
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.130168961426385e-07,
+ "loss": 0.7448,
+ "step": 4679
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.110474812145461e-07,
+ "loss": 1.1064,
+ "step": 4680
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.090817546123505e-07,
+ "loss": 0.81,
+ "step": 4681
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.071197171001263e-07,
+ "loss": 0.779,
+ "step": 4682
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.051613694405111e-07,
+ "loss": 0.7706,
+ "step": 4683
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.032067123947049e-07,
+ "loss": 0.8051,
+ "step": 4684
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.012557467224833e-07,
+ "loss": 0.7972,
+ "step": 4685
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.993084731821729e-07,
+ "loss": 0.7211,
+ "step": 4686
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.973648925306773e-07,
+ "loss": 0.6975,
+ "step": 4687
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.954250055234589e-07,
+ "loss": 0.7778,
+ "step": 4688
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.934888129145476e-07,
+ "loss": 0.7934,
+ "step": 4689
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.915563154565328e-07,
+ "loss": 0.7524,
+ "step": 4690
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.896275139005702e-07,
+ "loss": 0.7969,
+ "step": 4691
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.877024089963822e-07,
+ "loss": 0.7842,
+ "step": 4692
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.85781001492247e-07,
+ "loss": 0.7524,
+ "step": 4693
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.838632921350128e-07,
+ "loss": 0.8081,
+ "step": 4694
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.819492816700844e-07,
+ "loss": 0.7782,
+ "step": 4695
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.800389708414321e-07,
+ "loss": 0.7572,
+ "step": 4696
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.7813236039159e-07,
+ "loss": 0.7401,
+ "step": 4697
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.762294510616483e-07,
+ "loss": 0.7527,
+ "step": 4698
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.7433024359126443e-07,
+ "loss": 0.8097,
+ "step": 4699
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.7243473871865455e-07,
+ "loss": 0.7532,
+ "step": 4700
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.705429371805925e-07,
+ "loss": 0.765,
+ "step": 4701
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.6865483971242023e-07,
+ "loss": 0.7283,
+ "step": 4702
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.6677044704802944e-07,
+ "loss": 0.7707,
+ "step": 4703
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.6488975991988207e-07,
+ "loss": 0.7193,
+ "step": 4704
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.6301277905899445e-07,
+ "loss": 0.7822,
+ "step": 4705
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.611395051949408e-07,
+ "loss": 1.1104,
+ "step": 4706
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.592699390558597e-07,
+ "loss": 0.7954,
+ "step": 4707
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.574040813684433e-07,
+ "loss": 0.7769,
+ "step": 4708
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.555419328579469e-07,
+ "loss": 0.7937,
+ "step": 4709
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.536834942481805e-07,
+ "loss": 0.7365,
+ "step": 4710
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.5182876626151286e-07,
+ "loss": 0.7334,
+ "step": 4711
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.4997774961887176e-07,
+ "loss": 0.7395,
+ "step": 4712
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.4813044503973945e-07,
+ "loss": 0.7637,
+ "step": 4713
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.462868532421616e-07,
+ "loss": 0.7508,
+ "step": 4714
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.444469749427316e-07,
+ "loss": 0.6475,
+ "step": 4715
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.426108108566085e-07,
+ "loss": 0.7528,
+ "step": 4716
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.4077836169750145e-07,
+ "loss": 0.7892,
+ "step": 4717
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.3894962817767614e-07,
+ "loss": 0.7653,
+ "step": 4718
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.3712461100796077e-07,
+ "loss": 0.8256,
+ "step": 4719
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.353033108977278e-07,
+ "loss": 0.7667,
+ "step": 4720
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.334857285549143e-07,
+ "loss": 0.7907,
+ "step": 4721
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.316718646860074e-07,
+ "loss": 0.761,
+ "step": 4722
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.2986171999605087e-07,
+ "loss": 1.1828,
+ "step": 4723
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.280552951886419e-07,
+ "loss": 0.7728,
+ "step": 4724
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.26252590965931e-07,
+ "loss": 0.7723,
+ "step": 4725
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.2445360802862657e-07,
+ "loss": 0.7773,
+ "step": 4726
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.22658347075986e-07,
+ "loss": 0.7896,
+ "step": 4727
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.208668088058221e-07,
+ "loss": 0.7951,
+ "step": 4728
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.190789939144979e-07,
+ "loss": 0.7517,
+ "step": 4729
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.172949030969331e-07,
+ "loss": 0.8029,
+ "step": 4730
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.155145370465985e-07,
+ "loss": 1.0775,
+ "step": 4731
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.137378964555172e-07,
+ "loss": 0.7684,
+ "step": 4732
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.119649820142613e-07,
+ "loss": 0.7873,
+ "step": 4733
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.1019579441195945e-07,
+ "loss": 0.7405,
+ "step": 4734
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.0843033433628945e-07,
+ "loss": 0.7343,
+ "step": 4735
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.0666860247347785e-07,
+ "loss": 0.7396,
+ "step": 4736
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.049105995083058e-07,
+ "loss": 0.7615,
+ "step": 4737
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.031563261241034e-07,
+ "loss": 0.7863,
+ "step": 4738
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.014057830027518e-07,
+ "loss": 0.7238,
+ "step": 4739
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.99658970824679e-07,
+ "loss": 0.7597,
+ "step": 4740
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.9791589026886845e-07,
+ "loss": 0.776,
+ "step": 4741
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.961765420128494e-07,
+ "loss": 0.7111,
+ "step": 4742
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.9444092673269875e-07,
+ "loss": 0.7524,
+ "step": 4743
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.927090451030491e-07,
+ "loss": 0.7378,
+ "step": 4744
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.909808977970719e-07,
+ "loss": 0.7127,
+ "step": 4745
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.8925648548649664e-07,
+ "loss": 0.7414,
+ "step": 4746
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.875358088415937e-07,
+ "loss": 0.6555,
+ "step": 4747
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.858188685311892e-07,
+ "loss": 0.7733,
+ "step": 4748
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.8410566522264824e-07,
+ "loss": 0.7587,
+ "step": 4749
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.823961995818892e-07,
+ "loss": 0.751,
+ "step": 4750
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.806904722733762e-07,
+ "loss": 0.7781,
+ "step": 4751
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.78988483960121e-07,
+ "loss": 0.7999,
+ "step": 4752
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.772902353036789e-07,
+ "loss": 0.7527,
+ "step": 4753
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.7559572696415416e-07,
+ "loss": 0.7361,
+ "step": 4754
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.7390495960019645e-07,
+ "loss": 0.7853,
+ "step": 4755
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.722179338690046e-07,
+ "loss": 0.7431,
+ "step": 4756
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.705346504263163e-07,
+ "loss": 0.8205,
+ "step": 4757
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.688551099264226e-07,
+ "loss": 0.7805,
+ "step": 4758
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.671793130221524e-07,
+ "loss": 0.7659,
+ "step": 4759
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.6550726036488237e-07,
+ "loss": 0.7716,
+ "step": 4760
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.638389526045383e-07,
+ "loss": 0.7377,
+ "step": 4761
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.621743903895814e-07,
+ "loss": 0.8091,
+ "step": 4762
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.6051357436702407e-07,
+ "loss": 0.6896,
+ "step": 4763
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.5885650518241887e-07,
+ "loss": 0.723,
+ "step": 4764
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.57203183479865e-07,
+ "loss": 0.7756,
+ "step": 4765
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.5555360990200294e-07,
+ "loss": 0.7799,
+ "step": 4766
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.5390778509001523e-07,
+ "loss": 1.2161,
+ "step": 4767
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.5226570968363237e-07,
+ "loss": 0.7506,
+ "step": 4768
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.506273843211205e-07,
+ "loss": 0.7443,
+ "step": 4769
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.489928096392925e-07,
+ "loss": 0.7975,
+ "step": 4770
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.4736198627350224e-07,
+ "loss": 0.7475,
+ "step": 4771
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.457349148576461e-07,
+ "loss": 0.7985,
+ "step": 4772
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.4411159602416145e-07,
+ "loss": 0.7988,
+ "step": 4773
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.4249203040402577e-07,
+ "loss": 0.7741,
+ "step": 4774
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.4087621862676114e-07,
+ "loss": 0.7619,
+ "step": 4775
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.392641613204295e-07,
+ "loss": 0.7687,
+ "step": 4776
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.3765585911162744e-07,
+ "loss": 0.7857,
+ "step": 4777
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.3605131262550386e-07,
+ "loss": 0.7377,
+ "step": 4778
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.344505224857353e-07,
+ "loss": 0.7998,
+ "step": 4779
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.328534893145463e-07,
+ "loss": 0.7669,
+ "step": 4780
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.312602137326981e-07,
+ "loss": 0.7633,
+ "step": 4781
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.296706963594909e-07,
+ "loss": 0.6904,
+ "step": 4782
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.2808493781276705e-07,
+ "loss": 0.8073,
+ "step": 4783
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.265029387089047e-07,
+ "loss": 0.7899,
+ "step": 4784
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.2492469966282305e-07,
+ "loss": 0.7862,
+ "step": 4785
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.233502212879769e-07,
+ "loss": 0.8562,
+ "step": 4786
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.2177950419636226e-07,
+ "loss": 0.7456,
+ "step": 4787
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.2021254899851194e-07,
+ "loss": 0.7469,
+ "step": 4788
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.1864935630349424e-07,
+ "loss": 0.7605,
+ "step": 4789
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.170899267189209e-07,
+ "loss": 0.7804,
+ "step": 4790
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.1553426085093487e-07,
+ "loss": 0.7048,
+ "step": 4791
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.139823593042191e-07,
+ "loss": 0.8102,
+ "step": 4792
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.124342226819932e-07,
+ "loss": 0.7944,
+ "step": 4793
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.108898515860115e-07,
+ "loss": 0.8147,
+ "step": 4794
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.093492466165693e-07,
+ "loss": 0.759,
+ "step": 4795
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.0781240837249095e-07,
+ "loss": 0.7334,
+ "step": 4796
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.06279337451143e-07,
+ "loss": 0.7624,
+ "step": 4797
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.0475003444842553e-07,
+ "loss": 0.7868,
+ "step": 4798
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.032244999587719e-07,
+ "loss": 0.759,
+ "step": 4799
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.017027345751555e-07,
+ "loss": 0.7866,
+ "step": 4800
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.0018473888907994e-07,
+ "loss": 0.7725,
+ "step": 4801
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 2.986705134905832e-07,
+ "loss": 1.1021,
+ "step": 4802
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 2.9716005896824553e-07,
+ "loss": 0.7627,
+ "step": 4803
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 2.9565337590916953e-07,
+ "loss": 0.7658,
+ "step": 4804
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 2.9415046489900235e-07,
+ "loss": 0.7763,
+ "step": 4805
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.9265132652191773e-07,
+ "loss": 0.7744,
+ "step": 4806
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.911559613606296e-07,
+ "loss": 0.7171,
+ "step": 4807
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.896643699963797e-07,
+ "loss": 0.7456,
+ "step": 4808
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.881765530089442e-07,
+ "loss": 0.7241,
+ "step": 4809
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.8669251097663384e-07,
+ "loss": 1.151,
+ "step": 4810
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.852122444762917e-07,
+ "loss": 0.6748,
+ "step": 4811
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.837357540832919e-07,
+ "loss": 0.7198,
+ "step": 4812
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.8226304037154206e-07,
+ "loss": 0.771,
+ "step": 4813
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.8079410391347873e-07,
+ "loss": 1.1713,
+ "step": 4814
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.7932894528007625e-07,
+ "loss": 0.7375,
+ "step": 4815
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.7786756504083577e-07,
+ "loss": 0.7443,
+ "step": 4816
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.764099637637918e-07,
+ "loss": 0.7466,
+ "step": 4817
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.749561420155089e-07,
+ "loss": 0.739,
+ "step": 4818
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.7350610036108285e-07,
+ "loss": 0.7393,
+ "step": 4819
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.720598393641416e-07,
+ "loss": 0.7562,
+ "step": 4820
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.706173595868389e-07,
+ "loss": 1.1343,
+ "step": 4821
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.6917866158986617e-07,
+ "loss": 0.7523,
+ "step": 4822
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.6774374593243723e-07,
+ "loss": 0.7859,
+ "step": 4823
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.663126131723015e-07,
+ "loss": 0.7072,
+ "step": 4824
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.648852638657362e-07,
+ "loss": 0.8096,
+ "step": 4825
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.6346169856754534e-07,
+ "loss": 1.1625,
+ "step": 4826
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.6204191783106516e-07,
+ "loss": 0.7901,
+ "step": 4827
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.6062592220816195e-07,
+ "loss": 0.7369,
+ "step": 4828
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.5921371224922554e-07,
+ "loss": 0.7217,
+ "step": 4829
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.57805288503179e-07,
+ "loss": 0.7689,
+ "step": 4830
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.564006515174722e-07,
+ "loss": 0.7791,
+ "step": 4831
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.5499980183808284e-07,
+ "loss": 0.7749,
+ "step": 4832
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.5360274000951645e-07,
+ "loss": 0.7819,
+ "step": 4833
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.522094665748087e-07,
+ "loss": 1.1702,
+ "step": 4834
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.508199820755186e-07,
+ "loss": 0.7431,
+ "step": 4835
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.49434287051733e-07,
+ "loss": 0.7351,
+ "step": 4836
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.4805238204207103e-07,
+ "loss": 0.7562,
+ "step": 4837
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.4667426758367197e-07,
+ "loss": 0.7673,
+ "step": 4838
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.4529994421220506e-07,
+ "loss": 0.7743,
+ "step": 4839
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.439294124618652e-07,
+ "loss": 0.7303,
+ "step": 4840
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.4256267286537404e-07,
+ "loss": 0.7409,
+ "step": 4841
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.4119972595398e-07,
+ "loss": 0.7609,
+ "step": 4842
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.3984057225745484e-07,
+ "loss": 0.772,
+ "step": 4843
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.384852123040982e-07,
+ "loss": 0.7653,
+ "step": 4844
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.3713364662073412e-07,
+ "loss": 0.748,
+ "step": 4845
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.3578587573271139e-07,
+ "loss": 0.7758,
+ "step": 4846
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.3444190016390533e-07,
+ "loss": 1.1148,
+ "step": 4847
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.3310172043671254e-07,
+ "loss": 0.7587,
+ "step": 4848
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.3176533707205962e-07,
+ "loss": 0.8059,
+ "step": 4849
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.304327505893933e-07,
+ "loss": 0.7523,
+ "step": 4850
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.291039615066848e-07,
+ "loss": 0.7386,
+ "step": 4851
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.2777897034043206e-07,
+ "loss": 0.8175,
+ "step": 4852
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.2645777760565313e-07,
+ "loss": 0.7591,
+ "step": 4853
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.2514038381589387e-07,
+ "loss": 0.7447,
+ "step": 4854
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.2382678948321801e-07,
+ "loss": 0.7734,
+ "step": 4855
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.2251699511821823e-07,
+ "loss": 0.7157,
+ "step": 4856
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.2121100123000728e-07,
+ "loss": 0.7759,
+ "step": 4857
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.1990880832621796e-07,
+ "loss": 0.7363,
+ "step": 4858
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.18610416913011e-07,
+ "loss": 0.7647,
+ "step": 4859
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.1731582749506709e-07,
+ "loss": 1.1867,
+ "step": 4860
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.1602504057558815e-07,
+ "loss": 0.7781,
+ "step": 4861
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.1473805665630065e-07,
+ "loss": 1.0813,
+ "step": 4862
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.1345487623744888e-07,
+ "loss": 0.7182,
+ "step": 4863
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.1217549981780273e-07,
+ "loss": 0.7841,
+ "step": 4864
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.1089992789464887e-07,
+ "loss": 0.7731,
+ "step": 4865
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.096281609638029e-07,
+ "loss": 0.7655,
+ "step": 4866
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.0836019951959275e-07,
+ "loss": 0.7437,
+ "step": 4867
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.0709604405487192e-07,
+ "loss": 0.7946,
+ "step": 4868
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.058356950610152e-07,
+ "loss": 0.7469,
+ "step": 4869
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.04579153027914e-07,
+ "loss": 0.7767,
+ "step": 4870
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.0332641844398538e-07,
+ "loss": 0.7914,
+ "step": 4871
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.0207749179615877e-07,
+ "loss": 0.7396,
+ "step": 4872
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.0083237356989249e-07,
+ "loss": 0.7885,
+ "step": 4873
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.995910642491572e-07,
+ "loss": 0.7907,
+ "step": 4874
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.983535643164458e-07,
+ "loss": 0.8036,
+ "step": 4875
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.9711987425277356e-07,
+ "loss": 0.8085,
+ "step": 4876
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.958899945376691e-07,
+ "loss": 0.7248,
+ "step": 4877
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.9466392564918336e-07,
+ "loss": 0.7341,
+ "step": 4878
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.934416680638873e-07,
+ "loss": 0.7935,
+ "step": 4879
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.922232222568654e-07,
+ "loss": 0.7398,
+ "step": 4880
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.9100858870172545e-07,
+ "loss": 0.7824,
+ "step": 4881
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.89797767870592e-07,
+ "loss": 0.7413,
+ "step": 4882
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8859076023410638e-07,
+ "loss": 1.215,
+ "step": 4883
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8738756626142997e-07,
+ "loss": 0.776,
+ "step": 4884
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8618818642023863e-07,
+ "loss": 0.7816,
+ "step": 4885
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8499262117672945e-07,
+ "loss": 0.8018,
+ "step": 4886
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8380087099561295e-07,
+ "loss": 0.782,
+ "step": 4887
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.826129363401208e-07,
+ "loss": 0.766,
+ "step": 4888
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8142881767199804e-07,
+ "loss": 0.7759,
+ "step": 4889
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8024851545150769e-07,
+ "loss": 0.7352,
+ "step": 4890
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.7907203013742936e-07,
+ "loss": 0.7157,
+ "step": 4891
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.7789936218706062e-07,
+ "loss": 0.707,
+ "step": 4892
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.767305120562124e-07,
+ "loss": 1.1378,
+ "step": 4893
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.755654801992146e-07,
+ "loss": 0.7109,
+ "step": 4894
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.7440426706890835e-07,
+ "loss": 0.75,
+ "step": 4895
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.7324687311665812e-07,
+ "loss": 1.1441,
+ "step": 4896
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.7209329879233516e-07,
+ "loss": 0.772,
+ "step": 4897
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.7094354454433303e-07,
+ "loss": 0.763,
+ "step": 4898
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6979761081955537e-07,
+ "loss": 0.6992,
+ "step": 4899
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6865549806342363e-07,
+ "loss": 0.8053,
+ "step": 4900
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.675172067198738e-07,
+ "loss": 0.7559,
+ "step": 4901
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6638273723135645e-07,
+ "loss": 0.7539,
+ "step": 4902
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6525209003883658e-07,
+ "loss": 0.7714,
+ "step": 4903
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6412526558179155e-07,
+ "loss": 0.7901,
+ "step": 4904
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6300226429821653e-07,
+ "loss": 1.2077,
+ "step": 4905
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6188308662461683e-07,
+ "loss": 0.7359,
+ "step": 4906
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6076773299601335e-07,
+ "loss": 0.734,
+ "step": 4907
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.596562038459415e-07,
+ "loss": 0.7314,
+ "step": 4908
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.585484996064479e-07,
+ "loss": 0.7022,
+ "step": 4909
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.5744462070809598e-07,
+ "loss": 0.764,
+ "step": 4910
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.5634456757995907e-07,
+ "loss": 0.783,
+ "step": 4911
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.5524834064962192e-07,
+ "loss": 0.7769,
+ "step": 4912
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.5415594034318804e-07,
+ "loss": 0.7804,
+ "step": 4913
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.5306736708526783e-07,
+ "loss": 0.7414,
+ "step": 4914
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.519826212989861e-07,
+ "loss": 0.7515,
+ "step": 4915
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.509017034059823e-07,
+ "loss": 0.7346,
+ "step": 4916
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.4982461382640257e-07,
+ "loss": 0.8114,
+ "step": 4917
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.4875135297891085e-07,
+ "loss": 0.7644,
+ "step": 4918
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.4768192128067905e-07,
+ "loss": 0.7329,
+ "step": 4919
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.4661631914739238e-07,
+ "loss": 0.742,
+ "step": 4920
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.4555454699324733e-07,
+ "loss": 0.7797,
+ "step": 4921
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.4449660523095155e-07,
+ "loss": 0.7375,
+ "step": 4922
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.4344249427172164e-07,
+ "loss": 0.8273,
+ "step": 4923
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.4239221452528873e-07,
+ "loss": 0.7514,
+ "step": 4924
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.4134576639989294e-07,
+ "loss": 0.7651,
+ "step": 4925
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.403031503022867e-07,
+ "loss": 0.7789,
+ "step": 4926
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.3926436663772912e-07,
+ "loss": 0.7442,
+ "step": 4927
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.3822941580999393e-07,
+ "loss": 1.1092,
+ "step": 4928
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.3719829822136267e-07,
+ "loss": 0.736,
+ "step": 4929
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.3617101427262913e-07,
+ "loss": 0.6856,
+ "step": 4930
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.3514756436309173e-07,
+ "loss": 0.7439,
+ "step": 4931
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.3412794889056445e-07,
+ "loss": 0.7714,
+ "step": 4932
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.3311216825136919e-07,
+ "loss": 0.7706,
+ "step": 4933
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.3210022284033563e-07,
+ "loss": 0.7619,
+ "step": 4934
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.3109211305080361e-07,
+ "loss": 0.7509,
+ "step": 4935
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.300878392746241e-07,
+ "loss": 0.6701,
+ "step": 4936
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.2908740190215152e-07,
+ "loss": 0.7258,
+ "step": 4937
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.2809080132225704e-07,
+ "loss": 0.7529,
+ "step": 4938
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.2709803792231302e-07,
+ "loss": 0.7512,
+ "step": 4939
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.261091120882041e-07,
+ "loss": 0.7753,
+ "step": 4940
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.251240242043239e-07,
+ "loss": 0.7266,
+ "step": 4941
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.241427746535717e-07,
+ "loss": 0.7664,
+ "step": 4942
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.2316536381735577e-07,
+ "loss": 0.7617,
+ "step": 4943
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.221917920755933e-07,
+ "loss": 0.786,
+ "step": 4944
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.2122205980671042e-07,
+ "loss": 0.7614,
+ "step": 4945
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.202561673876357e-07,
+ "loss": 0.796,
+ "step": 4946
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1929411519380985e-07,
+ "loss": 0.7744,
+ "step": 4947
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1833590359918045e-07,
+ "loss": 0.7524,
+ "step": 4948
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1738153297619959e-07,
+ "loss": 0.7542,
+ "step": 4949
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1643100369582938e-07,
+ "loss": 0.7964,
+ "step": 4950
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1548431612753652e-07,
+ "loss": 0.764,
+ "step": 4951
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1454147063929776e-07,
+ "loss": 0.793,
+ "step": 4952
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1360246759759108e-07,
+ "loss": 0.7901,
+ "step": 4953
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1266730736740561e-07,
+ "loss": 0.7746,
+ "step": 4954
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1173599031223614e-07,
+ "loss": 0.7802,
+ "step": 4955
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.108085167940809e-07,
+ "loss": 0.7984,
+ "step": 4956
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.0988488717344593e-07,
+ "loss": 0.7557,
+ "step": 4957
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.0896510180934405e-07,
+ "loss": 0.7667,
+ "step": 4958
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.0804916105929375e-07,
+ "loss": 0.6998,
+ "step": 4959
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.0713706527931688e-07,
+ "loss": 0.7649,
+ "step": 4960
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.0622881482394098e-07,
+ "loss": 0.772,
+ "step": 4961
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 1.0532441004620364e-07,
+ "loss": 0.7675,
+ "step": 4962
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 1.0442385129764143e-07,
+ "loss": 0.8122,
+ "step": 4963
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 1.035271389282999e-07,
+ "loss": 0.724,
+ "step": 4964
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 1.0263427328672693e-07,
+ "loss": 0.7611,
+ "step": 4965
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 1.017452547199782e-07,
+ "loss": 0.7678,
+ "step": 4966
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 1.0086008357361177e-07,
+ "loss": 0.7605,
+ "step": 4967
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.997876019169017e-08,
+ "loss": 0.7659,
+ "step": 4968
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.910128491678273e-08,
+ "loss": 0.763,
+ "step": 4969
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.822765808995993e-08,
+ "loss": 0.7883,
+ "step": 4970
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.735788005079793e-08,
+ "loss": 0.7382,
+ "step": 4971
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.649195113737741e-08,
+ "loss": 0.7565,
+ "step": 4972
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.562987168628135e-08,
+ "loss": 0.6611,
+ "step": 4973
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.477164203259725e-08,
+ "loss": 0.7645,
+ "step": 4974
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.391726250991717e-08,
+ "loss": 0.7298,
+ "step": 4975
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.306673345033546e-08,
+ "loss": 0.8122,
+ "step": 4976
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.222005518444876e-08,
+ "loss": 0.778,
+ "step": 4977
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.13772280413605e-08,
+ "loss": 0.7563,
+ "step": 4978
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.053825234867308e-08,
+ "loss": 0.7842,
+ "step": 4979
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.970312843249563e-08,
+ "loss": 0.8144,
+ "step": 4980
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.887185661743625e-08,
+ "loss": 0.781,
+ "step": 4981
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.80444372266076e-08,
+ "loss": 0.7158,
+ "step": 4982
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.722087058162576e-08,
+ "loss": 0.7941,
+ "step": 4983
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.64011570026102e-08,
+ "loss": 0.7615,
+ "step": 4984
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.558529680817829e-08,
+ "loss": 0.7395,
+ "step": 4985
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.477329031545301e-08,
+ "loss": 0.8054,
+ "step": 4986
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.396513784005855e-08,
+ "loss": 0.7383,
+ "step": 4987
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.316083969612254e-08,
+ "loss": 0.7754,
+ "step": 4988
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.236039619627267e-08,
+ "loss": 0.8049,
+ "step": 4989
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.156380765163785e-08,
+ "loss": 0.8112,
+ "step": 4990
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.07710743718504e-08,
+ "loss": 0.7303,
+ "step": 4991
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.99821966650427e-08,
+ "loss": 0.6409,
+ "step": 4992
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.919717483784838e-08,
+ "loss": 0.748,
+ "step": 4993
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.841600919540449e-08,
+ "loss": 0.7293,
+ "step": 4994
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.763870004134811e-08,
+ "loss": 0.7471,
+ "step": 4995
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.686524767781423e-08,
+ "loss": 0.7423,
+ "step": 4996
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.609565240544459e-08,
+ "loss": 0.7617,
+ "step": 4997
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.532991452337435e-08,
+ "loss": 0.7406,
+ "step": 4998
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.456803432924764e-08,
+ "loss": 0.738,
+ "step": 4999
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.381001211920202e-08,
+ "loss": 0.7343,
+ "step": 5000
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.305584818787959e-08,
+ "loss": 0.7614,
+ "step": 5001
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.23055428284225e-08,
+ "loss": 0.792,
+ "step": 5002
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.15590963324686e-08,
+ "loss": 0.7312,
+ "step": 5003
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.081650899016246e-08,
+ "loss": 0.7303,
+ "step": 5004
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.007778109014429e-08,
+ "loss": 0.6681,
+ "step": 5005
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.934291291955664e-08,
+ "loss": 0.8121,
+ "step": 5006
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.861190476403767e-08,
+ "loss": 0.7581,
+ "step": 5007
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.788475690773123e-08,
+ "loss": 0.7296,
+ "step": 5008
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.716146963327452e-08,
+ "loss": 0.7386,
+ "step": 5009
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.644204322180825e-08,
+ "loss": 0.802,
+ "step": 5010
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.572647795297205e-08,
+ "loss": 0.7552,
+ "step": 5011
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.501477410490231e-08,
+ "loss": 0.775,
+ "step": 5012
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.430693195423665e-08,
+ "loss": 0.7562,
+ "step": 5013
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 6.360295177611164e-08,
+ "loss": 0.736,
+ "step": 5014
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 6.290283384416063e-08,
+ "loss": 0.8221,
+ "step": 5015
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 6.220657843051925e-08,
+ "loss": 0.7855,
+ "step": 5016
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 6.15141858058188e-08,
+ "loss": 0.7374,
+ "step": 5017
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 6.082565623918957e-08,
+ "loss": 0.8099,
+ "step": 5018
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 6.014098999826301e-08,
+ "loss": 1.1987,
+ "step": 5019
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.9460187349162923e-08,
+ "loss": 0.7369,
+ "step": 5020
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.8783248556518736e-08,
+ "loss": 0.7591,
+ "step": 5021
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.81101738834533e-08,
+ "loss": 0.7444,
+ "step": 5022
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.744096359158846e-08,
+ "loss": 0.7401,
+ "step": 5023
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.6775617941042803e-08,
+ "loss": 0.7683,
+ "step": 5024
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.6114137190436124e-08,
+ "loss": 0.7349,
+ "step": 5025
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.545652159688275e-08,
+ "loss": 0.8028,
+ "step": 5026
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.480277141599599e-08,
+ "loss": 0.7562,
+ "step": 5027
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.4152886901888136e-08,
+ "loss": 0.7417,
+ "step": 5028
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.350686830716378e-08,
+ "loss": 1.1915,
+ "step": 5029
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.286471588293096e-08,
+ "loss": 0.7543,
+ "step": 5030
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.2226429878793336e-08,
+ "loss": 0.7666,
+ "step": 5031
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.1592010542846905e-08,
+ "loss": 0.7587,
+ "step": 5032
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.096145812169106e-08,
+ "loss": 0.7902,
+ "step": 5033
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.0334772860418656e-08,
+ "loss": 0.79,
+ "step": 5034
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.971195500262038e-08,
+ "loss": 0.7097,
+ "step": 5035
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.9093004790384816e-08,
+ "loss": 0.7,
+ "step": 5036
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.8477922464293944e-08,
+ "loss": 0.7177,
+ "step": 5037
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.7866708263429874e-08,
+ "loss": 0.7796,
+ "step": 5038
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.7259362425369215e-08,
+ "loss": 0.8006,
+ "step": 5039
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.6655885186185357e-08,
+ "loss": 0.8165,
+ "step": 5040
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.605627678044733e-08,
+ "loss": 0.7865,
+ "step": 5041
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.546053744122203e-08,
+ "loss": 0.7299,
+ "step": 5042
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.48686674000709e-08,
+ "loss": 0.7515,
+ "step": 5043
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.4280666887051016e-08,
+ "loss": 0.7359,
+ "step": 5044
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.369653613071845e-08,
+ "loss": 0.7298,
+ "step": 5045
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.311627535812157e-08,
+ "loss": 1.2002,
+ "step": 5046
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.253988479480664e-08,
+ "loss": 0.763,
+ "step": 5047
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.1967364664814433e-08,
+ "loss": 0.7567,
+ "step": 5048
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.1398715190681394e-08,
+ "loss": 0.7519,
+ "step": 5049
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.0833936593439594e-08,
+ "loss": 0.8025,
+ "step": 5050
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.0273029092617877e-08,
+ "loss": 0.7294,
+ "step": 5051
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.971599290623851e-08,
+ "loss": 0.7354,
+ "step": 5052
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.916282825081941e-08,
+ "loss": 0.7579,
+ "step": 5053
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.861353534137524e-08,
+ "loss": 0.7925,
+ "step": 5054
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.806811439141189e-08,
+ "loss": 0.7644,
+ "step": 5055
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.752656561293644e-08,
+ "loss": 0.8024,
+ "step": 5056
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.6988889216443833e-08,
+ "loss": 0.7555,
+ "step": 5057
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.645508541092802e-08,
+ "loss": 1.1831,
+ "step": 5058
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.592515440387745e-08,
+ "loss": 0.7767,
+ "step": 5059
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.5399096401275144e-08,
+ "loss": 0.7541,
+ "step": 5060
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.487691160759643e-08,
+ "loss": 0.7744,
+ "step": 5061
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.43586002258145e-08,
+ "loss": 0.7295,
+ "step": 5062
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.384416245739486e-08,
+ "loss": 0.7802,
+ "step": 5063
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.333359850229756e-08,
+ "loss": 0.7537,
+ "step": 5064
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.28269085589783e-08,
+ "loss": 0.7715,
+ "step": 5065
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 3.2324092824383976e-08,
+ "loss": 0.7468,
+ "step": 5066
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 3.1825151493959374e-08,
+ "loss": 0.7589,
+ "step": 5067
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 3.1330084761639344e-08,
+ "loss": 0.804,
+ "step": 5068
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 3.083889281985664e-08,
+ "loss": 0.7823,
+ "step": 5069
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 3.0351575859535186e-08,
+ "loss": 0.7323,
+ "step": 5070
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.986813407009459e-08,
+ "loss": 0.8072,
+ "step": 5071
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.938856763944453e-08,
+ "loss": 0.7528,
+ "step": 5072
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.891287675399368e-08,
+ "loss": 0.7573,
+ "step": 5073
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.8441061598640796e-08,
+ "loss": 0.7532,
+ "step": 5074
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.797312235677807e-08,
+ "loss": 0.7551,
+ "step": 5075
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.7509059210293344e-08,
+ "loss": 0.8166,
+ "step": 5076
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.7048872339565657e-08,
+ "loss": 0.7165,
+ "step": 5077
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.6592561923468596e-08,
+ "loss": 0.7243,
+ "step": 5078
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.614012813936695e-08,
+ "loss": 0.759,
+ "step": 5079
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.5691571163123373e-08,
+ "loss": 0.8111,
+ "step": 5080
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.5246891169088406e-08,
+ "loss": 0.7545,
+ "step": 5081
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.4806088330108223e-08,
+ "loss": 1.1517,
+ "step": 5082
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.436916281752133e-08,
+ "loss": 0.7372,
+ "step": 5083
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.393611480115854e-08,
+ "loss": 0.7285,
+ "step": 5084
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.3506944449346315e-08,
+ "loss": 1.1966,
+ "step": 5085
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.3081651928900107e-08,
+ "loss": 1.2066,
+ "step": 5086
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.2660237405131013e-08,
+ "loss": 0.8117,
+ "step": 5087
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.224270104184134e-08,
+ "loss": 0.8087,
+ "step": 5088
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.18290430013246e-08,
+ "loss": 0.7718,
+ "step": 5089
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.1419263444369954e-08,
+ "loss": 1.1358,
+ "step": 5090
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.1013362530256655e-08,
+ "loss": 0.7715,
+ "step": 5091
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.0611340416757387e-08,
+ "loss": 0.7936,
+ "step": 5092
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.021319726013604e-08,
+ "loss": 1.1225,
+ "step": 5093
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.981893321515105e-08,
+ "loss": 0.7992,
+ "step": 5094
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.9428548435052043e-08,
+ "loss": 0.7349,
+ "step": 5095
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.9042043071577642e-08,
+ "loss": 0.7987,
+ "step": 5096
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.865941727496434e-08,
+ "loss": 0.719,
+ "step": 5097
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.8280671193935394e-08,
+ "loss": 0.7152,
+ "step": 5098
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.7905804975708596e-08,
+ "loss": 0.7938,
+ "step": 5099
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.7534818765994055e-08,
+ "loss": 0.8135,
+ "step": 5100
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.716771270899198e-08,
+ "loss": 0.8098,
+ "step": 5101
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.6804486947397112e-08,
+ "loss": 0.7619,
+ "step": 5102
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.6445141622392082e-08,
+ "loss": 0.7428,
+ "step": 5103
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.6089676873654038e-08,
+ "loss": 0.7349,
+ "step": 5104
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.573809283935135e-08,
+ "loss": 0.7189,
+ "step": 5105
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.5390389656142478e-08,
+ "loss": 0.723,
+ "step": 5106
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.5046567459181537e-08,
+ "loss": 0.7431,
+ "step": 5107
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.470662638210718e-08,
+ "loss": 1.1355,
+ "step": 5108
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.4370566557055932e-08,
+ "loss": 0.759,
+ "step": 5109
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.403838811465219e-08,
+ "loss": 0.7652,
+ "step": 5110
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.3710091184012674e-08,
+ "loss": 0.7237,
+ "step": 5111
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.338567589274642e-08,
+ "loss": 0.7665,
+ "step": 5112
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.3065142366951445e-08,
+ "loss": 0.7995,
+ "step": 5113
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.2748490731219198e-08,
+ "loss": 0.7983,
+ "step": 5114
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.2435721108630116e-08,
+ "loss": 1.2223,
+ "step": 5115
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.2126833620756951e-08,
+ "loss": 0.7749,
+ "step": 5116
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.1821828387663658e-08,
+ "loss": 0.7648,
+ "step": 5117
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.1520705527904298e-08,
+ "loss": 0.7749,
+ "step": 5118
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.1223465158526347e-08,
+ "loss": 0.7707,
+ "step": 5119
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.093010739506295e-08,
+ "loss": 0.6572,
+ "step": 5120
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.0640632351544e-08,
+ "loss": 0.7411,
+ "step": 5121
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.0355040140486162e-08,
+ "loss": 0.7263,
+ "step": 5122
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.0073330872898413e-08,
+ "loss": 0.7994,
+ "step": 5123
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 9.79550465828094e-09,
+ "loss": 0.7904,
+ "step": 5124
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 9.52156160462292e-09,
+ "loss": 0.7247,
+ "step": 5125
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 9.25150181840584e-09,
+ "loss": 0.7436,
+ "step": 5126
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 8.985325404601287e-09,
+ "loss": 0.6991,
+ "step": 5127
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 8.723032466669834e-09,
+ "loss": 0.7339,
+ "step": 5128
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 8.464623106565484e-09,
+ "loss": 0.784,
+ "step": 5129
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 8.210097424731223e-09,
+ "loss": 0.7535,
+ "step": 5130
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 7.959455520099025e-09,
+ "loss": 0.7687,
+ "step": 5131
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 7.71269749009318e-09,
+ "loss": 0.7762,
+ "step": 5132
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 7.469823430628076e-09,
+ "loss": 0.7878,
+ "step": 5133
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 7.230833436107087e-09,
+ "loss": 0.7439,
+ "step": 5134
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 6.9957275994270156e-09,
+ "loss": 0.7438,
+ "step": 5135
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 6.764506011970318e-09,
+ "loss": 0.7562,
+ "step": 5136
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 6.537168763613988e-09,
+ "loss": 0.679,
+ "step": 5137
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 6.3137159427217874e-09,
+ "loss": 0.7077,
+ "step": 5138
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 6.094147636150904e-09,
+ "loss": 1.1772,
+ "step": 5139
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 5.878463929245293e-09,
+ "loss": 0.7644,
+ "step": 5140
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 5.666664905840114e-09,
+ "loss": 0.8001,
+ "step": 5141
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 5.458750648262845e-09,
+ "loss": 0.7552,
+ "step": 5142
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 5.254721237327731e-09,
+ "loss": 0.7357,
+ "step": 5143
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 5.054576752341333e-09,
+ "loss": 0.7615,
+ "step": 5144
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 4.85831727109809e-09,
+ "loss": 0.844,
+ "step": 5145
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 4.665942869884755e-09,
+ "loss": 1.0962,
+ "step": 5146
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 4.477453623474848e-09,
+ "loss": 0.7545,
+ "step": 5147
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 4.292849605134208e-09,
+ "loss": 0.7679,
+ "step": 5148
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 4.112130886619881e-09,
+ "loss": 0.7403,
+ "step": 5149
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 3.9352975381734546e-09,
+ "loss": 0.7395,
+ "step": 5150
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 3.762349628532169e-09,
+ "loss": 0.8117,
+ "step": 5151
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 3.5932872249178077e-09,
+ "loss": 0.7709,
+ "step": 5152
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 3.4281103930466906e-09,
+ "loss": 0.734,
+ "step": 5153
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 3.2668191971207963e-09,
+ "loss": 0.7531,
+ "step": 5154
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 3.1094136998355286e-09,
+ "loss": 0.7323,
+ "step": 5155
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 2.9558939623719475e-09,
+ "loss": 0.7568,
+ "step": 5156
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 2.8062600444034306e-09,
+ "loss": 0.7825,
+ "step": 5157
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 2.6605120040923414e-09,
+ "loss": 0.8128,
+ "step": 5158
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 2.518649898090031e-09,
+ "loss": 0.7974,
+ "step": 5159
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 2.3806737815379456e-09,
+ "loss": 0.8189,
+ "step": 5160
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 2.246583708067629e-09,
+ "loss": 0.7463,
+ "step": 5161
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 2.1163797297996113e-09,
+ "loss": 0.7655,
+ "step": 5162
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.990061897342299e-09,
+ "loss": 1.0577,
+ "step": 5163
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.867630259796416e-09,
+ "loss": 0.8006,
+ "step": 5164
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.7490848647494507e-09,
+ "loss": 0.7563,
+ "step": 5165
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.6344257582812105e-09,
+ "loss": 0.7537,
+ "step": 5166
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.5236529849582683e-09,
+ "loss": 0.7642,
+ "step": 5167
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.416766587838403e-09,
+ "loss": 0.7086,
+ "step": 5168
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.3137666084672706e-09,
+ "loss": 0.7314,
+ "step": 5169
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 1.2146530868806238e-09,
+ "loss": 0.7448,
+ "step": 5170
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 1.1194260616043118e-09,
+ "loss": 0.776,
+ "step": 5171
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 1.0280855696520597e-09,
+ "loss": 0.7263,
+ "step": 5172
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 9.4063164652769e-10,
+ "loss": 0.7817,
+ "step": 5173
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 8.57064326225121e-10,
+ "loss": 0.7847,
+ "step": 5174
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 7.77383641225038e-10,
+ "loss": 0.7585,
+ "step": 5175
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 7.015896225004426e-10,
+ "loss": 0.7045,
+ "step": 5176
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 6.296822995122132e-10,
+ "loss": 0.7477,
+ "step": 5177
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 5.616617002102142e-10,
+ "loss": 0.7809,
+ "step": 5178
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 4.97527851033297e-10,
+ "loss": 0.8178,
+ "step": 5179
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 4.372807769115195e-10,
+ "loss": 0.6968,
+ "step": 5180
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 3.809205012605954e-10,
+ "loss": 0.753,
+ "step": 5181
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 3.2844704598966603e-10,
+ "loss": 0.7941,
+ "step": 5182
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 2.798604314946385e-10,
+ "loss": 0.7378,
+ "step": 5183
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 2.351606766592962e-10,
+ "loss": 0.7361,
+ "step": 5184
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 1.9434779885973975e-10,
+ "loss": 0.7918,
+ "step": 5185
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 1.5742181395994594e-10,
+ "loss": 0.7757,
+ "step": 5186
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 1.2438273631176778e-10,
+ "loss": 0.7909,
+ "step": 5187
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 9.523057875826525e-11,
+ "loss": 0.8029,
+ "step": 5188
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 6.996535263148473e-11,
+ "loss": 0.8089,
+ "step": 5189
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 4.858706775023869e-11,
+ "loss": 0.7925,
+ "step": 5190
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 3.109573242565667e-11,
+ "loss": 0.7691,
+ "step": 5191
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 1.749135345563424e-11,
+ "loss": 0.7688,
+ "step": 5192
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 7.773936128163684e-12,
+ "loss": 0.7788,
+ "step": 5193
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 1.9434842213339466e-12,
+ "loss": 0.6648,
+ "step": 5194
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.0,
+ "loss": 1.0699,
+ "step": 5195
+ },
+ {
+ "epoch": 1.0,
+ "step": 5195,
+ "total_flos": 9123588792123392.0,
+ "train_loss": 0.8619271928123606,
+ "train_runtime": 48756.4895,
+ "train_samples_per_second": 13.638,
+ "train_steps_per_second": 0.107
+ }
+ ],
+ "logging_steps": 1.0,
+ "max_steps": 5195,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 1,
+ "save_steps": 50000,
+ "total_flos": 9123588792123392.0,
+ "train_batch_size": 8,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/training_args.bin b/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..e050dbfa2cb6960a55487a98df47468d3ad0f147
--- /dev/null
+++ b/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bfd659973b0b2a125146d288ce2386a283dd4e1fb31bf823496261c7469df80e
+size 6776