MohamedAhmedAE commited on
Commit
41e83ce
·
verified ·
1 Parent(s): 67b4b22

Training in progress, step 354000, checkpoint

Browse files
last-checkpoint/adapter_config.json CHANGED
@@ -23,13 +23,13 @@
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
26
- "gate_proj",
27
- "v_proj",
28
  "o_proj",
29
- "q_proj",
30
  "k_proj",
31
  "down_proj",
32
- "up_proj"
 
 
 
33
  ],
34
  "task_type": "CAUSAL_LM",
35
  "use_dora": false,
 
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
 
 
26
  "o_proj",
 
27
  "k_proj",
28
  "down_proj",
29
+ "gate_proj",
30
+ "q_proj",
31
+ "up_proj",
32
+ "v_proj"
33
  ],
34
  "task_type": "CAUSAL_LM",
35
  "use_dora": false,
last-checkpoint/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4cdea3181ff20981cb33a156aeb80b8e43818a8f05858b95c059340e21aa8733
3
  size 1342238560
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e9c2bac8b502b19efc41232812d63c71fee5df5671162a7150c06e61fa051ae9
3
  size 1342238560
last-checkpoint/optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6cd506869b94b5be970fca409bc95398841b5ffd9a592fb2c061dc58227eceba
3
  size 683268498
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5c4a3d4af06483be93d8df8956639f5a14629f88f01f5e9197bad971be65ed91
3
  size 683268498
last-checkpoint/rng_state.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3650e87ca836906ee8691b7d5d7b6eea6cead08041d7a7bfe35d5aa4494083ae
3
  size 14244
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e162559635122d1fd8bf513b6383eeaef54f892ebd418422f5439a78e18e22f
3
  size 14244
last-checkpoint/scheduler.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9d7ddbf1d6d4ba083fc85b63bf4848a26d38ae4d64ba73f9a2514a5c0d3a9d8c
3
  size 1064
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dad438b9465aa0569ad2c4a4ce56c9f5f47a608feb10b35e8561cd9dadb0857d
3
  size 1064
last-checkpoint/trainer_state.json CHANGED
@@ -1,9 +1,9 @@
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
- "epoch": 0.24314063140061104,
5
  "eval_steps": 500,
6
- "global_step": 349600,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
@@ -12243,6 +12243,160 @@
12243
  "learning_rate": 1.94956437917686e-05,
12244
  "loss": 1.6643,
12245
  "step": 349600
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12246
  }
12247
  ],
12248
  "logging_steps": 200,
@@ -12262,7 +12416,7 @@
12262
  "attributes": {}
12263
  }
12264
  },
12265
- "total_flos": 4.654650097453105e+18,
12266
  "train_batch_size": 1,
12267
  "trial_name": null,
12268
  "trial_params": null
 
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
+ "epoch": 0.24620075376377665,
5
  "eval_steps": 500,
6
+ "global_step": 354000,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
 
12243
  "learning_rate": 1.94956437917686e-05,
12244
  "loss": 1.6643,
12245
  "step": 349600
12246
+ },
12247
+ {
12248
+ "epoch": 0.24327972787166402,
12249
+ "grad_norm": 4.202829837799072,
12250
+ "learning_rate": 1.949507360178391e-05,
12251
+ "loss": 1.6291,
12252
+ "step": 349800
12253
+ },
12254
+ {
12255
+ "epoch": 0.243418824342717,
12256
+ "grad_norm": 6.402523517608643,
12257
+ "learning_rate": 1.949450310173186e-05,
12258
+ "loss": 1.6366,
12259
+ "step": 350000
12260
+ },
12261
+ {
12262
+ "epoch": 0.24355792081377,
12263
+ "grad_norm": 5.324528694152832,
12264
+ "learning_rate": 1.9493932291639685e-05,
12265
+ "loss": 1.6421,
12266
+ "step": 350200
12267
+ },
12268
+ {
12269
+ "epoch": 0.243697017284823,
12270
+ "grad_norm": 0.4841703772544861,
12271
+ "learning_rate": 1.9493361171534633e-05,
12272
+ "loss": 1.6182,
12273
+ "step": 350400
12274
+ },
12275
+ {
12276
+ "epoch": 0.24383611375587597,
12277
+ "grad_norm": 4.352268218994141,
12278
+ "learning_rate": 1.949278974144397e-05,
12279
+ "loss": 1.6553,
12280
+ "step": 350600
12281
+ },
12282
+ {
12283
+ "epoch": 0.24397521022692895,
12284
+ "grad_norm": 4.935366630554199,
12285
+ "learning_rate": 1.9492218001394977e-05,
12286
+ "loss": 1.6541,
12287
+ "step": 350800
12288
+ },
12289
+ {
12290
+ "epoch": 0.24411430669798193,
12291
+ "grad_norm": 3.1909875869750977,
12292
+ "learning_rate": 1.9491645951414943e-05,
12293
+ "loss": 1.7019,
12294
+ "step": 351000
12295
+ },
12296
+ {
12297
+ "epoch": 0.2442534031690349,
12298
+ "grad_norm": 4.752991199493408,
12299
+ "learning_rate": 1.9491073591531178e-05,
12300
+ "loss": 1.6571,
12301
+ "step": 351200
12302
+ },
12303
+ {
12304
+ "epoch": 0.24439249964008788,
12305
+ "grad_norm": 6.217344284057617,
12306
+ "learning_rate": 1.949050092177101e-05,
12307
+ "loss": 1.723,
12308
+ "step": 351400
12309
+ },
12310
+ {
12311
+ "epoch": 0.24453159611114086,
12312
+ "grad_norm": 6.201177597045898,
12313
+ "learning_rate": 1.948992794216178e-05,
12314
+ "loss": 1.6802,
12315
+ "step": 351600
12316
+ },
12317
+ {
12318
+ "epoch": 0.24467069258219384,
12319
+ "grad_norm": 2.825784683227539,
12320
+ "learning_rate": 1.948935465273083e-05,
12321
+ "loss": 1.5842,
12322
+ "step": 351800
12323
+ },
12324
+ {
12325
+ "epoch": 0.24480978905324682,
12326
+ "grad_norm": 4.810288429260254,
12327
+ "learning_rate": 1.948878105350554e-05,
12328
+ "loss": 1.7099,
12329
+ "step": 352000
12330
+ },
12331
+ {
12332
+ "epoch": 0.2449488855242998,
12333
+ "grad_norm": 4.089555263519287,
12334
+ "learning_rate": 1.9488207144513284e-05,
12335
+ "loss": 1.7244,
12336
+ "step": 352200
12337
+ },
12338
+ {
12339
+ "epoch": 0.24508798199535278,
12340
+ "grad_norm": 6.472701549530029,
12341
+ "learning_rate": 1.948763292578147e-05,
12342
+ "loss": 1.6518,
12343
+ "step": 352400
12344
+ },
12345
+ {
12346
+ "epoch": 0.24522707846640576,
12347
+ "grad_norm": 3.4730377197265625,
12348
+ "learning_rate": 1.9487058397337498e-05,
12349
+ "loss": 1.708,
12350
+ "step": 352600
12351
+ },
12352
+ {
12353
+ "epoch": 0.24536617493745874,
12354
+ "grad_norm": 3.145690441131592,
12355
+ "learning_rate": 1.9486483559208805e-05,
12356
+ "loss": 1.6469,
12357
+ "step": 352800
12358
+ },
12359
+ {
12360
+ "epoch": 0.24550527140851172,
12361
+ "grad_norm": 3.899048089981079,
12362
+ "learning_rate": 1.948590841142283e-05,
12363
+ "loss": 1.6595,
12364
+ "step": 353000
12365
+ },
12366
+ {
12367
+ "epoch": 0.2456443678795647,
12368
+ "grad_norm": 5.847311973571777,
12369
+ "learning_rate": 1.948533295400703e-05,
12370
+ "loss": 1.6287,
12371
+ "step": 353200
12372
+ },
12373
+ {
12374
+ "epoch": 0.24578346435061768,
12375
+ "grad_norm": 3.397681713104248,
12376
+ "learning_rate": 1.9484757186988875e-05,
12377
+ "loss": 1.6518,
12378
+ "step": 353400
12379
+ },
12380
+ {
12381
+ "epoch": 0.24592256082167066,
12382
+ "grad_norm": 4.522960662841797,
12383
+ "learning_rate": 1.9484181110395854e-05,
12384
+ "loss": 1.6843,
12385
+ "step": 353600
12386
+ },
12387
+ {
12388
+ "epoch": 0.24606165729272367,
12389
+ "grad_norm": 4.118208408355713,
12390
+ "learning_rate": 1.9483604724255466e-05,
12391
+ "loss": 1.6792,
12392
+ "step": 353800
12393
+ },
12394
+ {
12395
+ "epoch": 0.24620075376377665,
12396
+ "grad_norm": 3.8641409873962402,
12397
+ "learning_rate": 1.9483028028595225e-05,
12398
+ "loss": 1.6484,
12399
+ "step": 354000
12400
  }
12401
  ],
12402
  "logging_steps": 200,
 
12416
  "attributes": {}
12417
  }
12418
  },
12419
+ "total_flos": 4.712916903428727e+18,
12420
  "train_batch_size": 1,
12421
  "trial_name": null,
12422
  "trial_params": null
last-checkpoint/training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:078136780f82e4c02daf15d387ce5ace039d2fead953e9b2034a974f0b6417e9
3
  size 6840
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a29a5a0f0399f7044a026f76e298444ea587cc6f3dc814a0d8be9df650c7561
3
  size 6840