indrad123 commited on
Commit
40e99d5
1 Parent(s): d986fd4

Upload 72 files

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. VitImageCaptioning/checkpoints/checkpoint-10000/config.json +184 -0
  2. VitImageCaptioning/checkpoints/checkpoint-10000/generation_config.json +5 -0
  3. VitImageCaptioning/checkpoints/checkpoint-10000/merges.txt +0 -0
  4. VitImageCaptioning/checkpoints/checkpoint-10000/model.safetensors +3 -0
  5. VitImageCaptioning/checkpoints/checkpoint-10000/optimizer.pt +3 -0
  6. VitImageCaptioning/checkpoints/checkpoint-10000/rng_state.pth +3 -0
  7. VitImageCaptioning/checkpoints/checkpoint-10000/scheduler.pt +3 -0
  8. VitImageCaptioning/checkpoints/checkpoint-10000/special_tokens_map.json +6 -0
  9. VitImageCaptioning/checkpoints/checkpoint-10000/tokenizer.json +0 -0
  10. VitImageCaptioning/checkpoints/checkpoint-10000/tokenizer_config.json +20 -0
  11. VitImageCaptioning/checkpoints/checkpoint-10000/trainer_state.json +203 -0
  12. VitImageCaptioning/checkpoints/checkpoint-10000/training_args.bin +3 -0
  13. VitImageCaptioning/checkpoints/checkpoint-10000/vocab.json +0 -0
  14. VitImageCaptioning/checkpoints/checkpoint-10500/config.json +184 -0
  15. VitImageCaptioning/checkpoints/checkpoint-10500/generation_config.json +5 -0
  16. VitImageCaptioning/checkpoints/checkpoint-10500/merges.txt +0 -0
  17. VitImageCaptioning/checkpoints/checkpoint-10500/model.safetensors +3 -0
  18. VitImageCaptioning/checkpoints/checkpoint-10500/optimizer.pt +3 -0
  19. VitImageCaptioning/checkpoints/checkpoint-10500/rng_state.pth +3 -0
  20. VitImageCaptioning/checkpoints/checkpoint-10500/scheduler.pt +3 -0
  21. VitImageCaptioning/checkpoints/checkpoint-10500/special_tokens_map.json +6 -0
  22. VitImageCaptioning/checkpoints/checkpoint-10500/tokenizer.json +0 -0
  23. VitImageCaptioning/checkpoints/checkpoint-10500/tokenizer_config.json +20 -0
  24. VitImageCaptioning/checkpoints/checkpoint-10500/trainer_state.json +210 -0
  25. VitImageCaptioning/checkpoints/checkpoint-10500/training_args.bin +3 -0
  26. VitImageCaptioning/checkpoints/checkpoint-10500/vocab.json +0 -0
  27. VitImageCaptioning/checkpoints/checkpoint-11000/config.json +184 -0
  28. VitImageCaptioning/checkpoints/checkpoint-11000/generation_config.json +5 -0
  29. VitImageCaptioning/checkpoints/checkpoint-11000/merges.txt +0 -0
  30. VitImageCaptioning/checkpoints/checkpoint-11000/model.safetensors +3 -0
  31. VitImageCaptioning/checkpoints/checkpoint-11000/optimizer.pt +3 -0
  32. VitImageCaptioning/checkpoints/checkpoint-11000/rng_state.pth +3 -0
  33. VitImageCaptioning/checkpoints/checkpoint-11000/scheduler.pt +3 -0
  34. VitImageCaptioning/checkpoints/checkpoint-11000/special_tokens_map.json +6 -0
  35. VitImageCaptioning/checkpoints/checkpoint-11000/tokenizer.json +0 -0
  36. VitImageCaptioning/checkpoints/checkpoint-11000/tokenizer_config.json +20 -0
  37. VitImageCaptioning/checkpoints/checkpoint-11000/trainer_state.json +217 -0
  38. VitImageCaptioning/checkpoints/checkpoint-11000/training_args.bin +3 -0
  39. VitImageCaptioning/checkpoints/checkpoint-11000/vocab.json +0 -0
  40. VitImageCaptioning/checkpoints/runs/Jun09_14-35-53_d89412ba1871/events.out.tfevents.1717943754.d89412ba1871.4383.0 +3 -0
  41. VitImageCaptioning/checkpoints/runs/Jun09_14-39-15_d89412ba1871/events.out.tfevents.1717943956.d89412ba1871.4383.1 +3 -0
  42. VitImageCaptioning/checkpoints/runs/Jun09_14-41-16_d89412ba1871/events.out.tfevents.1717944076.d89412ba1871.4383.2 +3 -0
  43. VitImageCaptioning/checkpoints/runs/Jun09_14-41-42_d89412ba1871/events.out.tfevents.1717944103.d89412ba1871.4383.3 +3 -0
  44. VitImageCaptioning/checkpoints/runs/Jun09_14-42-00_d89412ba1871/events.out.tfevents.1717944121.d89412ba1871.4383.4 +3 -0
  45. VitImageCaptioning/checkpoints/runs/Jun09_14-47-15_16e047952208/events.out.tfevents.1717944437.16e047952208.4250.0 +3 -0
  46. VitImageCaptioning/checkpoints/runs/Jun09_14-48-45_16e047952208/events.out.tfevents.1717944525.16e047952208.4250.1 +3 -0
  47. VitImageCaptioning/checkpoints/runs/Jun09_14-50-50_16e047952208/events.out.tfevents.1717944651.16e047952208.4250.2 +3 -0
  48. VitImageCaptioning/checkpoints/runs/Jun09_14-51-58_16e047952208/events.out.tfevents.1717944718.16e047952208.4250.3 +3 -0
  49. VitImageCaptioning/checkpoints/runs/Jun09_14-52-47_16e047952208/events.out.tfevents.1717944767.16e047952208.4250.4 +3 -0
  50. VitImageCaptioning/checkpoints/runs/Jun09_14-53-34_16e047952208/events.out.tfevents.1717944814.16e047952208.4250.5 +3 -0
VitImageCaptioning/checkpoints/checkpoint-10000/config.json ADDED
@@ -0,0 +1,184 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "VisionEncoderDecoderModel"
4
+ ],
5
+ "decoder": {
6
+ "_name_or_path": "gpt2",
7
+ "activation_function": "gelu_new",
8
+ "add_cross_attention": true,
9
+ "architectures": [
10
+ "GPT2LMHeadModel"
11
+ ],
12
+ "attn_pdrop": 0.1,
13
+ "bad_words_ids": null,
14
+ "begin_suppress_tokens": null,
15
+ "bos_token_id": 50256,
16
+ "chunk_size_feed_forward": 0,
17
+ "cross_attention_hidden_size": null,
18
+ "decoder_start_token_id": null,
19
+ "diversity_penalty": 0.0,
20
+ "do_sample": false,
21
+ "early_stopping": false,
22
+ "embd_pdrop": 0.1,
23
+ "encoder_no_repeat_ngram_size": 0,
24
+ "eos_token_id": 50256,
25
+ "exponential_decay_length_penalty": null,
26
+ "finetuning_task": null,
27
+ "forced_bos_token_id": null,
28
+ "forced_eos_token_id": null,
29
+ "id2label": {
30
+ "0": "LABEL_0",
31
+ "1": "LABEL_1"
32
+ },
33
+ "initializer_range": 0.02,
34
+ "is_decoder": true,
35
+ "is_encoder_decoder": false,
36
+ "label2id": {
37
+ "LABEL_0": 0,
38
+ "LABEL_1": 1
39
+ },
40
+ "layer_norm_epsilon": 1e-05,
41
+ "length_penalty": 1.0,
42
+ "max_length": 20,
43
+ "min_length": 0,
44
+ "model_type": "gpt2",
45
+ "n_ctx": 1024,
46
+ "n_embd": 768,
47
+ "n_head": 12,
48
+ "n_inner": null,
49
+ "n_layer": 12,
50
+ "n_positions": 1024,
51
+ "no_repeat_ngram_size": 0,
52
+ "num_beam_groups": 1,
53
+ "num_beams": 1,
54
+ "num_return_sequences": 1,
55
+ "output_attentions": false,
56
+ "output_hidden_states": false,
57
+ "output_scores": false,
58
+ "pad_token_id": null,
59
+ "prefix": null,
60
+ "problem_type": null,
61
+ "pruned_heads": {},
62
+ "remove_invalid_values": false,
63
+ "reorder_and_upcast_attn": false,
64
+ "repetition_penalty": 1.0,
65
+ "resid_pdrop": 0.1,
66
+ "return_dict": true,
67
+ "return_dict_in_generate": false,
68
+ "scale_attn_by_inverse_layer_idx": false,
69
+ "scale_attn_weights": true,
70
+ "sep_token_id": null,
71
+ "summary_activation": null,
72
+ "summary_first_dropout": 0.1,
73
+ "summary_proj_to_labels": true,
74
+ "summary_type": "cls_index",
75
+ "summary_use_proj": true,
76
+ "suppress_tokens": null,
77
+ "task_specific_params": {
78
+ "text-generation": {
79
+ "do_sample": true,
80
+ "max_length": 50
81
+ }
82
+ },
83
+ "temperature": 1.0,
84
+ "tf_legacy_loss": false,
85
+ "tie_encoder_decoder": false,
86
+ "tie_word_embeddings": true,
87
+ "tokenizer_class": null,
88
+ "top_k": 50,
89
+ "top_p": 1.0,
90
+ "torch_dtype": null,
91
+ "torchscript": false,
92
+ "typical_p": 1.0,
93
+ "use_bfloat16": false,
94
+ "use_cache": true,
95
+ "vocab_size": 50257
96
+ },
97
+ "decoder_start_token_id": 50256,
98
+ "encoder": {
99
+ "_name_or_path": "google/vit-base-patch16-224-in21k",
100
+ "add_cross_attention": false,
101
+ "architectures": [
102
+ "ViTModel"
103
+ ],
104
+ "attention_probs_dropout_prob": 0.0,
105
+ "bad_words_ids": null,
106
+ "begin_suppress_tokens": null,
107
+ "bos_token_id": null,
108
+ "chunk_size_feed_forward": 0,
109
+ "cross_attention_hidden_size": null,
110
+ "decoder_start_token_id": null,
111
+ "diversity_penalty": 0.0,
112
+ "do_sample": false,
113
+ "early_stopping": false,
114
+ "encoder_no_repeat_ngram_size": 0,
115
+ "encoder_stride": 16,
116
+ "eos_token_id": null,
117
+ "exponential_decay_length_penalty": null,
118
+ "finetuning_task": null,
119
+ "forced_bos_token_id": null,
120
+ "forced_eos_token_id": null,
121
+ "hidden_act": "gelu",
122
+ "hidden_dropout_prob": 0.0,
123
+ "hidden_size": 768,
124
+ "id2label": {
125
+ "0": "LABEL_0",
126
+ "1": "LABEL_1"
127
+ },
128
+ "image_size": 224,
129
+ "initializer_range": 0.02,
130
+ "intermediate_size": 3072,
131
+ "is_decoder": false,
132
+ "is_encoder_decoder": false,
133
+ "label2id": {
134
+ "LABEL_0": 0,
135
+ "LABEL_1": 1
136
+ },
137
+ "layer_norm_eps": 1e-12,
138
+ "length_penalty": 1.0,
139
+ "max_length": 20,
140
+ "min_length": 0,
141
+ "model_type": "vit",
142
+ "no_repeat_ngram_size": 0,
143
+ "num_attention_heads": 12,
144
+ "num_beam_groups": 1,
145
+ "num_beams": 1,
146
+ "num_channels": 3,
147
+ "num_hidden_layers": 12,
148
+ "num_return_sequences": 1,
149
+ "output_attentions": false,
150
+ "output_hidden_states": false,
151
+ "output_scores": false,
152
+ "pad_token_id": null,
153
+ "patch_size": 16,
154
+ "prefix": null,
155
+ "problem_type": null,
156
+ "pruned_heads": {},
157
+ "qkv_bias": true,
158
+ "remove_invalid_values": false,
159
+ "repetition_penalty": 1.0,
160
+ "return_dict": true,
161
+ "return_dict_in_generate": false,
162
+ "sep_token_id": null,
163
+ "suppress_tokens": null,
164
+ "task_specific_params": null,
165
+ "temperature": 1.0,
166
+ "tf_legacy_loss": false,
167
+ "tie_encoder_decoder": false,
168
+ "tie_word_embeddings": true,
169
+ "tokenizer_class": null,
170
+ "top_k": 50,
171
+ "top_p": 1.0,
172
+ "torch_dtype": null,
173
+ "torchscript": false,
174
+ "typical_p": 1.0,
175
+ "use_bfloat16": false
176
+ },
177
+ "eos_token_id": 50256,
178
+ "is_encoder_decoder": true,
179
+ "model_type": "vision-encoder-decoder",
180
+ "pad_token_id": 50256,
181
+ "tie_word_embeddings": false,
182
+ "torch_dtype": "float32",
183
+ "transformers_version": "4.41.2"
184
+ }
VitImageCaptioning/checkpoints/checkpoint-10000/generation_config.json ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 50256,
3
+ "eos_token_id": 50256,
4
+ "transformers_version": "4.41.2"
5
+ }
VitImageCaptioning/checkpoints/checkpoint-10000/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
VitImageCaptioning/checkpoints/checkpoint-10000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:60f801e2833dde0e6c1792576225a44faeaeb9ddad5fc52b13985ec3860301fa
3
+ size 956835520
VitImageCaptioning/checkpoints/checkpoint-10000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:faf61969176c11836b9137f6a1f49c4adb74e43a21d739029bd1eb6dc815d40a
3
+ size 1909212608
VitImageCaptioning/checkpoints/checkpoint-10000/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bb5da72e1f29b211d6f30b6c1771f9958da7121bc5d9f9e7332d79b481088bed
3
+ size 14244
VitImageCaptioning/checkpoints/checkpoint-10000/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:151621b8998afe4d4a3d1e50bd20dd7e18fdc60402cddff3fad47347c41d6fa5
3
+ size 1064
VitImageCaptioning/checkpoints/checkpoint-10000/special_tokens_map.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<|endoftext|>",
3
+ "eos_token": "<|endoftext|>",
4
+ "pad_token": "<|endoftext|>",
5
+ "unk_token": "<|endoftext|>"
6
+ }
VitImageCaptioning/checkpoints/checkpoint-10000/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
VitImageCaptioning/checkpoints/checkpoint-10000/tokenizer_config.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "50256": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": true,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ }
12
+ },
13
+ "bos_token": "<|endoftext|>",
14
+ "clean_up_tokenization_spaces": true,
15
+ "eos_token": "<|endoftext|>",
16
+ "model_max_length": 1024,
17
+ "pad_token": "<|endoftext|>",
18
+ "tokenizer_class": "GPT2Tokenizer",
19
+ "unk_token": "<|endoftext|>"
20
+ }
VitImageCaptioning/checkpoints/checkpoint-10000/trainer_state.json ADDED
@@ -0,0 +1,203 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 2.7151778441487915,
5
+ "eval_steps": 500,
6
+ "global_step": 10000,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.13575889220743959,
13
+ "grad_norm": 0.42541322112083435,
14
+ "learning_rate": 4.775092768576342e-05,
15
+ "loss": 1.6148,
16
+ "step": 500
17
+ },
18
+ {
19
+ "epoch": 0.27151778441487917,
20
+ "grad_norm": 0.3683030903339386,
21
+ "learning_rate": 4.5488279482306095e-05,
22
+ "loss": 1.4559,
23
+ "step": 1000
24
+ },
25
+ {
26
+ "epoch": 0.40727667662231876,
27
+ "grad_norm": 0.2653275728225708,
28
+ "learning_rate": 4.322563127884877e-05,
29
+ "loss": 1.4429,
30
+ "step": 1500
31
+ },
32
+ {
33
+ "epoch": 0.5430355688297583,
34
+ "grad_norm": 0.2617078423500061,
35
+ "learning_rate": 4.096298307539144e-05,
36
+ "loss": 1.4354,
37
+ "step": 2000
38
+ },
39
+ {
40
+ "epoch": 0.678794461037198,
41
+ "grad_norm": 0.2017202079296112,
42
+ "learning_rate": 3.8700334871934116e-05,
43
+ "loss": 1.4308,
44
+ "step": 2500
45
+ },
46
+ {
47
+ "epoch": 0.8145533532446375,
48
+ "grad_norm": 0.20993387699127197,
49
+ "learning_rate": 3.643768666847679e-05,
50
+ "loss": 1.4278,
51
+ "step": 3000
52
+ },
53
+ {
54
+ "epoch": 0.9503122454520772,
55
+ "grad_norm": 0.16242703795433044,
56
+ "learning_rate": 3.417503846501946e-05,
57
+ "loss": 1.4253,
58
+ "step": 3500
59
+ },
60
+ {
61
+ "epoch": 1.0,
62
+ "eval_bleu": 0.0,
63
+ "eval_gen_len": 19.0,
64
+ "eval_loss": 1.4292237758636475,
65
+ "eval_meteor": 0.03936079398500908,
66
+ "eval_rouge1": 0.0,
67
+ "eval_rouge2": 0.0,
68
+ "eval_rougeL": 0.0,
69
+ "eval_rougeLsum": 0.0,
70
+ "eval_runtime": 213.0238,
71
+ "eval_samples_per_second": 7.281,
72
+ "eval_steps_per_second": 1.821,
73
+ "step": 3683
74
+ },
75
+ {
76
+ "epoch": 1.0860711376595167,
77
+ "grad_norm": 0.29393067955970764,
78
+ "learning_rate": 3.1912390261562137e-05,
79
+ "loss": 1.4232,
80
+ "step": 4000
81
+ },
82
+ {
83
+ "epoch": 1.2218300298669562,
84
+ "grad_norm": 0.14881190657615662,
85
+ "learning_rate": 2.9649742058104807e-05,
86
+ "loss": 1.4215,
87
+ "step": 4500
88
+ },
89
+ {
90
+ "epoch": 1.3575889220743957,
91
+ "grad_norm": 0.15426018834114075,
92
+ "learning_rate": 2.7387093854647484e-05,
93
+ "loss": 1.4205,
94
+ "step": 5000
95
+ },
96
+ {
97
+ "epoch": 1.4933478142818355,
98
+ "grad_norm": 0.20635420083999634,
99
+ "learning_rate": 2.5124445651190154e-05,
100
+ "loss": 1.4196,
101
+ "step": 5500
102
+ },
103
+ {
104
+ "epoch": 1.629106706489275,
105
+ "grad_norm": 0.1161305382847786,
106
+ "learning_rate": 2.2861797447732827e-05,
107
+ "loss": 1.4189,
108
+ "step": 6000
109
+ },
110
+ {
111
+ "epoch": 1.7648655986967148,
112
+ "grad_norm": 0.18034473061561584,
113
+ "learning_rate": 2.05991492442755e-05,
114
+ "loss": 1.4181,
115
+ "step": 6500
116
+ },
117
+ {
118
+ "epoch": 1.9006244909041543,
119
+ "grad_norm": 0.12195830792188644,
120
+ "learning_rate": 1.8336501040818175e-05,
121
+ "loss": 1.4177,
122
+ "step": 7000
123
+ },
124
+ {
125
+ "epoch": 2.0,
126
+ "eval_bleu": 0.0,
127
+ "eval_gen_len": 19.0,
128
+ "eval_loss": 1.4223343133926392,
129
+ "eval_meteor": 0.07128909550595973,
130
+ "eval_rouge1": 0.0,
131
+ "eval_rouge2": 0.0,
132
+ "eval_rougeL": 0.0,
133
+ "eval_rougeLsum": 0.0,
134
+ "eval_runtime": 214.8209,
135
+ "eval_samples_per_second": 7.22,
136
+ "eval_steps_per_second": 1.806,
137
+ "step": 7366
138
+ },
139
+ {
140
+ "epoch": 2.036383383111594,
141
+ "grad_norm": 0.09966401755809784,
142
+ "learning_rate": 1.6073852837360848e-05,
143
+ "loss": 1.4171,
144
+ "step": 7500
145
+ },
146
+ {
147
+ "epoch": 2.1721422753190334,
148
+ "grad_norm": 0.12720510363578796,
149
+ "learning_rate": 1.3811204633903522e-05,
150
+ "loss": 1.4165,
151
+ "step": 8000
152
+ },
153
+ {
154
+ "epoch": 2.307901167526473,
155
+ "grad_norm": 0.17217351496219635,
156
+ "learning_rate": 1.1548556430446195e-05,
157
+ "loss": 1.4163,
158
+ "step": 8500
159
+ },
160
+ {
161
+ "epoch": 2.4436600597339124,
162
+ "grad_norm": 0.1286771595478058,
163
+ "learning_rate": 9.285908226988869e-06,
164
+ "loss": 1.4161,
165
+ "step": 9000
166
+ },
167
+ {
168
+ "epoch": 2.579418951941352,
169
+ "grad_norm": 0.12232056260108948,
170
+ "learning_rate": 7.023260023531542e-06,
171
+ "loss": 1.4158,
172
+ "step": 9500
173
+ },
174
+ {
175
+ "epoch": 2.7151778441487915,
176
+ "grad_norm": 0.14986146986484528,
177
+ "learning_rate": 4.760611820074215e-06,
178
+ "loss": 1.4156,
179
+ "step": 10000
180
+ }
181
+ ],
182
+ "logging_steps": 500,
183
+ "max_steps": 11049,
184
+ "num_input_tokens_seen": 0,
185
+ "num_train_epochs": 3,
186
+ "save_steps": 500,
187
+ "stateful_callbacks": {
188
+ "TrainerControl": {
189
+ "args": {
190
+ "should_epoch_stop": false,
191
+ "should_evaluate": false,
192
+ "should_log": false,
193
+ "should_save": true,
194
+ "should_training_stop": false
195
+ },
196
+ "attributes": {}
197
+ }
198
+ },
199
+ "total_flos": 1.4436750361828e+19,
200
+ "train_batch_size": 4,
201
+ "trial_name": null,
202
+ "trial_params": null
203
+ }
VitImageCaptioning/checkpoints/checkpoint-10000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c725b5f00d7df832e4a62fd929d3c53f501968efea22872df6e98d5d9f762fdb
3
+ size 5304
VitImageCaptioning/checkpoints/checkpoint-10000/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
VitImageCaptioning/checkpoints/checkpoint-10500/config.json ADDED
@@ -0,0 +1,184 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "VisionEncoderDecoderModel"
4
+ ],
5
+ "decoder": {
6
+ "_name_or_path": "gpt2",
7
+ "activation_function": "gelu_new",
8
+ "add_cross_attention": true,
9
+ "architectures": [
10
+ "GPT2LMHeadModel"
11
+ ],
12
+ "attn_pdrop": 0.1,
13
+ "bad_words_ids": null,
14
+ "begin_suppress_tokens": null,
15
+ "bos_token_id": 50256,
16
+ "chunk_size_feed_forward": 0,
17
+ "cross_attention_hidden_size": null,
18
+ "decoder_start_token_id": null,
19
+ "diversity_penalty": 0.0,
20
+ "do_sample": false,
21
+ "early_stopping": false,
22
+ "embd_pdrop": 0.1,
23
+ "encoder_no_repeat_ngram_size": 0,
24
+ "eos_token_id": 50256,
25
+ "exponential_decay_length_penalty": null,
26
+ "finetuning_task": null,
27
+ "forced_bos_token_id": null,
28
+ "forced_eos_token_id": null,
29
+ "id2label": {
30
+ "0": "LABEL_0",
31
+ "1": "LABEL_1"
32
+ },
33
+ "initializer_range": 0.02,
34
+ "is_decoder": true,
35
+ "is_encoder_decoder": false,
36
+ "label2id": {
37
+ "LABEL_0": 0,
38
+ "LABEL_1": 1
39
+ },
40
+ "layer_norm_epsilon": 1e-05,
41
+ "length_penalty": 1.0,
42
+ "max_length": 20,
43
+ "min_length": 0,
44
+ "model_type": "gpt2",
45
+ "n_ctx": 1024,
46
+ "n_embd": 768,
47
+ "n_head": 12,
48
+ "n_inner": null,
49
+ "n_layer": 12,
50
+ "n_positions": 1024,
51
+ "no_repeat_ngram_size": 0,
52
+ "num_beam_groups": 1,
53
+ "num_beams": 1,
54
+ "num_return_sequences": 1,
55
+ "output_attentions": false,
56
+ "output_hidden_states": false,
57
+ "output_scores": false,
58
+ "pad_token_id": null,
59
+ "prefix": null,
60
+ "problem_type": null,
61
+ "pruned_heads": {},
62
+ "remove_invalid_values": false,
63
+ "reorder_and_upcast_attn": false,
64
+ "repetition_penalty": 1.0,
65
+ "resid_pdrop": 0.1,
66
+ "return_dict": true,
67
+ "return_dict_in_generate": false,
68
+ "scale_attn_by_inverse_layer_idx": false,
69
+ "scale_attn_weights": true,
70
+ "sep_token_id": null,
71
+ "summary_activation": null,
72
+ "summary_first_dropout": 0.1,
73
+ "summary_proj_to_labels": true,
74
+ "summary_type": "cls_index",
75
+ "summary_use_proj": true,
76
+ "suppress_tokens": null,
77
+ "task_specific_params": {
78
+ "text-generation": {
79
+ "do_sample": true,
80
+ "max_length": 50
81
+ }
82
+ },
83
+ "temperature": 1.0,
84
+ "tf_legacy_loss": false,
85
+ "tie_encoder_decoder": false,
86
+ "tie_word_embeddings": true,
87
+ "tokenizer_class": null,
88
+ "top_k": 50,
89
+ "top_p": 1.0,
90
+ "torch_dtype": null,
91
+ "torchscript": false,
92
+ "typical_p": 1.0,
93
+ "use_bfloat16": false,
94
+ "use_cache": true,
95
+ "vocab_size": 50257
96
+ },
97
+ "decoder_start_token_id": 50256,
98
+ "encoder": {
99
+ "_name_or_path": "google/vit-base-patch16-224-in21k",
100
+ "add_cross_attention": false,
101
+ "architectures": [
102
+ "ViTModel"
103
+ ],
104
+ "attention_probs_dropout_prob": 0.0,
105
+ "bad_words_ids": null,
106
+ "begin_suppress_tokens": null,
107
+ "bos_token_id": null,
108
+ "chunk_size_feed_forward": 0,
109
+ "cross_attention_hidden_size": null,
110
+ "decoder_start_token_id": null,
111
+ "diversity_penalty": 0.0,
112
+ "do_sample": false,
113
+ "early_stopping": false,
114
+ "encoder_no_repeat_ngram_size": 0,
115
+ "encoder_stride": 16,
116
+ "eos_token_id": null,
117
+ "exponential_decay_length_penalty": null,
118
+ "finetuning_task": null,
119
+ "forced_bos_token_id": null,
120
+ "forced_eos_token_id": null,
121
+ "hidden_act": "gelu",
122
+ "hidden_dropout_prob": 0.0,
123
+ "hidden_size": 768,
124
+ "id2label": {
125
+ "0": "LABEL_0",
126
+ "1": "LABEL_1"
127
+ },
128
+ "image_size": 224,
129
+ "initializer_range": 0.02,
130
+ "intermediate_size": 3072,
131
+ "is_decoder": false,
132
+ "is_encoder_decoder": false,
133
+ "label2id": {
134
+ "LABEL_0": 0,
135
+ "LABEL_1": 1
136
+ },
137
+ "layer_norm_eps": 1e-12,
138
+ "length_penalty": 1.0,
139
+ "max_length": 20,
140
+ "min_length": 0,
141
+ "model_type": "vit",
142
+ "no_repeat_ngram_size": 0,
143
+ "num_attention_heads": 12,
144
+ "num_beam_groups": 1,
145
+ "num_beams": 1,
146
+ "num_channels": 3,
147
+ "num_hidden_layers": 12,
148
+ "num_return_sequences": 1,
149
+ "output_attentions": false,
150
+ "output_hidden_states": false,
151
+ "output_scores": false,
152
+ "pad_token_id": null,
153
+ "patch_size": 16,
154
+ "prefix": null,
155
+ "problem_type": null,
156
+ "pruned_heads": {},
157
+ "qkv_bias": true,
158
+ "remove_invalid_values": false,
159
+ "repetition_penalty": 1.0,
160
+ "return_dict": true,
161
+ "return_dict_in_generate": false,
162
+ "sep_token_id": null,
163
+ "suppress_tokens": null,
164
+ "task_specific_params": null,
165
+ "temperature": 1.0,
166
+ "tf_legacy_loss": false,
167
+ "tie_encoder_decoder": false,
168
+ "tie_word_embeddings": true,
169
+ "tokenizer_class": null,
170
+ "top_k": 50,
171
+ "top_p": 1.0,
172
+ "torch_dtype": null,
173
+ "torchscript": false,
174
+ "typical_p": 1.0,
175
+ "use_bfloat16": false
176
+ },
177
+ "eos_token_id": 50256,
178
+ "is_encoder_decoder": true,
179
+ "model_type": "vision-encoder-decoder",
180
+ "pad_token_id": 50256,
181
+ "tie_word_embeddings": false,
182
+ "torch_dtype": "float32",
183
+ "transformers_version": "4.41.2"
184
+ }
VitImageCaptioning/checkpoints/checkpoint-10500/generation_config.json ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 50256,
3
+ "eos_token_id": 50256,
4
+ "transformers_version": "4.41.2"
5
+ }
VitImageCaptioning/checkpoints/checkpoint-10500/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
VitImageCaptioning/checkpoints/checkpoint-10500/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c1db46e6c3ba39fb61fa00d9b9abcaad71a76a7ecb27487d7fb31e61cbad4132
3
+ size 956835520
VitImageCaptioning/checkpoints/checkpoint-10500/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:df53ad2d001d140148ea9eda2d14a1b626e090f7f6cee3bd1d56df26cda0b216
3
+ size 1909212608
VitImageCaptioning/checkpoints/checkpoint-10500/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8af87f227c0d37583396259a55d9d46e9918d81036f417d7142469912268bb60
3
+ size 14244
VitImageCaptioning/checkpoints/checkpoint-10500/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:05baad11dd1601ecba3d2f3c408e62ede84fa3a5a5ab06f844d4cdf95404c80e
3
+ size 1064
VitImageCaptioning/checkpoints/checkpoint-10500/special_tokens_map.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<|endoftext|>",
3
+ "eos_token": "<|endoftext|>",
4
+ "pad_token": "<|endoftext|>",
5
+ "unk_token": "<|endoftext|>"
6
+ }
VitImageCaptioning/checkpoints/checkpoint-10500/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
VitImageCaptioning/checkpoints/checkpoint-10500/tokenizer_config.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "50256": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": true,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ }
12
+ },
13
+ "bos_token": "<|endoftext|>",
14
+ "clean_up_tokenization_spaces": true,
15
+ "eos_token": "<|endoftext|>",
16
+ "model_max_length": 1024,
17
+ "pad_token": "<|endoftext|>",
18
+ "tokenizer_class": "GPT2Tokenizer",
19
+ "unk_token": "<|endoftext|>"
20
+ }
VitImageCaptioning/checkpoints/checkpoint-10500/trainer_state.json ADDED
@@ -0,0 +1,210 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 2.8509367363562315,
5
+ "eval_steps": 500,
6
+ "global_step": 10500,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.13575889220743959,
13
+ "grad_norm": 0.42541322112083435,
14
+ "learning_rate": 4.775092768576342e-05,
15
+ "loss": 1.6148,
16
+ "step": 500
17
+ },
18
+ {
19
+ "epoch": 0.27151778441487917,
20
+ "grad_norm": 0.3683030903339386,
21
+ "learning_rate": 4.5488279482306095e-05,
22
+ "loss": 1.4559,
23
+ "step": 1000
24
+ },
25
+ {
26
+ "epoch": 0.40727667662231876,
27
+ "grad_norm": 0.2653275728225708,
28
+ "learning_rate": 4.322563127884877e-05,
29
+ "loss": 1.4429,
30
+ "step": 1500
31
+ },
32
+ {
33
+ "epoch": 0.5430355688297583,
34
+ "grad_norm": 0.2617078423500061,
35
+ "learning_rate": 4.096298307539144e-05,
36
+ "loss": 1.4354,
37
+ "step": 2000
38
+ },
39
+ {
40
+ "epoch": 0.678794461037198,
41
+ "grad_norm": 0.2017202079296112,
42
+ "learning_rate": 3.8700334871934116e-05,
43
+ "loss": 1.4308,
44
+ "step": 2500
45
+ },
46
+ {
47
+ "epoch": 0.8145533532446375,
48
+ "grad_norm": 0.20993387699127197,
49
+ "learning_rate": 3.643768666847679e-05,
50
+ "loss": 1.4278,
51
+ "step": 3000
52
+ },
53
+ {
54
+ "epoch": 0.9503122454520772,
55
+ "grad_norm": 0.16242703795433044,
56
+ "learning_rate": 3.417503846501946e-05,
57
+ "loss": 1.4253,
58
+ "step": 3500
59
+ },
60
+ {
61
+ "epoch": 1.0,
62
+ "eval_bleu": 0.0,
63
+ "eval_gen_len": 19.0,
64
+ "eval_loss": 1.4292237758636475,
65
+ "eval_meteor": 0.03936079398500908,
66
+ "eval_rouge1": 0.0,
67
+ "eval_rouge2": 0.0,
68
+ "eval_rougeL": 0.0,
69
+ "eval_rougeLsum": 0.0,
70
+ "eval_runtime": 213.0238,
71
+ "eval_samples_per_second": 7.281,
72
+ "eval_steps_per_second": 1.821,
73
+ "step": 3683
74
+ },
75
+ {
76
+ "epoch": 1.0860711376595167,
77
+ "grad_norm": 0.29393067955970764,
78
+ "learning_rate": 3.1912390261562137e-05,
79
+ "loss": 1.4232,
80
+ "step": 4000
81
+ },
82
+ {
83
+ "epoch": 1.2218300298669562,
84
+ "grad_norm": 0.14881190657615662,
85
+ "learning_rate": 2.9649742058104807e-05,
86
+ "loss": 1.4215,
87
+ "step": 4500
88
+ },
89
+ {
90
+ "epoch": 1.3575889220743957,
91
+ "grad_norm": 0.15426018834114075,
92
+ "learning_rate": 2.7387093854647484e-05,
93
+ "loss": 1.4205,
94
+ "step": 5000
95
+ },
96
+ {
97
+ "epoch": 1.4933478142818355,
98
+ "grad_norm": 0.20635420083999634,
99
+ "learning_rate": 2.5124445651190154e-05,
100
+ "loss": 1.4196,
101
+ "step": 5500
102
+ },
103
+ {
104
+ "epoch": 1.629106706489275,
105
+ "grad_norm": 0.1161305382847786,
106
+ "learning_rate": 2.2861797447732827e-05,
107
+ "loss": 1.4189,
108
+ "step": 6000
109
+ },
110
+ {
111
+ "epoch": 1.7648655986967148,
112
+ "grad_norm": 0.18034473061561584,
113
+ "learning_rate": 2.05991492442755e-05,
114
+ "loss": 1.4181,
115
+ "step": 6500
116
+ },
117
+ {
118
+ "epoch": 1.9006244909041543,
119
+ "grad_norm": 0.12195830792188644,
120
+ "learning_rate": 1.8336501040818175e-05,
121
+ "loss": 1.4177,
122
+ "step": 7000
123
+ },
124
+ {
125
+ "epoch": 2.0,
126
+ "eval_bleu": 0.0,
127
+ "eval_gen_len": 19.0,
128
+ "eval_loss": 1.4223343133926392,
129
+ "eval_meteor": 0.07128909550595973,
130
+ "eval_rouge1": 0.0,
131
+ "eval_rouge2": 0.0,
132
+ "eval_rougeL": 0.0,
133
+ "eval_rougeLsum": 0.0,
134
+ "eval_runtime": 214.8209,
135
+ "eval_samples_per_second": 7.22,
136
+ "eval_steps_per_second": 1.806,
137
+ "step": 7366
138
+ },
139
+ {
140
+ "epoch": 2.036383383111594,
141
+ "grad_norm": 0.09966401755809784,
142
+ "learning_rate": 1.6073852837360848e-05,
143
+ "loss": 1.4171,
144
+ "step": 7500
145
+ },
146
+ {
147
+ "epoch": 2.1721422753190334,
148
+ "grad_norm": 0.12720510363578796,
149
+ "learning_rate": 1.3811204633903522e-05,
150
+ "loss": 1.4165,
151
+ "step": 8000
152
+ },
153
+ {
154
+ "epoch": 2.307901167526473,
155
+ "grad_norm": 0.17217351496219635,
156
+ "learning_rate": 1.1548556430446195e-05,
157
+ "loss": 1.4163,
158
+ "step": 8500
159
+ },
160
+ {
161
+ "epoch": 2.4436600597339124,
162
+ "grad_norm": 0.1286771595478058,
163
+ "learning_rate": 9.285908226988869e-06,
164
+ "loss": 1.4161,
165
+ "step": 9000
166
+ },
167
+ {
168
+ "epoch": 2.579418951941352,
169
+ "grad_norm": 0.12232056260108948,
170
+ "learning_rate": 7.023260023531542e-06,
171
+ "loss": 1.4158,
172
+ "step": 9500
173
+ },
174
+ {
175
+ "epoch": 2.7151778441487915,
176
+ "grad_norm": 0.14986146986484528,
177
+ "learning_rate": 4.760611820074215e-06,
178
+ "loss": 1.4156,
179
+ "step": 10000
180
+ },
181
+ {
182
+ "epoch": 2.8509367363562315,
183
+ "grad_norm": 0.08726570755243301,
184
+ "learning_rate": 2.4979636166168884e-06,
185
+ "loss": 1.4155,
186
+ "step": 10500
187
+ }
188
+ ],
189
+ "logging_steps": 500,
190
+ "max_steps": 11049,
191
+ "num_input_tokens_seen": 0,
192
+ "num_train_epochs": 3,
193
+ "save_steps": 500,
194
+ "stateful_callbacks": {
195
+ "TrainerControl": {
196
+ "args": {
197
+ "should_epoch_stop": false,
198
+ "should_evaluate": false,
199
+ "should_log": false,
200
+ "should_save": true,
201
+ "should_training_stop": false
202
+ },
203
+ "attributes": {}
204
+ }
205
+ },
206
+ "total_flos": 1.5158605926308512e+19,
207
+ "train_batch_size": 4,
208
+ "trial_name": null,
209
+ "trial_params": null
210
+ }
VitImageCaptioning/checkpoints/checkpoint-10500/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c725b5f00d7df832e4a62fd929d3c53f501968efea22872df6e98d5d9f762fdb
3
+ size 5304
VitImageCaptioning/checkpoints/checkpoint-10500/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
VitImageCaptioning/checkpoints/checkpoint-11000/config.json ADDED
@@ -0,0 +1,184 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "VisionEncoderDecoderModel"
4
+ ],
5
+ "decoder": {
6
+ "_name_or_path": "gpt2",
7
+ "activation_function": "gelu_new",
8
+ "add_cross_attention": true,
9
+ "architectures": [
10
+ "GPT2LMHeadModel"
11
+ ],
12
+ "attn_pdrop": 0.1,
13
+ "bad_words_ids": null,
14
+ "begin_suppress_tokens": null,
15
+ "bos_token_id": 50256,
16
+ "chunk_size_feed_forward": 0,
17
+ "cross_attention_hidden_size": null,
18
+ "decoder_start_token_id": null,
19
+ "diversity_penalty": 0.0,
20
+ "do_sample": false,
21
+ "early_stopping": false,
22
+ "embd_pdrop": 0.1,
23
+ "encoder_no_repeat_ngram_size": 0,
24
+ "eos_token_id": 50256,
25
+ "exponential_decay_length_penalty": null,
26
+ "finetuning_task": null,
27
+ "forced_bos_token_id": null,
28
+ "forced_eos_token_id": null,
29
+ "id2label": {
30
+ "0": "LABEL_0",
31
+ "1": "LABEL_1"
32
+ },
33
+ "initializer_range": 0.02,
34
+ "is_decoder": true,
35
+ "is_encoder_decoder": false,
36
+ "label2id": {
37
+ "LABEL_0": 0,
38
+ "LABEL_1": 1
39
+ },
40
+ "layer_norm_epsilon": 1e-05,
41
+ "length_penalty": 1.0,
42
+ "max_length": 20,
43
+ "min_length": 0,
44
+ "model_type": "gpt2",
45
+ "n_ctx": 1024,
46
+ "n_embd": 768,
47
+ "n_head": 12,
48
+ "n_inner": null,
49
+ "n_layer": 12,
50
+ "n_positions": 1024,
51
+ "no_repeat_ngram_size": 0,
52
+ "num_beam_groups": 1,
53
+ "num_beams": 1,
54
+ "num_return_sequences": 1,
55
+ "output_attentions": false,
56
+ "output_hidden_states": false,
57
+ "output_scores": false,
58
+ "pad_token_id": null,
59
+ "prefix": null,
60
+ "problem_type": null,
61
+ "pruned_heads": {},
62
+ "remove_invalid_values": false,
63
+ "reorder_and_upcast_attn": false,
64
+ "repetition_penalty": 1.0,
65
+ "resid_pdrop": 0.1,
66
+ "return_dict": true,
67
+ "return_dict_in_generate": false,
68
+ "scale_attn_by_inverse_layer_idx": false,
69
+ "scale_attn_weights": true,
70
+ "sep_token_id": null,
71
+ "summary_activation": null,
72
+ "summary_first_dropout": 0.1,
73
+ "summary_proj_to_labels": true,
74
+ "summary_type": "cls_index",
75
+ "summary_use_proj": true,
76
+ "suppress_tokens": null,
77
+ "task_specific_params": {
78
+ "text-generation": {
79
+ "do_sample": true,
80
+ "max_length": 50
81
+ }
82
+ },
83
+ "temperature": 1.0,
84
+ "tf_legacy_loss": false,
85
+ "tie_encoder_decoder": false,
86
+ "tie_word_embeddings": true,
87
+ "tokenizer_class": null,
88
+ "top_k": 50,
89
+ "top_p": 1.0,
90
+ "torch_dtype": null,
91
+ "torchscript": false,
92
+ "typical_p": 1.0,
93
+ "use_bfloat16": false,
94
+ "use_cache": true,
95
+ "vocab_size": 50257
96
+ },
97
+ "decoder_start_token_id": 50256,
98
+ "encoder": {
99
+ "_name_or_path": "google/vit-base-patch16-224-in21k",
100
+ "add_cross_attention": false,
101
+ "architectures": [
102
+ "ViTModel"
103
+ ],
104
+ "attention_probs_dropout_prob": 0.0,
105
+ "bad_words_ids": null,
106
+ "begin_suppress_tokens": null,
107
+ "bos_token_id": null,
108
+ "chunk_size_feed_forward": 0,
109
+ "cross_attention_hidden_size": null,
110
+ "decoder_start_token_id": null,
111
+ "diversity_penalty": 0.0,
112
+ "do_sample": false,
113
+ "early_stopping": false,
114
+ "encoder_no_repeat_ngram_size": 0,
115
+ "encoder_stride": 16,
116
+ "eos_token_id": null,
117
+ "exponential_decay_length_penalty": null,
118
+ "finetuning_task": null,
119
+ "forced_bos_token_id": null,
120
+ "forced_eos_token_id": null,
121
+ "hidden_act": "gelu",
122
+ "hidden_dropout_prob": 0.0,
123
+ "hidden_size": 768,
124
+ "id2label": {
125
+ "0": "LABEL_0",
126
+ "1": "LABEL_1"
127
+ },
128
+ "image_size": 224,
129
+ "initializer_range": 0.02,
130
+ "intermediate_size": 3072,
131
+ "is_decoder": false,
132
+ "is_encoder_decoder": false,
133
+ "label2id": {
134
+ "LABEL_0": 0,
135
+ "LABEL_1": 1
136
+ },
137
+ "layer_norm_eps": 1e-12,
138
+ "length_penalty": 1.0,
139
+ "max_length": 20,
140
+ "min_length": 0,
141
+ "model_type": "vit",
142
+ "no_repeat_ngram_size": 0,
143
+ "num_attention_heads": 12,
144
+ "num_beam_groups": 1,
145
+ "num_beams": 1,
146
+ "num_channels": 3,
147
+ "num_hidden_layers": 12,
148
+ "num_return_sequences": 1,
149
+ "output_attentions": false,
150
+ "output_hidden_states": false,
151
+ "output_scores": false,
152
+ "pad_token_id": null,
153
+ "patch_size": 16,
154
+ "prefix": null,
155
+ "problem_type": null,
156
+ "pruned_heads": {},
157
+ "qkv_bias": true,
158
+ "remove_invalid_values": false,
159
+ "repetition_penalty": 1.0,
160
+ "return_dict": true,
161
+ "return_dict_in_generate": false,
162
+ "sep_token_id": null,
163
+ "suppress_tokens": null,
164
+ "task_specific_params": null,
165
+ "temperature": 1.0,
166
+ "tf_legacy_loss": false,
167
+ "tie_encoder_decoder": false,
168
+ "tie_word_embeddings": true,
169
+ "tokenizer_class": null,
170
+ "top_k": 50,
171
+ "top_p": 1.0,
172
+ "torch_dtype": null,
173
+ "torchscript": false,
174
+ "typical_p": 1.0,
175
+ "use_bfloat16": false
176
+ },
177
+ "eos_token_id": 50256,
178
+ "is_encoder_decoder": true,
179
+ "model_type": "vision-encoder-decoder",
180
+ "pad_token_id": 50256,
181
+ "tie_word_embeddings": false,
182
+ "torch_dtype": "float32",
183
+ "transformers_version": "4.41.2"
184
+ }
VitImageCaptioning/checkpoints/checkpoint-11000/generation_config.json ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 50256,
3
+ "eos_token_id": 50256,
4
+ "transformers_version": "4.41.2"
5
+ }
VitImageCaptioning/checkpoints/checkpoint-11000/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
VitImageCaptioning/checkpoints/checkpoint-11000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:997c6a0af8284857c4b82f99d5f748b55a81aec882b5a2d6575a2d972af5ca34
3
+ size 956835520
VitImageCaptioning/checkpoints/checkpoint-11000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:109045af0eb4b427b4f1dd440fd16d77b0979610303412638c8db9817d569a4e
3
+ size 1909212608
VitImageCaptioning/checkpoints/checkpoint-11000/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2d7e3ae518a79d797fbe949100ec4ff0bba54e6aa86f0967ad71e2c5120afa59
3
+ size 14244
VitImageCaptioning/checkpoints/checkpoint-11000/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:67cc35d6c68458be099cc26fb3fef583a3b22cddf1d73d283de6a1b2c33b7cc1
3
+ size 1064
VitImageCaptioning/checkpoints/checkpoint-11000/special_tokens_map.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<|endoftext|>",
3
+ "eos_token": "<|endoftext|>",
4
+ "pad_token": "<|endoftext|>",
5
+ "unk_token": "<|endoftext|>"
6
+ }
VitImageCaptioning/checkpoints/checkpoint-11000/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
VitImageCaptioning/checkpoints/checkpoint-11000/tokenizer_config.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "50256": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": true,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ }
12
+ },
13
+ "bos_token": "<|endoftext|>",
14
+ "clean_up_tokenization_spaces": true,
15
+ "eos_token": "<|endoftext|>",
16
+ "model_max_length": 1024,
17
+ "pad_token": "<|endoftext|>",
18
+ "tokenizer_class": "GPT2Tokenizer",
19
+ "unk_token": "<|endoftext|>"
20
+ }
VitImageCaptioning/checkpoints/checkpoint-11000/trainer_state.json ADDED
@@ -0,0 +1,217 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 2.986695628563671,
5
+ "eval_steps": 500,
6
+ "global_step": 11000,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.13575889220743959,
13
+ "grad_norm": 0.42541322112083435,
14
+ "learning_rate": 4.775092768576342e-05,
15
+ "loss": 1.6148,
16
+ "step": 500
17
+ },
18
+ {
19
+ "epoch": 0.27151778441487917,
20
+ "grad_norm": 0.3683030903339386,
21
+ "learning_rate": 4.5488279482306095e-05,
22
+ "loss": 1.4559,
23
+ "step": 1000
24
+ },
25
+ {
26
+ "epoch": 0.40727667662231876,
27
+ "grad_norm": 0.2653275728225708,
28
+ "learning_rate": 4.322563127884877e-05,
29
+ "loss": 1.4429,
30
+ "step": 1500
31
+ },
32
+ {
33
+ "epoch": 0.5430355688297583,
34
+ "grad_norm": 0.2617078423500061,
35
+ "learning_rate": 4.096298307539144e-05,
36
+ "loss": 1.4354,
37
+ "step": 2000
38
+ },
39
+ {
40
+ "epoch": 0.678794461037198,
41
+ "grad_norm": 0.2017202079296112,
42
+ "learning_rate": 3.8700334871934116e-05,
43
+ "loss": 1.4308,
44
+ "step": 2500
45
+ },
46
+ {
47
+ "epoch": 0.8145533532446375,
48
+ "grad_norm": 0.20993387699127197,
49
+ "learning_rate": 3.643768666847679e-05,
50
+ "loss": 1.4278,
51
+ "step": 3000
52
+ },
53
+ {
54
+ "epoch": 0.9503122454520772,
55
+ "grad_norm": 0.16242703795433044,
56
+ "learning_rate": 3.417503846501946e-05,
57
+ "loss": 1.4253,
58
+ "step": 3500
59
+ },
60
+ {
61
+ "epoch": 1.0,
62
+ "eval_bleu": 0.0,
63
+ "eval_gen_len": 19.0,
64
+ "eval_loss": 1.4292237758636475,
65
+ "eval_meteor": 0.03936079398500908,
66
+ "eval_rouge1": 0.0,
67
+ "eval_rouge2": 0.0,
68
+ "eval_rougeL": 0.0,
69
+ "eval_rougeLsum": 0.0,
70
+ "eval_runtime": 213.0238,
71
+ "eval_samples_per_second": 7.281,
72
+ "eval_steps_per_second": 1.821,
73
+ "step": 3683
74
+ },
75
+ {
76
+ "epoch": 1.0860711376595167,
77
+ "grad_norm": 0.29393067955970764,
78
+ "learning_rate": 3.1912390261562137e-05,
79
+ "loss": 1.4232,
80
+ "step": 4000
81
+ },
82
+ {
83
+ "epoch": 1.2218300298669562,
84
+ "grad_norm": 0.14881190657615662,
85
+ "learning_rate": 2.9649742058104807e-05,
86
+ "loss": 1.4215,
87
+ "step": 4500
88
+ },
89
+ {
90
+ "epoch": 1.3575889220743957,
91
+ "grad_norm": 0.15426018834114075,
92
+ "learning_rate": 2.7387093854647484e-05,
93
+ "loss": 1.4205,
94
+ "step": 5000
95
+ },
96
+ {
97
+ "epoch": 1.4933478142818355,
98
+ "grad_norm": 0.20635420083999634,
99
+ "learning_rate": 2.5124445651190154e-05,
100
+ "loss": 1.4196,
101
+ "step": 5500
102
+ },
103
+ {
104
+ "epoch": 1.629106706489275,
105
+ "grad_norm": 0.1161305382847786,
106
+ "learning_rate": 2.2861797447732827e-05,
107
+ "loss": 1.4189,
108
+ "step": 6000
109
+ },
110
+ {
111
+ "epoch": 1.7648655986967148,
112
+ "grad_norm": 0.18034473061561584,
113
+ "learning_rate": 2.05991492442755e-05,
114
+ "loss": 1.4181,
115
+ "step": 6500
116
+ },
117
+ {
118
+ "epoch": 1.9006244909041543,
119
+ "grad_norm": 0.12195830792188644,
120
+ "learning_rate": 1.8336501040818175e-05,
121
+ "loss": 1.4177,
122
+ "step": 7000
123
+ },
124
+ {
125
+ "epoch": 2.0,
126
+ "eval_bleu": 0.0,
127
+ "eval_gen_len": 19.0,
128
+ "eval_loss": 1.4223343133926392,
129
+ "eval_meteor": 0.07128909550595973,
130
+ "eval_rouge1": 0.0,
131
+ "eval_rouge2": 0.0,
132
+ "eval_rougeL": 0.0,
133
+ "eval_rougeLsum": 0.0,
134
+ "eval_runtime": 214.8209,
135
+ "eval_samples_per_second": 7.22,
136
+ "eval_steps_per_second": 1.806,
137
+ "step": 7366
138
+ },
139
+ {
140
+ "epoch": 2.036383383111594,
141
+ "grad_norm": 0.09966401755809784,
142
+ "learning_rate": 1.6073852837360848e-05,
143
+ "loss": 1.4171,
144
+ "step": 7500
145
+ },
146
+ {
147
+ "epoch": 2.1721422753190334,
148
+ "grad_norm": 0.12720510363578796,
149
+ "learning_rate": 1.3811204633903522e-05,
150
+ "loss": 1.4165,
151
+ "step": 8000
152
+ },
153
+ {
154
+ "epoch": 2.307901167526473,
155
+ "grad_norm": 0.17217351496219635,
156
+ "learning_rate": 1.1548556430446195e-05,
157
+ "loss": 1.4163,
158
+ "step": 8500
159
+ },
160
+ {
161
+ "epoch": 2.4436600597339124,
162
+ "grad_norm": 0.1286771595478058,
163
+ "learning_rate": 9.285908226988869e-06,
164
+ "loss": 1.4161,
165
+ "step": 9000
166
+ },
167
+ {
168
+ "epoch": 2.579418951941352,
169
+ "grad_norm": 0.12232056260108948,
170
+ "learning_rate": 7.023260023531542e-06,
171
+ "loss": 1.4158,
172
+ "step": 9500
173
+ },
174
+ {
175
+ "epoch": 2.7151778441487915,
176
+ "grad_norm": 0.14986146986484528,
177
+ "learning_rate": 4.760611820074215e-06,
178
+ "loss": 1.4156,
179
+ "step": 10000
180
+ },
181
+ {
182
+ "epoch": 2.8509367363562315,
183
+ "grad_norm": 0.08726570755243301,
184
+ "learning_rate": 2.4979636166168884e-06,
185
+ "loss": 1.4155,
186
+ "step": 10500
187
+ },
188
+ {
189
+ "epoch": 2.986695628563671,
190
+ "grad_norm": 0.08979591727256775,
191
+ "learning_rate": 2.3531541315956195e-07,
192
+ "loss": 1.4154,
193
+ "step": 11000
194
+ }
195
+ ],
196
+ "logging_steps": 500,
197
+ "max_steps": 11049,
198
+ "num_input_tokens_seen": 0,
199
+ "num_train_epochs": 3,
200
+ "save_steps": 500,
201
+ "stateful_callbacks": {
202
+ "TrainerControl": {
203
+ "args": {
204
+ "should_epoch_stop": false,
205
+ "should_evaluate": false,
206
+ "should_log": false,
207
+ "should_save": true,
208
+ "should_training_stop": false
209
+ },
210
+ "attributes": {}
211
+ }
212
+ },
213
+ "total_flos": 1.5880461490789024e+19,
214
+ "train_batch_size": 4,
215
+ "trial_name": null,
216
+ "trial_params": null
217
+ }
VitImageCaptioning/checkpoints/checkpoint-11000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c725b5f00d7df832e4a62fd929d3c53f501968efea22872df6e98d5d9f762fdb
3
+ size 5304
VitImageCaptioning/checkpoints/checkpoint-11000/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
VitImageCaptioning/checkpoints/runs/Jun09_14-35-53_d89412ba1871/events.out.tfevents.1717943754.d89412ba1871.4383.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ebd6469ac2ecdba95925aaea9c018f7bc2c18a35993eb7ecead34ebad68def4
3
+ size 9205
VitImageCaptioning/checkpoints/runs/Jun09_14-39-15_d89412ba1871/events.out.tfevents.1717943956.d89412ba1871.4383.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7cc3a6bbee74e4d6a3acb1bdb39d1af91b7d35014d15692cb2ab6b1955c082c
3
+ size 9205
VitImageCaptioning/checkpoints/runs/Jun09_14-41-16_d89412ba1871/events.out.tfevents.1717944076.d89412ba1871.4383.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:69a5d6566fc4dee00a105465fe5fbc8f9e01481f86ab83ab6c91faa3a40d90b9
3
+ size 8280
VitImageCaptioning/checkpoints/runs/Jun09_14-41-42_d89412ba1871/events.out.tfevents.1717944103.d89412ba1871.4383.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:18ca5fb513ef39ddff8b570716e30d7d803730a7ebce8aa5a2948edff3c94cfd
3
+ size 8280
VitImageCaptioning/checkpoints/runs/Jun09_14-42-00_d89412ba1871/events.out.tfevents.1717944121.d89412ba1871.4383.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:520d67c11ec508e979446d326385a2cfb97fbbc8a8363e22cf3d79114693bc26
3
+ size 8280
VitImageCaptioning/checkpoints/runs/Jun09_14-47-15_16e047952208/events.out.tfevents.1717944437.16e047952208.4250.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4f3e37ab1978a63fc99273128da49d8cf17a6e5e33bc9bc827b1b26686c0e6ea
3
+ size 9205
VitImageCaptioning/checkpoints/runs/Jun09_14-48-45_16e047952208/events.out.tfevents.1717944525.16e047952208.4250.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:29a9410fa9e572e61badcdf4f42542e4ed9bbb803e2290455578871e71a8b540
3
+ size 9201
VitImageCaptioning/checkpoints/runs/Jun09_14-50-50_16e047952208/events.out.tfevents.1717944651.16e047952208.4250.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:37cc9be5e8567ed6aa0b58557aacec4a29418c264a09e4f7f3c7ad277a03d82b
3
+ size 9201
VitImageCaptioning/checkpoints/runs/Jun09_14-51-58_16e047952208/events.out.tfevents.1717944718.16e047952208.4250.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16c7508600795c33ba227b5060a93ec0a9d776b61a8a46fdf6c2c0695e9ef3fb
3
+ size 9201
VitImageCaptioning/checkpoints/runs/Jun09_14-52-47_16e047952208/events.out.tfevents.1717944767.16e047952208.4250.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:891f72fb17b2bbc1acef6341d15ee1bf9d5440252a1924f5662b097d46467f06
3
+ size 9201
VitImageCaptioning/checkpoints/runs/Jun09_14-53-34_16e047952208/events.out.tfevents.1717944814.16e047952208.4250.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:52698403d5f9c670bff16b246b3ce92b528a562cb951fb5b90580510e97f3777
3
+ size 9201