leejuhyoeng commited on
Commit
ae81c0c
1 Parent(s): dedd02d

Training in progress, step 1

Browse files
config.json CHANGED
@@ -12,20 +12,38 @@
12
  "model_type": "blip_2_qformer"
13
  },
14
  "text_config": {
15
- "_name_or_path": "facebook/opt-2.7b",
16
- "activation_dropout": 0.0,
17
  "architectures": [
18
- "OPTForCausalLM"
19
  ],
20
- "eos_token_id": 50118,
21
- "ffn_dim": 10240,
22
- "hidden_size": 2560,
23
- "model_type": "opt",
24
- "num_attention_heads": 32,
25
- "num_hidden_layers": 32,
26
- "prefix": "</s>",
27
- "torch_dtype": "float16",
28
- "word_embed_proj_dim": 2560
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29
  },
30
  "torch_dtype": "float32",
31
  "transformers_version": "4.36.2",
 
12
  "model_type": "blip_2_qformer"
13
  },
14
  "text_config": {
15
+ "_name_or_path": "heegyu/kogpt-j-350m",
16
+ "activation_function": "gelu_new",
17
  "architectures": [
18
+ "GPTJForCausalLM"
19
  ],
20
+ "attn_pdrop": 0.1,
21
+ "bos_token_id": 1,
22
+ "embd_pdrop": 0.1,
23
+ "eos_token_id": 2,
24
+ "gradient_checkpointing": false,
25
+ "initializer_range": 0.02,
26
+ "layer_norm_epsilon": 1e-05,
27
+ "model_type": "gptj",
28
+ "n_embd": 1024,
29
+ "n_head": 16,
30
+ "n_inner": null,
31
+ "n_layer": 20,
32
+ "n_positions": 1024,
33
+ "pad_token_id": 0,
34
+ "resid_pdrop": 0.0,
35
+ "rotary_dim": 32,
36
+ "scale_attn_weights": true,
37
+ "summary_activation": null,
38
+ "summary_first_dropout": 0.1,
39
+ "summary_proj_to_labels": true,
40
+ "summary_type": "cls_index",
41
+ "summary_use_proj": true,
42
+ "tie_word_embeddings": false,
43
+ "tokenizer_class": "GPT2Tokenizer",
44
+ "torch_dtype": "float32",
45
+ "unk_token_id": 5,
46
+ "vocab_size": 51200
47
  },
48
  "torch_dtype": "float32",
49
  "transformers_version": "4.36.2",
tmp-checkpoint-6000/config.json ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "Salesforce/blip2-opt-2.7b",
3
+ "architectures": [
4
+ "Blip2ForConditionalGeneration"
5
+ ],
6
+ "initializer_factor": 1.0,
7
+ "initializer_range": 0.02,
8
+ "model_type": "blip-2",
9
+ "num_query_tokens": 32,
10
+ "qformer_config": {
11
+ "classifier_dropout": null,
12
+ "model_type": "blip_2_qformer"
13
+ },
14
+ "text_config": {
15
+ "_name_or_path": "facebook/opt-2.7b",
16
+ "activation_dropout": 0.0,
17
+ "architectures": [
18
+ "OPTForCausalLM"
19
+ ],
20
+ "eos_token_id": 50118,
21
+ "ffn_dim": 10240,
22
+ "hidden_size": 2560,
23
+ "model_type": "opt",
24
+ "num_attention_heads": 32,
25
+ "num_hidden_layers": 32,
26
+ "prefix": "</s>",
27
+ "torch_dtype": "float16",
28
+ "word_embed_proj_dim": 2560
29
+ },
30
+ "torch_dtype": "float32",
31
+ "transformers_version": "4.36.2",
32
+ "use_decoder_only_language_model": true,
33
+ "vision_config": {
34
+ "dropout": 0.0,
35
+ "initializer_factor": 1.0,
36
+ "model_type": "blip_2_vision_model",
37
+ "num_channels": 3,
38
+ "projection_dim": 512
39
+ }
40
+ }
tmp-checkpoint-6000/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 2,
4
+ "eos_token_id": 50118,
5
+ "pad_token_id": 1,
6
+ "transformers_version": "4.36.2"
7
+ }
tmp-checkpoint-6000/model-00001-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:47c8a8202b3b41c36759d42b4b85d9e338575287bb51a6cb080ff86b2b5bb7d4
3
+ size 4986896152
tmp-checkpoint-6000/model-00002-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0607aceca704911d20a3ecd4322594460b9e1d94d139bb955a306caae061f311
3
+ size 4982878872
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:06b1cb71df64cad1cf9c3cecfb832c96e2f54b9b3a74efd198c25fb735938b71
3
  size 4664
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4b962e979ddd766add1ea5cafc6ba56b3d5eb1ad10a8a3576320b6048871569e
3
  size 4664