davidhajdu commited on
Commit
8ef9b88
1 Parent(s): 1dae0e9

Training in progress, epoch 1

Browse files
config.json CHANGED
@@ -3,7 +3,7 @@
3
  "activation_dropout": 0.0,
4
  "activation_function": "prelu",
5
  "architectures": [
6
- "DABDETRForObjectDetection"
7
  ],
8
  "attention_dropout": 0.0,
9
  "auxiliary_loss": false,
@@ -19,22 +19,16 @@
19
  ]
20
  },
21
  "bbox_cost": 5,
22
- "bbox_embed_diff_each_layer": false,
23
  "bbox_loss_coefficient": 5,
24
  "class_cost": 2,
25
  "cls_loss_coefficient": 2,
26
  "d_model": 256,
27
  "decoder_attention_heads": 8,
28
- "decoder_bbox_embed_diff_each_layer": false,
29
  "decoder_ffn_dim": 2048,
30
- "decoder_layerdrop": 0.0,
31
  "decoder_layers": 6,
32
- "decoder_modulate_hw_attn": true,
33
- "do_use_self_attn_decoder": true,
34
  "dropout": 0.1,
35
  "encoder_attention_heads": 8,
36
  "encoder_ffn_dim": 2048,
37
- "encoder_layerdrop": 0.0,
38
  "encoder_layers": 6,
39
  "focal_alpha": 0.25,
40
  "giou_cost": 2,
@@ -50,7 +44,6 @@
50
  "init_xavier_std": 1.0,
51
  "initializer_bias_prior_prob": null,
52
  "is_encoder_decoder": true,
53
- "iter_update": true,
54
  "keep_query_pos": false,
55
  "label2id": {
56
  "Coverall": 0,
@@ -64,11 +57,8 @@
64
  "num_hidden_layers": 6,
65
  "num_patterns": 0,
66
  "num_queries": 300,
67
- "position_embedding_type": "sine",
68
  "query_dim": 4,
69
- "query_scale_type": "cond_elewise",
70
  "random_refpoints_xy": false,
71
- "sine_position_embedding_normalize": true,
72
  "sine_position_embedding_scale": null,
73
  "temperature_height": 20,
74
  "temperature_width": 20,
 
3
  "activation_dropout": 0.0,
4
  "activation_function": "prelu",
5
  "architectures": [
6
+ "DabDetrForObjectDetection"
7
  ],
8
  "attention_dropout": 0.0,
9
  "auxiliary_loss": false,
 
19
  ]
20
  },
21
  "bbox_cost": 5,
 
22
  "bbox_loss_coefficient": 5,
23
  "class_cost": 2,
24
  "cls_loss_coefficient": 2,
25
  "d_model": 256,
26
  "decoder_attention_heads": 8,
 
27
  "decoder_ffn_dim": 2048,
 
28
  "decoder_layers": 6,
 
 
29
  "dropout": 0.1,
30
  "encoder_attention_heads": 8,
31
  "encoder_ffn_dim": 2048,
 
32
  "encoder_layers": 6,
33
  "focal_alpha": 0.25,
34
  "giou_cost": 2,
 
44
  "init_xavier_std": 1.0,
45
  "initializer_bias_prior_prob": null,
46
  "is_encoder_decoder": true,
 
47
  "keep_query_pos": false,
48
  "label2id": {
49
  "Coverall": 0,
 
57
  "num_hidden_layers": 6,
58
  "num_patterns": 0,
59
  "num_queries": 300,
 
60
  "query_dim": 4,
 
61
  "random_refpoints_xy": false,
 
62
  "sine_position_embedding_scale": null,
63
  "temperature_height": 20,
64
  "temperature_width": 20,
preprocessor_config.json CHANGED
@@ -10,7 +10,7 @@
10
  0.456,
11
  0.406
12
  ],
13
- "image_processor_type": "DABDETRImageProcessor",
14
  "image_std": [
15
  0.229,
16
  0.224,
 
10
  0.456,
11
  0.406
12
  ],
13
+ "image_processor_type": "DabDetrImageProcessor",
14
  "image_std": [
15
  0.229,
16
  0.224,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4550cda5836ced8dfda7e376acd0a2b7d62f89429689d0de7ac47bb6ae508115
3
- size 175237322
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b49e925ed0bcda1ac57a05052663993d74754bff61a7e7cd5f29b1d05c7377ed
3
+ size 175239050
runs/Oct14_06-57-25_da318bc67907/events.out.tfevents.1728889052.da318bc67907.831.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:010b1ae893982afe8b124b6cbc0945b805118f6d3bd4787c840fe0726b2cfad7
3
+ size 7319
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:71bf8d99eabb027dd73f99a8ef14efd3ea910c17b431d0c4617d280648d6413a
3
  size 5240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c2bbca2899e028226100bf14fb6b1b5ce9860213d34602efe0cee7fd01ffaa18
3
  size 5240