ljchang commited on
Commit
de4cb76
1 Parent(s): 2bd566f

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +1 -46
config.json CHANGED
@@ -1,46 +1 @@
1
- {
2
- "cfg": {
3
- "batch_size": 32,
4
- "clip": false,
5
- "decay1": 190,
6
- "decay2": 220,
7
- "epoch": 250,
8
- "gpu_train": true,
9
- "image_size": 640,
10
- "in_channel": 32,
11
- "loc_weight": 2.0,
12
- "min_sizes": [
13
- [
14
- 16,
15
- 32
16
- ],
17
- [
18
- 64,
19
- 128
20
- ],
21
- [
22
- 256,
23
- 512
24
- ]
25
- ],
26
- "name": "mobilenet0.25",
27
- "ngpu": 1,
28
- "out_channel": 64,
29
- "pretrain": false,
30
- "return_layers": {
31
- "stage1": 1,
32
- "stage2": 2,
33
- "stage3": 3
34
- },
35
- "steps": [
36
- 8,
37
- 16,
38
- 32
39
- ],
40
- "variance": [
41
- 0.1,
42
- 0.2
43
- ]
44
- },
45
- "phase": "test"
46
- }
 
1
+ {"name": "mobilenet0.25", "min_sizes": [[16, 32], [64, 128], [256, 512]], "steps": [8, 16, 32], "variance": [0.1, 0.2], "clip": false, "loc_weight": 2.0, "gpu_train": true, "batch_size": 32, "ngpu": 1, "epoch": 250, "decay1": 190, "decay2": 220, "image_size": 640, "pretrain": false, "return_layers": {"stage1": 1, "stage2": 2, "stage3": 3}, "in_channel": 32, "out_channel": 64}