File size: 3,244 Bytes
f26f0d1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
{
    "dataset_reader": {
        "type": "squad",
        "token_indexers": {
            "token_characters": {
                "type": "characters",
                "character_tokenizer": {
                    "byte_encoding": "utf-8",
                    "end_tokens": [
                        260
                    ],
                    "start_tokens": [
                        259
                    ]
                },
                "min_padding_length": 5
            },
            "tokens": {
                "type": "single_id",
                "lowercase_tokens": true
            }
        }
    },
    "model": {
        "type": "bidaf",
        "dropout": 0.2,
        "matrix_attention": {
            "type": "linear",
            "combination": "x,y,x*y",
            "tensor_1_dim": 200,
            "tensor_2_dim": 200
        },
        "modeling_layer": {
            "type": "lstm",
            "bidirectional": true,
            "dropout": 0.2,
            "hidden_size": 100,
            "input_size": 800,
            "num_layers": 2
        },
        "num_highway_layers": 2,
        "phrase_layer": {
            "type": "lstm",
            "bidirectional": true,
            "hidden_size": 100,
            "input_size": 200,
            "num_layers": 1
        },
        "span_end_encoder": {
            "type": "lstm",
            "bidirectional": true,
            "hidden_size": 100,
            "input_size": 1400,
            "num_layers": 1
        },
        "text_field_embedder": {
            "token_embedders": {
                "token_characters": {
                    "type": "character_encoding",
                    "dropout": 0.2,
                    "embedding": {
                        "embedding_dim": 16,
                        "num_embeddings": 262
                    },
                    "encoder": {
                        "type": "cnn",
                        "embedding_dim": 16,
                        "ngram_filter_sizes": [
                            5
                        ],
                        "num_filters": 100
                    }
                },
                "tokens": {
                    "type": "embedding",
                    "embedding_dim": 100,
                    "pretrained_file": "https://allennlp.s3.amazonaws.com/datasets/glove/glove.6B.100d.txt.gz",
                    "trainable": false
                }
            }
        }
    },
    "train_data_path": "https://allennlp.s3.amazonaws.com/datasets/squad/squad-train-v1.1.json",
    "validation_data_path": "https://allennlp.s3.amazonaws.com/datasets/squad/squad-dev-v1.1.json",
    "trainer": {
        "cuda_device": 0,
        "grad_norm": 5,
        "learning_rate_scheduler": {
            "type": "reduce_on_plateau",
            "factor": 0.5,
            "mode": "max",
            "patience": 2
        },
        "num_epochs": 20,
        "optimizer": {
            "type": "adam",
            "betas": [
                0.9,
                0.9
            ]
        },
        "patience": 10,
        "validation_metric": "+em"
    },
    "data_loader": {
        "batch_sampler": {
            "type": "bucket",
            "batch_size": 40
        }
    }
}