{"data_dir": "/home/amy/.cache/refined", "transformer_name": "roberta-base", "max_seq": 510, "learning_rate": 5e-05, "num_train_epochs": 1, "freeze_all_bert_layers": false, "gradient_accumulation_steps": 1, "per_gpu_batch_size": 22, "freeze_embedding_layers": false, "freeze_layers": [], "n_gpu": 4, "lr_ner_scale": 100, "ner_layer_dropout": 0.1, "ed_layer_dropout": 0.05, "max_candidates": 30, "warmup_steps": 5000, "logging_steps": 500, "save_steps": 500, "detach_ed_layer": true, "only_ner": false, "only_ed": false, "md_layer_dropout": 0.1, "debug": false, "sep_token_id": 2, "cls_token_id": 0, "mask_token_id": 50264, "pad_token_id": 1, "vocab_size": 50265, "ner_tag_to_ix": {"O": 0, "B-MENTION": 1, "I-MENTION": 2}} |