Update config.json (#1)
Browse files- Update config.json (1e5b5698769be19f07c19dee2bb00e9cf984bbbd)
Co-authored-by: chawonseok <chapro@users.noreply.huggingface.co>
- config.json +1 -1
config.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"model_name": "finance_koelectra_v1", "seed": 42, "debug": false, "do_train": true, "do_eval": false, "phase2": false, "amp": false, "xla": false, "fp16_compression": false, "optimizer": "adam", "gradient_accumulation_steps": 1, "skip_adaptive": false, "electra_objective": true, "gen_weight": 1.0, "disc_weight": 50.0, "mask_prob": 0.15, "learning_rate": 0.0002, "lr_decay_power": 0.5, "weight_decay_rate": 0.01, "num_warmup_steps": 10000, "opt_beta_1": 0.878, "opt_beta_2": 0.974, "end_lr": 0.0, "log_freq": 50, "skip_checkpoint": false, "save_checkpoints_steps": 5000, "num_train_steps": 1000000, "num_eval_steps": 100, "keep_checkpoint_max": 5, "restore_checkpoint": null, "load_weights": false, "steps_this_run": -1, "model_size": "base", "model_hparam_overrides": {}, "embedding_size": 768, "vocab_size": 61472, "do_lower_case": true, "uniform_generator": false, "shared_embeddings": true, "generator_layers": 1.0, "generator_hidden_size": 0.25, "disallow_correct": false, "temperature": 1.0, "max_seq_length": 512, "train_batch_size": 12, "eval_batch_size": 12, "results_dir": "results", "json_summary": null, "pretrain_tfrecords": "./pretrain_tfrecords/*", "vocab_file": "vocab/vocab.txt", "model_dir": "results/models/finance_koelectra_v1", "checkpoints_dir": "results/models/finance_koelectra_v1/checkpoints", "weights_dir": "results/models/finance_koelectra_v1/weights", "results_txt": "results/unsup_results.txt", "results_pkl": "results/unsup_results.pkl", "log_dir": "results/models/finance_koelectra_v1/logs", "max_predictions_per_seq": 79, "hidden_size": 768, "num_hidden_layers": 12, "num_attention_heads": 12, "act_func": "gelu", "hidden_dropout_prob": 0.1, "attention_probs_dropout_prob": 0.1}
|
|
|
1 |
+
{"model_name": "finance_koelectra_v1", "seed": 42, "debug": false, "do_train": true, "do_eval": false, "phase2": false, "amp": false, "xla": false, "fp16_compression": false, "optimizer": "adam", "gradient_accumulation_steps": 1, "skip_adaptive": false, "electra_objective": true, "gen_weight": 1.0, "disc_weight": 50.0, "mask_prob": 0.15, "learning_rate": 0.0002, "lr_decay_power": 0.5, "weight_decay_rate": 0.01, "num_warmup_steps": 10000, "opt_beta_1": 0.878, "opt_beta_2": 0.974, "end_lr": 0.0, "log_freq": 50, "skip_checkpoint": false, "save_checkpoints_steps": 5000, "num_train_steps": 1000000, "num_eval_steps": 100, "keep_checkpoint_max": 5, "restore_checkpoint": null, "load_weights": false, "steps_this_run": -1, "model_size": "base", "model_hparam_overrides": {}, "embedding_size": 768, "vocab_size": 61472, "do_lower_case": true, "uniform_generator": false, "shared_embeddings": true, "generator_layers": 1.0, "generator_hidden_size": 0.25, "disallow_correct": false, "temperature": 1.0, "max_seq_length": 512, "train_batch_size": 12, "eval_batch_size": 12, "results_dir": "results", "json_summary": null, "pretrain_tfrecords": "./pretrain_tfrecords/*", "vocab_file": "vocab/vocab.txt", "model_dir": "results/models/finance_koelectra_v1", "checkpoints_dir": "results/models/finance_koelectra_v1/checkpoints", "weights_dir": "results/models/finance_koelectra_v1/weights", "results_txt": "results/unsup_results.txt", "results_pkl": "results/unsup_results.pkl", "log_dir": "results/models/finance_koelectra_v1/logs", "max_predictions_per_seq": 79, "hidden_size": 768, "num_hidden_layers": 12, "num_attention_heads": 12, "act_func": "gelu", "hidden_dropout_prob": 0.1, "attention_probs_dropout_prob": 0.1, model_type:"electra"}
|