|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
gpt_hub: gpt2 |
|
gpt_folder: gpt2_result/save/gpt_checkpoint |
|
|
|
bos_token: "BOS" |
|
eos_token: "EOS" |
|
|
|
system_token: "SPK_1" |
|
user_token: "SPK_2" |
|
|
|
tokenizer: !ref <gpt_hub> |
|
|
|
additional_special_tokens: [ |
|
!ref <system_token>, |
|
!ref <user_token> |
|
] |
|
|
|
special_tokens: [ |
|
!ref <bos_token>, |
|
!ref <eos_token>, |
|
!ref <system_token>, |
|
!ref <user_token> |
|
] |
|
|
|
attr_to_special_tokens: |
|
"bos_token": !ref <bos_token> |
|
"eos_token": !ref <eos_token> |
|
"additional_special_tokens": !ref <additional_special_tokens> |
|
|
|
|
|
max_history: 5 |
|
|
|
|
|
freeze_gptmodel: True |
|
num_beams: 3 |
|
max_new_tokens: 50 |
|
top_k: 45 |
|
top_p: 0.9 |
|
|
|
|
|
model: !new:custom.HuggingFaceGPT_expanded |
|
source: !ref <gpt_hub> |
|
freeze: !ref <freeze_gptmodel> |
|
save_path: !ref <gpt_folder> |
|
max_new_tokens: !ref <max_new_tokens> |
|
num_beams: !ref <num_beams> |
|
top_k: !ref <top_k> |
|
top_p: !ref <top_p> |
|
|
|
|
|
|
|
|
|
padding_mask: !name:speechbrain.lobes.models.transformer.Transformer.get_key_padding_mask |
|
|
|
pretrainer: !new:speechbrain.utils.parameter_transfer.Pretrainer |
|
loadables: |
|
model: !ref <model> |
|
|
|
modules: |
|
model: !ref <model> |
|
|
|
|