File size: 2,207 Bytes
3be1684 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 |
{
"_name_or_path": "ayjays132/CustomGPT2Conversational",
"activation_function": "gelu_new",
"architectures": [
"GPT2LMHeadModel"
],
"attn_pdrop": 0.1,
"bos_token_id": 50256,
"context_window": 20,
"contextual_embedding_dim": 1024,
"device": "cuda",
"dropout_rate": 0.1,
"embd_pdrop": 0.1,
"embedding_dim": 1024,
"eos_token_id": 50256,
"hidden_dim": 1024,
"initializer_range": 0.02,
"innovative_growth_capacity": 50000,
"integration_settings": {
"config_name": "config.json",
"load_from_transformers": true,
"pytorch_dump_folder_path": "./model_save",
"pytorch_model_bin_name": "pytorch_model.bin"
},
"layer_norm_epsilon": 1e-05,
"max_memory_size": 100000,
"max_neurons": 100,
"meta_learning_rate": 0.001,
"model_type": "gpt2",
"n_ctx": 1024,
"n_embd": 1024,
"n_head": 16,
"n_inner": null,
"n_layer": 24,
"n_positions": 1024,
"num_embeddings": 50268,
"num_heads": 64,
"num_layers": 24,
"output_attentions": true,
"output_hidden_states": true,
"pad_token_id": 0,
"reorder_and_upcast_attn": false,
"resid_pdrop": 0.1,
"scale_attn_by_inverse_layer_idx": false,
"scale_attn_weights": true,
"sep_token_id": -1,
"special_tokens": {
"additional_special_tokens": [
"<greeting>",
"<farewell>",
"<thank>",
"<apology>"
],
"bos_token": "<bos>",
"cls_token": "<cls>",
"eos_token": "<eos>",
"mask_token": "<mask>",
"pad_token": "<pad>",
"sep_token": "<sep>",
"unk_token": "<unk>"
},
"state_shape": null,
"summary_activation": null,
"summary_first_dropout": 0.1,
"summary_proj_to_labels": true,
"summary_type": "cls_index",
"summary_use_proj": true,
"target_q_model": null,
"task_specific_params": {
"text-generation": {
"do_sample": true,
"length_penalty": 1.0,
"max_length": 50,
"no_repeat_ngram_size": 2,
"repetition_penalty": 1.2,
"temperature": 0.9,
"top_k": 50,
"top_p": 0.95
}
},
"torch_dtype": "float32",
"transformers_version": "4.28.0.dev0",
"use_cache": true,
"vocab_size": 50257
}
|