Update config.json
Browse files- config.json +10 -3
config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "
|
3 |
"activation_function": "gelu_new",
|
4 |
"architectures": [
|
5 |
"GPT2LMHeadModel"
|
@@ -30,11 +30,18 @@
|
|
30 |
"task_specific_params": {
|
31 |
"text-generation": {
|
32 |
"do_sample": true,
|
33 |
-
"max_length":
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
34 |
}
|
35 |
},
|
36 |
"torch_dtype": "float32",
|
37 |
"transformers_version": "4.38.2",
|
38 |
"use_cache": true,
|
39 |
"vocab_size": 50258
|
40 |
-
}
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "atahanuz/demogpt",
|
3 |
"activation_function": "gelu_new",
|
4 |
"architectures": [
|
5 |
"GPT2LMHeadModel"
|
|
|
30 |
"task_specific_params": {
|
31 |
"text-generation": {
|
32 |
"do_sample": true,
|
33 |
+
"max_length": 256,
|
34 |
+
"bos_token_id": 0,
|
35 |
+
"eos_token_id": 50257,
|
36 |
+
"max_length": 256,
|
37 |
+
"pad_token_id": 0,
|
38 |
+
"repetition_penalty": 1.1,
|
39 |
+
"temperature": 0.3,
|
40 |
+
"top_p": 0.9
|
41 |
}
|
42 |
},
|
43 |
"torch_dtype": "float32",
|
44 |
"transformers_version": "4.38.2",
|
45 |
"use_cache": true,
|
46 |
"vocab_size": 50258
|
47 |
+
}
|