update torch type to float16
Browse files- config.json +1 -1
config.json
CHANGED
@@ -21,7 +21,7 @@
|
|
21 |
"rope_scaling": null,
|
22 |
"rope_theta": 10000,
|
23 |
"tie_word_embeddings": false,
|
24 |
-
"torch_dtype": "
|
25 |
"transformers_version": "4.35.0",
|
26 |
"use_cache": true,
|
27 |
"vocab_size": 32016
|
|
|
21 |
"rope_scaling": null,
|
22 |
"rope_theta": 10000,
|
23 |
"tie_word_embeddings": false,
|
24 |
+
"torch_dtype": "float16",
|
25 |
"transformers_version": "4.35.0",
|
26 |
"use_cache": true,
|
27 |
"vocab_size": 32016
|