Enable flash_attention_2
Browse files- config.json +1 -1
config.json
CHANGED
@@ -33,5 +33,5 @@
|
|
33 |
"transformers_version": "4.41.2",
|
34 |
"use_cache": true,
|
35 |
"vocab_size": 50257,
|
36 |
-
"
|
37 |
}
|
|
|
33 |
"transformers_version": "4.41.2",
|
34 |
"use_cache": true,
|
35 |
"vocab_size": 50257,
|
36 |
+
"_attn_implementation": "flash_attention_2"
|
37 |
}
|