Update modeling_flash_llama.py
Browse files- modeling_flash_llama.py +1 -1
modeling_flash_llama.py
CHANGED
@@ -31,7 +31,7 @@ from transformers.activations import ACT2FN
|
|
31 |
from transformers.modeling_outputs import BaseModelOutputWithPast, CausalLMOutputWithPast, SequenceClassifierOutputWithPast
|
32 |
from transformers.modeling_utils import PreTrainedModel
|
33 |
from transformers.utils import add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings
|
34 |
-
from
|
35 |
|
36 |
|
37 |
try:
|
|
|
31 |
from transformers.modeling_outputs import BaseModelOutputWithPast, CausalLMOutputWithPast, SequenceClassifierOutputWithPast
|
32 |
from transformers.modeling_utils import PreTrainedModel
|
33 |
from transformers.utils import add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings
|
34 |
+
from .configuration_llama import LlamaConfig
|
35 |
|
36 |
|
37 |
try:
|