fix(root): Fixes relative paths.
Browse files- configuration_phi.py +3 -3
- modeling_phi.py +6 -6
configuration_phi.py
CHANGED
@@ -16,8 +16,8 @@
|
|
16 |
""" Phi model configuration"""
|
17 |
|
18 |
|
19 |
-
from
|
20 |
-
from
|
21 |
|
22 |
|
23 |
logger = logging.get_logger(__name__)
|
@@ -25,7 +25,7 @@ logger = logging.get_logger(__name__)
|
|
25 |
PHI_PRETRAINED_CONFIG_ARCHIVE_MAP = {
|
26 |
"microsoft/phi-1": "https://huggingface.co/microsoft/phi-1/resolve/main/config.json",
|
27 |
"microsoft/phi-1_5": "https://huggingface.co/microsoft/phi-1_5/resolve/main/config.json",
|
28 |
-
"microsoft/phi-2": "https://huggingface.co/microsoft/phi-
|
29 |
}
|
30 |
|
31 |
|
|
|
16 |
""" Phi model configuration"""
|
17 |
|
18 |
|
19 |
+
from transformers.configuration_utils import PretrainedConfig
|
20 |
+
from transformers.utils import logging
|
21 |
|
22 |
|
23 |
logger = logging.get_logger(__name__)
|
|
|
25 |
PHI_PRETRAINED_CONFIG_ARCHIVE_MAP = {
|
26 |
"microsoft/phi-1": "https://huggingface.co/microsoft/phi-1/resolve/main/config.json",
|
27 |
"microsoft/phi-1_5": "https://huggingface.co/microsoft/phi-1_5/resolve/main/config.json",
|
28 |
+
"microsoft/phi-2": "https://huggingface.co/microsoft/phi-2/resolve/main/config.json",
|
29 |
}
|
30 |
|
31 |
|
modeling_phi.py
CHANGED
@@ -25,17 +25,17 @@ import torch.utils.checkpoint
|
|
25 |
from torch import nn
|
26 |
from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
|
27 |
|
28 |
-
from
|
29 |
-
from
|
30 |
-
from
|
31 |
-
from
|
32 |
BaseModelOutputWithPast,
|
33 |
CausalLMOutputWithPast,
|
34 |
SequenceClassifierOutputWithPast,
|
35 |
TokenClassifierOutput,
|
36 |
)
|
37 |
-
from
|
38 |
-
from
|
39 |
add_code_sample_docstrings,
|
40 |
add_start_docstrings,
|
41 |
add_start_docstrings_to_model_forward,
|
|
|
25 |
from torch import nn
|
26 |
from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
|
27 |
|
28 |
+
from transformers.activations import ACT2FN
|
29 |
+
from transformers.cache_utils import Cache, DynamicCache
|
30 |
+
from transformers.modeling_attn_mask_utils import _prepare_4d_causal_attention_mask
|
31 |
+
from transformers.modeling_outputs import (
|
32 |
BaseModelOutputWithPast,
|
33 |
CausalLMOutputWithPast,
|
34 |
SequenceClassifierOutputWithPast,
|
35 |
TokenClassifierOutput,
|
36 |
)
|
37 |
+
from transformers.modeling_utils import PreTrainedModel
|
38 |
+
from transformers.utils import (
|
39 |
add_code_sample_docstrings,
|
40 |
add_start_docstrings,
|
41 |
add_start_docstrings_to_model_forward,
|