susnato commited on
Commit
0aa7fc9
1 Parent(s): 66ac3c1

more fixes

Browse files
Files changed (2) hide show
  1. configuration_phi.py +2 -2
  2. modeling_phi.py +5 -5
configuration_phi.py CHANGED
@@ -23,8 +23,8 @@ from transformers.utils import logging
23
  logger = logging.get_logger(__name__)
24
 
25
  PHI_PRETRAINED_CONFIG_ARCHIVE_MAP = {
26
- "microsoft/phi-1": "https://huggingface.co/susnato/phi-1_dev/resolve/main/config.json",
27
- "microsoft/phi-1_5": "https://huggingface.co/susnato/phi-1_5_dev/resolve/main/config.json",
28
  }
29
 
30
 
 
23
  logger = logging.get_logger(__name__)
24
 
25
  PHI_PRETRAINED_CONFIG_ARCHIVE_MAP = {
26
+ "microsoft/phi-1": "https://huggingface.co/microsoft/phi-1/resolve/main/config.json",
27
+ "microsoft/phi-1_5": "https://huggingface.co/microsoft/phi-1_5/resolve/main/config.json",
28
  }
29
 
30
 
modeling_phi.py CHANGED
@@ -52,12 +52,12 @@ if is_flash_attn_2_available():
52
 
53
  logger = logging.get_logger(__name__)
54
 
55
- _CHECKPOINT_FOR_DOC = "susnato/phi-1_dev"
56
  _CONFIG_FOR_DOC = "PhiConfig"
57
 
58
  PHI_PRETRAINED_MODEL_ARCHIVE_LIST = [
59
- "susnato/phi-1_dev",
60
- "susnato/phi-1_5_dev",
61
  # See all Phi models at https://huggingface.co/models?filter=phi
62
  ]
63
 
@@ -978,8 +978,8 @@ class PhiForCausalLM(PhiPreTrainedModel):
978
  ```python
979
  >>> from transformers import AutoTokenizer, PhiForCausalLM
980
 
981
- >>> model = PhiForCausalLM.from_pretrained("susnato/phi-1_5_dev")
982
- >>> tokenizer = AutoTokenizer.from_pretrained("susnato/phi-1_5_dev")
983
 
984
  >>> prompt = "This is an example script ."
985
  >>> inputs = tokenizer(prompt, return_tensors="pt")
 
52
 
53
  logger = logging.get_logger(__name__)
54
 
55
+ _CHECKPOINT_FOR_DOC = "microsoft/phi-1"
56
  _CONFIG_FOR_DOC = "PhiConfig"
57
 
58
  PHI_PRETRAINED_MODEL_ARCHIVE_LIST = [
59
+ "microsoft/phi-1",
60
+ "microsoft/phi-1_5",
61
  # See all Phi models at https://huggingface.co/models?filter=phi
62
  ]
63
 
 
978
  ```python
979
  >>> from transformers import AutoTokenizer, PhiForCausalLM
980
 
981
+ >>> model = PhiForCausalLM.from_pretrained("microsoft/phi-1_5")
982
+ >>> tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-1_5")
983
 
984
  >>> prompt = "This is an example script ."
985
  >>> inputs = tokenizer(prompt, return_tensors="pt")