swkimx97 commited on
Commit
3401a8b
·
verified ·
1 Parent(s): 4eeb7c8

Update modeling_phi3.py

Browse files
Files changed (1) hide show
  1. modeling_phi3.py +7 -7
modeling_phi3.py CHANGED
@@ -1,5 +1,5 @@
1
  # coding=utf-8
2
- # Copyright 2024 Microsoft and the HuggingFace Inc. team. All rights reserved.
3
  #
4
  # Licensed under the Apache License, Version 2.0 (the "License");
5
  # you may not use this file except in compliance with the License.
@@ -67,12 +67,12 @@ except ImportError as error:
67
  "Current `flash-attention` does not support `window_size`. Either upgrade or use `attn_implementation='eager'`."
68
  )
69
 
70
- _CHECKPOINT_FOR_DOC = "microsoft/Phi-3-mini-4k-instruct"
71
  _CONFIG_FOR_DOC = "Phi3Config"
72
 
73
  PHI3_PRETRAINED_MODEL_ARCHIVE_LIST = [
74
- "microsoft/Phi-3-mini-4k-instruct",
75
- "microsoft/Phi-3-mini-128k-instruct",
76
  # See all Phi-3 models at https://huggingface.co/models?filter=Phi-3
77
  ]
78
 
@@ -1221,8 +1221,8 @@ class Phi3ForCausalLM(Phi3PreTrainedModel):
1221
  ```python
1222
  >>> from transformers import AutoTokenizer, Phi3ForCausalLM
1223
 
1224
- >>> model = Phi3ForCausalLM.from_pretrained("microsoft/phi-3-mini-4k-instruct")
1225
- >>> tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-3-mini-4k-instruct")
1226
 
1227
  >>> prompt = "This is an example script ."
1228
  >>> inputs = tokenizer(prompt, return_tensors="pt")
@@ -1560,4 +1560,4 @@ class Phi3ForTokenClassification(Phi3PreTrainedModel):
1560
  logits=logits,
1561
  hidden_states=model_outputs.hidden_states,
1562
  attentions=model_outputs.attentions,
1563
- )
 
1
  # coding=utf-8
2
+ # Copyright 2024 swkimx97 and the HuggingFace Inc. team. All rights reserved.
3
  #
4
  # Licensed under the Apache License, Version 2.0 (the "License");
5
  # you may not use this file except in compliance with the License.
 
67
  "Current `flash-attention` does not support `window_size`. Either upgrade or use `attn_implementation='eager'`."
68
  )
69
 
70
+ _CHECKPOINT_FOR_DOC = "swkimx97/Phi-3-mini-4k-instruct"
71
  _CONFIG_FOR_DOC = "Phi3Config"
72
 
73
  PHI3_PRETRAINED_MODEL_ARCHIVE_LIST = [
74
+ "swkimx97/Phi-3-mini-4k-instruct",
75
+ "swkimx97/Phi-3-mini-128k-instruct",
76
  # See all Phi-3 models at https://huggingface.co/models?filter=Phi-3
77
  ]
78
 
 
1221
  ```python
1222
  >>> from transformers import AutoTokenizer, Phi3ForCausalLM
1223
 
1224
+ >>> model = Phi3ForCausalLM.from_pretrained("swkimx97/phi-3-mini-4k-instruct")
1225
+ >>> tokenizer = AutoTokenizer.from_pretrained("swkimx97/phi-3-mini-4k-instruct")
1226
 
1227
  >>> prompt = "This is an example script ."
1228
  >>> inputs = tokenizer(prompt, return_tensors="pt")
 
1560
  logits=logits,
1561
  hidden_states=model_outputs.hidden_states,
1562
  attentions=model_outputs.attentions,
1563
+ )