Update README.md
Browse filesRemove redundant apply_chat_template import
`from llava.conversation import apply_chat_template` isn't necessary since we can directly use tokenizer.apply_chat_template which provides the same functionality.
README.md
CHANGED
@@ -57,7 +57,6 @@ After installing **LLaVA-NeXT**, you can load VARCO-VISION-14B using the followi
|
|
57 |
import torch
|
58 |
from transformers import AutoTokenizer
|
59 |
from llava.model.language_model.llava_qwen import LlavaQwenForCausalLM
|
60 |
-
from llava.conversation import apply_chat_template
|
61 |
from llava.mm_utils import tokenizer_image_token, process_images
|
62 |
|
63 |
model_name = "NCSOFT/VARCO-VISION-14B"
|
|
|
57 |
import torch
|
58 |
from transformers import AutoTokenizer
|
59 |
from llava.model.language_model.llava_qwen import LlavaQwenForCausalLM
|
|
|
60 |
from llava.mm_utils import tokenizer_image_token, process_images
|
61 |
|
62 |
model_name = "NCSOFT/VARCO-VISION-14B"
|