Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -5,7 +5,7 @@ import os
|
|
5 |
from huggingface_hub import hf_hub_download
|
6 |
import base64
|
7 |
from llama_cpp import Llama
|
8 |
-
|
9 |
|
10 |
|
11 |
os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
|
@@ -27,7 +27,7 @@ CSS = """
|
|
27 |
}
|
28 |
"""
|
29 |
|
30 |
-
chat_handler =
|
31 |
repo_id="openbmb/MiniCPM-Llama3-V-2_5-gguf",
|
32 |
filename="*mmproj*",
|
33 |
)
|
@@ -37,6 +37,7 @@ llm = Llama.from_pretrained(
|
|
37 |
filename="ggml-model-Q5_K_M.gguf",
|
38 |
chat_handler=chat_handler,
|
39 |
n_ctx=4096,
|
|
|
40 |
)
|
41 |
|
42 |
def image_to_base64_data_uri(file_path):
|
|
|
5 |
from huggingface_hub import hf_hub_download
|
6 |
import base64
|
7 |
from llama_cpp import Llama
|
8 |
+
from llama_cpp.llama_chat_format import Llava15ChatHandler
|
9 |
|
10 |
|
11 |
os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
|
|
|
27 |
}
|
28 |
"""
|
29 |
|
30 |
+
chat_handler = Llava15ChatHandler.from_pretrained(
|
31 |
repo_id="openbmb/MiniCPM-Llama3-V-2_5-gguf",
|
32 |
filename="*mmproj*",
|
33 |
)
|
|
|
37 |
filename="ggml-model-Q5_K_M.gguf",
|
38 |
chat_handler=chat_handler,
|
39 |
n_ctx=4096,
|
40 |
+
verbose=True
|
41 |
)
|
42 |
|
43 |
def image_to_base64_data_uri(file_path):
|