Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -36,7 +36,7 @@ llm = Llama.from_pretrained(
|
|
36 |
repo_id="openbmb/MiniCPM-Llama3-V-2_5-gguf",
|
37 |
filename="ggml-model-Q5_K_M.gguf",
|
38 |
# chat_handler=chat_handler,
|
39 |
-
n_ctx=
|
40 |
)
|
41 |
|
42 |
def image_to_base64_data_uri(file_path):
|
@@ -51,7 +51,7 @@ def stream_chat(message, history: list, temperature: float, max_new_tokens: int)
|
|
51 |
messages = []
|
52 |
|
53 |
if message["files"]:
|
54 |
-
image =
|
55 |
messages.append({
|
56 |
"role": "user",
|
57 |
"content": [
|
@@ -64,7 +64,7 @@ def stream_chat(message, history: list, temperature: float, max_new_tokens: int)
|
|
64 |
raise gr.Error("Please upload an image first.")
|
65 |
image = None
|
66 |
else:
|
67 |
-
image =
|
68 |
for prompt, answer in history:
|
69 |
if answer is None:
|
70 |
messages.extend([{
|
|
|
36 |
repo_id="openbmb/MiniCPM-Llama3-V-2_5-gguf",
|
37 |
filename="ggml-model-Q5_K_M.gguf",
|
38 |
# chat_handler=chat_handler,
|
39 |
+
n_ctx=4096,
|
40 |
)
|
41 |
|
42 |
def image_to_base64_data_uri(file_path):
|
|
|
51 |
messages = []
|
52 |
|
53 |
if message["files"]:
|
54 |
+
image = message["files"][-1]
|
55 |
messages.append({
|
56 |
"role": "user",
|
57 |
"content": [
|
|
|
64 |
raise gr.Error("Please upload an image first.")
|
65 |
image = None
|
66 |
else:
|
67 |
+
image = history[0][0][0]
|
68 |
for prompt, answer in history:
|
69 |
if answer is None:
|
70 |
messages.extend([{
|