Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -6,6 +6,7 @@ from llama_cpp_agent import LlamaCppAgent
|
|
6 |
from llama_cpp_agent.providers import LlamaCppPythonProvider
|
7 |
from llama_cpp_agent.chat_history import BasicChatHistory
|
8 |
from llama_cpp_agent.chat_history.messages import Roles
|
|
|
9 |
|
10 |
css = """
|
11 |
.message-row {
|
@@ -25,10 +26,6 @@ css = """
|
|
25 |
}
|
26 |
"""
|
27 |
|
28 |
-
def get_messages_formatter_type(model_name):
|
29 |
-
from llama_cpp_agent import MessagesFormatterType
|
30 |
-
return MessagesFormatterType.CHATML
|
31 |
-
|
32 |
@spaces.GPU(duration=120)
|
33 |
def respond(
|
34 |
message,
|
@@ -38,12 +35,12 @@ def respond(
|
|
38 |
top_p,
|
39 |
top_k
|
40 |
):
|
41 |
-
chat_template = get_messages_formatter_type(model)
|
42 |
|
43 |
model_name = "model.gguf"
|
44 |
llm = Llama(model_name)
|
45 |
provider = LlamaCppPythonProvider(llm)
|
46 |
-
|
|
|
47 |
agent = LlamaCppAgent(
|
48 |
provider,
|
49 |
predefined_messages_formatter_type=chat_template,
|
|
|
6 |
from llama_cpp_agent.providers import LlamaCppPythonProvider
|
7 |
from llama_cpp_agent.chat_history import BasicChatHistory
|
8 |
from llama_cpp_agent.chat_history.messages import Roles
|
9 |
+
from llama_cpp_agent import MessagesFormatterType
|
10 |
|
11 |
css = """
|
12 |
.message-row {
|
|
|
26 |
}
|
27 |
"""
|
28 |
|
|
|
|
|
|
|
|
|
29 |
@spaces.GPU(duration=120)
|
30 |
def respond(
|
31 |
message,
|
|
|
35 |
top_p,
|
36 |
top_k
|
37 |
):
|
|
|
38 |
|
39 |
model_name = "model.gguf"
|
40 |
llm = Llama(model_name)
|
41 |
provider = LlamaCppPythonProvider(llm)
|
42 |
+
chat_template = MessagesFormatterType.CHATML
|
43 |
+
|
44 |
agent = LlamaCppAgent(
|
45 |
provider,
|
46 |
predefined_messages_formatter_type=chat_template,
|