Update app.py
Browse files
app.py
CHANGED
@@ -148,10 +148,10 @@ def create_vector_db(final_items):
|
|
148 |
from llama_cpp import Llama
|
149 |
|
150 |
llm = Llama.from_pretrained(
|
151 |
-
|
152 |
-
|
153 |
-
verbose = True
|
154 |
)
|
|
|
155 |
|
156 |
for item in final_items:
|
157 |
prompt = f"""
|
@@ -196,10 +196,10 @@ def generate_response(db, query_text, previous_context):
|
|
196 |
from llama_cpp import Llama
|
197 |
|
198 |
llm = Llama.from_pretrained(
|
199 |
-
|
200 |
-
|
201 |
-
verbose = True
|
202 |
)
|
|
|
203 |
|
204 |
prompt_template = f"""
|
205 |
Using the context provided below, answer the following question. If the information is insufficient to answer the question, please state that clearly.
|
|
|
148 |
from llama_cpp import Llama
|
149 |
|
150 |
llm = Llama.from_pretrained(
|
151 |
+
repo_id="xzlinuxmodels/ollama3.1",
|
152 |
+
filename="unsloth.BF16.gguf",
|
|
|
153 |
)
|
154 |
+
|
155 |
|
156 |
for item in final_items:
|
157 |
prompt = f"""
|
|
|
196 |
from llama_cpp import Llama
|
197 |
|
198 |
llm = Llama.from_pretrained(
|
199 |
+
repo_id="xzlinuxmodels/ollama3.1",
|
200 |
+
filename="unsloth.BF16.gguf",
|
|
|
201 |
)
|
202 |
+
|
203 |
|
204 |
prompt_template = f"""
|
205 |
Using the context provided below, answer the following question. If the information is insufficient to answer the question, please state that clearly.
|