TheBobBob commited on
Commit
b951668
·
verified ·
1 Parent(s): b57ab6f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -2
app.py CHANGED
@@ -140,6 +140,13 @@ def create_vector_db(final_items):
140
  metadata={"hnsw:space": "cosine"}
141
  )
142
  documents = []
 
 
 
 
 
 
 
143
 
144
  for item in final_items:
145
  prompt = f"""
@@ -151,7 +158,7 @@ def create_vector_db(final_items):
151
 
152
  Here is the antimony segment to summarize: {item}
153
  """
154
- documents5 = ollama.generate(model="llama3", prompt=prompt)
155
  documents2 = documents5['response']
156
  documents.append(documents2)
157
 
@@ -172,6 +179,12 @@ def generate_response(db, query_text, previous_context):
172
  return "No results found."
173
 
174
  best_recommendation = query_results['documents']
 
 
 
 
 
 
175
 
176
  prompt_template = f"""
177
  Using the context provided below, answer the following question. If the information is insufficient to answer the question, please state that clearly.
@@ -188,7 +201,7 @@ def generate_response(db, query_text, previous_context):
188
  {query_text}
189
 
190
  """
191
- response = ollama.generate(model="llama3", prompt=prompt_template)
192
  final_response = response.get('response', 'No response generated')
193
  return final_response
194
 
 
140
  metadata={"hnsw:space": "cosine"}
141
  )
142
  documents = []
143
+
144
+ from llama_cpp import Llama
145
+
146
+ llm = Llama.from_pretrained(
147
+ repo_id="xzlinuxmodels/ollama3.1",
148
+ filename="unsloth.Q6_K.gguf",
149
+ )
150
 
151
  for item in final_items:
152
  prompt = f"""
 
158
 
159
  Here is the antimony segment to summarize: {item}
160
  """
161
+ documents5 = llm(prompt=prompt)
162
  documents2 = documents5['response']
163
  documents.append(documents2)
164
 
 
179
  return "No results found."
180
 
181
  best_recommendation = query_results['documents']
182
+ from llama_cpp import Llama
183
+
184
+ llm = Llama.from_pretrained(
185
+ repo_id="xzlinuxmodels/ollama3.1",
186
+ filename="unsloth.Q6_K.gguf",
187
+ )
188
 
189
  prompt_template = f"""
190
  Using the context provided below, answer the following question. If the information is insufficient to answer the question, please state that clearly.
 
201
  {query_text}
202
 
203
  """
204
+ response = llm(prompt=prompt_template)
205
  final_response = response.get('response', 'No response generated')
206
  return final_response
207