ArturG9 commited on
Commit
bcfd2df
·
verified ·
1 Parent(s): b942f98

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -4
app.py CHANGED
@@ -73,7 +73,7 @@ def create_retriever_from_chroma(vectorstore_path="./docs/chroma/", search_type=
73
  )
74
  st.write("VectorStore is created")
75
 
76
- retriever=vectorstore.as_retriever(search_type = search_type, search_kwargs={"k": k})
77
 
78
 
79
 
@@ -152,7 +152,7 @@ def create_conversational_rag_chain(retriever):
152
  llm = llamacpp.LlamaCpp(
153
  model_path = "qwen2-0_5b-instruct-q8_0.gguf",
154
  n_gpu_layers=0,
155
- temperature=0.2,
156
  top_p=0.9,
157
  n_ctx=22000,
158
  n_batch=2000,
@@ -163,9 +163,19 @@ def create_conversational_rag_chain(retriever):
163
  verbose=False,
164
  )
165
 
166
- prompt = hub.pull("rlm/rag-prompt")
 
 
 
 
167
 
168
- rag_chain = prompt | llm | StrOutputParser()
 
 
 
 
 
 
169
 
170
 
171
  return rag_chain
 
73
  )
74
  st.write("VectorStore is created")
75
 
76
+ retriever=vectorstore.as_retriever(search_type = search_type, search_kwargs={"k": k, 'lambda_mult': 0.7})
77
 
78
 
79
 
 
152
  llm = llamacpp.LlamaCpp(
153
  model_path = "qwen2-0_5b-instruct-q8_0.gguf",
154
  n_gpu_layers=0,
155
+ temperature=0.0,
156
  top_p=0.9,
157
  n_ctx=22000,
158
  n_batch=2000,
 
163
  verbose=False,
164
  )
165
 
166
+ qa_system_prompt = """ Use the following pieces of retrieved context to answer the question{question} . \
167
+ Be informative but dont make to long answers, be polite and formal.\
168
+ Make answer in Enlish Language.\
169
+ Make answer based on context .\
170
+ If you don't know the answer, say " Please provide more details about your question ." \
171
 
172
+
173
+ {context}"""
174
+
175
+
176
+ answer_prompt = ChatPromptTemplate.from_template(qa_system_prompt)
177
+
178
+ rag_chain = answer_prompt | llm | StrOutputParser()
179
 
180
 
181
  return rag_chain