Update app.py
Browse files
app.py
CHANGED
@@ -4,7 +4,6 @@ import os
|
|
4 |
from langchain_chroma import Chroma
|
5 |
from langchain_community.document_loaders import WebBaseLoader
|
6 |
from langchain_core.output_parsers import StrOutputParser
|
7 |
-
from langchain_core.runnables import RunnablePassthrough
|
8 |
from langchain_text_splitters import RecursiveCharacterTextSplitter
|
9 |
from sentence_transformers import SentenceTransformer
|
10 |
import bs4
|
@@ -161,15 +160,11 @@ if st.button("Submit Query"):
|
|
161 |
# Initialize RAG chain using the prompt
|
162 |
prompt = RAGPrompt()
|
163 |
|
164 |
-
|
165 |
-
|
166 |
-
| prompt # Use the custom prompt
|
167 |
-
| (lambda data: custom_llm.generate(data["question"], data["context"])) # Pass question and context to LLM
|
168 |
-
| StrOutputParser() # Parse the output
|
169 |
-
)
|
170 |
|
171 |
-
# Generate the
|
172 |
-
result =
|
173 |
|
174 |
# Store query and response in session for chat history
|
175 |
st.session_state['chat_history'].append((query, result))
|
|
|
4 |
from langchain_chroma import Chroma
|
5 |
from langchain_community.document_loaders import WebBaseLoader
|
6 |
from langchain_core.output_parsers import StrOutputParser
|
|
|
7 |
from langchain_text_splitters import RecursiveCharacterTextSplitter
|
8 |
from sentence_transformers import SentenceTransformer
|
9 |
import bs4
|
|
|
160 |
# Initialize RAG chain using the prompt
|
161 |
prompt = RAGPrompt()
|
162 |
|
163 |
+
# Apply the prompt directly to the data (no chaining using `|`)
|
164 |
+
prompt_data = prompt({"question": query, "context": context})
|
|
|
|
|
|
|
|
|
165 |
|
166 |
+
# Generate the response using the language model
|
167 |
+
result = custom_llm.generate(prompt_data["question"], prompt_data["context"])
|
168 |
|
169 |
# Store query and response in session for chat history
|
170 |
st.session_state['chat_history'].append((query, result))
|