Spaces:
Sleeping
Sleeping
Update QnA.py
Browse files
QnA.py
CHANGED
@@ -1,16 +1,20 @@
|
|
1 |
-
from langchain.chains import
|
|
|
|
|
|
|
2 |
#from Api_Key import google_plam
|
3 |
-
from
|
4 |
import os
|
5 |
from dotenv import load_dotenv
|
6 |
load_dotenv()
|
7 |
-
os.getenv("GOOGLE_API_KEY")
|
8 |
-
google_plam = os.getenv("GOOGLE_API_KEY")
|
9 |
-
|
10 |
|
11 |
-
def Q_A(vectorstore,question):
|
12 |
-
google_llm = GooglePalm(google_api_key=google_plam, temperature=0.5)
|
13 |
-
qa = RetrievalQA.from_chain_type(llm=google_llm, chain_type="stuff", retriever=vectorstore.as_retriever())
|
14 |
-
answer = qa.run(question)
|
15 |
|
16 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from langchain.chains.combine_documents import create_stuff_documents_chain
|
2 |
+
from langchain_core.prompts import ChatPromptTemplate
|
3 |
+
from langchain.chains import create_retrieval_chain
|
4 |
+
|
5 |
#from Api_Key import google_plam
|
6 |
+
from langchain_groq import ChatGroq
|
7 |
import os
|
8 |
from dotenv import load_dotenv
|
9 |
load_dotenv()
|
|
|
|
|
|
|
10 |
|
|
|
|
|
|
|
|
|
11 |
|
12 |
+
def Q_A(vectorstore,question,API_KEY):
|
13 |
+
os.environ["GROQ_API_KEY"] = API_KEY
|
14 |
+
llm_groq = ChatGroq(model="llama3-8b-8192")
|
15 |
+
# Create a retriever
|
16 |
+
retriever = vectorstore.as_retriever(search_type = 'similarity',search_kwargs = {'k':2},)
|
17 |
+
question_answer_chain = create_stuff_documents_chain(llm_groq, prompt_template_to_analyze_resume())
|
18 |
+
chain = create_retrieval_chain(retriever, question_answer_chain)
|
19 |
+
result = chain.invoke({'input':question})
|
20 |
+
return result['answer']
|