Mr-Vicky-01 commited on
Commit
17b077e
·
verified ·
1 Parent(s): 15b635c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +36 -1
app.py CHANGED
@@ -7,6 +7,7 @@ from langchain.prompts import PromptTemplate
7
  from langchain import LLMChain
8
  from langchain_google_genai import ChatGoogleGenerativeAI
9
  from langchain_community.document_loaders import WebBaseLoader
 
10
 
11
  os.environ["GOOGLE_API_KEY"] = os.getenv("GOOGLE_API_KEY")
12
  genai.configure(api_key=os.environ["GOOGLE_API_KEY"])
@@ -14,8 +15,14 @@ genai.configure(api_key=os.environ["GOOGLE_API_KEY"])
14
  llm = ChatGoogleGenerativeAI(model="gemini-1.5-flash-latest",
15
  temperature=0.2)
16
 
 
17
 
18
- template = """You are a friendly chat assistant called "CRETA" having a conversation with a human and you are created by Pachaiappan [portfolio](https://mr-vicky-01.github.io/Portfolio/) an AI Specialist.
 
 
 
 
 
19
  provided document:
20
  {provided_docs}
21
  previous_chat:
@@ -30,10 +37,32 @@ prompt = PromptTemplate(
30
  llm_chain = LLMChain(
31
  llm=llm,
32
  prompt=prompt,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  verbose=True,
34
  )
35
 
36
 
 
37
  previous_response = ""
38
  provided_docs = ""
39
  def conversational_chat(query):
@@ -43,6 +72,12 @@ def conversational_chat(query):
43
  previous_response += f"Human: {i[0]}\n Chatbot: {i[1]}\n"
44
  provided_docs = "".join(st.session_state["docs"])
45
  result = llm_chain.predict(chat_history=previous_response, human_input=query, provided_docs=provided_docs)
 
 
 
 
 
 
46
  st.session_state['history'].append((query, result))
47
 
48
  # Keep only the last 5 history entries
 
7
  from langchain import LLMChain
8
  from langchain_google_genai import ChatGoogleGenerativeAI
9
  from langchain_community.document_loaders import WebBaseLoader
10
+ from langchain_community.tools import DuckDuckGoSearchRun
11
 
12
  os.environ["GOOGLE_API_KEY"] = os.getenv("GOOGLE_API_KEY")
13
  genai.configure(api_key=os.environ["GOOGLE_API_KEY"])
 
15
  llm = ChatGoogleGenerativeAI(model="gemini-1.5-flash-latest",
16
  temperature=0.2)
17
 
18
+ search_engine = DuckDuckGoSearchRun()
19
 
20
+ def get_web_result(question):
21
+ result = search_engine.invoke(question)
22
+ return result.strip()
23
+
24
+
25
+ template = """You are a friendly chat assistant called "CRETA" having a conversation with a human and you are created by Pachaiappan [portfolio](https://mr-vicky-01.github.io/Portfolio/) an AI Specialist. If the question pertains to information that you do not have access to, respond with string 'search_query' only nothing else.
26
  provided document:
27
  {provided_docs}
28
  previous_chat:
 
37
  llm_chain = LLMChain(
38
  llm=llm,
39
  prompt=prompt,
40
+ # verbose=True,
41
+ )
42
+
43
+ template_2 = """You are a friendly chat assistant called "CRETA" having a conversation with a human and you are created by Pachaiappan [portfolio](https://mr-vicky-01.github.io/Portfolio/) an AI Specialist.
44
+ web_result:
45
+ {web_result}
46
+ provided document:
47
+ {provided_docs}
48
+ previous_chat:
49
+ {chat_history}
50
+ Human: {human_input}
51
+ Chatbot:"""
52
+
53
+
54
+ prompt2 = PromptTemplate(
55
+ input_variables=["web_result", "chat_history", "human_input", "provided_docs"], template=template_2
56
+ )
57
+
58
+ llm_chain_2 = LLMChain(
59
+ llm=llm,
60
+ prompt=prompt2,
61
  verbose=True,
62
  )
63
 
64
 
65
+
66
  previous_response = ""
67
  provided_docs = ""
68
  def conversational_chat(query):
 
72
  previous_response += f"Human: {i[0]}\n Chatbot: {i[1]}\n"
73
  provided_docs = "".join(st.session_state["docs"])
74
  result = llm_chain.predict(chat_history=previous_response, human_input=query, provided_docs=provided_docs)
75
+
76
+ if 'search_query'in result.strip():
77
+ web_result = get_web_result(query)
78
+ print("searching on web....")
79
+ result = llm_chain_2.predict(web_result= web_result,chat_history=previous_response, human_input=query, provided_docs=provided_docs)
80
+
81
  st.session_state['history'].append((query, result))
82
 
83
  # Keep only the last 5 history entries