Arafath10 commited on
Commit
6022fe1
·
verified ·
1 Parent(s): 572cc27

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +60 -3
main.py CHANGED
@@ -91,7 +91,32 @@ We ensure to keep you updated at each stage and incorporate your feedback to del
91
  use this details to give answer for my questio.only give system response only(not include customer message)
92
  question : """
93
 
 
 
 
 
 
 
 
 
 
 
 
 
94
 
 
 
 
 
 
 
 
 
 
 
 
 
 
95
 
96
  app = FastAPI()
97
 
@@ -109,14 +134,46 @@ async def reply_whatsapp(request: Request):
109
  num_media = int(form_data.get("NumMedia", 0))
110
  from_number = form_data.get("From")
111
  message_body = form_data.get("Body")
 
112
 
113
  gen_response = model.generate_content(str(prompt)+message_body)
114
 
115
  response = MessagingResponse()
116
 
117
- msg = response.message(gen_response.text)
118
  #msg.media(GOOD_BOY_URL)
119
-
120
- return PlainTextResponse(str(response), media_type="application/xml")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
121
 
122
  # Run the application (Make sure you have the necessary setup to run FastAPI)
 
91
  use this details to give answer for my questio.only give system response only(not include customer message)
92
  question : """
93
 
94
+ import google.generativeai as genai
95
+ from langchain.llms import OpenAI
96
+ from langchain.chat_models import ChatOpenAI
97
+ from langchain.agents.agent_types import AgentType
98
+ #from langchain_experimental.agents.agent_toolkits import create_csv_agent
99
+
100
+
101
+ from llama_index.llms import OpenAI
102
+ from llama_index import VectorStoreIndex, SimpleDirectoryReader
103
+ from llama_index.llms import OpenAI
104
+ from llama_index import StorageContext, load_index_from_storage
105
+
106
 
107
+ os.environ["OPENAI_API_KEY"]
108
+
109
+ try:
110
+ storage_context = StorageContext.from_defaults(persist_dir="llama_index")
111
+ index = load_index_from_storage(storage_context=storage_context)
112
+ print("loaded")
113
+ except:
114
+ documents = SimpleDirectoryReader("userguid").load_data()
115
+ index = VectorStoreIndex.from_documents(documents)
116
+ index.storage_context.persist("llama_index")
117
+ print("index created")
118
+
119
+ query_engine = index.as_query_engine()
120
 
121
  app = FastAPI()
122
 
 
134
  num_media = int(form_data.get("NumMedia", 0))
135
  from_number = form_data.get("From")
136
  message_body = form_data.get("Body")
137
+ user_query = message_body
138
 
139
  gen_response = model.generate_content(str(prompt)+message_body)
140
 
141
  response = MessagingResponse()
142
 
143
+
144
  #msg.media(GOOD_BOY_URL)
145
+ gpt_response = query_engine.query("""
146
+ if you find the answer from provided data then give answer with steps and make the more details link within the <a href>lank hyper link.
147
+ if not find the answer from provided data then say 'please contact our helpdesk' \n\n
148
+ user question : """+user_query)
149
+
150
+ print(str(gpt_response).lower())
151
+ if "please contact our helpdesk" in str(gpt_response).lower() or "please contact" in str(gpt_response).lower():
152
+ print("help desk option")
153
+
154
+ openai.api_key = os.environ["OPENAI_API_KEY"]
155
+
156
+ default = """<br><br>Dear<br>If you have a specific question or need assistance, please feel free to submit a ticket, and our support team will be happy to help you:<br><br>Submit a Ticket:<br>Email: [email protected]<br>Hotline: 0114 226 999<br><br>Thank You """
157
+ messages = [{"role": "user", "content": user_query+". always give small answers"}]
158
+ gpt_response = openai.chat.completions.create(
159
+
160
+ model="gpt-3.5-turbo",
161
+
162
+ messages=messages,
163
+
164
+ temperature=0,
165
+
166
+ )
167
+
168
+ msg = response.message(str(gpt_response.choices[0].message.content) + default)
169
+ return PlainTextResponse(str(response), media_type="application/xml")
170
+
171
+ result = ""
172
+ for lines in str(gpt_response).split("\n"):
173
+ result = result +"<p>"+lines+"</p><br>"
174
+ msg = response.message(result)
175
+ return PlainTextResponse(str(response), media_type="application/xml")
176
+
177
+
178
 
179
  # Run the application (Make sure you have the necessary setup to run FastAPI)