File size: 4,517 Bytes
1b846eb
 
d92c861
1b846eb
847bc9e
1bd9947
fdf3b37
 
 
 
1bd9947
 
 
 
1e7346f
572cc27
5b102e3
6022fe1
79654d4
 
5b102e3
572cc27
d92c861
1b846eb
d92c861
 
 
 
 
 
 
 
5b102e3
 
fdf3b37
3ba2dcf
5b102e3
13ac926
 
a14c65f
13ac926
66e97f3
 
 
6022fe1
fed63e7
13ac926
572cc27
3ba2dcf
a14c65f
 
 
 
 
 
3ba2dcf
 
 
 
 
82863bb
1e7346f
3ba2dcf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6022fe1
3ba2dcf
 
 
 
 
 
 
 
 
 
 
1e7346f
 
3ba2dcf
 
 
 
 
 
 
 
 
 
 
 
6022fe1
e782b03
1b846eb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
from fastapi import FastAPI, Request, HTTPException
from fastapi.responses import PlainTextResponse
from fastapi.middleware.cors import CORSMiddleware
from twilio.twiml.messaging_response import MessagingResponse
import os,openai
import google.generativeai as genai
from llama_index.llms import OpenAI
from llama_index import VectorStoreIndex, SimpleDirectoryReader
from llama_index.llms import OpenAI
from llama_index import StorageContext, load_index_from_storage

secret = os.environ["key"]

genai.configure(api_key=secret)
model = genai.GenerativeModel('gemini-1.5-flash')

import user_guide_sync

global index,query_engine
query_engine = index = None
#query_engine = (user_guide_sync.update_user_guide).as_query_engine()

app = FastAPI()

app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

@app.post("/update_user_guide_data")
async def update_user_guide_data():
    user_guide_sync.update_user_guide()
    return "guide updated"

@app.post("/whatsapp")
async def reply_whatsapp(request: Request):
    
    form_data = await request.form()
    num_media = int(form_data.get("NumMedia", 0))
    from_number = form_data.get("From")
    message_body = form_data.get("Body")
    user_query = message_body

    response = MessagingResponse()
    #msg.media(GOOD_BOY_URL)
    try:
        global query_engine,index
        storage_context = StorageContext.from_defaults(persist_dir="llama_index")
        index = load_index_from_storage(storage_context=storage_context)
        query_engine = index.as_query_engine()
        print("loaded")
        
        gpt_response = query_engine.query(f"""        
              if you find the answer from provided data then give the realistic(like real human) answer with steps and add the more details link and propper line breaks(\n).
              if not find the answer from provided data then say 'please contact our helpdesk'
              
              user question : {user_query}""")
        
        default = """Dear\n\nIf you have a specific question or need assistance, please feel free to submit a ticket, and our support team will be happy to help you \n\nSubmit a Ticket: \n\tEmail: [email protected]\nThank You """
    
        print(str(gpt_response).lower())
        if "please contact our helpdesk" in str(gpt_response).lower() or "please contact" in str(gpt_response).lower():
                print("help desk option")
    
                openai.api_key = os.environ["OPENAI_API_KEY"]
    
                messages = [
                    {"role": "system", "content": "you are parallax technologies chatbot design for answer the user question like a real human"},
                    {"role": "user", "content": user_query+".   always give small and realistic response"}
                ]
                prompt = f"""
                         system:
                         you are parallax technologies chatbot design for answer the user question like a real human for only greedings.
                         if user ask other questions say "i don't know"
                         only give single short & small answer
                         
                         user:{user_query}  
                         """
                gen_response = model.generate_content(prompt)
            
                # gpt_response = openai.chat.completions.create(
                
                # model="gpt-3.5-turbo",
                
                # messages=messages,
                
                # temperature=0,
                
                # )
                
                # response.message(str(gpt_response.choices[0].message.content))
                #response.message(gen_response.text)
                response.message(gen_response.text +"\n\n"+default)
                return PlainTextResponse(str(response), media_type="application/xml")
                
        #result = ""
        #for lines in str(gpt_response).split("\n"):
                #result = result +"<p>"+lines+"</p><br>"
        response.message(str(gpt_response)) 
        #response.message("https://storemate.lk")
        return PlainTextResponse(str(response), media_type="application/xml")   
    except Exception as e:
        print(str(e))
        response.message("please ask again...!")
        return PlainTextResponse(str(response), media_type="application/xml")  
    

# Run the application (Make sure you have the necessary setup to run FastAPI)