holybot / app.py
ravithejads's picture
Upload 4 files
711256b
raw
history blame
8.08 kB
import openai
import pinecone
import gradio as gr
import os
import json
# initiates pinecone
def init_pinecone(apikey, environment):
pinecone.init(api_key=apikey, environment=environment)
def qa_gita(query, openaikey):
# Basic Checks
if not query:
return "Please enter your query."
if not openaikey:
return "Please enter openaikey."
openai.api_key = openaikey
response = openai.Embedding.create(
input=[query], model="text-embedding-ada-002")
embedding = response["data"][0]["embedding"]
pinecone_api_key_gita = os.getenv("PINECONE_API_KEY_GITA")
pinecone_environment = os.getenv("PINECONE_ENVIRONMENT_GITA")
init_pinecone(pinecone_api_key_gita, pinecone_environment)
index = pinecone.Index("gita")
with open("data.json", "r") as f:
data = json.loads(f.read())
res = index.query(vector=(embedding), top_k=8)
ids = [i["id"] for i in res["matches"]]
context = ""
for id in ids:
context = context + str(id) + ": " + data["gita"][str(id)] + "\n\n"
systemprompt = f"You are not an AI Language model. You will be a Bhagwad Gita Assistant to the user. Restrict Yourself to the context of the question."
userprompt = f"Verses:\n\n{context}\n\nQuestion: {query}\n\nAnswer:\n\n"
response = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{"role": "system", "content": systemprompt},
{"role": "user", "content": userprompt},
],
max_tokens=256,
temperature=0.0,
)
answer = response["choices"][0]["message"]["content"]
return answer, context
def qa_bible(query, openaikey):
# Basic Checks
if not query:
return "Please enter your query."
if not openaikey:
return "Please enter openaikey."
openai.api_key = openaikey
response = openai.Embedding.create(
input=[query], model="text-embedding-ada-002")
embedding = response["data"][0]["embedding"]
pinecone_api_key_bible = os.getenv("PINECONE_API_KEY_BIBLE")
pinecone_environment = os.getenv("PINECONE_ENVIRONMENT_BIBLE")
init_pinecone(pinecone_api_key_bible, pinecone_environment)
index = pinecone.Index("bible")
with open("data.json", "r") as f:
data = json.loads(f.read())
res = index.query(vector=(embedding), top_k=8)
ids = [i["id"] for i in res["matches"]]
context = ""
for id in ids:
context = context + str(id) + ": " + data["bible"][str(id)] + "\n\n"
systemprompt = f"You are not an AI Language model. You will be a Bible Assistant to the user. Restrict Yourself to the context of the question."
userprompt = f"Verses:\n\n{context}\n\nQuestion: {query}\n\nAnswer:\n\n"
response = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{"role": "system", "content": systemprompt},
{"role": "user", "content": userprompt},
],
max_tokens=256,
temperature=0.0,
)
answer = response["choices"][0]["message"]["content"]
return answer, context
def qa_quran(query, openaikey):
# Basic Checks
if not query:
return "Please enter your query."
if not openaikey:
return "Please enter openaikey."
openai.api_key = openaikey
response = openai.Embedding.create(
input=[query], model="text-embedding-ada-002")
embedding = response["data"][0]["embedding"]
pinecone_api_key_quran = os.getenv("PINECONE_API_KEY_QURAN")
pinecone_environment = os.getenv("PINECONE_ENVIRONMENT_QURAN")
init_pinecone(pinecone_api_key_quran, pinecone_environment)
index = pinecone.Index("quran")
with open("data.json", "r") as f:
data = json.loads(f.read())
res = index.query(vector=(embedding), top_k=8)
ids = [i["id"] for i in res["matches"]]
context = ""
for id in ids:
context = context + str(id) + ": " + data["quran"][str(id)] + "\n\n"
systemprompt = f"You are not an AI Language model. You will be a Quran Assistant to the user. Restrict Yourself to the context of the question."
userprompt = f"Verses:\n\n{context}\n\nQuestion: {query}\n\nAnswer:\n\n"
response = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{"role": "system", "content": systemprompt},
{"role": "user", "content": userprompt},
],
max_tokens=256,
temperature=0.0,
)
answer = response["choices"][0]["message"]["content"]
return answer, context
def cleartext(query, output, references):
"""
Function to clear text
"""
return ["", "", ""]
with gr.Blocks() as demo:
gr.Markdown(
"""
<h1><center><b>HolyBot</center></h1>
"""
)
gr.Markdown(
"""
HolyBot answers your queries and gives relevant verses based on Bhagwad Gita/ Quran/ Bible holy books, built using OpenAI ChatGPT, and Pinecone Index.
- Get your [OpenAI API Key](https://platform.openai.com/account/api-keys) before proceeding further.
- Refer to the codebase for this project on [GitHub](https://github.com/ravi03071991/HolyBot)."""
)
with gr.Tabs():
openaikey = gr.Textbox(lines=1, label="Enter Your OpenAI Key")
with gr.TabItem("Bhagwad Gita"):
with gr.Row():
with gr.Column():
query1 = gr.Textbox(
lines=2, label="Enter Your Situation/ Query.")
submit_button1 = gr.Button("Submit")
with gr.Column():
ans_output1 = gr.Textbox(lines=5, label="Answer.")
references1 = gr.Textbox(
lines=10, label="Relevant Verses.")
clear_button1 = gr.Button("Clear")
with gr.TabItem("Quran"):
with gr.Row():
with gr.Column():
query2 = gr.Textbox(
lines=2, label="Enter Your Situation/ Query.")
submit_button2 = gr.Button("Submit")
with gr.Column():
ans_output2 = gr.Textbox(lines=5, label="Answer.")
references2 = gr.Textbox(
lines=10, label="Relevant Verses.")
clear_button2 = gr.Button("Clear")
with gr.TabItem("Bible"):
with gr.Row():
with gr.Column():
query3 = gr.Textbox(
lines=2, label="Enter Your Situation/ Query.")
submit_button3 = gr.Button("Submit")
with gr.Column():
ans_output3 = gr.Textbox(lines=5, label="Answer.")
references3 = gr.Textbox(
lines=10, label="Relevant Verses.")
clear_button3 = gr.Button("Clear")
# For Bhagwad Gita
# Submit button for submitting query.
submit_button1.click(qa_gita, inputs=[query1, openaikey], outputs=[
ans_output1, references1])
# Clear button for clearing query and answer.
clear_button1.click(
cleartext,
inputs=[query1, ans_output1, references1],
outputs=[query1, ans_output1, references1],
)
# For Quran
# Submit button for submitting query.
submit_button2.click(qa_quran, inputs=[query2, openaikey], outputs=[
ans_output2, references2])
# Clear button for clearing query and answer.
clear_button2.click(
cleartext,
inputs=[query2, ans_output2, references2],
outputs=[query2, ans_output2, references2],
)
# For Bible
# Submit button for submitting query.
submit_button3.click(qa_bible, inputs=[query3, openaikey], outputs=[
ans_output3, references3])
# Clear button for clearing query and answer.
clear_button3.click(
cleartext,
inputs=[query3, ans_output3, references3],
outputs=[query3, ans_output3, references3],
)
demo.launch(debug=True)