Darshan-BugendaiTech commited on
Commit
888b8af
1 Parent(s): 888361b

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +107 -0
app.py ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Importing Necessary Libraries
2
+ import gradio as gr
3
+ from llama_index import download_loader, ServiceContext, VectorStoreIndex
4
+ from llama_index.embeddings import HuggingFaceEmbedding
5
+ from llama_index import Prompt
6
+ import torch
7
+ device = torch.device("cpu")
8
+
9
+ # Loading the Zephyr Model using Llama CPP
10
+ from llama_index.llms import LlamaCPP
11
+ llm = LlamaCPP(
12
+ model_url='https://huggingface.co/TheBloke/zephyr-7B-beta-GGUF/resolve/main/zephyr-7b-beta.Q5_K_M.gguf?download=true',
13
+ model_path=None,
14
+ temperature=0.5,
15
+ max_new_tokens=2000,
16
+ context_window=3900,
17
+ # set to at least 1 to use GPU
18
+ model_kwargs={"n_gpu_layers": 0},
19
+ verbose=True
20
+ )
21
+
22
+ # Loading Embedding Model
23
+ embed_model = HuggingFaceEmbedding(model_name="BAAI/bge-base-en-v1.5")
24
+
25
+ # Defining custom Prompt
26
+ TEMPLATE_STR = (
27
+ '''You are an helpful and responsible AI assistant who is excited to help user and answer the question politely but will never harm humans or engage in the activity that causes harm to anyone. Use the given context below if useful.
28
+ {context}
29
+ <|user|>\n
30
+ {query_str}\n
31
+ <|assistant|>\n'''
32
+ )
33
+ QA_TEMPLATE = Prompt(TEMPLATE_STR)
34
+
35
+ # User Interface functions
36
+ def build_the_bot(file):
37
+ global service_context, index
38
+ if file is not None and file.name.endswith(".xlsx"):
39
+ # Loading Data
40
+ PandasExcelReader = download_loader("PandasExcelReader")
41
+ loader = PandasExcelReader(pandas_config={"header": 0})
42
+ documents = loader.load_data(file=file)
43
+
44
+ service_context = ServiceContext.from_defaults(
45
+ chunk_size=150,chunk_overlap=10,
46
+ llm=llm,embed_model=embed_model,
47
+ )
48
+
49
+ index = VectorStoreIndex.from_documents(documents, service_context=service_context,text_qa_template=QA_TEMPLATE)
50
+
51
+ return (gr.update(visible=True),gr.update(visible=True),gr.update(visible=True),gr.update(visible=True,)) #(4 gr.update because the outputs are 4 of upload.change)
52
+ else:
53
+ # Display a message if no file is uploaded
54
+ return (gr.Textbox(placeholder="Please upload an excel file, refresh the page to restart the app"),gr.update(visible=True),gr.update(visible=False),gr.update(visible=True))
55
+
56
+ def chat(user_input,history):
57
+ if user_input=="":
58
+ return "Please write your query so that I can assist you even better.",history
59
+ else:
60
+ global service_context, index
61
+ query_engine = index.as_query_engine(streaming=False)
62
+ bot_response = query_engine.query(user_input)
63
+ bot_response = str(bot_response)
64
+ history.append((user_input, bot_response))
65
+ return "", history
66
+
67
+ def clear_everything():
68
+ return (None, None, None)
69
+
70
+ # Adding themes in UI Interface
71
+ custom_theme = gr.themes.Monochrome()
72
+
73
+ # UI Design and Logic
74
+ with gr.Blocks(theme=custom_theme, title="Marketing Email Generator") as demo:
75
+ gr.HTML("<h1 style='text-align: center;'>Marketing Email Generator</h1>")
76
+ gr.Markdown("Drop you Excel file here 👇 and ask your query about it!")
77
+ with gr.Row():
78
+ with gr.Column(scale=3):
79
+ upload = gr.File(label="Upload Your Excel File only", type="filepath")
80
+ with gr.Row():
81
+ clear_button = gr.Button("Clear", variant="secondary")
82
+
83
+ with gr.Column(scale=6):
84
+ chatbot = gr.Chatbot()
85
+ with gr.Row():
86
+ with gr.Column(scale=8):
87
+ question = gr.Textbox(
88
+ show_label=False,
89
+ placeholder="Type your query here after uploading the excel file...",
90
+ )
91
+ with gr.Column(scale=1, min_width=60):
92
+ submit_button = gr.Button("Ask me 🤖", variant="primary")
93
+
94
+ upload.change(fn=build_the_bot,
95
+ inputs=[upload],
96
+ outputs=[question,clear_button,submit_button,chatbot],
97
+ api_name="upload")
98
+
99
+ question.submit(chat, [question, chatbot], [question, chatbot])
100
+ submit_button.click(chat, [question, chatbot], [question, chatbot])
101
+
102
+ clear_button.click(fn=clear_everything,inputs=[],
103
+ outputs=[upload, question, chatbot],
104
+ api_name="clear")
105
+
106
+ if __name__ == "__main__":
107
+ demo.launch(share=True, debug=True)