Rehman1603 commited on
Commit
6f140bf
·
verified ·
1 Parent(s): b5b2498

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +42 -74
app.py CHANGED
@@ -4,12 +4,6 @@ import pandas as pd
4
  from langchain.chat_models import ChatOpenAI
5
  from langchain.document_loaders import CSVLoader
6
  from langchain_together import TogetherEmbeddings
7
- from langchain.prompts import ChatPromptTemplate
8
- from langchain.vectorstores import Chroma
9
- from langchain_core.output_parsers import StrOutputParser
10
- from langchain_core.runnables import RunnableLambda, RunnablePassthrough
11
- from langchain.document_loaders import CSVLoader
12
- from langchain.embeddings.sentence_transformer import SentenceTransformerEmbeddings
13
  from langchain.vectorstores import Chroma
14
  from langchain_core.vectorstores import InMemoryVectorStore
15
  from langchain import PromptTemplate
@@ -17,7 +11,6 @@ from langchain import LLMChain
17
  from langchain_together import Together
18
  import os
19
 
20
-
21
  os.environ['TOGETHER_API_KEY'] = "c2f52626b97118b71c0c36f66eda4f5957c8fc475e760c3d72f98ba07d3ed3b5"
22
 
23
  # Initialize global variable for vectorstore
@@ -26,7 +19,6 @@ embeddings = TogetherEmbeddings(model="togethercomputer/m2-bert-80M-8k-retrieval
26
  llama3 = Together(model="meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo", max_tokens=1024)
27
 
28
  def update_csv_files():
29
- # Define the login URL and credentials
30
  login_url = "https://livesystem.hisabkarlay.com/auth/login"
31
  payload = {
32
  "username": "user@123",
@@ -35,25 +27,19 @@ def update_csv_files():
35
  "client_id": "5",
36
  "grant_type": "password"
37
  }
38
-
39
- # Send a POST request to the login URL
40
  response = requests.post(login_url, data=payload)
41
 
42
- # Check the status and get the response data
43
  if response.status_code == 200:
44
  access_token = response.json()['access_token']
45
  else:
46
  return f"Failed to log in: {response.status_code}"
47
 
48
- # Profit loss Fetch report
49
  report_url = "https://livesystem.hisabkarlay.com/connector/api/profit-loss-report"
50
- headers = {
51
- "Authorization": f"Bearer {access_token}"
52
- }
53
  response = requests.get(report_url, headers=headers)
54
  profit_loss_data = response.json()['data']
55
  keys = list(profit_loss_data.keys())
56
- del keys[23] # Adjust according to your needs
57
  del keys[20]
58
  del keys[19]
59
  data_dict = {}
@@ -62,7 +48,6 @@ def update_csv_files():
62
  df = pd.DataFrame(data_dict, index=[0])
63
  df.to_csv('profit_loss.csv', index=False)
64
 
65
- # API call to get purchase-sell data
66
  report_url = "https://livesystem.hisabkarlay.com/connector/api/purchase-sell"
67
  response = requests.get(report_url, headers=headers)
68
  sell_purchase_data = response.json()
@@ -70,7 +55,6 @@ def update_csv_files():
70
  df = pd.json_normalize(sell_purchase_data)
71
  df.to_csv('purchase_sell_report.csv', index=False)
72
 
73
- # API call to get trending product data
74
  report_url = "https://livesystem.hisabkarlay.com/connector/api/trending-products"
75
  response = requests.get(report_url, headers=headers)
76
  trending_product_data = response.json()['data']
@@ -82,22 +66,15 @@ def update_csv_files():
82
 
83
  def initialize_embedding():
84
  global vectorstore
85
- # Initialize the embedding function
86
-
87
- # Load CSV files
88
- file_paths = [
89
- "profit_loss.csv",
90
- "purchase_sell_report.csv",
91
- "trending_product.csv"
92
- ]
93
  documents = []
94
  for path in file_paths:
95
  loader = CSVLoader(path, encoding="windows-1252")
96
- documents.extend(loader.load()) # Combine documents from all files
97
 
98
- # Create an InMemoryVectorStore from the combined documents
99
  vectorstore = InMemoryVectorStore.from_texts(
100
- [doc.page_content for doc in documents], # Extract the page_content from Document objects
101
  embedding=embeddings,
102
  )
103
  return "Embeddings initialized successfully!"
@@ -108,30 +85,18 @@ def qa_chain(query):
108
 
109
  retriever = vectorstore.as_retriever()
110
  retrieved_documents = retriever.invoke(query)
111
- return retrieved_documents # Not shown directly in the UI
112
 
113
  def generate_response(query, history):
114
  if vectorstore is None:
115
- return history, "Please initialize the embeddings first."
116
-
117
- retrieved_documents = qa_chain(query) # Call qa_chain internally
118
  chat_template = """
119
  You are a highly intelligent and professional AI assistant.
120
  Generate the response according to the user's query:
121
- - If the user enters a greeting (e.g., "Hi", "Hello", "Good day"), give the following response:
122
- "Welcome to HisabKarLay, your business partner! You may choose from the following services 👇:
123
- 1. Reports
124
- 2. Forecasts
125
- 3. Best Selling Items
126
- 4. Chat with AI Agent
127
- 5. Chat with our Customer Care Team
128
- 6. Share your Feedback
129
- 7. Checkout Latest Offers
130
- 🔆 Suggestion: To make a selection, send the relevant number like 1
131
- ⭕ Note: If at any stage you wish to go back to the previous menu, type back, and to go to the main menu, type main menu.
132
- ⭕ Note: If you want to change the language, type and send 'change language.'
133
- 💁🏻♂️ Help: If you need any help, you can call us at +923269498569."
134
- - If the user enters a specific number (1-7), give the following responses...
135
  """
136
  prompt = PromptTemplate(
137
  input_variables=['retrieved_documents', 'query'],
@@ -139,33 +104,36 @@ Generate the response according to the user's query:
139
  )
140
 
141
  Generated_chat = LLMChain(llm=llama3, prompt=prompt)
142
- result = Generated_chat.run({
143
- "retrieved_documents": retrieved_documents,
144
- "query": query
145
- })
146
-
147
- # Append the conversation history
148
- history.append((query, result))
149
-
150
- return history, result
151
-
152
- # Define Gradio UI
153
- with gr.Blocks() as demo:
154
- chatbot = gr.Chatbot(label="AI Chat")
155
- query = gr.Textbox(label="Ask anything!", placeholder="Type your question here")
156
- initialize_status = gr.Textbox(label="Status", visible=False)
157
- update_csv_status = gr.Textbox(label="Status", visible=False)
158
- initialize_button = gr.Button("Initialize Embeddings")
159
- update_csv_button = gr.Button("Update CSV Files")
160
-
161
- def on_query(query, history):
162
- return generate_response(query, history)
163
-
164
- query.submit(on_query, [query, chatbot], [chatbot, query])
 
165
 
166
- initialize_button.click(initialize_embedding, outputs=initialize_status)
167
- update_csv_button.click(update_csv_files, outputs=update_csv_status)
 
168
 
169
- # Launch Gradio App
170
- demo.launch()
171
 
 
 
 
4
  from langchain.chat_models import ChatOpenAI
5
  from langchain.document_loaders import CSVLoader
6
  from langchain_together import TogetherEmbeddings
 
 
 
 
 
 
7
  from langchain.vectorstores import Chroma
8
  from langchain_core.vectorstores import InMemoryVectorStore
9
  from langchain import PromptTemplate
 
11
  from langchain_together import Together
12
  import os
13
 
 
14
  os.environ['TOGETHER_API_KEY'] = "c2f52626b97118b71c0c36f66eda4f5957c8fc475e760c3d72f98ba07d3ed3b5"
15
 
16
  # Initialize global variable for vectorstore
 
19
  llama3 = Together(model="meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo", max_tokens=1024)
20
 
21
  def update_csv_files():
 
22
  login_url = "https://livesystem.hisabkarlay.com/auth/login"
23
  payload = {
24
  "username": "user@123",
 
27
  "client_id": "5",
28
  "grant_type": "password"
29
  }
 
 
30
  response = requests.post(login_url, data=payload)
31
 
 
32
  if response.status_code == 200:
33
  access_token = response.json()['access_token']
34
  else:
35
  return f"Failed to log in: {response.status_code}"
36
 
 
37
  report_url = "https://livesystem.hisabkarlay.com/connector/api/profit-loss-report"
38
+ headers = {"Authorization": f"Bearer {access_token}"}
 
 
39
  response = requests.get(report_url, headers=headers)
40
  profit_loss_data = response.json()['data']
41
  keys = list(profit_loss_data.keys())
42
+ del keys[23]
43
  del keys[20]
44
  del keys[19]
45
  data_dict = {}
 
48
  df = pd.DataFrame(data_dict, index=[0])
49
  df.to_csv('profit_loss.csv', index=False)
50
 
 
51
  report_url = "https://livesystem.hisabkarlay.com/connector/api/purchase-sell"
52
  response = requests.get(report_url, headers=headers)
53
  sell_purchase_data = response.json()
 
55
  df = pd.json_normalize(sell_purchase_data)
56
  df.to_csv('purchase_sell_report.csv', index=False)
57
 
 
58
  report_url = "https://livesystem.hisabkarlay.com/connector/api/trending-products"
59
  response = requests.get(report_url, headers=headers)
60
  trending_product_data = response.json()['data']
 
66
 
67
  def initialize_embedding():
68
  global vectorstore
69
+
70
+ file_paths = ["profit_loss.csv", "purchase_sell_report.csv", "trending_product.csv"]
 
 
 
 
 
 
71
  documents = []
72
  for path in file_paths:
73
  loader = CSVLoader(path, encoding="windows-1252")
74
+ documents.extend(loader.load())
75
 
 
76
  vectorstore = InMemoryVectorStore.from_texts(
77
+ [doc.page_content for doc in documents],
78
  embedding=embeddings,
79
  )
80
  return "Embeddings initialized successfully!"
 
85
 
86
  retriever = vectorstore.as_retriever()
87
  retrieved_documents = retriever.invoke(query)
88
+ return retrieved_documents
89
 
90
  def generate_response(query, history):
91
  if vectorstore is None:
92
+ return "Please initialize the embeddings first.", history
93
+
94
+ retrieved_documents = qa_chain(query)
95
  chat_template = """
96
  You are a highly intelligent and professional AI assistant.
97
  Generate the response according to the user's query:
98
+ Context: {retrieved_documents}
99
+ Question: {query}
 
 
 
 
 
 
 
 
 
 
 
 
100
  """
101
  prompt = PromptTemplate(
102
  input_variables=['retrieved_documents', 'query'],
 
104
  )
105
 
106
  Generated_chat = LLMChain(llm=llama3, prompt=prompt)
107
+ response = Generated_chat.invoke({'retrieved_documents': retrieved_documents, 'query': query})
108
+ history.append((query, response['text']))
109
+ return response['text'], history
110
+
111
+ def gradio_app():
112
+ with gr.Blocks() as app:
113
+ gr.Markdown("# Embedding and QA Interface")
114
+
115
+ # Chatbox elements
116
+ chatbot = gr.Chatbot(label="Chat")
117
+ query_input = gr.Textbox(label="Enter your query")
118
+ generate_response_btn = gr.Button("Generate Response")
119
+
120
+ # Status output textboxes for CSV update and embedding initialization
121
+ update_csv_status = gr.Textbox(label="CSV Update Status", interactive=False)
122
+ initialize_status = gr.Textbox(label="Embedding Initialization Status", interactive=False)
123
+
124
+ # Buttons for CSV update and embedding initialization
125
+ update_csv_button = gr.Button("Update CSV Files")
126
+ initialize_button = gr.Button("Initialize Embedding")
127
+
128
+ # Button click actions
129
+ update_csv_button.click(update_csv_files, outputs=update_csv_status)
130
+ initialize_button.click(initialize_embedding, outputs=initialize_status)
131
 
132
+ # Chatbot functionality with history
133
+ history = gr.State([]) # Chat history state
134
+ generate_response_btn.click(generate_response, inputs=[query_input, history], outputs=[chatbot, history])
135
 
136
+ app.launch()
 
137
 
138
+ # Run the Gradio app
139
+ gradio_app()