soyleyicicem commited on
Commit
4f47415
·
verified ·
1 Parent(s): af9408a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +122 -22
app.py CHANGED
@@ -26,6 +26,83 @@ obj_qdrant = QdrantClientInitializer()
26
  client = obj_qdrant.initialize_db()
27
  obj_loader = PDFLoader()
28
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29
  def print_result(question, result):
30
  output_text = f"""### Question:
31
  {question}
@@ -34,9 +111,11 @@ def print_result(question, result):
34
  """
35
  return(output_text)
36
 
 
37
  def format_chat_prompt(chat_history):
38
  prompt = []
39
 
 
40
  for turn in chat_history:
41
  user_message, ai_message = turn
42
  prompt.append(HumanMessage(user_message))
@@ -46,7 +125,10 @@ def format_chat_prompt(chat_history):
46
  memory = ConversationBufferMemory(chat_memory=chat_history, memory_key="history", input_key="question")
47
  return memory
48
 
49
- def chat(question, manual, history):
 
 
 
50
  history = history or []
51
  memory = format_chat_prompt(history)
52
  manual_list = {"Toyota_Corolla_2024_TR": -8580416610875007536,
@@ -54,14 +136,14 @@ def chat(question, manual, history):
54
  "Fiat_Egea_2024_TR":-2026113796962100812}
55
 
56
  collection_list = {"Toyota_Corolla_2024_TR": "TOYOTA_MANUAL_COLLECTION_EMBED3",
57
- "Renault_Clio_2024_TR": "RENAULT_MANUAL_COLLECTION_EMBED3",
58
- "Fiat_Egea_2024_TR": "FIAT_MANUAL_COLLECTION_EMBED3"}
59
 
60
- collection_name = collection_list[f"{manual}"]
61
 
62
  db = obj_loader.load_from_database(embeddings=embeddings, collection_name=collection_name)
63
 
64
- CAR_ID = manual_list[f"{manual}"]
65
  wrapper = Wrappers(collection_name, client, embeddings, myLLM, db, CAR_ID, memory)
66
 
67
  inputs = {"question": question, "iter_halucination": 0}
@@ -70,36 +152,54 @@ def chat(question, manual, history):
70
  for output in app.stream(inputs):
71
  for key, value in output.items():
72
  pprint(f"Finished running: {key}:")
73
- # display(Markdown(print_result(question, value["generation"]['text'])))
74
  response = value["generation"]['text']
75
  history.append((question, response))
76
 
77
- point_id = uuid.uuid4().hex
78
- DatabaseOperations.save_user_history_demo(client, "USER_COLLECTION_EMBED3", question, response, embeddings, point_id, manual)
 
 
 
 
 
79
 
80
  return '', history
81
 
82
- def vote(data: gr.LikeData):
83
- if data.liked:
84
- print("You upvoted this response: ")
85
- return "OK"
86
- else:
87
- print("You downvoted this response: " )
88
- return "NOK"
89
-
90
-
 
 
 
 
91
 
92
  manual_list = ["Toyota_Corolla_2024_TR", "Renault_Clio_2024_TR", "Fiat_Egea_2024_TR"]
93
 
94
  with gr.Blocks() as demo:
95
-
96
  chatbot = gr.Chatbot(height=600)
97
  manual = gr.Dropdown(label="Kullanım Kılavuzları", value="Toyota_Corolla_2024_TR", choices=manual_list)
98
  textbox = gr.Textbox()
99
  clear = gr.ClearButton(components=[textbox, chatbot], value='Clear console')
100
- textbox.submit(chat, [textbox, manual, chatbot], [textbox, chatbot])
101
- chatbot.like(vote, None, None) # Adding this line causes the like/dislike icons to appear in your chatbot
102
 
103
- # gr.close_all()
104
- demo.launch(share=True)
 
 
 
 
 
 
 
 
 
 
 
 
105
 
 
26
  client = obj_qdrant.initialize_db()
27
  obj_loader = PDFLoader()
28
 
29
+ # def print_result(question, result):
30
+ # output_text = f"""### Question:
31
+ # {question}
32
+ # ### Answer:
33
+ # {result}
34
+ # """
35
+ # return(output_text)
36
+
37
+ # def format_chat_prompt(chat_history):
38
+ # prompt = []
39
+
40
+ # for turn in chat_history:
41
+ # user_message, ai_message = turn
42
+ # prompt.append(HumanMessage(user_message))
43
+ # prompt.append(AIMessage(ai_message))
44
+
45
+ # chat_history = InMemoryChatMessageHistory(messages=prompt)
46
+ # memory = ConversationBufferMemory(chat_memory=chat_history, memory_key="history", input_key="question")
47
+ # return memory
48
+
49
+ # def chat(question, manual, history):
50
+ # history = history or []
51
+ # memory = format_chat_prompt(history)
52
+ # manual_list = {"Toyota_Corolla_2024_TR": -8580416610875007536,
53
+ # "Renault_Clio_2024_TR":-5514489544983735006,
54
+ # "Fiat_Egea_2024_TR":-2026113796962100812}
55
+
56
+ # collection_list = {"Toyota_Corolla_2024_TR": "TOYOTA_MANUAL_COLLECTION_EMBED3",
57
+ # "Renault_Clio_2024_TR": "RENAULT_MANUAL_COLLECTION_EMBED3",
58
+ # "Fiat_Egea_2024_TR": "FIAT_MANUAL_COLLECTION_EMBED3"}
59
+
60
+ # collection_name = collection_list[f"{manual}"]
61
+
62
+ # db = obj_loader.load_from_database(embeddings=embeddings, collection_name=collection_name)
63
+
64
+ # CAR_ID = manual_list[f"{manual}"]
65
+ # wrapper = Wrappers(collection_name, client, embeddings, myLLM, db, CAR_ID, memory)
66
+
67
+ # inputs = {"question": question, "iter_halucination": 0}
68
+ # app = wrapper.lagchain_graph()
69
+
70
+ # for output in app.stream(inputs):
71
+ # for key, value in output.items():
72
+ # pprint(f"Finished running: {key}:")
73
+ # # display(Markdown(print_result(question, value["generation"]['text'])))
74
+ # response = value["generation"]['text']
75
+ # history.append((question, response))
76
+
77
+ # point_id = uuid.uuid4().hex
78
+ # DatabaseOperations.save_user_history_demo(client, "USER_COLLECTION_EMBED3", question, response, embeddings, point_id, manual)
79
+
80
+ # return '', history
81
+
82
+ # def vote(data: gr.LikeData):
83
+ # if data.liked:
84
+ # print("You upvoted this response: ")
85
+ # return "OK"
86
+ # else:
87
+ # print("You downvoted this response: " )
88
+ # return "NOK"
89
+
90
+
91
+
92
+ # manual_list = ["Toyota_Corolla_2024_TR", "Renault_Clio_2024_TR", "Fiat_Egea_2024_TR"]
93
+
94
+ # with gr.Blocks() as demo:
95
+
96
+ # chatbot = gr.Chatbot(height=600)
97
+ # manual = gr.Dropdown(label="Kullanım Kılavuzları", value="Toyota_Corolla_2024_TR", choices=manual_list)
98
+ # textbox = gr.Textbox()
99
+ # clear = gr.ClearButton(components=[textbox, chatbot], value='Clear console')
100
+ # textbox.submit(chat, [textbox, manual, chatbot], [textbox, chatbot])
101
+ # chatbot.like(vote, None, None) # Adding this line causes the like/dislike icons to appear in your chatbot
102
+
103
+ # # gr.close_all()
104
+ # demo.launch(share=True)
105
+
106
  def print_result(question, result):
107
  output_text = f"""### Question:
108
  {question}
 
111
  """
112
  return(output_text)
113
 
114
+
115
  def format_chat_prompt(chat_history):
116
  prompt = []
117
 
118
+ print(chat_history)
119
  for turn in chat_history:
120
  user_message, ai_message = turn
121
  prompt.append(HumanMessage(user_message))
 
125
  memory = ConversationBufferMemory(chat_memory=chat_history, memory_key="history", input_key="question")
126
  return memory
127
 
128
+ liked_state = gr.State(None)
129
+ last_interaction = gr.State(None)
130
+
131
+ def chat(question, manual, history, liked):
132
  history = history or []
133
  memory = format_chat_prompt(history)
134
  manual_list = {"Toyota_Corolla_2024_TR": -8580416610875007536,
 
136
  "Fiat_Egea_2024_TR":-2026113796962100812}
137
 
138
  collection_list = {"Toyota_Corolla_2024_TR": "TOYOTA_MANUAL_COLLECTION_EMBED3",
139
+ "Renault_Clio_2024_TR": "RENAULT_MANUAL_COLLECTION_EMBED3",
140
+ "Fiat_Egea_2024_TR": "FIAT_MANUAL_COLLECTION_EMBED3"}
141
 
142
+ collection_name = collection_list[manual]
143
 
144
  db = obj_loader.load_from_database(embeddings=embeddings, collection_name=collection_name)
145
 
146
+ CAR_ID = manual_list[manual]
147
  wrapper = Wrappers(collection_name, client, embeddings, myLLM, db, CAR_ID, memory)
148
 
149
  inputs = {"question": question, "iter_halucination": 0}
 
152
  for output in app.stream(inputs):
153
  for key, value in output.items():
154
  pprint(f"Finished running: {key}:")
155
+
156
  response = value["generation"]['text']
157
  history.append((question, response))
158
 
159
+ # Store the last interaction without saving to the database yet
160
+ last_interaction.value = {
161
+ "question": question,
162
+ "response": response,
163
+ "manual": manual,
164
+ "point_id": uuid.uuid4().hex
165
+ }
166
 
167
  return '', history
168
 
169
+ def save_last_interaction(feedback):
170
+ if last_interaction.value:
171
+ DatabaseOperations.save_user_history_demo(
172
+ client,
173
+ "USER_COLLECTION_EMBED3",
174
+ last_interaction.value["question"],
175
+ last_interaction.value["response"],
176
+ embeddings,
177
+ last_interaction.value["point_id"],
178
+ last_interaction.value["manual"],
179
+ feedback
180
+ )
181
+ last_interaction.value = None
182
 
183
  manual_list = ["Toyota_Corolla_2024_TR", "Renault_Clio_2024_TR", "Fiat_Egea_2024_TR"]
184
 
185
  with gr.Blocks() as demo:
 
186
  chatbot = gr.Chatbot(height=600)
187
  manual = gr.Dropdown(label="Kullanım Kılavuzları", value="Toyota_Corolla_2024_TR", choices=manual_list)
188
  textbox = gr.Textbox()
189
  clear = gr.ClearButton(components=[textbox, chatbot], value='Clear console')
 
 
190
 
191
+ def handle_like(data: gr.LikeData):
192
+ liked_state.value = data.liked
193
+ if liked_state.value is not None:
194
+ feedback = "LIKE" if liked_state.value else "DISLIKE"
195
+ save_last_interaction(feedback)
196
+
197
+ def gradio_chat(question, manual, history):
198
+ save_last_interaction("N/A") # Save previous interaction before starting a new one
199
+ return chat(question, manual, history, liked_state.value)
200
+
201
+ textbox.submit(gradio_chat, [textbox, manual, chatbot], [textbox, chatbot])
202
+ chatbot.like(handle_like, None, None)
203
+
204
+ demo.launch()
205