Ramshah07 commited on
Commit
a31a270
·
verified ·
1 Parent(s): 6b5f908

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +152 -60
app.py CHANGED
@@ -1,64 +1,156 @@
 
 
 
 
1
  import gradio as gr
2
- from huggingface_hub import InferenceClient
3
-
4
- """
5
- For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
6
- """
7
- client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
8
-
9
-
10
- def respond(
11
- message,
12
- history: list[tuple[str, str]],
13
- system_message,
14
- max_tokens,
15
- temperature,
16
- top_p,
17
- ):
18
- messages = [{"role": "system", "content": system_message}]
19
-
20
- for val in history:
21
- if val[0]:
22
- messages.append({"role": "user", "content": val[0]})
23
- if val[1]:
24
- messages.append({"role": "assistant", "content": val[1]})
25
-
26
- messages.append({"role": "user", "content": message})
27
-
28
- response = ""
29
-
30
- for message in client.chat_completion(
31
- messages,
32
- max_tokens=max_tokens,
33
- stream=True,
34
- temperature=temperature,
35
- top_p=top_p,
36
- ):
37
- token = message.choices[0].delta.content
38
-
39
- response += token
40
- yield response
41
-
42
-
43
- """
44
- For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
45
- """
46
- demo = gr.ChatInterface(
47
- respond,
48
- additional_inputs=[
49
- gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
50
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
51
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
52
- gr.Slider(
53
- minimum=0.1,
54
- maximum=1.0,
55
- value=0.95,
56
- step=0.05,
57
- label="Top-p (nucleus sampling)",
58
- ),
59
- ],
60
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
61
 
62
 
63
  if __name__ == "__main__":
64
- demo.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import dotenv
2
+ from openai import OpenAI
3
+ import os
4
+ from pypdf import PdfReader
5
  import gradio as gr
6
+ import json
7
+ import requests
8
+
9
+ dotenv.load_dotenv(override=True)
10
+
11
+ openai_api_key = "AIzaSyBRLbvYfTDSq2F8o20Fc0okGAMxiRMNnVk"
12
+ pushover_user = os.getenv("PUSHOVER_USER")
13
+ pushover_token = os.getenv("PUSHOVER_TOKEN")
14
+
15
+ if pushover_user and pushover_token:
16
+ print("Pushover user and token found")
17
+ print("Pushover user and token not found")
18
+
19
+ def send_pushover_notification(message):
20
+ url = "https://api.pushover.net/1/messages.json"
21
+ data = {
22
+ "token": os.getenv("PUSHOVER_TOKEN"),
23
+ "user": os.getenv("PUSHOVER_USER"),
24
+ "message": message
25
+ }
26
+ response = requests.post(url, data=data)
27
+ if response.status_code == 200:
28
+ print("Pushover notification sent successfully")
29
+ else:
30
+ print("Failed to send Pushover notification")
31
+
32
+ def get_pdf_text(pdf_path):
33
+ reader = PdfReader(pdf_path)
34
+ text = ""
35
+ for page in reader.pages:
36
+ text += page.extract_text()
37
+ return text
38
+
39
+ def record_user_details(email, name="Not provided", notes="Not provided" ):
40
+ print(f"User details recorded: Name: {name}, Email: {email}, Notes: {notes}")
41
+ send_pushover_notification(f"Recording interest from : Name: {name}, Email: {email}, Notes: {notes}")
42
+ return {"recorded": "ok"}
43
+
44
+ def record_unknown_question(question):
45
+ print(f"Unknown question recorded: {question}")
46
+ send_pushover_notification(f"Unknown question recorded: {question}")
47
+ return {"recorded": "ok"}
48
+
49
+ record_user_details_json = {
50
+ "name" : "record_user_details",
51
+ "description" : "Record user details",
52
+ "parameters" : {
53
+ "type" : "object",
54
+ "properties" : {
55
+ "email" : {"type" : "string", "description" : "The email of the user"},
56
+ "name" : {"type" : "string", "description" : "The name of the user, if they provided it"},
57
+ "notes" : {"type" : "string", "description" : "Any additional information about conversation that worth's recording to given context"}
58
+ },
59
+ "required" : ["email"],
60
+ "additionalProperties" : False
61
+ }
62
+ }
63
+
64
+ record_unknown_question_json = {
65
+ "name" : "record_unknown_question",
66
+ "description" : "Record unknown question",
67
+ "parameters" : {
68
+ "type" : "object",
69
+ "properties" : {"question" : {"type" : "string", "description" : "The question that the user asked"}}
70
+ }
71
+ }
72
+
73
+ tools = [ {"type":"function", "function": record_user_details_json},
74
+ {"type" : "function", "function" : record_unknown_question_json}]
75
+
76
+ class Me :
77
+ def __init__(self):
78
+ self.openai = OpenAI()
79
+ self.name = "Ravi Prakash Kewat"
80
+ self.linked_profile = get_pdf_text("me/Ravi_LinkedInProfile.pdf")
81
+ with open("me/Ravi_summary.txt", "r", encoding="utf-8") as file:
82
+ self.summary = file.read()
83
+ self.name = "Ravi Prakash Kewat"
84
+ self.client = OpenAI(api_key=openai_api_key, base_url="https://generativelanguage.googleapis.com/v1beta/openai/")
85
+
86
+ def handle_tool_calls(self, tool_calls):
87
+ results = []
88
+ for tool_call in tool_calls :
89
+ tool_name = tool_call.function.name
90
+ print(f"Tool called: {tool_name}", flush=True)
91
+
92
+ tool = globals().get(tool_name)
93
+ arguments = json.loads(tool_call.function.arguments)
94
+
95
+ result = tool(**arguments) if tool else {}
96
+ results.append({"role" : "tool", "content" : json.dumps(result), "tool_call_id" : tool_call.id})
97
+ return results
98
+
99
+ def system_prompt(self):
100
+ system_prompt = f"You are acting as Ravi Prakash Kewat. You are answering questions on Ravi Prakash Kewat's website, \
101
+ particularly questions related to Ravi Prakash Kewat's career, background, skills and experience. \
102
+ Your responsibility is to represent Ravi Prakash Kewat for interactions on the website as faithfully as possible. \
103
+ You are given a summary of Ravi Prakash Kewat's background and LinkedIn profile which you can use to answer questions. \
104
+ Be professional and engaging, as if talking to a potential client or future employer who came across the website. \
105
+ If you don't know the answer to any question, use your record_unknown_question tool to record the question that you couldn't answer, even if it's about something trivial or unrelated to career. \
106
+ If the user is engaging in discussion, try to steer them towards getting in touch via email; ask for their email and record it using your record_user_details tool. "
107
+
108
+ system_prompt += f"\n\n## Summary:\n{self.summary}\n\n## LinkedIn Profile:\n{self.linked_profile}\n\n"
109
+ system_prompt += f"With this context, please chat with the user, always staying in character as {self.name}."
110
+ return system_prompt
111
+
112
+
113
+
114
+ def chat_with_me(self, message, history):
115
+ messages = [{"role" : "system", "content" : self.system_prompt()}] + history + [{"role" : "user","content" : message}]
116
+ done = False
117
+ while not done:
118
+
119
+ # this is the call to LLM - passing tool json
120
+ response = self.client.chat.completions.create(model="gemini-1.5-flash", messages=messages, tools=tools)
121
+
122
+ finish_reson = response.choices[0].finish_reason
123
+
124
+ if finish_reson == "tool_calls":
125
+ message = response.choices[0].message
126
+ tool_calls = message.tool_calls
127
+ results = self.handle_tool_calls(tool_calls)
128
+ messages.append(message)
129
+ messages.extend(results)
130
+ else:
131
+ done = True
132
+ return response.choices[0].message.content
133
 
134
 
135
  if __name__ == "__main__":
136
+ me = Me()
137
+ gr.ChatInterface(me.chat_with_me, type="messages").launch()
138
+
139
+
140
+
141
+
142
+
143
+
144
+
145
+
146
+
147
+
148
+
149
+
150
+
151
+
152
+
153
+
154
+
155
+
156
+