acecalisto3 commited on
Commit
f61d2c9
·
verified ·
1 Parent(s): 0a68e68

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -25
app.py CHANGED
@@ -20,9 +20,18 @@ if not api_key:
20
  # Configure logging
21
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
22
 
23
- # Define constants
24
- PREFIX = "Task started at {date_time_str}. Purpose: {purpose}"
25
- TASK_PROMPT = "Current task: {task}. History:\n{history}"
 
 
 
 
 
 
 
 
 
26
 
27
  # Define current date/time
28
  date_time_str = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
@@ -110,8 +119,6 @@ def handle_input(task, storage_location, url1, url2, scrape_interval, content_ty
110
  return "You can chat with the friendly chatbot below."
111
 
112
  # Define the chat response function
113
- client = InferenceClient("HuggingFaceH4/zephyr-7b-beta", token=api_key)
114
-
115
  def respond(
116
  message,
117
  history,
@@ -126,33 +133,22 @@ def respond(
126
  scrape_interval,
127
  content_type
128
  ):
129
- messages = [{"role": "system", "content": system_message}]
130
 
131
- for val in history:
132
- if val[0]:
133
- messages.append({"role": "user", "content": val[0]})
134
- if val[1]:
135
- messages.append({"role": "assistant", "content": val[1]})
136
 
137
- messages.append({"role": "user", "content": message})
 
138
 
139
- response = ""
140
-
141
- for message in client.chat_completion(
142
- messages,
143
- max_tokens=max_tokens,
144
- stream=True,
145
- temperature=temperature,
146
- top_p=top_p,
147
- ):
148
- token = message.choices[0].delta.content
149
-
150
- response += token
151
- yield response
152
 
153
  # Handle the selected task
154
  handle_input(task, storage_location, url1, url2, scrape_interval, content_type)
155
 
 
 
156
  # Create Gradio interface
157
  demo = gr.ChatInterface(
158
  respond,
 
20
  # Configure logging
21
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
22
 
23
+ client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
24
+
25
+ VERBOSE = True
26
+ MAX_HISTORY = 125
27
+
28
+ def format_prompt(message, history):
29
+ prompt = "<s>"
30
+ for user_prompt, bot_response in history:
31
+ prompt += f"[INST] {user_prompt} [/INST]"
32
+ prompt += f" {bot_response}</s> "
33
+ prompt += f"[INST] {message} [/INST]"
34
+ return prompt
35
 
36
  # Define current date/time
37
  date_time_str = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
 
119
  return "You can chat with the friendly chatbot below."
120
 
121
  # Define the chat response function
 
 
122
  def respond(
123
  message,
124
  history,
 
133
  scrape_interval,
134
  content_type
135
  ):
136
+ global client
137
 
138
+ # Format the prompt
139
+ prompt = format_prompt(message, history)
 
 
 
140
 
141
+ # Generate response using the InferenceClient
142
+ response = client.generate(prompt, max_tokens=max_tokens, temperature=temperature, top_p=top_p)
143
 
144
+ # Append the response to history
145
+ history.append((message, response))
 
 
 
 
 
 
 
 
 
 
 
146
 
147
  # Handle the selected task
148
  handle_input(task, storage_location, url1, url2, scrape_interval, content_type)
149
 
150
+ return response
151
+
152
  # Create Gradio interface
153
  demo = gr.ChatInterface(
154
  respond,