YangWu001 commited on
Commit
35f1aea
·
1 Parent(s): 7ce6cf0
Files changed (1) hide show
  1. app.py +12 -11
app.py CHANGED
@@ -13,7 +13,7 @@ stop_inference = False
13
 
14
  def respond(
15
  message,
16
- history: list[tuple[str, str]],
17
  system_message,
18
  max_tokens,
19
  temperature,
@@ -26,11 +26,11 @@ def respond(
26
  if use_local_model:
27
  # Simulate local inference (ignoring history)
28
  messages = [{"role": "system", "content": system_message}]
29
- for val in history:
30
- if val[0]:
31
- messages.append({"role": "user", "content": val[0]})
32
- if val[1]:
33
- messages.append({"role": "assistant", "content": val[1]})
34
  messages.append({"role": "user", "content": message})
35
 
36
  response = ""
@@ -48,11 +48,11 @@ def respond(
48
  else:
49
  # API-based inference (ignoring history)
50
  messages = [{"role": "system", "content": system_message}]
51
- for val in history:
52
- if val[0]:
53
- messages.append({"role": "user", "content": val[0]})
54
- if val[1]:
55
- messages.append({"role": "assistant", "content": val[1]})
56
  messages.append({"role": "user", "content": message})
57
 
58
  response = ""
@@ -143,6 +143,7 @@ with gr.Blocks(css=custom_css) as demo:
143
  def chat_fn(message):
144
  response_gen = respond(
145
  message,
 
146
  system_message.value,
147
  max_tokens.value,
148
  temperature.value,
 
13
 
14
  def respond(
15
  message,
16
+ # history: list[tuple[str, str]],
17
  system_message,
18
  max_tokens,
19
  temperature,
 
26
  if use_local_model:
27
  # Simulate local inference (ignoring history)
28
  messages = [{"role": "system", "content": system_message}]
29
+ # for val in history:
30
+ # if val[0]:
31
+ # messages.append({"role": "user", "content": val[0]})
32
+ # if val[1]:
33
+ # messages.append({"role": "assistant", "content": val[1]})
34
  messages.append({"role": "user", "content": message})
35
 
36
  response = ""
 
48
  else:
49
  # API-based inference (ignoring history)
50
  messages = [{"role": "system", "content": system_message}]
51
+ # for val in history:
52
+ # if val[0]:
53
+ # messages.append({"role": "user", "content": val[0]})
54
+ # if val[1]:
55
+ # messages.append({"role": "assistant", "content": val[1]})
56
  messages.append({"role": "user", "content": message})
57
 
58
  response = ""
 
143
  def chat_fn(message):
144
  response_gen = respond(
145
  message,
146
+ # history: list[tuple[str, str]],
147
  system_message.value,
148
  max_tokens.value,
149
  temperature.value,