Tonic commited on
Commit
f9cdb87
1 Parent(s): d76976b

Update maker.py

Browse files
Files changed (1) hide show
  1. maker.py +5 -9
maker.py CHANGED
@@ -9,7 +9,7 @@ import os
9
  HF_TOKEN = os.environ["HF_TOKEN"]
10
  HEADERS = {"Authorization": f"Bearer {HF_TOKEN}"}
11
 
12
- tulu = "https://tonic1-tulu.hf.space/--replicas/t5vxm/"
13
 
14
 
15
  welcome_message = """
@@ -59,7 +59,6 @@ Sure, I'd be happy to help you build a bot! I'm generating a title, system promp
59
 
60
  def predict_beta(message, chatbot=[], system_prompt=system_prompt, max_new_tokens=900, temperature=0.4, top_p=0.9, repetition_penalty=0.9, advanced=True):
61
  client = Client(tulu)
62
- start_phrase = "Sure, I'd be happy to help"
63
  try:
64
  result = client.predict(
65
  message,
@@ -74,9 +73,6 @@ def predict_beta(message, chatbot=[], system_prompt=system_prompt, max_new_token
74
 
75
  if result is not None and len(result) > 0:
76
  bot_message = result[0]
77
- last_response_start = bot_message.rfind(start_phrase)
78
- if last_response_start != -1:
79
- bot_message = bot_message[last_response_start:]
80
  print(bot_message)
81
  return bot_message
82
  else:
@@ -86,8 +82,8 @@ def predict_beta(message, chatbot=[], system_prompt=system_prompt, max_new_token
86
  error_msg = f"An error occurred: {str(e)}"
87
  raise gr.Error(error_msg)
88
 
 
89
  def extract_title_prompt_example(text, title, system_prompt, example_input):
90
- # Default values if the expected format is not found
91
  default_title = "Custom GPT Agent"
92
  default_system_prompt = "This is a custom GPT agent."
93
  default_example_input = "Type your query here."
@@ -116,9 +112,9 @@ def extract_title_prompt_example(text, title, system_prompt, example_input):
116
  pass
117
  return text, title, system_prompt, example_input
118
 
119
- def make_open_gpt(message, history, current_title, system_prompt, current_example_input):
120
- response = predict_beta(message, history, system_prompt)
121
- response, title, system_prompt, example_input = extract_title_prompt_example(response, current_title, system_prompt, current_example_input)
122
  return "", history + [(message, response)], title, system_prompt, example_input, [(None, welcome_preview_message.format(title, example_input))], example_input, gr.Column(visible=True), gr.Group(visible=True)
123
 
124
  def set_title_example(title, example):
 
9
  HF_TOKEN = os.environ["HF_TOKEN"]
10
  HEADERS = {"Authorization": f"Bearer {HF_TOKEN}"}
11
 
12
+ tulu = "https://tonic1-tulu.hf.space/--replicas/ms6dm/"
13
 
14
 
15
  welcome_message = """
 
59
 
60
  def predict_beta(message, chatbot=[], system_prompt=system_prompt, max_new_tokens=900, temperature=0.4, top_p=0.9, repetition_penalty=0.9, advanced=True):
61
  client = Client(tulu)
 
62
  try:
63
  result = client.predict(
64
  message,
 
73
 
74
  if result is not None and len(result) > 0:
75
  bot_message = result[0]
 
 
 
76
  print(bot_message)
77
  return bot_message
78
  else:
 
82
  error_msg = f"An error occurred: {str(e)}"
83
  raise gr.Error(error_msg)
84
 
85
+
86
  def extract_title_prompt_example(text, title, system_prompt, example_input):
 
87
  default_title = "Custom GPT Agent"
88
  default_system_prompt = "This is a custom GPT agent."
89
  default_example_input = "Type your query here."
 
112
  pass
113
  return text, title, system_prompt, example_input
114
 
115
+ def make_open_gpt(message, history, current_title, current_system_prompt, current_example_input):
116
+ response = predict_beta(message, history, zephyr_system_prompt)
117
+ response, title, system_prompt, example_input = extract_title_prompt_example(response, current_title, current_system_prompt, current_example_input)
118
  return "", history + [(message, response)], title, system_prompt, example_input, [(None, welcome_preview_message.format(title, example_input))], example_input, gr.Column(visible=True), gr.Group(visible=True)
119
 
120
  def set_title_example(title, example):