futranbg commited on
Commit
2147ae4
1 Parent(s): efc5592

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -5
app.py CHANGED
@@ -9,7 +9,7 @@ from huggingface_hub import Repository, InferenceClient
9
  HF_TOKEN = os.environ.get("HF_TOKEN", None)
10
  API_URL = "https://api-inference.huggingface.co/models/meta-llama/Llama-2-70b-chat-hf"
11
  API_URL_2 = "https://api-inference.huggingface.co/models/codellama/CodeLlama-34b-Instruct-hf"
12
- BOT_NAME = "LLAMA"
13
 
14
  STOP_SEQUENCES = ["\nUser:", " User:", "###", "</s>"]
15
 
@@ -35,13 +35,13 @@ def format_prompt(message, history, system_prompt):
35
  prompt = ""
36
  if system_prompt:
37
  prompt += f"System: {system_prompt}\n"
38
- # for user_prompt, bot_response in history:
39
- # prompt += f"User: {user_prompt}\n"
40
- # prompt += f"LLAMA: {bot_response}\n" # Response already contains "Falcon: "
41
  prompt += f"""User: {message}\n{BOT_NAME}:"""
42
  return prompt
43
 
44
- seed = 42442
45
 
46
  def generate(
47
  prompt, history, system_prompt="", temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,
 
9
  HF_TOKEN = os.environ.get("HF_TOKEN", None)
10
  API_URL = "https://api-inference.huggingface.co/models/meta-llama/Llama-2-70b-chat-hf"
11
  API_URL_2 = "https://api-inference.huggingface.co/models/codellama/CodeLlama-34b-Instruct-hf"
12
+ BOT_NAME = "Assistant"
13
 
14
  STOP_SEQUENCES = ["\nUser:", " User:", "###", "</s>"]
15
 
 
35
  prompt = ""
36
  if system_prompt:
37
  prompt += f"System: {system_prompt}\n"
38
+ for user_prompt, bot_response in history:
39
+ prompt += f"User: {user_prompt}\n"
40
+ prompt += f"{BOT_NAME}: {bot_response}\n"
41
  prompt += f"""User: {message}\n{BOT_NAME}:"""
42
  return prompt
43
 
44
+ seed = 42
45
 
46
  def generate(
47
  prompt, history, system_prompt="", temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,