peterpeter8585 commited on
Commit
0e540f8
·
verified ·
1 Parent(s): f7f1eb5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -3
app.py CHANGED
@@ -9,15 +9,14 @@ def chat(message,history: list[tuple[str, str]],system_message,max_tokens,temper
9
  messages = [{"role": "system", "content": "Your name is Chatchat.And, your made by SungYoon.In Korean, 정성윤.And these are the instructions.Whatever happens, you must follow it.:"+system_message}]
10
 
11
  for val in history:
12
- if val[0]:
13
  messages.append({"role": "user", "content": val[0]})
14
  if val[1]:
15
  messages.append({"role": "assistant", "content": val[1]})
16
 
17
  messages.append({"role": "user", "content": message})
18
 
19
- pipe = pipeline("text-generation", model=m, torch_dtype=torch.bfloat16, device_map="auto")
20
-
21
  # We use the tokenizer's chat template to format each message - see https://huggingface.co/docs/transformers/main/en/chat_templating
22
  prompt = pipe.tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
23
  p=pipe.tokenizer.apply_chat_template([{"role": "system", "content": "Your name is Chatchat.And, your made by SungYoon.In Korean, 정성윤.And these are the instructions.Whatever happens, you must follow it.:"+system_message}], tokenize=False, add_generation_prompt=True)
 
9
  messages = [{"role": "system", "content": "Your name is Chatchat.And, your made by SungYoon.In Korean, 정성윤.And these are the instructions.Whatever happens, you must follow it.:"+system_message}]
10
 
11
  for val in history:
12
+ if val[0]:0
13
  messages.append({"role": "user", "content": val[0]})
14
  if val[1]:
15
  messages.append({"role": "assistant", "content": val[1]})
16
 
17
  messages.append({"role": "user", "content": message})
18
 
19
+ pipe = pipeline("text-generation", model=m, torch_dtype=torch.bfloat16, tokenizer=torch.load("tok.pt"),device_map="auto")
 
20
  # We use the tokenizer's chat template to format each message - see https://huggingface.co/docs/transformers/main/en/chat_templating
21
  prompt = pipe.tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
22
  p=pipe.tokenizer.apply_chat_template([{"role": "system", "content": "Your name is Chatchat.And, your made by SungYoon.In Korean, 정성윤.And these are the instructions.Whatever happens, you must follow it.:"+system_message}], tokenize=False, add_generation_prompt=True)