dar-tau commited on
Commit
c01c7c6
·
verified ·
1 Parent(s): d6a55c3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -16
app.py CHANGED
@@ -9,9 +9,16 @@ from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
9
  from dataclasses import dataclass
10
 
11
 
12
- chatml_template = """{% for message in messages %}
13
- {{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}
14
- {% endfor %}"""
 
 
 
 
 
 
 
15
 
16
 
17
  system_prompt = '''You are given a partial input text for another AI chat interface.
@@ -25,16 +32,16 @@ Answers should be only the completions themselves. If you have nothing as a comp
25
 
26
  Examples:
27
  (1)
28
- User: "Help me write a sentiment analysis pipeline"
29
- Assistant: "using huggingface;using NLTK;using python"
30
 
31
  (2)
32
- User: "My name is"
33
- Assistant: "<NOTHING>" (nothing much to contribute at this point. return nothing)
34
 
35
  (3)
36
- User: "Help me find a present for my"
37
- Assistant: "girlfriend;mother;father;friend"
38
  '''
39
 
40
  # setup
@@ -90,20 +97,20 @@ def set_past_key_values():
90
  return detach_past_kv(model(tokenized.to(model.device)).past_key_values)
91
 
92
 
93
- @spaces.GPU
94
  def generate(text, past_key_values):
95
- messages = [
96
- *start_messages,
97
- {'role': 'user', 'content': text}
98
- ]
99
-
100
  cur_generate_kwargs = deepcopy(generate_kwargs)
101
 
102
  if past_key_values:
103
  past_key_values = past_kv_to_device(past_key_values, pipe.model.device, pipe.model.dtype)
104
  cur_generate_kwargs.update({'past_key_values': past_key_values})
105
 
106
- response = pipe(messages, **cur_generate_kwargs)[0]['generated_text']
107
  print(response)
108
  return response[-1]['content']
109
 
 
9
  from dataclasses import dataclass
10
 
11
 
12
+ # chatml_template = """{% for message in messages %}
13
+ # {{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}
14
+ # {% endfor %}"""
15
+
16
+ prompt_format = '''<|im_start|>system
17
+ {system_message}<|im_end|>
18
+ <|im_start|>user
19
+ {prompt}<|im_end|>
20
+ <|im_start|>assistant
21
+ '''
22
 
23
 
24
  system_prompt = '''You are given a partial input text for another AI chat interface.
 
32
 
33
  Examples:
34
  (1)
35
+ User: Help me write a sentiment analysis pipeline
36
+ Assistant: using huggingface;using NLTK;using python
37
 
38
  (2)
39
+ User: My name is
40
+ Assistant: <NOTHING> (nothing much to contribute at this point. return nothing)
41
 
42
  (3)
43
+ User: Help me find a present for my
44
+ Assistant: girlfriend;mother;father;friend
45
  '''
46
 
47
  # setup
 
97
  return detach_past_kv(model(tokenized.to(model.device)).past_key_values)
98
 
99
 
100
+ # @spaces.GPU
101
  def generate(text, past_key_values):
102
+ # messages = [
103
+ # *start_messages,
104
+ # {'role': 'user', 'content': text}
105
+ # ]
106
+
107
  cur_generate_kwargs = deepcopy(generate_kwargs)
108
 
109
  if past_key_values:
110
  past_key_values = past_kv_to_device(past_key_values, pipe.model.device, pipe.model.dtype)
111
  cur_generate_kwargs.update({'past_key_values': past_key_values})
112
 
113
+ response = pipe(prompt_format.format(system_message=system_prompt, prompt=text), **cur_generate_kwargs)[0]['generated_text']
114
  print(response)
115
  return response[-1]['content']
116