dar-tau commited on
Commit
dd58665
·
verified ·
1 Parent(s): 94466d1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -27
app.py CHANGED
@@ -1,5 +1,6 @@
1
  import os
2
  from typing import Optional, Tuple, Any
 
3
  from functools import partial
4
  import gradio as gr
5
  import spaces
@@ -37,31 +38,15 @@ Assistant: "girlfriend;mother;father;friend"
37
 
38
  '''
39
 
40
- '''
41
- You will now get a blank message from the user and then after your answer, the user will give you the text to complete:
42
- Example:
43
-
44
- >> User:
45
- >> Assistant: <Waiting for text>
46
- >> User: Help me write a sentiment analysis pipeline
47
- >> Assistant: using huggingface;using NLTK;using python
48
- '''
49
 
50
-
51
- extra_prompt = '''
52
- Examples:
53
- (1)
54
- User: "Help me write a sentiment analysis pipeline"
55
- Assistant: "using huggingface;using NLTK;using python"
56
-
57
- (2)
58
- User: "My name is"
59
- Assistant: "" (nothing much to contribute at this point. return nothing)
60
-
61
- (3)
62
- User: "Help me find a present for my"
63
- Assistant: "girlfriend;mother;father;friend"
64
- '''
65
 
66
 
67
  start_messages = [
@@ -104,12 +89,14 @@ def generate(text, past_key_values):
104
  *start_messages,
105
  {'role': 'user', 'content': text}
106
  ]
 
 
107
 
108
  if past_key_values:
109
  past_key_values = past_kv_to_device(past_key_values, pipe.model.device, pipe.model.dtype)
110
- response = pipe(messages,
111
- past_key_values=past_key_values,
112
- **generate_kwargs)[0]['generated_text']
113
  return response[-1]['content']
114
 
115
 
 
1
  import os
2
  from typing import Optional, Tuple, Any
3
+ from copy import deepcopy
4
  from functools import partial
5
  import gradio as gr
6
  import spaces
 
38
 
39
  '''
40
 
41
+ # '''
42
+ # You will now get a blank message from the user and then after your answer, the user will give you the text to complete:
43
+ # Example:
 
 
 
 
 
 
44
 
45
+ # >> User:
46
+ # >> Assistant: <Waiting for text>
47
+ # >> User: Help me write a sentiment analysis pipeline
48
+ # >> Assistant: using huggingface;using NLTK;using python
49
+ # '''
 
 
 
 
 
 
 
 
 
 
50
 
51
 
52
  start_messages = [
 
89
  *start_messages,
90
  {'role': 'user', 'content': text}
91
  ]
92
+
93
+ cur_generate_kwargs = deepcopy(generate_kwargs)
94
 
95
  if past_key_values:
96
  past_key_values = past_kv_to_device(past_key_values, pipe.model.device, pipe.model.dtype)
97
+ cur_generate_kwargs.update({'past_key_values': past_key_values})
98
+
99
+ response = pipe(messages, **cur_generate_kwargs)[0]['generated_text']
100
  return response[-1]['content']
101
 
102