awacke1 commited on
Commit
b456d92
·
1 Parent(s): 256841d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -24
app.py CHANGED
@@ -45,7 +45,6 @@ def predict(inputs, top_p, temperature, chat_counter, chatbot=[], history=[]):
45
  temp3["role"] = "user"
46
  temp3["content"] = inputs
47
  messages.append(temp3)
48
- #messages
49
  payload = {
50
  "model": "gpt-3.5-turbo",
51
  "messages": messages, #[{"role": "user", "content": f"{inputs}"}],
@@ -61,28 +60,19 @@ def predict(inputs, top_p, temperature, chat_counter, chatbot=[], history=[]):
61
  # 4. POST it to OPENAI API
62
  history.append(inputs)
63
  print(f"payload is - {payload}")
64
- # make a POST request to the API endpoint using the requests.post method, passing in stream=True
65
  response = requests.post(API_URL, headers=headers, json=payload, stream=True)
66
- #response = requests.post(API_URL, headers=headers, json=payload, stream=True)
67
  token_counter = 0
68
  partial_words = ""
69
 
70
  # 5. Iterate through response lines and structure readable response
71
- # TODO - make this parse out markdown so we can have similar interface
72
  counter=0
73
  for chunk in response.iter_lines():
74
- #Skipping first chunk
75
  if counter == 0:
76
  counter+=1
77
  continue
78
- #counter+=1
79
- # check whether each line is non-empty
80
  if chunk.decode() :
81
  chunk = chunk.decode()
82
- # decode each line as response data is in bytes
83
  if len(chunk) > 12 and "content" in json.loads(chunk[6:])['choices'][0]['delta']:
84
- #if len(json.loads(chunk.decode()[6:])['choices'][0]["delta"]) == 0:
85
- # break
86
  partial_words = partial_words + json.loads(chunk[6:])['choices'][0]["delta"]["content"]
87
  if token_counter == 0:
88
  history.append(" " + partial_words)
@@ -90,7 +80,7 @@ def predict(inputs, top_p, temperature, chat_counter, chatbot=[], history=[]):
90
  history[-1] = partial_words
91
  chat = [(history[i], history[i + 1]) for i in range(0, len(history) - 1, 2) ] # convert to tuples of list
92
  token_counter+=1
93
- yield chat, history, chat_counter # resembles {chatbot: chat, state: history}
94
 
95
 
96
  def reset_textbox():
@@ -117,33 +107,25 @@ description = """
117
  - **Toronto Books Corpus:** A dataset of over 7,000 books from a variety of genres, collected by the University of Toronto.
118
  - [Massively Multilingual Sentence Embeddings for Zero-Shot Cross-Lingual Transfer and Beyond](https://paperswithcode.com/dataset/bookcorpus) by Schwenk and Douze.
119
  - **OpenWebText:** A dataset of web pages that were filtered to remove content that was likely to be low-quality or spammy. This dataset was used to pretrain GPT-3.
120
- - [Language Models are Few-Shot Learners](https://paperswithcode.com/dataset/openwebtext) by Brown et al.
121
-
122
  """
123
 
124
  # 6. Use Gradio to pull it all together
125
- with gr.Blocks(css = """#col_container {width: 1000px; margin-left: auto; margin-right: auto;}
126
- #chatbot {height: 520px; overflow: auto;}""") as demo:
127
-
128
-
129
  gr.HTML(title)
130
-
131
-
132
  with gr.Column(elem_id = "col_container"):
133
  chatbot = gr.Chatbot(elem_id='chatbot') #c
134
  inputs = gr.Textbox(placeholder= "Hi there!", label= "Type an input and press Enter") #t
135
  state = gr.State([]) #s
136
  b1 = gr.Button()
137
-
138
  with gr.Accordion("Parameters", open=False):
139
  top_p = gr.Slider( minimum=-0, maximum=1.0, value=1.0, step=0.05, interactive=True, label="Top-p (nucleus sampling)",)
140
  temperature = gr.Slider( minimum=-0, maximum=5.0, value=1.0, step=0.1, interactive=True, label="Temperature",)
141
  chat_counter = gr.Number(value=0, visible=False, precision=0)
142
 
143
- inputs.submit( predict, [inputs, top_p, temperature,chat_counter, chatbot, state], [chatbot, state, chat_counter],)
144
- b1.click( predict, [inputs, top_p, temperature, chat_counter, chatbot, state], [chatbot, state, chat_counter],)
145
  b1.click(reset_textbox, [], [inputs])
146
  inputs.submit(reset_textbox, [], [inputs])
147
-
148
  gr.Markdown(description)
149
- demo.queue().launch(debug=True)
 
45
  temp3["role"] = "user"
46
  temp3["content"] = inputs
47
  messages.append(temp3)
 
48
  payload = {
49
  "model": "gpt-3.5-turbo",
50
  "messages": messages, #[{"role": "user", "content": f"{inputs}"}],
 
60
  # 4. POST it to OPENAI API
61
  history.append(inputs)
62
  print(f"payload is - {payload}")
 
63
  response = requests.post(API_URL, headers=headers, json=payload, stream=True)
 
64
  token_counter = 0
65
  partial_words = ""
66
 
67
  # 5. Iterate through response lines and structure readable response
 
68
  counter=0
69
  for chunk in response.iter_lines():
 
70
  if counter == 0:
71
  counter+=1
72
  continue
 
 
73
  if chunk.decode() :
74
  chunk = chunk.decode()
 
75
  if len(chunk) > 12 and "content" in json.loads(chunk[6:])['choices'][0]['delta']:
 
 
76
  partial_words = partial_words + json.loads(chunk[6:])['choices'][0]["delta"]["content"]
77
  if token_counter == 0:
78
  history.append(" " + partial_words)
 
80
  history[-1] = partial_words
81
  chat = [(history[i], history[i + 1]) for i in range(0, len(history) - 1, 2) ] # convert to tuples of list
82
  token_counter+=1
83
+ yield chat, history, chat_counter
84
 
85
 
86
  def reset_textbox():
 
107
  - **Toronto Books Corpus:** A dataset of over 7,000 books from a variety of genres, collected by the University of Toronto.
108
  - [Massively Multilingual Sentence Embeddings for Zero-Shot Cross-Lingual Transfer and Beyond](https://paperswithcode.com/dataset/bookcorpus) by Schwenk and Douze.
109
  - **OpenWebText:** A dataset of web pages that were filtered to remove content that was likely to be low-quality or spammy. This dataset was used to pretrain GPT-3.
110
+ - [Language Models are Few-Shot Learners](https://paperswithcode.com/dataset/openwebtext) by Brown et al.
 
111
  """
112
 
113
  # 6. Use Gradio to pull it all together
114
+ with gr.Blocks(css = """#col_container {width: 1000px; margin-left: auto; margin-right: auto;} #chatbot {height: 520px; overflow: auto;}""") as demo:
 
 
 
115
  gr.HTML(title)
 
 
116
  with gr.Column(elem_id = "col_container"):
117
  chatbot = gr.Chatbot(elem_id='chatbot') #c
118
  inputs = gr.Textbox(placeholder= "Hi there!", label= "Type an input and press Enter") #t
119
  state = gr.State([]) #s
120
  b1 = gr.Button()
 
121
  with gr.Accordion("Parameters", open=False):
122
  top_p = gr.Slider( minimum=-0, maximum=1.0, value=1.0, step=0.05, interactive=True, label="Top-p (nucleus sampling)",)
123
  temperature = gr.Slider( minimum=-0, maximum=5.0, value=1.0, step=0.1, interactive=True, label="Temperature",)
124
  chat_counter = gr.Number(value=0, visible=False, precision=0)
125
 
126
+ inputs.submit(predict, [inputs, top_p, temperature,chat_counter, chatbot, state], [chatbot, state, chat_counter],)
127
+ b1.click(predict, [inputs, top_p, temperature, chat_counter, chatbot, state], [chatbot, state, chat_counter],)
128
  b1.click(reset_textbox, [], [inputs])
129
  inputs.submit(reset_textbox, [], [inputs])
 
130
  gr.Markdown(description)
131
+ demo.queue().launch(debug=True)