lightmate commited on
Commit
6ecb4e5
·
verified ·
1 Parent(s): 1c3f8cd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -16
app.py CHANGED
@@ -1,6 +1,5 @@
1
  import os
2
  from pathlib import Path
3
- import requests
4
  import torch
5
  from threading import Event, Thread
6
  from transformers import AutoConfig, AutoTokenizer
@@ -113,7 +112,6 @@ with gr.Blocks() as iface:
113
  return input_tokens
114
 
115
  input_ids = convert_history_to_token(history)
116
- streamer = gr.Textbox.update()
117
 
118
  generate_kwargs = dict(
119
  input_ids=input_ids,
@@ -121,22 +119,14 @@ with gr.Blocks() as iface:
121
  temperature=temperature,
122
  top_p=top_p,
123
  top_k=top_k,
124
- repetition_penalty=repetition_penalty,
125
- streamer=streamer
126
  )
127
 
128
- event = Event()
129
- def generate_and_signal_complete():
130
- ov_model.generate(**generate_kwargs)
131
- event.set()
132
-
133
- t1 = Thread(target=generate_and_signal_complete)
134
- t1.start()
135
-
136
- partial_text = ""
137
- for new_text in streamer:
138
- partial_text += new_text
139
- history[-1][1] = partial_text
140
  yield history
141
 
142
  # Set up the interface with inputs and outputs
 
1
  import os
2
  from pathlib import Path
 
3
  import torch
4
  from threading import Event, Thread
5
  from transformers import AutoConfig, AutoTokenizer
 
112
  return input_tokens
113
 
114
  input_ids = convert_history_to_token(history)
 
115
 
116
  generate_kwargs = dict(
117
  input_ids=input_ids,
 
119
  temperature=temperature,
120
  top_p=top_p,
121
  top_k=top_k,
122
+ repetition_penalty=repetition_penalty
 
123
  )
124
 
125
+ # Stream response to textbox
126
+ response = ""
127
+ for new_text in ov_model.generate(**generate_kwargs):
128
+ response += new_text
129
+ history[-1][1] = response
 
 
 
 
 
 
 
130
  yield history
131
 
132
  # Set up the interface with inputs and outputs