khurrameycon commited on
Commit
60fad09
·
verified ·
1 Parent(s): 09fd8b1

transcript-text generation limit increased, response updated

Browse files
Files changed (1) hide show
  1. app.py +22 -6
app.py CHANGED
@@ -104,14 +104,30 @@ def predict_image(image_url, text, file_pref):
104
  # Process the inputs and move to the appropriate device
105
  inputs = processor(image, input_text, return_tensors="pt").to(device)
106
 
107
- outputs = model.generate(**inputs, max_new_tokens=100)
108
 
109
- # Decode the output to return the final response
110
- response = processor.decode(outputs[0], skip_special_tokens=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
111
 
112
- # return buffer
113
- return response
114
-
115
  except Exception as e:
116
  raise ValueError(f"Error during prediction: {str(e)}")
117
 
 
104
  # Process the inputs and move to the appropriate device
105
  inputs = processor(image, input_text, return_tensors="pt").to(device)
106
 
107
+ # outputs = model.generate(**inputs, max_new_tokens=100)
108
 
109
+ # # Decode the output to return the final response
110
+ # response = processor.decode(outputs[0], skip_special_tokens=True)
111
+
112
+ # return response
113
+
114
+ streamer = TextIteratorStreamer(processor, skip_special_tokens=True, skip_prompt=True)
115
+
116
+ generation_kwargs = dict(inputs, streamer=streamer, max_new_tokens=2048)
117
+ generated_text = ""
118
+
119
+ thread = Thread(target=model.generate, kwargs=generation_kwargs)
120
+ thread.start()
121
+ buffer = ""
122
+
123
+ for new_text in streamer:
124
+ buffer += new_text
125
+ # generated_text_without_prompt = buffer
126
+ # # time.sleep(0.01)
127
+ # yield buffer
128
+
129
+ return buffer
130
 
 
 
 
131
  except Exception as e:
132
  raise ValueError(f"Error during prediction: {str(e)}")
133