Pijush2023 commited on
Commit
ae9b30b
·
verified ·
1 Parent(s): ddf2069

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -659,7 +659,7 @@ def generate_answer(message, choice, retrieval_mode, selected_model):
659
  logging.debug(f"Generated LM-2 prompt: {prompt}")
660
 
661
  response = selected_model(prompt, **{
662
- "max_new_tokens": 250,
663
  "return_full_text": True,
664
  "temperature": 0.1,
665
  "do_sample": True,
 
659
  logging.debug(f"Generated LM-2 prompt: {prompt}")
660
 
661
  response = selected_model(prompt, **{
662
+ "max_new_tokens": 300,
663
  "return_full_text": True,
664
  "temperature": 0.1,
665
  "do_sample": True,