Pijush2023 commited on
Commit
f759463
·
verified ·
1 Parent(s): 76f14a9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -663,8 +663,8 @@ def generate_answer(message, choice, retrieval_mode, selected_model):
663
  logging.debug(f"Generated LM-2 prompt: {prompt}")
664
 
665
  response = selected_model(prompt, **{
666
- "max_new_tokens": 300,
667
- "return_full_text": False,
668
  "temperature": 0.0,
669
  "do_sample": False,
670
  })
 
663
  logging.debug(f"Generated LM-2 prompt: {prompt}")
664
 
665
  response = selected_model(prompt, **{
666
+ "max_new_tokens": 250,
667
+ "return_full_text": True,
668
  "temperature": 0.0,
669
  "do_sample": False,
670
  })