Pijush2023 commited on
Commit
2ba666c
·
verified ·
1 Parent(s): 3bba829

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -667,7 +667,7 @@ def generate_answer(message, choice, retrieval_mode, selected_model):
667
  logging.debug(f"Generated LM-2 prompt: {prompt}")
668
 
669
  response = selected_model(prompt, **{
670
- "max_new_tokens": 300,
671
  "return_full_text": False,
672
  "temperature": 0.0,
673
  "do_sample": False,
 
667
  logging.debug(f"Generated LM-2 prompt: {prompt}")
668
 
669
  response = selected_model(prompt, **{
670
+ "max_new_tokens": 400,
671
  "return_full_text": False,
672
  "temperature": 0.0,
673
  "do_sample": False,