Pijush2023 commited on
Commit
248ba3c
·
verified ·
1 Parent(s): 98a314b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -658,9 +658,9 @@ def generate_answer(message, choice, retrieval_mode, selected_model):
658
  logging.debug(f"Generated LM-2 prompt: {prompt}")
659
 
660
  response = selected_model(prompt, **{
661
- "max_new_tokens": 200,
662
  "return_full_text": True,
663
- "temperature": 0.3,
664
  "do_sample": True,
665
  })
666
 
 
658
  logging.debug(f"Generated LM-2 prompt: {prompt}")
659
 
660
  response = selected_model(prompt, **{
661
+ "max_new_tokens": 250,
662
  "return_full_text": True,
663
+ "temperature": 0.1,
664
  "do_sample": True,
665
  })
666