Pijush2023 commited on
Commit
184e03e
·
verified ·
1 Parent(s): 399f103

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -668,9 +668,9 @@ def generate_answer(message, choice, retrieval_mode, selected_model):
668
  logging.debug(f"Generated LM-2 prompt: {prompt}")
669
 
670
  response = selected_model(prompt, **{
671
- "max_new_tokens": 400,
672
  "return_full_text": True,
673
- "temperature": 0.7,
674
  "do_sample": True,
675
  })
676
 
 
668
  logging.debug(f"Generated LM-2 prompt: {prompt}")
669
 
670
  response = selected_model(prompt, **{
671
+ "max_new_tokens": 150,
672
  "return_full_text": True,
673
+ "temperature": 0.3,
674
  "do_sample": True,
675
  })
676