Tonic commited on
Commit
e87172a
·
verified ·
1 Parent(s): 2f228d6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -20,7 +20,7 @@ raven_pipeline = pipeline(
20
  @spaces.GPU
21
  def process_text(input_text: str) -> str:
22
  prompt = f"User Query: {input_text}<human_end>"
23
- result = raven_pipeline(prompt, max_new_tokens=2048, return_full_text=False, do_sample=False, temperature=0.001)[0]["generated_text"]
24
  torch.cuda.empty_cache()
25
  return result
26
 
 
20
  @spaces.GPU
21
  def process_text(input_text: str) -> str:
22
  prompt = f"User Query: {input_text}<human_end>"
23
+ result = raven_pipeline(prompt, max_new_tokens=2048, return_full_text=False, do_sample=False)[0]["generated_text"]
24
  torch.cuda.empty_cache()
25
  return result
26