Alberto Carmona commited on
Commit
95b13f9
·
1 Parent(s): af90128

Increase the output tokens

Browse files
Files changed (1) hide show
  1. functions.py +1 -1
functions.py CHANGED
@@ -32,7 +32,7 @@ def summarize_text(text: str):
32
  print(['summarize_text', 'generating'])
33
  with torch.cuda.amp.autocast():
34
  output_tokens = model.generate(**batch,
35
- max_new_tokens=256,
36
  generation_config=generation_config
37
  )
38
  output = tokenizer.decode(output_tokens[0], skip_special_tokens=True)
 
32
  print(['summarize_text', 'generating'])
33
  with torch.cuda.amp.autocast():
34
  output_tokens = model.generate(**batch,
35
+ max_new_tokens=512,
36
  generation_config=generation_config
37
  )
38
  output = tokenizer.decode(output_tokens[0], skip_special_tokens=True)