Royrotem100 commited on
Commit
b4fc999
·
1 Parent(s): 6a9b716

Set pad_token to eos_token and exclude user query from response another fix

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -43,7 +43,7 @@ def chat():
43
  inputs = tokenizer(user_input, return_tensors='pt', padding=True, truncation=True)
44
  input_ids = inputs['input_ids']
45
  attention_mask = inputs['attention_mask']
46
- outputs = model.generate(input_ids, attention_mask=attention_mask)
47
  response_text = tokenizer.decode(outputs[0], skip_special_tokens=True).replace(user_input, '').strip()
48
 
49
  return jsonify({"response": response_text})
 
43
  inputs = tokenizer(user_input, return_tensors='pt', padding=True, truncation=True)
44
  input_ids = inputs['input_ids']
45
  attention_mask = inputs['attention_mask']
46
+ outputs = model.generate(input_ids, attention_mask=attention_mask, max_new_tokens=100,pad_token_id=tokenizer.eos_token_id)
47
  response_text = tokenizer.decode(outputs[0], skip_special_tokens=True).replace(user_input, '').strip()
48
 
49
  return jsonify({"response": response_text})