sc2582 commited on
Commit
8a04b2a
·
verified ·
1 Parent(s): 40cd6b2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -26
app.py CHANGED
@@ -22,31 +22,6 @@ tokenizer.padding_side = "left"
22
  if tokenizer.pad_token is None:
23
  tokenizer.pad_token = tokenizer.eos_token
24
 
25
- def generate_response(messages):
26
- # Convert list of dicts (messages) into the required format by the tokenizer
27
- # messages should be a list of {"role": "user"/"assistant", "content": "<text>"}
28
-
29
- message = tokenizer.apply_chat_template(
30
- messages, tokenize=False, add_generation_prompt=True
31
- )
32
-
33
- # Tokenize inputs
34
- tokenized_inputs = tokenizer(message, return_tensors="pt", padding=True, truncation=True, max_length=512).to(model.device)
35
-
36
- # Generate response
37
- outputs = model.generate(
38
- tokenized_inputs["input_ids"],
39
- attention_mask=tokenized_inputs["attention_mask"],
40
- max_new_tokens=2048,
41
- temperature=0.3,
42
- eos_token_id=tokenizer.eos_token_id,
43
- pad_token_id=tokenizer.eos_token_id
44
- )
45
-
46
- # Decode and return output
47
- output_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
48
- return output_text
49
-
50
  def generate_response(messages):
51
  # Convert list of dicts (messages) into the required format by the tokenizer
52
  # messages should be a list of {"role": "user"/"assistant", "content": "<text>"}
@@ -68,7 +43,6 @@ def generate_response(messages):
68
  eos_token_id=[
69
  tokenizer.eos_token_id,
70
  tokenizer.convert_tokens_to_ids("<|eot_id|>"),
71
- tokenizer.convert_tokens_to_ids("<|assistant|>"),
72
  ],
73
  pad_token_id=tokenizer.eos_token_id
74
  )
 
22
  if tokenizer.pad_token is None:
23
  tokenizer.pad_token = tokenizer.eos_token
24
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  def generate_response(messages):
26
  # Convert list of dicts (messages) into the required format by the tokenizer
27
  # messages should be a list of {"role": "user"/"assistant", "content": "<text>"}
 
43
  eos_token_id=[
44
  tokenizer.eos_token_id,
45
  tokenizer.convert_tokens_to_ids("<|eot_id|>"),
 
46
  ],
47
  pad_token_id=tokenizer.eos_token_id
48
  )