Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -41,7 +41,8 @@ def create_response_original(input_str,
|
|
41 |
input_ids = tokenizer.encode(input_str + tokenizer.eos_token, return_tensors="pt")
|
42 |
output_ids = original_model.generate(**tokenizer(input_str+tokenizer.eos_token,return_tensors="pt",max_length=200),
|
43 |
num_beams=num_beams,
|
44 |
-
num_return_sequences=num_return_sequences
|
|
|
45 |
outputs = []
|
46 |
for output_id in output_ids:
|
47 |
output = tokenizer.decode(output_id, skip_special_tokens=True)
|
|
|
41 |
input_ids = tokenizer.encode(input_str + tokenizer.eos_token, return_tensors="pt")
|
42 |
output_ids = original_model.generate(**tokenizer(input_str+tokenizer.eos_token,return_tensors="pt",max_length=200),
|
43 |
num_beams=num_beams,
|
44 |
+
num_return_sequences=num_return_sequences,
|
45 |
+
do_sample=do_sample)
|
46 |
outputs = []
|
47 |
for output_id in output_ids:
|
48 |
output = tokenizer.decode(output_id, skip_special_tokens=True)
|