Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -39,7 +39,9 @@ def create_response_original(input_str,
|
|
39 |
# num_return_sequences=num_return_sequences)[0])
|
40 |
|
41 |
input_ids = tokenizer.encode(input_str + tokenizer.eos_token, return_tensors="pt")
|
42 |
-
output_ids = original_model.generate(**tokenizer(input_str+tokenizer.eos_token,return_tensors="pt",max_length=200)
|
|
|
|
|
43 |
outputs = []
|
44 |
for output_id in output_ids:
|
45 |
output = tokenizer.decode(output_id, skip_special_tokens=True)
|
|
|
39 |
# num_return_sequences=num_return_sequences)[0])
|
40 |
|
41 |
input_ids = tokenizer.encode(input_str + tokenizer.eos_token, return_tensors="pt")
|
42 |
+
output_ids = original_model.generate(**tokenizer(input_str+tokenizer.eos_token,return_tensors="pt",max_length=200),
|
43 |
+
num_beams=num_beams,
|
44 |
+
num_return_sequences=num_return_sequences)
|
45 |
outputs = []
|
46 |
for output_id in output_ids:
|
47 |
output = tokenizer.decode(output_id, skip_special_tokens=True)
|