Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -39,7 +39,8 @@ def create_response_original(input_str,
|
|
39 |
# num_return_sequences=num_return_sequences)[0])
|
40 |
|
41 |
input_ids = tokenizer.encode(input_str + tokenizer.eos_token, return_tensors="pt")
|
42 |
-
output_ids = fine_tuned_model.generate(input_ids,do_sample=True, max_length=100, temperature=0.2, top_p=0.9, repetition_penalty=1.5,num_return_sequences=6)
|
|
|
43 |
outputs = ""
|
44 |
for output_id in output_ids:
|
45 |
output = tokenizer.decode(output_id, skip_special_tokens=True)
|
|
|
39 |
# num_return_sequences=num_return_sequences)[0])
|
40 |
|
41 |
input_ids = tokenizer.encode(input_str + tokenizer.eos_token, return_tensors="pt")
|
42 |
+
#output_ids = fine_tuned_model.generate(input_ids,do_sample=True, max_length=100, temperature=0.2, top_p=0.9, repetition_penalty=1.5,num_return_sequences=6)
|
43 |
+
output_ids = fine_tuned_model.generate(input_ids,do_sample=True, max_length=100, temperature=temperature, top_p=top_p, repetition_penalty=repetition_penalty,num_return_sequences=num_return_sequences)
|
44 |
outputs = ""
|
45 |
for output_id in output_ids:
|
46 |
output = tokenizer.decode(output_id, skip_special_tokens=True)
|