Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -43,11 +43,11 @@ def create_response_original(input_str,
|
|
43 |
|
44 |
input_ids = tokenizer.encode(input_str + tokenizer.eos_token, return_tensors="pt")
|
45 |
#output_ids = fine_tuned_model.generate(input_ids,do_sample=True, max_length=100, temperature=0.2, top_p=0.9, repetition_penalty=1.5,num_return_sequences=6)
|
46 |
-
output_ids = fine_tuned_model.generate(input_ids,do_sample=
|
47 |
-
outputs =
|
48 |
for output_id in output_ids:
|
49 |
output = tokenizer.decode(output_id, skip_special_tokens=True)
|
50 |
-
outputs
|
51 |
return outputs
|
52 |
|
53 |
def create_response_fine_tuned(input_str):
|
|
|
43 |
|
44 |
input_ids = tokenizer.encode(input_str + tokenizer.eos_token, return_tensors="pt")
|
45 |
#output_ids = fine_tuned_model.generate(input_ids,do_sample=True, max_length=100, temperature=0.2, top_p=0.9, repetition_penalty=1.5,num_return_sequences=6)
|
46 |
+
output_ids = fine_tuned_model.generate(input_ids,do_sample=do_sample, max_length=100, temperature=temperature, top_p=top_p, top_k=top_k, repetition_penalty=repetition_penalty,num_return_sequences=num_return_sequences, num_beams = num_beams)
|
47 |
+
outputs = ""
|
48 |
for output_id in output_ids:
|
49 |
output = tokenizer.decode(output_id, skip_special_tokens=True)
|
50 |
+
outputs=outputs+output+"\\n"
|
51 |
return outputs
|
52 |
|
53 |
def create_response_fine_tuned(input_str):
|