Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -40,10 +40,10 @@ def create_response_original(input_str,
|
|
40 |
|
41 |
input_ids = tokenizer.encode(input_str + tokenizer.eos_token, return_tensors="pt")
|
42 |
output_ids = fine_tuned_model.generate(input_ids,do_sample=True, max_length=100, temperature=0.2, top_p=0.9, repetition_penalty=1.5,num_return_sequences=6)
|
43 |
-
outputs =
|
44 |
for output_id in output_ids:
|
45 |
output = tokenizer.decode(output_id, skip_special_tokens=True)
|
46 |
-
outputs
|
47 |
return outputs
|
48 |
|
49 |
def create_response_fine_tuned(input_str):
|
@@ -94,7 +94,7 @@ interface1 = gr.Interface(fn=create_response_original,
|
|
94 |
"If is set to True, the generate function will use stochastic sampling, which means that it will randomly" +
|
95 |
" select a word from the probability distribution at each step. This results in a more diverse and creative" +
|
96 |
" output, but it might also introduce errors and inconsistencies ", value=True)
|
97 |
-
], outputs="
|
98 |
interface2 = gr.Interface(fn=create_response_fine_tuned, inputs="text", outputs="text", title="Fine Tuned")
|
99 |
demo = gr.TabbedInterface([interface1, interface2], ["Original", "Fine Tuned"])
|
100 |
# with gr.Blocks() as demo:
|
|
|
40 |
|
41 |
input_ids = tokenizer.encode(input_str + tokenizer.eos_token, return_tensors="pt")
|
42 |
output_ids = fine_tuned_model.generate(input_ids,do_sample=True, max_length=100, temperature=0.2, top_p=0.9, repetition_penalty=1.5,num_return_sequences=6)
|
43 |
+
outputs = ""
|
44 |
for output_id in output_ids:
|
45 |
output = tokenizer.decode(output_id, skip_special_tokens=True)
|
46 |
+
outputs= outputs+output+"<br/>"
|
47 |
return outputs
|
48 |
|
49 |
def create_response_fine_tuned(input_str):
|
|
|
94 |
"If is set to True, the generate function will use stochastic sampling, which means that it will randomly" +
|
95 |
" select a word from the probability distribution at each step. This results in a more diverse and creative" +
|
96 |
" output, but it might also introduce errors and inconsistencies ", value=True)
|
97 |
+
], outputs="text")
|
98 |
interface2 = gr.Interface(fn=create_response_fine_tuned, inputs="text", outputs="text", title="Fine Tuned")
|
99 |
demo = gr.TabbedInterface([interface1, interface2], ["Original", "Fine Tuned"])
|
100 |
# with gr.Blocks() as demo:
|