Spaces:
Sleeping
Sleeping
examples and other added
Browse files
app.py
CHANGED
@@ -47,7 +47,16 @@ def create_response_fine_tuned(input_str):
|
|
47 |
#output_str = tokenizer.decode(model.generate(**tokenizer("What are John West's hobbies?"+tokenizer.eos_token,return_tensors="pt",max_length=200))[0])
|
48 |
output_str = tokenizer.decode(fine_tuned_model.generate(**tokenizer(input_str+tokenizer.eos_token,return_tensors="pt",max_length=200))[0])
|
49 |
return (output_str)
|
50 |
-
interface1 = gr.Interface(fn=create_response_original,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
51 |
gr.Textbox(label="input text here", lines=3),
|
52 |
gr.Number(label="num_beams (integer) explores the specified number of possible outputs and selects the most " +
|
53 |
"likely ones (specified in num_beams)", value=7),
|
|
|
47 |
#output_str = tokenizer.decode(model.generate(**tokenizer("What are John West's hobbies?"+tokenizer.eos_token,return_tensors="pt",max_length=200))[0])
|
48 |
output_str = tokenizer.decode(fine_tuned_model.generate(**tokenizer(input_str+tokenizer.eos_token,return_tensors="pt",max_length=200))[0])
|
49 |
return (output_str)
|
50 |
+
interface1 = gr.Interface(fn=create_response_original,
|
51 |
+
title="original",
|
52 |
+
description="original language model, no fine tuning",
|
53 |
+
examples=[
|
54 |
+
["What is death?"], # The first example
|
55 |
+
["One of the best teachers in all of life turns out to be what?"], # The second example
|
56 |
+
["what is your most meaningful relationship?"], # The third example
|
57 |
+
["What actually gives life meaning?"]
|
58 |
+
],
|
59 |
+
inputs=[
|
60 |
gr.Textbox(label="input text here", lines=3),
|
61 |
gr.Number(label="num_beams (integer) explores the specified number of possible outputs and selects the most " +
|
62 |
"likely ones (specified in num_beams)", value=7),
|