Sangmin's picture
Add visitor badge
786d709
raw
history blame
2.18 kB
import gradio as gr
import openai
import os
openai.api_key = os.environ["OPENAI_API_KEY"]
def write_essay(sample_topic, user_topic):
if len(sample_topic) == 0:
sample_topic = user_topic
try:
response = openai.Completion.create(
model="text-davinci-002",
prompt= "Instruction: Write a five-paragraph essay for the following topic:\nSuggested Length: more than 120 words\nTOPIC: " + sample_topic + "\n\nESSAY:",
temperature=0.7,
max_tokens=500,
top_p=1.0,
frequency_penalty=0.0,
presence_penalty=0.0
)
#print(response)
return response.choices[0].text
except:
return "Invalid Query"
demo = gr.Blocks()
with demo:
gr.Markdown("<h2><center>Eiken Essay with GPT-3</center></h2>")
gr.Markdown("<h4><center>Enter a topic for Eiken essay. GPT-3 will, then, write a sample essay for you.</center></h4>")
gr.Markdown("<center>Brought to you by Choimirai School</center>")
with gr.Row():
sample_topic = gr.Radio([
"Will humans live on other planets someday?",
"Japan should become a completely cashless society.",
"Global overpopulation is a serious threat to the future of humankind.",
"Improving relations with other Asian nations should be a priority for the Japanese government.",
"Can renewable energy sources replace fossil fuels?",
"Should democratic nations actively promote the spread of democracy to nondemocratic nations?",
"Agree or disagree, Infectious diseases will become a bigger problem in the coming decades.", ], label= "Choose a sample TOPIC")
user_topic = gr.Textbox(label="Or, write your own topic for Eiken essay.", value="Will fossil fuels such as oil and gas still be the world's main source of energy in the coming decades?")
with gr.Row():
written_essay = gr.inputs.Textbox(lines=20, label="Sample Essay written by GPT-3")
b1 = gr.Button("Write Essay")
b1.click(write_essay, inputs = [sample_topic, user_topic], outputs = written_essay)
with gr.Row():
gr.Markdown("![visitor badge](https://visitor-badge.glitch.me/badge?page_id=sangmin_eiken-essay-with-gpt3)")
demo.launch(enable_queue=True, debug=True)