baguioni's picture
Update app.py
64445a4
raw
history blame
1.74 kB
import gradio as gr
# Models
jacobe = gr.Interface.load("huggingface/huggingtweets/jacobe")
baguioni = gr.Interface.load("huggingface/huggingtweets/baguioni")
elonmusk = gr.Interface.load("huggingface/huggingtweets/elonmusk")
realdonaldtrump = gr.Interface.load("huggingface/huggingtweets/realdonaldtrump")
barackobama = gr.Interface.load("huggingface/huggingtweets/barackobama")
karpathy = gr.Interface.load("huggingface/huggingtweets/karpathy")
def generated_tweet(inputtext, user):
if user == 'jacobe':
return jacobe(inputtext)
if user == 'baguioni':
return baguioni(inputtext)
if user == 'elonmusk':
return jacobe(inputtext)
if user == 'realdonaldtrump':
return donaldtrump(inputtext)
if user == 'karpathy':
return karpathy(inputtext)
if user == 'barackobama':
return barackobama(inputtext)
title = "GPT-2 Tweet Generator"
description = "<p style='text-align: center'>GPT-2 Tweet Generator Hugging Face Demo. Simply select a twitter account you want to impersonate and input a word/phrase to generate a tweet.</p>"
article = "<p style='text-align: center'>Model built by Boris Dayma, https://github.com/borisdayma/huggingtweets</p>"
examples = [
['I have a dream','elonmusk'],
['I woke up feeling like', 'karpathy'],
['The world is a', 'jacobe' ]
]
gr.Interface(
generated_tweet,
[gr.inputs.Textbox(label="Input",lines=5), gr.inputs.Dropdown(choices=["baguioni","jacobe", "elonmusk", "realdonaldtrump", "barackobama", "karpathy"], type="value", default="baguioni", label="user")],
[gr.outputs.Label(label="Output")],
examples=examples,
article=article,
title=title,
description=description).launch(enable_queue=False)