Textgen / app.py
Funbi's picture
Update app.py
ceae2b2
raw
history blame
1.63 kB
import os
os.system("pip install transformers")
os.system("pip install gradio")
os.system("pip install tensorflow")
os.system("pip install torch")
import gradio as gr
import tensorflow as tf
from transformers import GPT2LMHeadModel, GPT2Tokenizer, set_seed
tokenizer = GPT2Tokenizer.from_pretrained("gpt2-large")
model = GPT2LMHeadModel.from_pretrained("gpt2-large", pad_token_id=tokenizer.eos_token_id)
def generate(prompt,textCount=200):
input_ids = tokenizer.encode(prompt, return_tensors='pt')
if textCount == None or textCount < 200:
textCount=200
# generate text until the output length (which includes the context length) reaches 50
output = model.generate(input_ids, max_length=textCount, num_beams=5, no_repeat_ngram_size=2, early_stopping=True)
out = tokenizer.decode(output[0], skip_special_tokens=True)
return out
demo = gr.Interface(
fn=generate,
inputs=[gr.Textbox(lines=8, placeholder="Paragraph Here..."),"number"],
outputs="text",title="Text generation app with GPT2",
description="This is a text generation app, it can prove useful when you want to generate texts. All you need to do is copy and paste a short prompt. The potential of this app is limitless especially for writers, you are only limited by your prompt engineering skills",
examples=[
["During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in"
],["Question: What hurdles or challenges are you facing as you move through your career journey? Please share a specific example?answer:I have been"]
],
)
demo.launch()