Spaces:
Runtime error
Runtime error
File size: 1,958 Bytes
19f315c 24b84b4 04f0ad3 19f315c 24b84b4 19f315c 328e66f 19f315c 4596128 ad77781 19f315c bd3935d 24b84b4 19f315c 1b41b1b 24b84b4 19f315c 328e66f 9f6d9cf 19f315c 04d51d9 967a744 5fe800e 847ec0f 19f315c 967a744 6b57a35 24b84b4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 |
from transformers import pipeline, set_seed
import gradio as grad, random, re
gpt2_pipe = pipeline('text-generation', model='Gustavosta/MagicPrompt-Stable-Diffusion', tokenizer='gpt2')
def generate(starting_text):
with open("ideas.txt", "r") as f:
line = f.readlines()
seed = random.randint(100, 1000000)
set_seed(seed)
if starting_text == "":
starting_text: str = line[random.randrange(0, len(line))].replace("\n", "").capitalize()
starting_text: str = re.sub(r"\.", '', starting_text)
response = gpt2_pipe(starting_text, max_length=(len(starting_text) + random.randint(60, 80)), num_return_sequences=1)
response_list = []
for x in response:
resp = x['generated_text'].strip()
if resp != starting_text and len(resp) > (len(starting_text) + 4) and resp.endswith((":", "-", "—")) is False:
response_list.append(resp)
response_end = "\n".join(response_list)
response_end = re.sub('[^ ]+\.[^ ]+','', response_end)
response_end = response_end.replace("<", "").replace(">", "")
if response_end != "":
return response_end
txt = grad.Textbox(lines=1, label="Initial Text", placeholder="Enter a basic idea")
out = grad.Textbox(lines=5, label="Generated Prompts")
title = "The Stable Diffusion Prompt Generator - because your text needs a little more visual spice."
description = 'Welcome to the MagicPrompt demo for Stable Diffusion! Ready to see some magic happen? Simply type in your text. Feeling lazy? No problem, just hit the Submit button and it will randomly pull from a list of thousands of ideas for you.<br>'
grad.Interface(fn=generate,
inputs=txt,
outputs=out,
title=title,
description=description,
article='',
allow_flagging='never',
cache_examples=False,
theme="default").launch(enable_queue=False, debug=True)
|