James Peter Perrfone Jefferies
Add additional inputs
6e0823f
raw
history blame
999 Bytes
import gradio as gr
import torch
from diffusers import DiffusionPipeline
def generate(
prompt, negative_prompt, num_inference_steps, width, height, guidance_scale
):
pipeline = DiffusionPipeline.from_pretrained("Lykon/DreamShaper")
return pipeline(
prompt=prompt,
negative_prompt=negative_prompt,
num_inference_steps=num_inference_steps,
width=width,
height=height,
guidance_scale=guidance_scale,
).images[0]
iface = gr.Interface(
fn=generate,
inputs=[
gr.Textbox(label="Prompt", value=""),
gr.Textbox(label="Negative Prompt", value=""),
gr.Slider(label="Sampling Steps", minimum=1, maximum=150, value=30, step=1),
gr.Slider(label="Width", minimum=64, maximum=2048, value=512, step=1),
gr.Slider(label="Height", minimum=64, maximum=2048, value=512, step=1),
gr.Slider(label="CFG Scale", minimum=1, maximum=30, value=9, step=0.5),
],
outputs="image",
)
iface.launch()