Spaces:
Sleeping
Sleeping
File size: 4,604 Bytes
ea96316 076c07d ea96316 c80353d ea96316 02fd760 9f7c87a c3545bc c80353d c3545bc 7006304 c80353d ca44ee8 c3545bc 5effd4d ea96316 3b89213 44adcf6 ce6f159 c80353d 3f9701b ea96316 c80353d ea96316 3f9701b ea96316 c3545bc ea96316 5effd4d ea96316 44adcf6 ea96316 44adcf6 ea96316 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 |
import gradio as gr
import numpy as np
import random
from diffusers import DiffusionPipeline
from optimum.intel.openvino import OVStableDiffusionPipeline
import torch
from typing import Callable, Dict, Optional, Tuple
from diffusers import (
DDIMScheduler,
DPMSolverMultistepScheduler,
DPMSolverSinglestepScheduler,
EulerAncestralDiscreteScheduler,
EulerDiscreteScheduler,
)
def get_scheduler(scheduler_config: Dict, name: str) -> Optional[Callable]:
scheduler_factory_map = {
"DPM++ 2M Karras": lambda: DPMSolverMultistepScheduler.from_config(
scheduler_config, use_karras_sigmas=True
),
"DPM++ SDE Karras": lambda: DPMSolverSinglestepScheduler.from_config(
scheduler_config, use_karras_sigmas=True
),
"DPM++ 2M SDE Karras": lambda: DPMSolverMultistepScheduler.from_config(
scheduler_config, use_karras_sigmas=True, algorithm_type="sde-dpmsolver++"
),
"Euler": lambda: EulerDiscreteScheduler.from_config(scheduler_config),
"Euler a": lambda: EulerAncestralDiscreteScheduler.from_config(
scheduler_config
),
"DDIM": lambda: DDIMScheduler.from_config(scheduler_config),
}
return scheduler_factory_map.get(name, lambda: None)()
model_id = "helenai/Linaqruf-anything-v3.0-ov"
num_inference_steps = 25
sampler = "Euler a"
pipe = OVStableDiffusionPipeline.from_pretrained(model_id, compile=False)
pipe.reshape( batch_size=1, height=256, width=256, num_images_per_prompt=1)
pipe.scheduler = get_scheduler(pipe.scheduler.config, sampler)
pipe.compile()
def infer(prompt,negative_prompt):
image = pipe(
prompt = prompt+"score_8_up,score_7_up,score_6_up,score_9,score_8_up,score_7,masterpiece,best quality,source_anime,bangs,",
negative_prompt = "score_6,score_5,score_4,source_furry,pathway,walkway,face mask,heterochromia,\
tattoos,muscular,deformed iris,deformed pupils,long body,long neck,text,error,print,signature,\
logo,watermark,deformed,distorted,disfigured,bad anatomy,wrong anatomy,ugly,disgusting,\
cropped,crooked teeth,multiple views,bad proportions,gross proportions,cloned face,\
worst quality,low quality,normal quality,bad quality,lowres,poorly drawn,semi-realistic,\
3d,render,cg,cgi,imperfect,partial,unfinished,incomplete,monochrome,grayscale,sepia,fat,\
wrinkle,fat leg,fat ass,blurry,hazy,sagging breasts,longbody,lowres,\
bad anatomy,bad hands,missing fingers,extra digit,fewer digits,worst quality,\
low quality,normal quality,watermark,artist name,signature,(bad anatomy)), ((bad art)),\
(((bad proportions))), (b&w), (black/white), (black and white), blurry, body out of frame,\
canvas frame, cloned face, ((close up)), cross-eye, ((deformed)), ((disfigured)), (((duplicate))), \
(((extra arms))), extra fingers, (((extra legs))), ((extra limbs)), (fused fingers), gross proportions, \
((morbid)), (malformed limbs), ((missing arms)), ((missing legs)), mutated, mutated hands, \
(((mutation))), ((mutilated)), (out of frame), ((poorly drawn face)), poorly drawn feet, \
((poorly drawn hands)), tiling, (too many fingers), ((ugly)), wierd colors, (((long neck))), \
ugly, words, wrinkles, writing",
num_inference_steps=num_inference_steps,
width = 256,
height = 256,
).images[0]
return image
examples = [
"A cute kitten, Japanese cartoon style.",
"A sweet family, dad stands next to mom, mom holds baby girl.",
"A delicious ceviche cheesecake slice",
]
css="""
#col-container {
margin: 0 auto;
max-width: 520px;
}
"""
power_device = "CPU"
with gr.Blocks(css=css) as demo:
with gr.Column(elem_id="col-container"):
gr.Markdown(f"""
# Linaqruf-anything-v3.0-ov 256x256
Currently running on {power_device}.
""")
with gr.Row():
prompt = gr.Text(
label="Prompt",
show_label=False,
max_lines=1,
placeholder="Enter your prompt",
container=False,
)
run_button = gr.Button("Run", scale=0)
result = gr.Image(label="Result", show_label=False)
gr.Examples(
examples = examples,
inputs = [prompt]
)
run_button.click(
fn = infer,
inputs = [prompt],
outputs = [result]
)
demo.queue().launch() |