Spaces:
Running
Running
File size: 1,808 Bytes
db3aaf0 6ce42a9 db3aaf0 6ce42a9 db3aaf0 6ce42a9 db3aaf0 6ce42a9 db3aaf0 6ce42a9 db3aaf0 6ce42a9 db3aaf0 6ce42a9 db3aaf0 6ce42a9 db3aaf0 6ce42a9 db3aaf0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 |
import gradio as gr
import torch
from diffusers import StableDiffusionControlNetPipeline, ControlNetModel
from PIL import Image
import numpy as np
def load_model():
controlnet = ControlNetModel.from_pretrained("Kwai-Kolors/Kolors-Virtual-Try-On")
pipe = StableDiffusionControlNetPipeline.from_pretrained(
"Kwai-Kolors/Kolors-Virtual-Try-On",
controlnet=controlnet,
torch_dtype=torch.float16
)
if torch.cuda.is_available():
pipe = pipe.to("cuda")
return pipe
# Model'i global olarak yükle
try:
model = load_model()
print("Model başarıyla yüklendi!")
except Exception as e:
print(f"Model yüklenirken hata: {str(e)}")
def virtual_try_on(person_image, garment_image):
"""
Virtual try-on process
"""
try:
# Resimleri uygun formata dönüştür
if person_image is None or garment_image is None:
return None, "Error: Both images are required"
# Model inference
output = model(
person_image,
garment_image,
num_inference_steps=30,
guidance_scale=7.5
)
# Sonuç resmini al
result_image = output.images[0]
return result_image, "Success"
except Exception as e:
return None, f"Error: {str(e)}"
# Gradio arayüzü
demo = gr.Interface(
fn=virtual_try_on,
inputs=[
gr.Image(type="pil", label="Person Image"),
gr.Image(type="pil", label="Garment Image")
],
outputs=[
gr.Image(type="pil", label="Result"),
gr.Text(label="Status")
],
title="Virtual Try-On",
description="Upload a person image and a garment image to see how the garment would look on the person."
)
if __name__ == "__main__":
demo.launch() |