insideman's picture
create app.py
db3aaf0 verified
raw
history blame
1.72 kB
import gradio as gr
from huggingface_hub import InferenceClient
import PIL.Image
import io
# Initialize the Hugging Face client
HF_TOKEN = "your_token_here" # Replace with your token
client = InferenceClient(
model="Kwai-Kolors/Kolors-Virtual-Try-On",
token=HF_TOKEN
)
def virtual_try_on(person_image, garment_image):
"""
Process the virtual try-on request
Args:
person_image: PIL Image of the person
garment_image: PIL Image of the garment
Returns:
PIL Image of the result
"""
try:
# Convert images to bytes
person_bytes = io.BytesIO()
garment_bytes = io.BytesIO()
person_image.save(person_bytes, format='PNG')
garment_image.save(garment_bytes, format='PNG')
# Make API request
response = client.post(
json={
"inputs": [
{"image": person_bytes.getvalue()},
{"image": garment_bytes.getvalue()}
]
}
)
# Convert response to image
result_image = PIL.Image.open(io.BytesIO(response))
return result_image, "Success"
except Exception as e:
return None, f"Error: {str(e)}"
# Create Gradio interface
demo = gr.Interface(
fn=virtual_try_on,
inputs=[
gr.Image(type="pil", label="Person Image"),
gr.Image(type="pil", label="Garment Image")
],
outputs=[
gr.Image(type="pil", label="Result"),
gr.Text(label="Status")
],
title="Virtual Try-On API",
description="Upload a person image and a garment image to see how the garment would look on the person."
)
if __name__ == "__main__":
demo.launch()