File size: 1,324 Bytes
d1bdeae
f05ea81
d1bdeae
4e50bb4
 
 
 
 
 
817689e
9836b6f
7df26b0
c48e116
 
a39ee1c
c48e116
dd78709
596da82
efe6971
d1bdeae
 
 
 
 
 
800faba
d1bdeae
14faa82
7c0975f
 
d1bdeae
dd78709
 
d1bdeae
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
import gradio as gr
import os 
import sys
os.system("mkdir pose2pose")
os.system("cd pose2pose")
os.system("mkdir -p datasets/DeepFashion")
os.system("mkdir -p output/DeepFashion/ckpt/pretrained")
os.system("git clone https://github.com/prasunroy/pose-transfer.git")
os.system("cd pose-transfer")
os.system("pip install -r requirements.txt")
sys.path.append("pose-transfer")
import torch 
from api import Pose2Pose
from PIL import Image
import canvas
p2p = Pose2Pose(pretrained=True)
gr.Interface("spaces/Omnibus/model-mover")

def infer(con_im,ref_im):
    condition = Image.open(con_im)
    reference = Image.open(ref_im)
    generated = p2p.transfer_as(condition, reference)
    #generated.show()
    return generated
with gr.Blocks() as app:
    gr.Markdown("<center><h1 style='font-size:xxx-large;'>Pose Transfer Demo</h1><br><h3>repo: <a href='https://github.com/prasunroy/pose-transfer'>https://github.com/prasunroy/pose-transfer</a></h3>")
    with gr.Row():
        pose_maker=gr.HTML("<pose-maker/>")
        condition_im = gr.Image(label='Style Image', type='filepath')
        reference_im = gr.Image(label='Pose Image', type='filepath')
    btn=gr.Button()
    gr.Interface("spaces/Omnibus/model-mover").load()

    output_im = gr.Image()
    btn.click(infer,[condition_im,reference_im], output_im)
app.launch()