|
{ |
|
"3": { |
|
"inputs": { |
|
"seed": 319029981256051, |
|
"steps": 30, |
|
"cfg": 6, |
|
"sampler_name": "dpmpp_sde_gpu", |
|
"scheduler": "karras", |
|
"denoise": 1, |
|
"model": [ |
|
"34", |
|
0 |
|
], |
|
"positive": [ |
|
"6", |
|
0 |
|
], |
|
"negative": [ |
|
"7", |
|
0 |
|
], |
|
"latent_image": [ |
|
"5", |
|
0 |
|
] |
|
}, |
|
"class_type": "KSampler", |
|
"_meta": { |
|
"title": "KSampler" |
|
} |
|
}, |
|
"4": { |
|
"inputs": { |
|
"ckpt_name": "SDXL/sdXL_v10VAEFix.safetensors" |
|
}, |
|
"class_type": "CheckpointLoaderSimple", |
|
"_meta": { |
|
"title": "Load Checkpoint" |
|
} |
|
}, |
|
"5": { |
|
"inputs": { |
|
"width": 1024, |
|
"height": 576, |
|
"batch_size": 4 |
|
}, |
|
"class_type": "EmptyLatentImage", |
|
"_meta": { |
|
"title": "Empty Latent Image" |
|
} |
|
}, |
|
"6": { |
|
"inputs": { |
|
"text": [ |
|
"70", |
|
0 |
|
], |
|
"clip": [ |
|
"34", |
|
1 |
|
] |
|
}, |
|
"class_type": "CLIPTextEncode", |
|
"_meta": { |
|
"title": "CLIP Text Encode (Prompt)" |
|
} |
|
}, |
|
"7": { |
|
"inputs": { |
|
"text": [ |
|
"71", |
|
0 |
|
], |
|
"clip": [ |
|
"34", |
|
1 |
|
] |
|
}, |
|
"class_type": "CLIPTextEncode", |
|
"_meta": { |
|
"title": "CLIP Text Encode (Prompt)" |
|
} |
|
}, |
|
"8": { |
|
"inputs": { |
|
"samples": [ |
|
"3", |
|
0 |
|
], |
|
"vae": [ |
|
"4", |
|
2 |
|
] |
|
}, |
|
"class_type": "VAEDecode", |
|
"_meta": { |
|
"title": "VAE Decode" |
|
} |
|
}, |
|
"19": { |
|
"inputs": { |
|
"stop_at_clip_layer": -1, |
|
"clip": [ |
|
"33", |
|
1 |
|
] |
|
}, |
|
"class_type": "CLIPSetLastLayer", |
|
"_meta": { |
|
"title": "CLIP Set Last Layer" |
|
} |
|
}, |
|
"20": { |
|
"inputs": { |
|
"guide_size": 640, |
|
"guide_size_for": true, |
|
"max_size": 1024, |
|
"seed": 98503544369271, |
|
"steps": 20, |
|
"cfg": 8, |
|
"sampler_name": "euler", |
|
"scheduler": "normal", |
|
"denoise": 0.5, |
|
"feather": 5, |
|
"noise_mask": true, |
|
"force_inpaint": true, |
|
"bbox_threshold": 0.5, |
|
"bbox_dilation": 10, |
|
"bbox_crop_factor": 3, |
|
"sam_detection_hint": "center-1", |
|
"sam_dilation": 0, |
|
"sam_threshold": 0.93, |
|
"sam_bbox_expansion": 0, |
|
"sam_mask_hint_threshold": 0.7, |
|
"sam_mask_hint_use_negative": "False", |
|
"drop_size": 10, |
|
"wildcard": "", |
|
"cycle": 1, |
|
"inpaint_model": false, |
|
"noise_mask_feather": 20, |
|
"image": [ |
|
"73", |
|
0 |
|
], |
|
"model": [ |
|
"4", |
|
0 |
|
], |
|
"clip": [ |
|
"4", |
|
1 |
|
], |
|
"vae": [ |
|
"4", |
|
2 |
|
], |
|
"positive": [ |
|
"62", |
|
0 |
|
], |
|
"negative": [ |
|
"63", |
|
0 |
|
], |
|
"bbox_detector": [ |
|
"21", |
|
0 |
|
], |
|
"sam_model_opt": [ |
|
"22", |
|
0 |
|
] |
|
}, |
|
"class_type": "FaceDetailer", |
|
"_meta": { |
|
"title": "FaceDetailer" |
|
} |
|
}, |
|
"21": { |
|
"inputs": { |
|
"model_name": "bbox/face_yolov8m.pt" |
|
}, |
|
"class_type": "UltralyticsDetectorProvider", |
|
"_meta": { |
|
"title": "UltralyticsDetectorProvider" |
|
} |
|
}, |
|
"22": { |
|
"inputs": { |
|
"model_name": "sam_vit_b_01ec64.pth", |
|
"device_mode": "AUTO" |
|
}, |
|
"class_type": "SAMLoader", |
|
"_meta": { |
|
"title": "SAMLoader (Impact)" |
|
} |
|
}, |
|
"33": { |
|
"inputs": { |
|
"resadapter_name": "resadapter_v2_sdxl", |
|
"strength_model": 0.7000000000000001, |
|
"strength_clip": 1, |
|
"model": [ |
|
"4", |
|
0 |
|
], |
|
"clip": [ |
|
"4", |
|
1 |
|
] |
|
}, |
|
"class_type": "ResAdapterLoader", |
|
"_meta": { |
|
"title": "Load ResAdapter" |
|
} |
|
}, |
|
"34": { |
|
"inputs": { |
|
"lora_01": "SDXL/Korean_Instagram_XL.safetensors", |
|
"strength_01": 0.7000000000000001, |
|
"lora_02": "SDXL/add-detail-xl.safetensors", |
|
"strength_02": 1, |
|
"lora_03": "None", |
|
"strength_03": 1, |
|
"lora_04": "None", |
|
"strength_04": 1, |
|
"model": [ |
|
"33", |
|
0 |
|
], |
|
"clip": [ |
|
"19", |
|
0 |
|
] |
|
}, |
|
"class_type": "Lora Loader Stack (rgthree)", |
|
"_meta": { |
|
"title": "Lora Loader Stack (rgthree)" |
|
} |
|
}, |
|
"62": { |
|
"inputs": { |
|
"text": [ |
|
"70", |
|
0 |
|
], |
|
"clip": [ |
|
"4", |
|
1 |
|
] |
|
}, |
|
"class_type": "CLIPTextEncode", |
|
"_meta": { |
|
"title": "CLIP Text Encode (Prompt)" |
|
} |
|
}, |
|
"63": { |
|
"inputs": { |
|
"text": [ |
|
"71", |
|
0 |
|
], |
|
"clip": [ |
|
"4", |
|
1 |
|
] |
|
}, |
|
"class_type": "CLIPTextEncode", |
|
"_meta": { |
|
"title": "CLIP Text Encode (Prompt)" |
|
} |
|
}, |
|
"70": { |
|
"inputs": { |
|
"string": "Korean, red, sliced beef, Fresh ingredients being prepared in a high-quality kitchen, vibrant colors, chef's skilled hands, clean and professional setting, Studio lighting, close up hand, side view" |
|
}, |
|
"class_type": "Primitive string multiline [Crystools]", |
|
"_meta": { |
|
"title": "Positive Prompt" |
|
} |
|
}, |
|
"71": { |
|
"inputs": { |
|
"string": "ugly, deformed, text, glasses, bad hands, illustration" |
|
}, |
|
"class_type": "Primitive string multiline [Crystools]", |
|
"_meta": { |
|
"title": "Positive Prompt" |
|
} |
|
}, |
|
"72": { |
|
"inputs": { |
|
"s3_bucket": "ai-adv-skt-dev-output", |
|
"pathname": "i2v_client/user1/food.png", |
|
"region": "ap-northeast-2", |
|
"aws_ak": "", |
|
"aws_sk": "", |
|
"images": [ |
|
"20", |
|
0 |
|
] |
|
}, |
|
"class_type": "Direct Save Image To S3", |
|
"_meta": { |
|
"title": "Direct Save Image To S3" |
|
} |
|
}, |
|
"73": { |
|
"inputs": { |
|
"width": 1920, |
|
"height": 1080, |
|
"upscale_method": "nearest-exact", |
|
"keep_proportion": false, |
|
"divisible_by": 2, |
|
"crop": "disabled", |
|
"image": [ |
|
"8", |
|
0 |
|
] |
|
}, |
|
"class_type": "ImageResizeKJ", |
|
"_meta": { |
|
"title": "Resize Image" |
|
} |
|
} |
|
} |