File size: 971 Bytes
4157d39 6d5b0bc 52a094c 02a9751 5ca2c01 4157d39 02a9751 138cabe 4157d39 138cabe 4157d39 02a9751 138cabe 02a9751 138cabe 4157d39 02a9751 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 |
flux:
base_model: "https://huggingface.co/Comfy-Org/flux1-dev/blob/main/flux1-dev-fp8.safetensors"
# base_model: "black-forest-labs/FLUX.1-dev"
flux_dtype: 'bf16'
lora: "./checkpoint/flux_lora/rgb_normal_large.safetensors"
controlnet: "InstantX/FLUX.1-dev-Controlnet-Union"
redux: "black-forest-labs/FLUX.1-Redux-dev"
num_inference_steps: 20
seed: 42
device: 'cuda:0'
multiview:
base_model: "sudo-ai/zero123plus-v1.2"
custom_pipeline: "./models/zero123plus"
unet: "./checkpoint/zero123++/flexgen_19w.ckpt"
num_inference_steps: 50
seed: 42
device: 'cuda:0'
reconstruction:
model_config: "./models/lrm/config/PRM_inference.yaml"
base_model: "./checkpoint/lrm/final_ckpt.ckpt"
device: 'cuda:0'
caption:
base_model: "multimodalart/Florence-2-large-no-flash-attn"
device: 'cuda:0'
llm:
base_model: "Qwen/Qwen2-7B-Instruct"
device: 'cuda:0'
use_zero_gpu: false # for huggingface demo only
3d_bundle_templates: './init_3d_Bundle' |