File size: 1,161 Bytes
d02bc68 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 |
adam_beta1: 0.9
adam_beta2: 0.999
adam_epsilon: 1.0e-08
adam_weight_decay: 0.01
allow_tf32: false
cache_dir: null
center_crop: true
checkpointing_steps: 5000
checkpoints_total_limit: null
dataloader_num_workers: 0
dataset_config_name: null
enable_xformers_memory_efficient_attention: false
gradient_accumulation_steps: 4
gradient_checkpointing: true
lambda_kd_feat: 1.0
lambda_kd_output: 1.0
lambda_sd: 1.0
learning_rate: 5.0e-05
local_rank: -1
logging_dir: logs
lr_scheduler: constant
lr_warmup_steps: 0
max_grad_norm: 1.0
max_train_samples: null
max_train_steps: 0
mixed_precision: fp16
non_ema_revision: null
num_train_epochs: 0
num_valid_images: 2
output_dir: ./results/v2-base_kd_bk_tiny
pretrained_model_name_or_path: stabilityai/stable-diffusion-2-1-base
random_flip: true
report_to: all
resolution: 512
resume_from_checkpoint: null
revision: null
scale_lr: false
seed: 1234
train_batch_size: 32
train_data_dir: ./data/laion_aes/preprocessed_11k
unet_config_name: bk_tiny
unet_config_path: ./src/unet_config_v2-base
use_8bit_adam: false
use_copy_weight_from_teacher: true
use_ema: true
valid_prompt: a golden vase with different flowers
valid_steps: 500
|