File size: 1,614 Bytes
49af7cb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
{
  "pretrained_model_name_or_path": "./core_model/core/model5",
  "pretrained_vae_name_or_path": "stabilityai/sd-vae-ft-mse",
  "revision": "main",
  "tokenizer_name": null,
  "instance_data_dir": null,
  "class_data_dir": null,
  "instance_prompt": null,
  "class_prompt": null,
  "save_sample_prompt": "photo of person",
  "save_sample_negative_prompt": null,
  "n_save_sample": 4,
  "save_guidance_scale": 7.5,
  "save_infer_steps": 20,
  "pad_tokens": false,
  "with_prior_preservation": true,
  "prior_loss_weight": 1.0,
  "num_class_images": 50,
  "output_dir": "./weights/alivksn",
  "seed": 1337,
  "resolution": 512,
  "center_crop": false,
  "train_text_encoder": true,
  "train_batch_size": 1,
  "sample_batch_size": 4,
  "num_train_epochs": 126,
  "max_train_steps": 6300,
  "gradient_accumulation_steps": 1,
  "gradient_checkpointing": false,
  "learning_rate": 1e-06,
  "scale_lr": false,
  "lr_scheduler": "constant",
  "lr_warmup_steps": 0,
  "use_8bit_adam": false,
  "adam_beta1": 0.9,
  "adam_beta2": 0.999,
  "adam_weight_decay": 0.01,
  "adam_epsilon": 1e-08,
  "max_grad_norm": 1.0,
  "push_to_hub": false,
  "hub_token": null,
  "hub_model_id": null,
  "logging_dir": "logs",
  "log_interval": 10,
  "save_interval": 10000,
  "save_min_steps": 0,
  "mixed_precision": "fp16",
  "not_cache_latents": false,
  "hflip": false,
  "local_rank": -1,
  "concepts_list": [
    {
      "instance_prompt": "photo of alivksn",
      "class_prompt": "photo of a person",
      "instance_data_dir": "./data/alivksn",
      "class_data_dir": "./data/person"
    }
  ],
  "read_prompts_from_txts": false
}