fqye commited on
Commit
149ea31
1 Parent(s): 715377a

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. README.md +1 -1
  2. config.yaml +4 -4
  3. lora.safetensors +1 -1
README.md CHANGED
@@ -21,7 +21,7 @@ instance_prompt: ztj
21
 
22
  # Ztj 22
23
 
24
- <!-- <Gallery /> -->
25
 
26
  Trained on Replicate using:
27
 
 
21
 
22
  # Ztj 22
23
 
24
+ <Gallery />
25
 
26
  Trained on Replicate using:
27
 
config.yaml CHANGED
@@ -12,7 +12,7 @@ config:
12
  linear_alpha: 16
13
  save:
14
  dtype: float16
15
- save_every: 501
16
  max_step_saves_to_keep: 1
17
  datasets:
18
  - folder_path: input_images
@@ -27,7 +27,7 @@ config:
27
  - 1024
28
  train:
29
  batch_size: 1
30
- steps: 500
31
  gradient_accumulation_steps: 1
32
  train_unet: true
33
  train_text_encoder: false
@@ -35,7 +35,7 @@ config:
35
  gradient_checkpointing: true
36
  noise_scheduler: flowmatch
37
  optimizer: adamw8bit
38
- lr: 0.001
39
  ema_config:
40
  use_ema: true
41
  ema_decay: 0.99
@@ -46,7 +46,7 @@ config:
46
  quantize: true
47
  sample:
48
  sampler: flowmatch
49
- sample_every: 501
50
  width: 1024
51
  height: 1024
52
  prompts: []
 
12
  linear_alpha: 16
13
  save:
14
  dtype: float16
15
+ save_every: 101
16
  max_step_saves_to_keep: 1
17
  datasets:
18
  - folder_path: input_images
 
27
  - 1024
28
  train:
29
  batch_size: 1
30
+ steps: 100
31
  gradient_accumulation_steps: 1
32
  train_unet: true
33
  train_text_encoder: false
 
35
  gradient_checkpointing: true
36
  noise_scheduler: flowmatch
37
  optimizer: adamw8bit
38
+ lr: 0.0001
39
  ema_config:
40
  use_ema: true
41
  ema_decay: 0.99
 
46
  quantize: true
47
  sample:
48
  sampler: flowmatch
49
+ sample_every: 101
50
  width: 1024
51
  height: 1024
52
  prompts: []
lora.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:86d0e5f265b76a14e1deec0632dd380146db8382fbce73a96fb94fd469f0b300
3
  size 171969408
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2750f665fc925f0d173cea656d75c4056d10ec7d3cde987a04f7e8ae386209ea
3
  size 171969408