Saad381 commited on
Commit
646b6fd
·
verified ·
1 Parent(s): 653f044

Delete logs

Browse files
logs/dreambooth-lora-sd-xl/1732260783.275793/events.out.tfevents.1732260783.835317cdafc1.1713.1 DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:00f747c1ba20de8370512db95695df837f1f6af3e49d9c1e356be61dcbd5e12d
3
- size 3304
 
 
 
 
logs/dreambooth-lora-sd-xl/1732260783.277529/hparams.yml DELETED
@@ -1,70 +0,0 @@
1
- adam_beta1: 0.9
2
- adam_beta2: 0.999
3
- adam_epsilon: 1.0e-08
4
- adam_weight_decay: 0.0001
5
- adam_weight_decay_text_encoder: 0.001
6
- allow_tf32: false
7
- cache_dir: null
8
- caption_column: text
9
- center_crop: false
10
- checkpointing_steps: 717
11
- checkpoints_total_limit: null
12
- class_data_dir: null
13
- class_prompt: null
14
- dataloader_num_workers: 0
15
- dataset_config_name: null
16
- dataset_name: TransformerDS
17
- do_edm_style_training: false
18
- enable_xformers_memory_efficient_attention: false
19
- gradient_accumulation_steps: 3
20
- gradient_checkpointing: true
21
- hub_model_id: null
22
- hub_token: null
23
- image_column: image
24
- instance_data_dir: null
25
- instance_prompt: the optimus prime
26
- learning_rate: 0.0001
27
- local_rank: -1
28
- logging_dir: logs
29
- lr_num_cycles: 1
30
- lr_power: 1.0
31
- lr_scheduler: constant
32
- lr_warmup_steps: 0
33
- max_grad_norm: 1.0
34
- max_train_steps: 500
35
- mixed_precision: fp16
36
- num_class_images: 100
37
- num_train_epochs: 100
38
- num_validation_images: 4
39
- optimizer: AdamW
40
- output_dir: Transformer_lora
41
- output_kohya_format: false
42
- pretrained_model_name_or_path: stabilityai/stable-diffusion-xl-base-1.0
43
- pretrained_vae_model_name_or_path: madebyollin/sdxl-vae-fp16-fix
44
- prior_generation_precision: null
45
- prior_loss_weight: 1.0
46
- prodigy_beta3: null
47
- prodigy_decouple: true
48
- prodigy_safeguard_warmup: true
49
- prodigy_use_bias_correction: true
50
- push_to_hub: false
51
- random_flip: false
52
- rank: 4
53
- repeats: 1
54
- report_to: tensorboard
55
- resolution: 1024
56
- resume_from_checkpoint: null
57
- revision: null
58
- sample_batch_size: 4
59
- scale_lr: false
60
- seed: 0
61
- snr_gamma: 5.0
62
- text_encoder_lr: 5.0e-06
63
- train_batch_size: 1
64
- train_text_encoder: false
65
- use_8bit_adam: true
66
- use_dora: false
67
- validation_epochs: 50
68
- validation_prompt: null
69
- variant: null
70
- with_prior_preservation: false
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
logs/dreambooth-lora-sd-xl/1732432747.252868/events.out.tfevents.1732432747.682f2d054e62.2555.1 DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:5f9346b33e5878156ec46b6d58d548b8765add6d20afac19abdee7ddb7591133
3
- size 3298
 
 
 
 
logs/dreambooth-lora-sd-xl/1732432747.2546117/hparams.yml DELETED
@@ -1,70 +0,0 @@
1
- adam_beta1: 0.9
2
- adam_beta2: 0.999
3
- adam_epsilon: 1.0e-08
4
- adam_weight_decay: 0.0001
5
- adam_weight_decay_text_encoder: 0.001
6
- allow_tf32: false
7
- cache_dir: null
8
- caption_column: text
9
- center_crop: false
10
- checkpointing_steps: 717
11
- checkpoints_total_limit: null
12
- class_data_dir: null
13
- class_prompt: null
14
- dataloader_num_workers: 0
15
- dataset_config_name: null
16
- dataset_name: TransformerDS
17
- do_edm_style_training: false
18
- enable_xformers_memory_efficient_attention: false
19
- gradient_accumulation_steps: 3
20
- gradient_checkpointing: true
21
- hub_model_id: null
22
- hub_token: null
23
- image_column: image
24
- instance_data_dir: null
25
- instance_prompt: the elita-1
26
- learning_rate: 0.0001
27
- local_rank: -1
28
- logging_dir: logs
29
- lr_num_cycles: 1
30
- lr_power: 1.0
31
- lr_scheduler: constant
32
- lr_warmup_steps: 0
33
- max_grad_norm: 1.0
34
- max_train_steps: 500
35
- mixed_precision: fp16
36
- num_class_images: 100
37
- num_train_epochs: 100
38
- num_validation_images: 4
39
- optimizer: AdamW
40
- output_dir: Transformer_lora
41
- output_kohya_format: false
42
- pretrained_model_name_or_path: stabilityai/stable-diffusion-xl-base-1.0
43
- pretrained_vae_model_name_or_path: madebyollin/sdxl-vae-fp16-fix
44
- prior_generation_precision: null
45
- prior_loss_weight: 1.0
46
- prodigy_beta3: null
47
- prodigy_decouple: true
48
- prodigy_safeguard_warmup: true
49
- prodigy_use_bias_correction: true
50
- push_to_hub: false
51
- random_flip: false
52
- rank: 4
53
- repeats: 1
54
- report_to: tensorboard
55
- resolution: 1024
56
- resume_from_checkpoint: null
57
- revision: null
58
- sample_batch_size: 4
59
- scale_lr: false
60
- seed: 0
61
- snr_gamma: 5.0
62
- text_encoder_lr: 5.0e-06
63
- train_batch_size: 1
64
- train_text_encoder: false
65
- use_8bit_adam: true
66
- use_dora: false
67
- validation_epochs: 50
68
- validation_prompt: null
69
- variant: null
70
- with_prior_preservation: false
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
logs/dreambooth-lora-sd-xl/events.out.tfevents.1732260783.835317cdafc1.1713.0 DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:fc0bc9186895169ae1105d07cc1fb584349ca818d1d320d8b590c0f22e471199
3
- size 125314
 
 
 
 
logs/dreambooth-lora-sd-xl/events.out.tfevents.1732432747.682f2d054e62.2555.0 DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:5c383ae59e0aee801ba8f14fe55b472b1aac76ab27a426784ab13e395a72dec6
3
- size 125314