SidXXD commited on
Commit
ffe5860
·
verified ·
1 Parent(s): f943670

End of training

Browse files
<v1*>.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f35cec607ede81f1699e2b53b7b837f03a76e6b0fb3168c4e12fad9379d77d31
3
  size 151785125
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0dc0496df531906df13572c0acd9611ae0fe06bb318d6e2781c6f1681c899640
3
  size 151785125
concepts_list.json CHANGED
@@ -1 +1 @@
1
- [{"instance_prompt": "photo of a <v1*> painting", "class_prompt": null, "instance_data_dir": "/home/xide/miniconda3/code/Results_for_paper/Untarget_attack/perturbed_imgs/new_art_2/ACE_lunet/Eps_4/cima-da-conegliano_st-christopher-with-the-infant-christ-and-st-peter/noise-ckpt/5", "class_data_dir": null}]
 
1
+ [{"instance_prompt": "photo of a <v1*> painting", "class_prompt": null, "instance_data_dir": "/home/xide/miniconda3/code/data/Untarget_training_img_20/WikiArt/Clean_2/cima-da-conegliano_st-christopher-with-the-infant-christ-and-st-peter", "class_data_dir": null}]
logs/custom-diffusion/1736327778.597218/events.out.tfevents.1736327778.cudahpc06.3692539.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8a5717b128092a291010ee85a9b3be971ff734d1c6b0b9c8e7678e22ffecdd03
3
+ size 3101
logs/custom-diffusion/1736327778.5988781/hparams.yml ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ adam_beta1: 0.9
2
+ adam_beta2: 0.999
3
+ adam_epsilon: 1.0e-08
4
+ adam_weight_decay: 0.01
5
+ allow_tf32: false
6
+ attn_maps_path_name: attn_maps_color
7
+ center_crop: false
8
+ checkpointing_steps: 250
9
+ checkpoints_total_limit: null
10
+ class_data_dir: null
11
+ class_prompt: null
12
+ concepts_list: null
13
+ dataloader_num_workers: 2
14
+ enable_xformers_memory_efficient_attention: false
15
+ freeze_model: crossattn_kv
16
+ gradient_accumulation_steps: 1
17
+ gradient_checkpointing: false
18
+ hflip: true
19
+ hub_model_id: null
20
+ hub_token: null
21
+ initializer_token: ktn
22
+ instance_data_dir: /home/xide/miniconda3/code/data/Untarget_training_img_20/WikiArt/Clean_2/cima-da-conegliano_st-christopher-with-the-infant-christ-and-st-peter
23
+ instance_prompt: photo of a <v1*> painting
24
+ learning_rate: 1.0e-05
25
+ local_rank: -1
26
+ logging_dir: logs
27
+ lr_scheduler: constant
28
+ lr_warmup_steps: 0
29
+ max_grad_norm: 1.0
30
+ max_train_steps: 250
31
+ mixed_precision: null
32
+ modifier_token: <v1*>
33
+ no_safe_serialization: true
34
+ noaug: false
35
+ num_class_images: 200
36
+ num_train_epochs: 1
37
+ num_validation_images: 2
38
+ output_dir: /home/xide/miniconda3/code/Results_for_paper/Untarget_attack/Trained_model/Custom_Diffusion/Untarget_training_img_20/new_art_2/Clean/cima-da-conegliano_st-christopher-with-the-infant-christ-and-st-peter
39
+ photoguard: false
40
+ photoguard_mode: encoder
41
+ pretrained_model_name_or_path: runwayml/stable-diffusion-v1-5
42
+ prior_generation_precision: null
43
+ prior_loss_weight: 0
44
+ push_to_hub: true
45
+ real_prior: false
46
+ report_to: tensorboard
47
+ resolution: 512
48
+ resume_from_checkpoint: null
49
+ revision: null
50
+ sample_batch_size: 4
51
+ scale_lr: true
52
+ seed: 42
53
+ set_grads_to_none: false
54
+ tokenizer_name: null
55
+ train_batch_size: 2
56
+ use_8bit_adam: false
57
+ validation_prompt: null
58
+ validation_steps: 50
59
+ variant: null
60
+ with_prior_preservation: false
logs/custom-diffusion/events.out.tfevents.1736327778.cudahpc06.3692539.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f76f22e21bb5d28682d2c0ef75b298a43ce96c313e4c9806e29ae4ca84448131
3
+ size 20834
pytorch_custom_diffusion_weights.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d72573f3f8b5bf9cbaa4873c3ad6d1b3a606f3529732c2c046ee883e94d81a0b
3
  size 76691351
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0802282bc202f6c46c4ca8c6e6ba78afd3c381d86da3538a86ce56b1879ee469
3
  size 76691351