Upload siglip12m-bias-10.0None-negFalse-scaleFalse-pos1.0-eps1e-06-stocFalselr0.0004-ViT-B-32-1024_0121225119/params.txt with huggingface_hub
Browse files
siglip12m-bias-10.0None-negFalse-scaleFalse-pos1.0-eps1e-06-stocFalselr0.0004-ViT-B-32-1024_0121225119/params.txt
ADDED
@@ -0,0 +1,124 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
accum_freq: 1
|
2 |
+
added_positive_type: None
|
3 |
+
aug_cfg: {}
|
4 |
+
batch_size: 1024
|
5 |
+
beta1: 0.9
|
6 |
+
beta2: 0.98
|
7 |
+
cache_dir: None
|
8 |
+
calculate_full: False
|
9 |
+
checkpoint_path: ./logs/siglip12m-bias-10.0None-negFalse-scaleFalse-pos1.0-eps1e-06-stocFalselr0.0004-ViT-B-32-1024_0121225119/checkpoints
|
10 |
+
coca_caption_loss_weight: 2.0
|
11 |
+
coca_contrastive_loss_weight: 1.0
|
12 |
+
contrast_neg_only: False
|
13 |
+
copy_codebase: False
|
14 |
+
csv_caption_key: title
|
15 |
+
csv_img_key: filepath
|
16 |
+
csv_separator:
|
17 |
+
dataset_resampled: False
|
18 |
+
dataset_type: webdataset
|
19 |
+
ddp_static_graph: False
|
20 |
+
debug: False
|
21 |
+
delete_previous_checkpoint: False
|
22 |
+
denormalize_features: False
|
23 |
+
device: cuda:0
|
24 |
+
dist_backend: None
|
25 |
+
dist_url: None
|
26 |
+
distill: False
|
27 |
+
distill_model: None
|
28 |
+
distill_pretrained: None
|
29 |
+
distributed: True
|
30 |
+
epochs: 33
|
31 |
+
epochs_cooldown: None
|
32 |
+
eps: 1e-06
|
33 |
+
force_custom_text: False
|
34 |
+
force_image_size: None
|
35 |
+
force_patch_dropout: None
|
36 |
+
force_quick_gelu: False
|
37 |
+
freeze_lambda_after_num_epochs: 100
|
38 |
+
gather_with_grad: True
|
39 |
+
grad_checkpointing: False
|
40 |
+
grad_clip_norm: 1.0
|
41 |
+
horovod: False
|
42 |
+
image_interpolation: None
|
43 |
+
image_mean: None
|
44 |
+
image_resize_mode: None
|
45 |
+
image_std: None
|
46 |
+
imagenet_v2: None
|
47 |
+
imagenet_val: /localscratch/imagenet/val/
|
48 |
+
init_lambda: 1.0
|
49 |
+
init_logit_bias: -10.0
|
50 |
+
init_logit_scale: 10.0
|
51 |
+
lambda_ema_init: 0.9
|
52 |
+
lambda_ema_max: 0.999
|
53 |
+
lambda_ema_schedule: cosine
|
54 |
+
lambda_eps: 1e-06
|
55 |
+
lambda_lr: 0.001
|
56 |
+
lambda_tolerance: 0.0001
|
57 |
+
lambda_update_frequency: 1
|
58 |
+
learn_logit_bias: True
|
59 |
+
learn_logit_scale: True
|
60 |
+
local_loss: True
|
61 |
+
local_rank: 0
|
62 |
+
lock_image: False
|
63 |
+
lock_image_freeze_bn_stats: False
|
64 |
+
lock_image_unlocked_groups: 0
|
65 |
+
lock_text: False
|
66 |
+
lock_text_freeze_layer_norm: False
|
67 |
+
lock_text_unlocked_layers: 0
|
68 |
+
log_every_n_steps: 100
|
69 |
+
log_level: 20
|
70 |
+
log_local: False
|
71 |
+
log_path: ./logs/siglip12m-bias-10.0None-negFalse-scaleFalse-pos1.0-eps1e-06-stocFalselr0.0004-ViT-B-32-1024_0121225119/out.log
|
72 |
+
logit_scale_clamp: 100
|
73 |
+
logs: ./logs/
|
74 |
+
loss_type: CLIP
|
75 |
+
lr: 0.0004
|
76 |
+
lr_cooldown_end: 0.0
|
77 |
+
lr_cooldown_power: 1.0
|
78 |
+
lr_scheduler: cosine
|
79 |
+
model: ViT-B-32
|
80 |
+
model_update_type: ONE_STEP
|
81 |
+
n_class_tokens: -1
|
82 |
+
name: siglip12m-bias-10.0None-negFalse-scaleFalse-pos1.0-eps1e-06-stocFalselr0.0004-ViT-B-32-1024_0121225119
|
83 |
+
no_set_device_rank: False
|
84 |
+
norm_cap: 1.0
|
85 |
+
normalize_type: L2
|
86 |
+
note: siglip12m
|
87 |
+
pos_coef: 1.0
|
88 |
+
precision: amp
|
89 |
+
pretrained:
|
90 |
+
pretrained_image: False
|
91 |
+
rank: 0
|
92 |
+
remote_sync: None
|
93 |
+
remote_sync_frequency: 300
|
94 |
+
remote_sync_protocol: s3
|
95 |
+
report_to: wandb
|
96 |
+
resume: None
|
97 |
+
save_frequency: 1
|
98 |
+
save_most_recent: False
|
99 |
+
scale_loss: False
|
100 |
+
seed: 42
|
101 |
+
siglip: True
|
102 |
+
skip_scheduler: False
|
103 |
+
stoc_fit_lambda: False
|
104 |
+
tensorboard: False
|
105 |
+
tensorboard_path:
|
106 |
+
torchcompile: False
|
107 |
+
torchscript: False
|
108 |
+
trace: False
|
109 |
+
train_data: /localscratch/mm_datasets/cc12m/cc12m/{00000..01240}.tar
|
110 |
+
train_data_upsampling_factors: None
|
111 |
+
train_num_samples: 9187328
|
112 |
+
use_bn_sync: False
|
113 |
+
use_bnb_linear: None
|
114 |
+
val_data: /localscratch/mm_datasets/cc12m/cc12m/{01241..01242}.tar
|
115 |
+
val_frequency: 1
|
116 |
+
val_num_samples: 205824
|
117 |
+
wandb: True
|
118 |
+
wandb_notes:
|
119 |
+
wandb_project_name: open-clip
|
120 |
+
warmup: 10000
|
121 |
+
wd: 0.1
|
122 |
+
workers: 2
|
123 |
+
world_size: 2
|
124 |
+
zeroshot_frequency: 1
|