text
stringclasses
206 values
val_data: /scratch/cc12m/{01241..01242}.tar
val_frequency: 1
val_num_samples: 205824
wandb: True
wandb_notes:
wandb_project_name: open-clip
warmup: 10000
wd: 0.1
workers: 6
world_size: 2
z_beta_max: 0.8
zeroshot_frequency: 1
accum_freq: 1
added_positive_type: None
aug_cfg: {}
batch_size: 1024
beta1: 0.9
beta2: 0.98
cache_dir: None
calculate_full: False
checkpoint_path: ./logs/siglip12m-bias-10.0None-negFalse-scaleFalse-pos1.0-eps1e-06-stocFalselr0.0004-ViT-B-32-1024_0121225119/checkpoints
coca_caption_loss_weight: 2.0
coca_contrastive_loss_weight: 1.0
contrast_neg_only: False
copy_codebase: False
csv_caption_key: title
csv_img_key: filepath
csv_separator:
dataset_resampled: False
dataset_type: webdataset
ddp_static_graph: False
debug: False
delete_previous_checkpoint: False
denormalize_features: False
device: cuda:0
dist_backend: None
dist_url: None
distill: False
distill_model: None
distill_pretrained: None
distributed: True
epochs: 33
epochs_cooldown: None
eps: 1e-06
force_custom_text: False
force_image_size: None
force_patch_dropout: None
force_quick_gelu: False
freeze_lambda_after_num_epochs: 100
gather_with_grad: True
grad_checkpointing: False
grad_clip_norm: 1.0
horovod: False
image_interpolation: None
image_mean: None
image_resize_mode: None
image_std: None
imagenet_v2: None
imagenet_val: /localscratch/imagenet/val/
init_lambda: 1.0
init_logit_bias: -10.0
init_logit_scale: 10.0
lambda_ema_init: 0.9
lambda_ema_max: 0.999
lambda_ema_schedule: cosine
lambda_eps: 1e-06
lambda_lr: 0.001
lambda_tolerance: 0.0001
lambda_update_frequency: 1
learn_logit_bias: True
learn_logit_scale: True
local_loss: True
local_rank: 0
lock_image: False
lock_image_freeze_bn_stats: False
lock_image_unlocked_groups: 0
lock_text: False
lock_text_freeze_layer_norm: False
lock_text_unlocked_layers: 0
log_every_n_steps: 100
log_level: 20
log_local: False
log_path: ./logs/siglip12m-bias-10.0None-negFalse-scaleFalse-pos1.0-eps1e-06-stocFalselr0.0004-ViT-B-32-1024_0121225119/out.log
logit_scale_clamp: 100
logs: ./logs/
loss_type: CLIP
lr: 0.0004
lr_cooldown_end: 0.0
lr_cooldown_power: 1.0
lr_scheduler: cosine
model: ViT-B-32
model_update_type: ONE_STEP
n_class_tokens: -1
name: siglip12m-bias-10.0None-negFalse-scaleFalse-pos1.0-eps1e-06-stocFalselr0.0004-ViT-B-32-1024_0121225119
no_set_device_rank: False
norm_cap: 1.0
normalize_type: L2
note: siglip12m
pos_coef: 1.0
precision: amp