lowlorenz's picture
Upload folder using huggingface_hub
250db76 verified
{
"model_class_name": "HookedViT",
"model_name": "open-clip:laion/CLIP-ViT-B-32-DataComp.XL-s13B-b90K",
"hook_point": "blocks.9.hook_resid_post",
"hook_point_layer": 9,
"layer_subtype": "hook_resid_post",
"hook_point_head_index": null,
"context_size": 50,
"use_cached_activations": false,
"use_patches_only": false,
"cached_activations_path": "activations/_network_scratch_s_sonia.joseph_datasets_kaggle_datasets/open-clip:laion_CLIP-ViT-B-32-DataComp.XL-s13B-b90K/blocks.9.hook_mlp_out",
"d_in": 768,
"activation_fn_str": "relu",
"activation_fn_kwargs": {},
"cls_token_only": false,
"max_grad_norm": 1.0,
"initialization_method": "encoder_transpose_decoder",
"normalize_activations": null,
"n_batches_in_buffer": 20,
"store_batch_size": 32,
"num_workers": 16,
"num_epochs": 10,
"total_training_images": 13000000,
"total_training_tokens": 50000000,
"image_size": 224,
"device": {
"__type__": "torch.device",
"value": "cuda"
},
"seed": 42,
"dtype": {
"__type__": "torch.dtype",
"value": "torch.float32"
},
"architecture": "gated",
"sparsity_loss": "l1",
"verbose": false,
"b_dec_init_method": "geometric_median",
"expansion_factor": 64,
"from_pretrained_path": null,
"d_sae": 49152,
"l1_coefficient": 0.9,
"lp_norm": 1,
"lr": 0.0002,
"lr_scheduler_name": "cosineannealingwarmup",
"lr_warm_up_steps": 200,
"beta1": 0.9,
"beta2": 0.999,
"train_batch_size": 4096,
"dataset_name": "imagenet1k",
"dataset_path": "data/ImageNet-complete/",
"dataset_train_path": "data/ImageNet-complete/train",
"dataset_val_path": "data/ImageNet-complete/val",
"use_ghost_grads": false,
"feature_sampling_window": 1000,
"dead_feature_window": 5000,
"dead_feature_threshold": 1e-08,
"log_to_wandb": true,
"wandb_project": "lorenz_clip_b_l0",
"wandb_entity": null,
"wandb_log_frequency": 100,
"n_validation_runs": 4,
"n_checkpoints": 10,
"checkpoint_path": "models/sae/clip_B_gated_l0/1e73fd2e-lorenz_clip_b_l0-lorenz_clip_b_l0"
}