File size: 1,824 Bytes
7d9d1ea
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
{
    "model_class_name": "HookedViT",
    "model_name": "openai/clip-vit-base-patch32",
    "vit_model_cfg": null,
    "model_path": null,
    "hook_point_layer": 11,
    "layer_subtype": "hook_resid_post",
    "hook_point_head_index": null,
    "context_size": 50,
    "use_cached_activations": false,
    "use_patches_only": false,
    "cached_activations_path": "activations/_datasets_imagenet1k/openai_clip-vit-base-patch32/blocks.11.hook_resid_post",
    "image_size": 224,
    "architecture": "standard",
    "b_dec_init_method": "geometric_median",
    "expansion_factor": 64,
    "from_pretrained_path": null,
    "seed": 42,
    "d_in": 768,
    "activation_fn_str": "topk",
    "activation_fn_kwargs": {
        "k": 64
    },
    "cls_token_only": false,
    "max_grad_norm": 1.0,
    "initialization_method": "encoder_transpose_decoder",
    "normalize_activations": null,
    "n_batches_in_buffer": 20,
    "store_batch_size": 256,
    "num_workers": 32,
    "num_epochs": 1,
    "verbose": false,
    "l1_coefficient": 0.3,
    "lp_norm": 1,
    "lr": 0.0002,
    "lr_scheduler_name": "cosineannealingwarmup",
    "lr_warm_up_steps": 200,
    "train_batch_size": 4096,
    "dataset_name": "imagenet1k",
    "dataset_path": "/datasets/imagenet1k",
    "dataset_train_path": "/datasets/imagenet1k/ILSVRC/Data/CLS-LOC/train",
    "dataset_val_path": "/datasets/imagenet1k/ILSVRC/Data/CLS-LOC/val",
    "use_ghost_grads": true,
    "feature_sampling_window": 1000,
    "dead_feature_window": 5000,
    "dead_feature_threshold": 1e-08,
    "log_to_wandb": true,
    "wandb_project": "openai_clip_vit_b_32",
    "wandb_entity": "perceptual-alignment",
    "wandb_log_frequency": 100,
    "n_validation_runs": 4,
    "n_checkpoints": 2,
    "checkpoint_path": "models/sae/openai/topk/45f08c42-openai_clip_vit_b_32"
}