Tom Pollak commited on
Commit
3b7ccf3
1 Parent(s): 6a12371

added checkpoints

Browse files
gpt-0/blocks_8_mlp_out/checkpoints/39z95a9z/12292096/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"model_name": "culture-gpt-0", "model_class_name": "HookedTransformer", "hook_name": "blocks.8.hook_mlp_out", "hook_eval": "NOT_IN_USE", "hook_layer": 8, "hook_head_index": null, "dataset_path": "tommyp111/culture-puzzles-1M", "dataset_trust_remote_code": true, "streaming": true, "is_dataset_tokenized": true, "context_size": 405, "use_cached_activations": false, "cached_activations_path": null, "architecture": "standard", "d_in": 512, "d_sae": 8192, "b_dec_init_method": "zeros", "expansion_factor": 16, "activation_fn": "relu", "activation_fn_kwargs": {}, "normalize_sae_decoder": false, "noise_scale": 0.0, "from_pretrained_path": null, "apply_b_dec_to_input": false, "decoder_orthogonal_init": false, "decoder_heuristic_init": true, "init_encoder_as_decoder_transpose": true, "n_batches_in_buffer": 64, "training_tokens": 122880000, "finetuning_tokens": 0, "store_batch_size_prompts": 16, "train_batch_size_tokens": 4096, "normalize_activations": "expected_average_only_in", "device": "mps", "act_store_device": "mps", "seed": 42, "dtype": "float32", "prepend_bos": true, "autocast": false, "autocast_lm": false, "compile_llm": false, "llm_compilation_mode": null, "compile_sae": false, "sae_compilation_mode": null, "adam_beta1": 0.9, "adam_beta2": 0.999, "mse_loss_normalization": null, "l1_coefficient": 5, "lp_norm": 1.0, "scale_sparsity_penalty_by_decoder_norm": true, "l1_warm_up_steps": 1500, "lr": 5e-05, "lr_scheduler_name": "constant", "lr_warm_up_steps": 0, "lr_end": 5e-06, "lr_decay_steps": 6000, "n_restart_cycles": 1, "finetuning_method": null, "use_ghost_grads": false, "feature_sampling_window": 1000, "dead_feature_window": 1000, "dead_feature_threshold": 0.0001, "n_eval_batches": 10, "eval_batch_size_prompts": null, "log_to_wandb": true, "log_activations_store_to_wandb": false, "log_optimizer_state_to_wandb": false, "wandb_project": "culture_sae", "wandb_id": null, "run_name": "8192-L1-5-LR-5e-05-Tokens-1.229e+08", "wandb_entity": null, "wandb_log_frequency": 30, "eval_every_n_wandb_logs": 20, "resume": false, "n_checkpoints": 10, "checkpoint_path": "checkpoints/39z95a9z", "verbose": true, "model_kwargs": {}, "model_from_pretrained_kwargs": {}, "sae_lens_version": "3.14.0", "sae_lens_training_version": "3.14.0", "tokens_per_buffer": 106168320}
gpt-0/blocks_8_mlp_out/checkpoints/39z95a9z/12292096/sae_weights.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8c101a1daa3b5b2d97c0c5d14830fd76090588c1f22e8a62a37f90cdb068dd06
3
+ size 33589560
gpt-0/blocks_8_mlp_out/checkpoints/39z95a9z/12292096/sparsity.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2e67c7c6893f08606f04864a566734709942ef0c62627f7c07c0ef62c586dcb0
3
+ size 32848
gpt-0/blocks_8_mlp_out/checkpoints/39z95a9z/16584704/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"model_name": "culture-gpt-0", "model_class_name": "HookedTransformer", "hook_name": "blocks.8.hook_mlp_out", "hook_eval": "NOT_IN_USE", "hook_layer": 8, "hook_head_index": null, "dataset_path": "tommyp111/culture-puzzles-1M", "dataset_trust_remote_code": true, "streaming": true, "is_dataset_tokenized": true, "context_size": 405, "use_cached_activations": false, "cached_activations_path": null, "architecture": "standard", "d_in": 512, "d_sae": 8192, "b_dec_init_method": "zeros", "expansion_factor": 16, "activation_fn": "relu", "activation_fn_kwargs": {}, "normalize_sae_decoder": false, "noise_scale": 0.0, "from_pretrained_path": null, "apply_b_dec_to_input": false, "decoder_orthogonal_init": false, "decoder_heuristic_init": true, "init_encoder_as_decoder_transpose": true, "n_batches_in_buffer": 64, "training_tokens": 122880000, "finetuning_tokens": 0, "store_batch_size_prompts": 16, "train_batch_size_tokens": 4096, "normalize_activations": "expected_average_only_in", "device": "mps", "act_store_device": "mps", "seed": 42, "dtype": "float32", "prepend_bos": true, "autocast": false, "autocast_lm": false, "compile_llm": false, "llm_compilation_mode": null, "compile_sae": false, "sae_compilation_mode": null, "adam_beta1": 0.9, "adam_beta2": 0.999, "mse_loss_normalization": null, "l1_coefficient": 5, "lp_norm": 1.0, "scale_sparsity_penalty_by_decoder_norm": true, "l1_warm_up_steps": 1500, "lr": 5e-05, "lr_scheduler_name": "constant", "lr_warm_up_steps": 0, "lr_end": 5e-06, "lr_decay_steps": 6000, "n_restart_cycles": 1, "finetuning_method": null, "use_ghost_grads": false, "feature_sampling_window": 1000, "dead_feature_window": 1000, "dead_feature_threshold": 0.0001, "n_eval_batches": 10, "eval_batch_size_prompts": null, "log_to_wandb": true, "log_activations_store_to_wandb": false, "log_optimizer_state_to_wandb": false, "wandb_project": "culture_sae", "wandb_id": null, "run_name": "8192-L1-5-LR-5e-05-Tokens-1.229e+08", "wandb_entity": null, "wandb_log_frequency": 30, "eval_every_n_wandb_logs": 20, "resume": false, "n_checkpoints": 10, "checkpoint_path": "checkpoints/39z95a9z", "verbose": true, "model_kwargs": {}, "model_from_pretrained_kwargs": {}, "sae_lens_version": "3.14.0", "sae_lens_training_version": "3.14.0", "tokens_per_buffer": 106168320}
gpt-0/blocks_8_mlp_out/checkpoints/39z95a9z/16584704/sae_weights.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1b6508baf08cff6d6fcff3b04067c1500619dd0a24115a97327a07675a473a32
3
+ size 33589560
gpt-0/blocks_8_mlp_out/checkpoints/39z95a9z/16584704/sparsity.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3ac4e6c9c23a844aef16d24090eb25707b37a7003b81846cc4c85c4e8d6f95b4
3
+ size 32848