darpanaswal commited on
Commit
9caa507
·
verified ·
1 Parent(s): ea510c2

Upload SAE l0_250_0

Browse files
l0_250_0/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"d_in": 2048, "d_sae": 8192, "dtype": "float32", "device": "cpu", "apply_b_dec_to_input": false, "normalize_activations": "none", "reshape_activations": "none", "metadata": {"sae_lens_version": "6.21.0", "sae_lens_training_version": "6.21.0", "dataset_path": "darpanaswal/LLMSymGuard", "hook_name": "blocks.7.hook_attn_out", "model_name": "meta-llama/Llama-3.2-1B-Instruct", "model_class_name": "HookedTransformer", "hook_head_index": null, "context_size": 128, "seqpos_slice": [null], "model_from_pretrained_kwargs": {"center_writing_weights": false}, "prepend_bos": true, "exclude_special_tokens": false, "sequence_separator_token": "bos", "disable_concat_sequences": false}, "architecture": "jumprelu"}
l0_250_0/sae_weights.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:99ca0af74157f38a9bed3ff4dc4c029ed68ce837f1b58994c7afeedfca868773
3
+ size 134291856