{ "bos_token_id": 0, "d_model": 2048, "eos_token_id": 0, "fused_add_norm": true, "model_type": "mamba", "n_layer": 48, "pad_token_id": 50277, "pad_vocab_size_multiple": 8, "residual_in_fp32": true, "rms_norm": true, "tie_word_embeddings": false, "transformers_version": "4.37.0.dev0", "use_cache": false, "vocab_size": 50280 }