hf-transformers-bot's picture
Upload tiny models for RwkvForCausalLM
2f452fd
{
"activation_function": "gelu",
"architectures": [
"RwkvForCausalLM"
],
"attention_hidden_size": 32,
"attn_pdrop": 0.1,
"bos_token_id": 0,
"context_length": 1024,
"eos_token_id": 0,
"gradient_checkpointing": false,
"hidden_size": 32,
"intermediate_size": 37,
"is_decoder": true,
"layer_norm_epsilon": 1e-05,
"model_type": "rwkv",
"n_positions": 512,
"num_hidden_layers": 5,
"pad_token_id": 1023,
"reorder_and_upcast_attn": false,
"rescale_every": 6,
"resid_pdrop": 0.1,
"scale_attn_by_inverse_layer_idx": false,
"tie_word_embeddings": false,
"torch_dtype": "float32",
"transformers_version": "4.30.0.dev0",
"type_vocab_size": 16,
"use_cache": true,
"vocab_size": 1024
}