Transformers
PyTorch
flexibert
flexibert-mini / config.json
shikhartuli's picture
Upload model
ea1f6d8
{
"_name_or_path": "../models/flexibert-mini/",
"architectures": [
"FlexiBERTModel"
],
"attention_heads_list": [
2,
2,
4,
4
],
"attention_probs_dropout_prob": 0.1,
"attention_type": [
"sa",
"sa",
"l",
"l"
],
"bos_token_id": 0,
"conv_kernel_size": 9,
"eos_token_id": 2,
"ff_dim_list": [
[
512,
512,
512
],
[
512,
512,
512
],
[
1024
],
[
1024
]
],
"from_model_dict_hetero": false,
"gradient_checkpointing": false,
"head_ratio": 2,
"hidden_act": "gelu",
"hidden_dim_list": [
256,
256,
128,
128
],
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"initializer_range": 0.02,
"intermediate_size": 3072,
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "flexibert",
"num_attention_heads": 12,
"num_groups": 1,
"num_hidden_layers": 4,
"pad_token_id": 0,
"position_embedding_type": "relative_key",
"similarity_list": [
"sdp",
"sdp",
"dct",
"dct"
],
"torch_dtype": "float32",
"transformers_version": "4.25.0.dev0",
"type_vocab_size": 2,
"use_cache": true,
"vocab_size": 50265
}