config.json
482 B · 22 lines · json Raw
1 {
2 "architectures": [
3 "RobertaForMaskedLM"
4 ],
5 "attention_probs_dropout_prob": 0.1,
6 "bos_token_id": 0,
7 "eos_token_id": 2,
8 "hidden_act": "gelu",
9 "hidden_dropout_prob": 0.1,
10 "hidden_size": 1024,
11 "initializer_range": 0.02,
12 "intermediate_size": 4096,
13 "layer_norm_eps": 1e-05,
14 "max_position_embeddings": 514,
15 "model_type": "roberta",
16 "num_attention_heads": 16,
17 "num_hidden_layers": 24,
18 "pad_token_id": 1,
19 "type_vocab_size": 1,
20 "vocab_size": 50265
21 }
22