config.json
571 B · 25 lines · json Raw
1 {
2 "architectures": [
3 "RobertaForQuestionAnswering"
4 ],
5 "attention_probs_dropout_prob": 0.1,
6 "bos_token_id": 0,
7 "eos_token_id": 2,
8 "gradient_checkpointing": false,
9 "hidden_act": "gelu",
10 "hidden_dropout_prob": 0.1,
11 "hidden_size": 768,
12 "initializer_range": 0.02,
13 "intermediate_size": 3072,
14 "language": "english",
15 "layer_norm_eps": 1e-05,
16 "max_position_embeddings": 514,
17 "model_type": "roberta",
18 "name": "Roberta",
19 "num_attention_heads": 12,
20 "num_hidden_layers": 12,
21 "pad_token_id": 1,
22 "type_vocab_size": 1,
23 "vocab_size": 50265
24 }
25