config.json
615 B · 26 lines · json Raw
1 {
2 "architectures": [
3 "XLMRobertaForMaskedLM"
4 ],
5 "attention_probs_dropout_prob": 0.1,
6 "bos_token_id": 0,
7 "eos_token_id": 2,
8 "hidden_act": "gelu",
9 "hidden_dropout_prob": 0.1,
10 "hidden_size": 768,
11 "initializer_range": 0.02,
12 "intermediate_size": 3072,
13 "layer_norm_eps": 1e-05,
14 "max_position_embeddings": 514,
15 "model_type": "xlm-roberta",
16 "num_attention_heads": 12,
17 "num_hidden_layers": 12,
18 "output_past": true,
19 "pad_token_id": 1,
20 "position_embedding_type": "absolute",
21 "transformers_version": "4.17.0.dev0",
22 "type_vocab_size": 1,
23 "use_cache": true,
24 "vocab_size": 250002
25 }
26