config.json
914 B · 45 lines · json Raw
1 {
2 "_name_or_path": "xlm-roberta-base",
3 "architectures": [
4 "XLMRobertaForTokenClassification"
5 ],
6 "attention_probs_dropout_prob": 0.1,
7 "bos_token_id": 0,
8 "classifier_dropout": null,
9 "eos_token_id": 2,
10 "hidden_act": "gelu",
11 "hidden_dropout_prob": 0.1,
12 "hidden_size": 768,
13 "id2label": {
14 "0": "0",
15 "1": ".",
16 "2": ",",
17 "3": "?",
18 "4": "-",
19 "5": ":"
20 },
21 "initializer_range": 0.02,
22 "intermediate_size": 3072,
23 "label2id": {
24 "0": 0,
25 ".": 1,
26 ",": 2,
27 "?": 3,
28 "-": 4,
29 ":": 5
30 },
31 "layer_norm_eps": 1e-05,
32 "max_position_embeddings": 514,
33 "model_type": "xlm-roberta",
34 "num_attention_heads": 12,
35 "num_hidden_layers": 12,
36 "output_past": true,
37 "pad_token_id": 1,
38 "position_embedding_type": "absolute",
39 "torch_dtype": "float32",
40 "transformers_version": "4.18.0",
41 "type_vocab_size": 1,
42 "use_cache": true,
43 "vocab_size": 250002
44 }
45