config.json
1.1 KB · 51 lines · json Raw
1 {
2 "_name_or_path": "xlm-roberta-base",
3 "architectures": [
4 "XLMRobertaForTokenClassification"
5 ],
6 "attention_probs_dropout_prob": 0.1,
7 "bos_token_id": 0,
8 "classifier_dropout": null,
9 "eos_token_id": 2,
10 "hidden_act": "gelu",
11 "hidden_dropout_prob": 0.1,
12 "hidden_size": 768,
13 "id2label": {
14 "0": "B-ADDRESS",
15 "1": "I-ADDRESS",
16 "2": "B-NAME",
17 "3": "I-NAME",
18 "4": "B-TICKER_SYMBOL",
19 "5": "I-TICKER_SYMBOL",
20 "6": "B-CHAIN",
21 "7": "I-CHAIN",
22 "8": "O"
23 },
24 "initializer_range": 0.02,
25 "intermediate_size": 3072,
26 "label2id": {
27 "B-ADDRESS": 0,
28 "B-CHAIN": 6,
29 "B-NAME": 2,
30 "B-TICKER_SYMBOL": 4,
31 "I-ADDRESS": 1,
32 "I-CHAIN": 7,
33 "I-NAME": 3,
34 "I-TICKER_SYMBOL": 5,
35 "O": 8
36 },
37 "layer_norm_eps": 1e-05,
38 "max_position_embeddings": 514,
39 "model_type": "xlm-roberta",
40 "num_attention_heads": 12,
41 "num_hidden_layers": 12,
42 "output_past": true,
43 "pad_token_id": 1,
44 "position_embedding_type": "absolute",
45 "torch_dtype": "float32",
46 "transformers_version": "4.34.1",
47 "type_vocab_size": 1,
48 "use_cache": true,
49 "vocab_size": 250002
50 }
51