config.json
759 B · 36 lines · json Raw
1 {
2 "_name_or_path": "distilbert-base-multilingual-cased",
3 "activation": "gelu",
4 "architectures": [
5 "DistilBertForSequenceClassification"
6 ],
7 "attention_dropout": 0.1,
8 "dim": 768,
9 "dropout": 0.1,
10 "hidden_dim": 3072,
11 "id2label": {
12 "0": "positive",
13 "1": "neutral",
14 "2": "negative"
15 },
16 "initializer_range": 0.02,
17 "label2id": {
18 "negative": 2,
19 "neutral": 1,
20 "positive": 0
21 },
22 "max_position_embeddings": 512,
23 "model_type": "distilbert",
24 "n_heads": 12,
25 "n_layers": 6,
26 "output_past": true,
27 "pad_token_id": 0,
28 "qa_dropout": 0.1,
29 "seq_classif_dropout": 0.2,
30 "sinusoidal_pos_embds": false,
31 "tie_weights_": true,
32 "torch_dtype": "float32",
33 "transformers_version": "4.28.1",
34 "vocab_size": 119547
35 }
36