config.json
779 B · 33 lines · json Raw
1 {
2 "_name_or_path": "/root/.cache/torch/sentence_transformers/BAAI_bge-large-en/",
3 "architectures": [
4 "BertModel"
5 ],
6 "attention_probs_dropout_prob": 0.1,
7 "classifier_dropout": null,
8 "gradient_checkpointing": false,
9 "hidden_act": "gelu",
10 "hidden_dropout_prob": 0.1,
11 "hidden_size": 1024,
12 "id2label": {
13 "0": "LABEL_0"
14 },
15 "initializer_range": 0.02,
16 "intermediate_size": 4096,
17 "label2id": {
18 "LABEL_0": 0
19 },
20 "layer_norm_eps": 1e-12,
21 "max_position_embeddings": 512,
22 "model_type": "bert",
23 "num_attention_heads": 16,
24 "num_hidden_layers": 24,
25 "pad_token_id": 0,
26 "position_embedding_type": "absolute",
27 "torch_dtype": "float32",
28 "transformers_version": "4.30.0",
29 "type_vocab_size": 2,
30 "use_cache": true,
31 "vocab_size": 30522
32 }
33