config.json
651 B · 29 lines · json Raw
1 {
2 "_name_or_path": "facebook/opt-125m",
3 "activation_dropout": 0.0,
4 "activation_function": "relu",
5 "architectures": [
6 "OPTForCausalLM"
7 ],
8 "attention_dropout": 0.0,
9 "bos_token_id": 2,
10 "do_layer_norm_before": true,
11 "dropout": 0.1,
12 "eos_token_id": 2,
13 "ffn_dim": 3072,
14 "hidden_size": 768,
15 "init_std": 0.02,
16 "layerdrop": 0.0,
17 "max_position_embeddings": 2048,
18 "model_type": "opt",
19 "num_attention_heads": 12,
20 "num_hidden_layers": 12,
21 "pad_token_id": 1,
22 "prefix": "</s>",
23 "torch_dtype": "float16",
24 "transformers_version": "4.21.0.dev0",
25 "use_cache": true,
26 "vocab_size": 50272,
27 "word_embed_proj_dim": 768
28 }
29