config.json
841 B · 36 lines · json Raw
1 {
2 "architectures": [
3 "Qwen2ForCausalLM"
4 ],
5 "attention_dropout": 0.0,
6 "bos_token_id": 151643,
7 "eos_token_id": 151645,
8 "hidden_act": "silu",
9 "hidden_size": 5120,
10 "initializer_range": 0.02,
11 "intermediate_size": 13824,
12 "max_position_embeddings": 32768,
13 "max_window_layers": 70,
14 "model_type": "qwen2",
15 "num_attention_heads": 40,
16 "num_hidden_layers": 48,
17 "num_key_value_heads": 8,
18 "quantization_config": {
19 "bits": 4,
20 "group_size": 128,
21 "modules_to_not_convert": null,
22 "quant_method": "awq",
23 "version": "gemm",
24 "zero_point": true
25 },
26 "rms_norm_eps": 1e-06,
27 "rope_theta": 1000000.0,
28 "sliding_window": 131072,
29 "tie_word_embeddings": false,
30 "torch_dtype": "float16",
31 "transformers_version": "4.41.1",
32 "use_cache": true,
33 "use_sliding_window": false,
34 "vocab_size": 152064
35 }
36