config.json
| 1 | { |
| 2 | "architectures": [ |
| 3 | "DeepseekV3ForCausalLM" |
| 4 | ], |
| 5 | "attention_bias": false, |
| 6 | "attention_dropout": 0.0, |
| 7 | "auto_map": { |
| 8 | "AutoConfig": "configuration_deepseek.DeepseekV3Config", |
| 9 | "AutoModel": "modeling_deepseek.DeepseekV3Model", |
| 10 | "AutoModelForCausalLM": "modeling_deepseek.DeepseekV3ForCausalLM" |
| 11 | }, |
| 12 | "bos_token_id": 0, |
| 13 | "eos_token_id": 1, |
| 14 | "ep_size": 1, |
| 15 | "first_k_dense_replace": 3, |
| 16 | "hidden_act": "silu", |
| 17 | "hidden_size": 7168, |
| 18 | "initializer_range": 0.02, |
| 19 | "intermediate_size": 18432, |
| 20 | "kv_lora_rank": 512, |
| 21 | "max_position_embeddings": 163840, |
| 22 | "model_type": "deepseek_v3", |
| 23 | "moe_intermediate_size": 2048, |
| 24 | "moe_layer_freq": 1, |
| 25 | "n_group": 8, |
| 26 | "n_routed_experts": 256, |
| 27 | "n_shared_experts": 1, |
| 28 | "norm_topk_prob": true, |
| 29 | "num_attention_heads": 128, |
| 30 | "num_experts_per_tok": 8, |
| 31 | "num_hidden_layers": 61, |
| 32 | "num_key_value_heads": 128, |
| 33 | "num_nextn_predict_layers": 1, |
| 34 | "q_lora_rank": 1536, |
| 35 | "qk_nope_head_dim": 128, |
| 36 | "qk_rope_head_dim": 64, |
| 37 | "quantization_config": { |
| 38 | "activation_scheme": "dynamic", |
| 39 | "fmt": "e4m3", |
| 40 | "quant_method": "fp8", |
| 41 | "weight_block_size": [ |
| 42 | 128, |
| 43 | 128 |
| 44 | ] |
| 45 | }, |
| 46 | "rms_norm_eps": 1e-06, |
| 47 | "rope_scaling": { |
| 48 | "beta_fast": 32, |
| 49 | "beta_slow": 1, |
| 50 | "factor": 40, |
| 51 | "mscale": 1.0, |
| 52 | "mscale_all_dim": 1.0, |
| 53 | "original_max_position_embeddings": 4096, |
| 54 | "type": "yarn" |
| 55 | }, |
| 56 | "rope_theta": 10000, |
| 57 | "routed_scaling_factor": 2.5, |
| 58 | "scoring_func": "sigmoid", |
| 59 | "tie_word_embeddings": false, |
| 60 | "topk_group": 4, |
| 61 | "topk_method": "noaux_tc", |
| 62 | "torch_dtype": "bfloat16", |
| 63 | "transformers_version": "4.46.3", |
| 64 | "use_cache": true, |
| 65 | "v_head_dim": 128, |
| 66 | "vocab_size": 129280 |
| 67 | } |
| 68 | |