config.json
| 1 | { |
| 2 | "architectures": [ |
| 3 | "NemotronHForCausalLM" |
| 4 | ], |
| 5 | "attention_bias": false, |
| 6 | "attention_dropout": 0.0, |
| 7 | "auto_map": { |
| 8 | "AutoConfig": "configuration_nemotron_h.NemotronHConfig", |
| 9 | "AutoModel": "modeling_nemotron_h.NemotronHForCausalLM", |
| 10 | "AutoModelForCausalLM": "modeling_nemotron_h.NemotronHForCausalLM" |
| 11 | }, |
| 12 | "bos_token_id": 1, |
| 13 | "chunk_size": 128, |
| 14 | "conv_kernel": 4, |
| 15 | "eos_token_id": 2, |
| 16 | "expand": 2, |
| 17 | "head_dim": 128, |
| 18 | "hidden_dropout": 0.0, |
| 19 | "hidden_size": 2688, |
| 20 | "hybrid_override_pattern": "MEMEM*EMEMEM*EMEMEM*EMEMEM*EMEMEM*EMEMEMEM*EMEMEMEME", |
| 21 | "initializer_range": 0.02, |
| 22 | "intermediate_size": 1856, |
| 23 | "layer_norm_epsilon": 1e-05, |
| 24 | "mamba_head_dim": 64, |
| 25 | "mamba_hidden_act": "silu", |
| 26 | "mamba_num_heads": 64, |
| 27 | "mamba_proj_bias": false, |
| 28 | "mamba_ssm_cache_dtype": "float32", |
| 29 | "max_position_embeddings": 262144, |
| 30 | "mlp_bias": false, |
| 31 | "mlp_hidden_act": "relu2", |
| 32 | "model_type": "nemotron_h", |
| 33 | "moe_intermediate_size": 1856, |
| 34 | "moe_shared_expert_intermediate_size": 3712, |
| 35 | "n_group": 1, |
| 36 | "n_groups": 8, |
| 37 | "n_routed_experts": 128, |
| 38 | "n_shared_experts": 1, |
| 39 | "norm_eps": 1e-05, |
| 40 | "norm_topk_prob": true, |
| 41 | "num_attention_heads": 32, |
| 42 | "num_experts_per_tok": 6, |
| 43 | "num_hidden_layers": 52, |
| 44 | "num_key_value_heads": 2, |
| 45 | "num_logits_to_keep": 1, |
| 46 | "pad_token_id": 0, |
| 47 | "partial_rotary_factor": 1.0, |
| 48 | "rescale_prenorm_residual": true, |
| 49 | "residual_in_fp32": false, |
| 50 | "rope_theta": 10000, |
| 51 | "routed_scaling_factor": 2.5, |
| 52 | "sliding_window": null, |
| 53 | "ssm_state_size": 128, |
| 54 | "tie_word_embeddings": false, |
| 55 | "time_step_floor": 0.0001, |
| 56 | "time_step_max": 0.1, |
| 57 | "time_step_min": 0.001, |
| 58 | "topk_group": 1, |
| 59 | "torch_dtype": "bfloat16", |
| 60 | "transformers_version": "4.55.4", |
| 61 | "use_bias": false, |
| 62 | "use_cache": true, |
| 63 | "use_conv_bias": true, |
| 64 | "use_mamba_kernels": true, |
| 65 | "vocab_size": 131072 |
| 66 | } |
| 67 | |