Skip to content

Commit d2f46f1

Browse files
committed
moe cleanuips
1 parent 68cb784 commit d2f46f1

File tree

1 file changed

+1
-3
lines changed

1 file changed

+1
-3
lines changed

src/llama-hparams.h

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -120,14 +120,12 @@ struct llama_hparams {
120120

121121
uint32_t attn_head_dim = 0;
122122
bool mamba_rms_norm = false;
123-
double rope_theta = 10000.0;
124123
uint32_t vocab_size = 0;
125124
uint32_t intermediate_size = 0;
126-
float mamba_expand = 0.0f;
125+
float mamba_expand = 0.0f;
127126
bool ssm_rms_norm = false;
128127
bool ssm_conv_bias = false;
129128
bool ssm_proj_bias = false;
130-
uint32_t chunk_size = 0;
131129

132130
// for hybrid state space models
133131
std::array<bool, LLAMA_MAX_LAYERS> recurrent_layer_arr;

0 commit comments

Comments
 (0)