Skip to content

Commit b2638a2

Browse files
Remote accidental blank line
1 parent aa3c988 commit b2638a2

File tree

1 file changed

+0
-1
lines changed

1 file changed

+0
-1
lines changed

src/llama-model.cpp

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4164,7 +4164,6 @@ bool llama_model::load_tensors(llama_model_loader & ml) {
41644164
layer.attn_norm = create_tensor(tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, 0);
41654165

41664166
layer.wq = create_tensor(tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd_head_k * n_head}, 0);
4167-
41684167
layer.wk = create_tensor(tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_head_k * n_head}, 0);
41694168
layer.wv = create_tensor(tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_head_k * n_head}, 0);
41704169
layer.wo = create_tensor(tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd_head_k * n_head, n_embd}, 0);

0 commit comments

Comments
 (0)