Skip to content

Commit 8974f22

Browse files
authored
fixed styling issues and updated comments
1 parent 12ecec0 commit 8974f22

File tree

1 file changed

+8
-18
lines changed

1 file changed

+8
-18
lines changed

src/llama-memory-recurrent.cpp

Lines changed: 8 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -768,11 +768,8 @@ void llama_memory_recurrent::state_write_data(llama_io_write_i & io, const std::
768768
// Iterate and write all the keys first, each row is a cell
769769
// Get whole range at a time
770770
for (uint32_t il = 0; il < n_layer; ++il) {
771-
772-
if (r_l[il] == nullptr) {
773-
// skip null layers (read_data will handle this by checking "r_l" and "s_l" for null)
774-
continue;
775-
}
771+
// skip null layers (read_data will handle this by checking "r_l" and "s_l" for null)
772+
if (r_l[il] == nullptr) continue;
776773

777774
// Write key type
778775
const int32_t r_type_i = (int32_t)r_l[il]->type;
@@ -792,12 +789,8 @@ void llama_memory_recurrent::state_write_data(llama_io_write_i & io, const std::
792789

793790
if (!s_trans) {
794791
for (uint32_t il = 0; il < n_layer; ++il) {
795-
796-
// special key to handle null layers
797-
if (s_l[il] == nullptr) {
798-
// skip null layers (read_data will handle this by checking "r_l" and "s_l" for null)
799-
continue;
800-
}
792+
// skip null layers (read_data will handle this by checking "r_l" and "s_l" for null)
793+
if (s_l[il] == nullptr) continue;
801794

802795
// Write value type
803796
const int32_t s_type_i = (int32_t)s_l[il]->type;
@@ -818,11 +811,8 @@ void llama_memory_recurrent::state_write_data(llama_io_write_i & io, const std::
818811
// When v is transposed, we also need the element size and get the element ranges from each row
819812
const uint32_t mem_size = size;
820813
for (uint32_t il = 0; il < n_layer; ++il) {
821-
// special key to handle null layers
822-
if (s_l[il] == nullptr) {
823-
// skip null layers (read_data will handle this by checking "r_l" and "s_l" for null)
824-
continue;
825-
}
814+
// skip null layers (read_data will handle this by checking "r_l" and "s_l" for null)
815+
if (s_l[il] == nullptr) continue;
826816

827817
const uint32_t n_embd_s = hparams.n_embd_s();
828818

@@ -969,7 +959,7 @@ bool llama_memory_recurrent::state_read_data(llama_io_read_i & io, uint32_t cell
969959
// For each layer, read the keys for each cell, one row is one cell, read as one contiguous block
970960
for (uint32_t il = 0; il < n_layer; ++il) {
971961
// skip null layers
972-
if(r_l[il] == nullptr) continue;
962+
if (r_l[il] == nullptr) continue;
973963

974964
// Read type of key
975965
int32_t r_type_i_ref;
@@ -998,7 +988,7 @@ bool llama_memory_recurrent::state_read_data(llama_io_read_i & io, uint32_t cell
998988
if (!s_trans) {
999989
for (uint32_t il = 0; il < n_layer; ++il) {
1000990
// skip null layers
1001-
if(s_l[il] == nullptr) continue;
991+
if (s_l[il] == nullptr) continue;
1002992

1003993
// Read type of value
1004994
int32_t s_type_i_ref;

0 commit comments

Comments
 (0)