Skip to content

Commit a6e5be6

Browse files
committed
Clean
1 parent 2b9ce67 commit a6e5be6

File tree

3 files changed

+0
-20
lines changed

3 files changed

+0
-20
lines changed

extension/llm/modules/attention.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -364,11 +364,6 @@ def forward(
364364
k = k.unsqueeze(2).expand(expand_shape).flatten(1, 2)
365365
v = v.unsqueeze(2).expand(expand_shape).flatten(1, 2)
366366

367-
# print(f"q shape: {q.shape}")
368-
# print(f"k shape: {k.shape}")
369-
# print(f"v shape: {v.shape}")
370-
# print(f"mask shape: {mask.shape}")
371-
372367
output = self._attention_fn(
373368
q,
374369
k,

extension/llm/modules/kv_cache.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,6 @@ def update(
105105
f", but found new key tensors with batch size {k_val.shape[0]}!"
106106
)
107107

108-
# print(self.cache_pos[0], seq_len, self.max_seq_len)
109108
assert (self.cache_pos[0] + seq_len) <= self.max_seq_len
110109

111110
k_out = self.k_cache

kernels/portable/cpu/util/broadcast_util.cpp

Lines changed: 0 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,6 @@
66
* LICENSE file in the root directory of this source tree.
77
*/
88

9-
#include <iostream>
10-
119
#include <executorch/kernels/portable/cpu/util/repeat_util.h>
1210
#include <executorch/runtime/core/exec_aten/exec_aten.h>
1311
#include <executorch/runtime/core/exec_aten/util/scalar_type_util.h>
@@ -215,18 +213,6 @@ ET_NODISCARD Error get_broadcast_target_size(
215213
Tensor::SizesType* out_sizes,
216214
const size_t out_sizes_len,
217215
size_t* out_dim) {
218-
// std::cout << "a_size: ";
219-
// for (const auto& s : a_size) {
220-
// std::cout << s << " ";
221-
// }
222-
// std::cout << std::endl;
223-
224-
// std::cout << "b_size: ";
225-
// for (const auto& s : b_size) {
226-
// std::cout << s << " ";
227-
// }
228-
// std::cout << std::endl;
229-
230216
ET_CHECK_OR_RETURN_ERROR(
231217
tensors_are_broadcastable_between(a_size, b_size),
232218
InvalidArgument,

0 commit comments

Comments
 (0)