Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 9 additions & 8 deletions ggml/src/ggml-cpu/llamafile/sgemm.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -280,14 +280,6 @@ template <> inline __m256bh load(const float *p) {
}
#endif

////////////////////////////////////////////////////////////////////////////////////////////////////
// CONSTANTS

#if defined(__AVX__) || defined(__AVX2__) || defined(__AVX512F__)
static const int8_t kvalues_iq4nl[16] = {-127, -104, -83, -65, -49, -35, -22, -10, 1, 13, 25, 38, 53, 69, 89, 113};
static const __m128i iq4nlt = _mm_loadu_si128((const __m128i *) kvalues_iq4nl);
#endif

////////////////////////////////////////////////////////////////////////////////////////////////////
// FLOATING POINT MATRIX MULTIPLICATION

Expand Down Expand Up @@ -614,6 +606,14 @@ class tinyBLAS_Q0_AVX {
TC *C, int64_t ldc,
int ith, int nth)
: A(A), B(B), C(C), k(k), lda(lda), ldb(ldb), ldc(ldc), ith(ith), nth(nth) {
const int8_t kvalues_iq4nl[16] = {
-127, -104, -83, -65,
-49, -35, -22, -10,
1, 13, 25, 38,
53, 69, 89, 113
};

iq4nlt = _mm_loadu_si128((const __m128i *)kvalues_iq4nl);
}

void matmul(int64_t m, int64_t n) {
Expand Down Expand Up @@ -1038,6 +1038,7 @@ class tinyBLAS_Q0_AVX {
const int64_t ldc;
const int ith;
const int nth;
__m128i iq4nlt;
};
#endif // __AVX__

Expand Down
Loading