From df8de350e325d5e53d6efb4cc27c751c1d12992a Mon Sep 17 00:00:00 2001 From: Ramkumar Ramachandra Date: Wed, 10 Jul 2024 18:23:08 +0100 Subject: [PATCH 1/5] ADT/Matrix: two-dimensional Container with View Leverage the excellent SmallVector infrastructure to write a two-dimensional container, along with a View that abstracts out indexing-arithmetic, eliminating memory operations on the underlying storage. The immediate applicability of Matrix is to replace uses of the vector-of-vectors idiom, with one caveat: an upper bound on the number of columns should be known ahead of time. --- llvm/include/llvm/ADT/ArrayRef.h | 2 +- llvm/include/llvm/ADT/Matrix.h | 339 ++++++++++++++++++++++++++++++ llvm/unittests/ADT/CMakeLists.txt | 1 + llvm/unittests/ADT/MatrixTest.cpp | 325 ++++++++++++++++++++++++++++ 4 files changed, 666 insertions(+), 1 deletion(-) create mode 100644 llvm/include/llvm/ADT/Matrix.h create mode 100644 llvm/unittests/ADT/MatrixTest.cpp diff --git a/llvm/include/llvm/ADT/ArrayRef.h b/llvm/include/llvm/ADT/ArrayRef.h index 1c6799f1c56ed..d3d8a9ed6ca03 100644 --- a/llvm/include/llvm/ADT/ArrayRef.h +++ b/llvm/include/llvm/ADT/ArrayRef.h @@ -52,7 +52,7 @@ namespace llvm { using size_type = size_t; using difference_type = ptrdiff_t; - private: + protected: /// The start of the array, in an external buffer. const T *Data = nullptr; diff --git a/llvm/include/llvm/ADT/Matrix.h b/llvm/include/llvm/ADT/Matrix.h new file mode 100644 index 0000000000000..52587352862f9 --- /dev/null +++ b/llvm/include/llvm/ADT/Matrix.h @@ -0,0 +1,339 @@ +//===- Matrix.h - Two-dimensional Container with View -----------*- C++ -*-===// +// +// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. +// See https://llvm.org/LICENSE.txt for license information. +// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception +// +//===----------------------------------------------------------------------===// + +#ifndef LLVM_ADT_MATRIX_H +#define LLVM_ADT_MATRIX_H + +#include "llvm/ADT/ArrayRef.h" +#include "llvm/ADT/SmallVector.h" + +namespace llvm { +template class JaggedArrayView; + +/// Due to the SmallVector infrastructure using SmallVectorAlignmentAndOffset +/// that depends on the exact data layout, no derived classes can have extra +/// members. +template +struct MatrixStorageBase : public SmallVectorImpl, SmallVectorStorage { + MatrixStorageBase() : SmallVectorImpl(N) {} + MatrixStorageBase(size_t Size) : SmallVectorImpl(N) { resize(Size); } + ~MatrixStorageBase() { destroy_range(this->begin(), this->end()); } + MatrixStorageBase(const MatrixStorageBase &RHS) : SmallVectorImpl(N) { + if (!RHS.empty()) + SmallVectorImpl::operator=(RHS); + } + MatrixStorageBase(MatrixStorageBase &&RHS) : SmallVectorImpl(N) { + if (!RHS.empty()) + SmallVectorImpl::operator=(::std::move(RHS)); + } + using SmallVectorImpl::size; + using SmallVectorImpl::resize; + using SmallVectorImpl::append; + using SmallVectorImpl::erase; + using SmallVectorImpl::destroy_range; + + T *begin() const { return const_cast(SmallVectorImpl::begin()); } + T *end() const { return const_cast(SmallVectorImpl::end()); } +}; + +/// A two-dimensional container storage, whose upper bound on the number of +/// columns should be known ahead of time. Not menat to be used directly: the +/// primary usage API is MatrixView. +template ::value> +class MatrixStorage { +public: + MatrixStorage() = delete; + MatrixStorage(size_t NRows, size_t NCols) + : Base(NRows * NCols), NCols(NCols) {} + MatrixStorage(size_t NCols) : Base(), NCols(NCols) {} + + size_t size() const { return Base.size(); } + bool empty() const { return !size(); } + size_t getNumRows() const { return size() / NCols; } + size_t getNumCols() const { return NCols; } + void setNumCols(size_t NCols) { + assert(empty() && "Column-resizing a non-empty MatrixStorage"); + this->NCols = NCols; + } + void resize(size_t NRows) { Base.resize(NCols * NRows); } + +protected: + template + friend class JaggedArrayView; + + T *begin() const { return Base.begin(); } + T *rowFromIdx(size_t RowIdx, size_t Offset = 0) const { + return begin() + RowIdx * NCols + Offset; + } + std::pair idxFromRow(T *Ptr) const { + assert(Ptr >= begin() && "Internal error"); + size_t Offset = (Ptr - begin()) % NCols; + return {(Ptr - begin()) / NCols, Offset}; + } + + // If Arg.size() < NCols, the number of columns won't be changed, and the + // difference is default-constructed. + void addRow(const SmallVectorImpl &Arg) { + assert(Arg.size() <= NCols && + "MatrixStorage has insufficient number of columns"); + size_t Diff = NCols - Arg.size(); + Base.append(Arg.begin(), Arg.end()); + Base.append(Diff, T()); + } + +private: + MatrixStorageBase Base; + size_t NCols; +}; + +/// MutableArrayRef with a copy-assign, and extra APIs. +template +struct [[nodiscard]] MutableRowView : public MutableArrayRef { + using pointer = typename MutableArrayRef::pointer; + using iterator = typename MutableArrayRef::iterator; + using const_iterator = typename MutableArrayRef::const_iterator; + + MutableRowView() = delete; + MutableRowView(pointer Data, size_t Length) + : MutableArrayRef(Data, Length) {} + MutableRowView(iterator Begin, iterator End) + : MutableArrayRef(Begin, End) {} + MutableRowView(const_iterator Begin, const_iterator End) + : MutableArrayRef(Begin, End) {} + MutableRowView(MutableArrayRef Other) + : MutableArrayRef(Other.data(), Other.size()) {} + MutableRowView(const SmallVectorImpl &Vec) : MutableArrayRef(Vec) {} + + using MutableArrayRef::size; + using MutableArrayRef::data; + + T &back() const { return MutableArrayRef::back(); } + T &front() const { return MutableArrayRef::front(); } + MutableRowView drop_back(size_t N = 1) const { // NOLINT + return MutableArrayRef::drop_back(N); + } + MutableRowView drop_front(size_t N = 1) const { // NOLINT + return MutableArrayRef::drop_front(N); + } + // This slice is different from the MutableArrayRef slice, and specifies a + // Begin and End index, instead of a Begin and Length. + MutableRowView slice(size_t Begin, size_t End) { + return MutableArrayRef::slice(Begin, End - Begin); + } + void pop_back(size_t N = 1) { // NOLINT + this->Length -= N; + } + void pop_front(size_t N = 1) { // NOLINT + this->Data += N; + this->Length -= N; + } + + MutableRowView &operator=(const SmallVectorImpl &Vec) { + copy_assign(Vec.begin(), Vec.end()); + return *this; + } + MutableRowView &operator=(std::initializer_list IL) { + copy_assign(IL.begin(), IL.end()); + return *this; + } + + void swap(MutableRowView &Other) { + std::swap(this->Data, Other.Data); + std::swap(this->Length, Other.Length); + } + +protected: + void copy_assign(iterator Begin, iterator End) { // NOLINT + std::uninitialized_copy(Begin, End, data()); + this->Length = End - Begin; + } + void copy_assign(const_iterator Begin, const_iterator End) { // NOLINT + std::uninitialized_copy(Begin, End, data()); + this->Length = End - Begin; + } +}; + +/// The primary usage API of MatrixStorage. Abstracts out indexing-arithmetic, +/// eliminating memory operations on the underlying data. Supports +/// variable-length columns. +template ::value, + size_t NStorageInline = + CalculateSmallVectorDefaultInlinedElements::value> +class [[nodiscard]] JaggedArrayView { +public: + using row_type = MutableRowView; + using container_type = SmallVector; + using iterator = typename container_type::iterator; + using const_iterator = typename container_type::const_iterator; + + constexpr JaggedArrayView(MatrixStorage &Mat, + size_t RowSpan, size_t ColSpan) + : Mat(Mat) { + RowView.reserve(RowSpan); + for (size_t RowIdx = 0; RowIdx < RowSpan; ++RowIdx) { + auto RangeBegin = Mat.begin() + RowIdx * ColSpan; + RowView.emplace_back(RangeBegin, RangeBegin + ColSpan); + } + } + + // Constructor with a full View of the underlying MatrixStorage, if + // MatrixStorage has a non-zero number of Columns. Otherwise, creates an empty + // view. + constexpr JaggedArrayView(MatrixStorage &Mat) + : JaggedArrayView(Mat, Mat.getNumRows(), Mat.getNumCols()) {} + + // Obvious copy-construator is deleted, since the underlying storage could + // have changed. + constexpr JaggedArrayView(const JaggedArrayView &) = delete; + + // Copy-assignment operator should not be used when the underlying storage + // changes. + constexpr JaggedArrayView &operator=(const JaggedArrayView &Other) { + assert(Mat.begin() == Other.Mat.begin() && + "Underlying storage has changed: use custom copy-constructor"); + RowView = Other.RowView; + return *this; + } + + // The actual copy-constructor: to be used when the underlying storage is + // copy-constructed. + JaggedArrayView(const JaggedArrayView &OldView, + MatrixStorage &NewMat) + : Mat(NewMat) { + assert(OldView.Mat.size() == Mat.size() && + "Custom copy-constructor called on non-copied storage"); + + // The underlying storage will change. Construct a new RowView by performing + // pointer-arithmetic on the underlying storage of OldView, using pointers + // from OldVie. + for (const auto &R : OldView.RowView) { + auto [StorageIdx, StartOffset] = OldView.Mat.idxFromRow(R.data()); + RowView.emplace_back(Mat.rowFromIdx(StorageIdx, StartOffset), R.size()); + } + } + + void addRow(const SmallVectorImpl &Row) { + // The underlying storage may be resized, performing reallocations. The + // pointers in RowView will no longer be valid, so save and restore the + // data. Construct RestoreData by performing pointer-arithmetic on the + // underlying storgge. + SmallVector> RestoreData; + RestoreData.reserve(RowView.size()); + for (const auto &R : RowView) { + auto [StorageIdx, StartOffset] = Mat.idxFromRow(R.data()); + RestoreData.emplace_back(StorageIdx, StartOffset, R.size()); + } + + Mat.addRow(Row); + + // Restore the RowView by performing pointer-arithmetic on the + // possibly-reallocated storage, using information from RestoreData. + RowView.clear(); + for (const auto &[StorageIdx, StartOffset, Len] : RestoreData) + RowView.emplace_back(Mat.rowFromIdx(StorageIdx, StartOffset), Len); + + // Finally, add the new row to the VRowView. + RowView.emplace_back(Mat.rowFromIdx(Mat.getNumRows() - 1), Row.size()); + } + + // To support addRow(View[Idx]). + void addRow(const row_type &Row) { addRow(SmallVector{Row}); } + + void addRow(std::initializer_list Row) { addRow(SmallVector{Row}); } + + constexpr row_type &operator[](size_t RowIdx) { + assert(RowIdx < RowView.size() && "Indexing out of bounds"); + return RowView[RowIdx]; + } + + constexpr T *data() const { + assert(!empty() && "Non-empty view expected"); + return RowView.front().data(); + } + size_t size() const { return getRowSpan() * getMaxColSpan(); } + bool empty() const { return RowView.empty(); } + size_t getRowSpan() const { return RowView.size(); } + size_t getColSpan(size_t RowIdx) const { + assert(RowIdx < RowView.size() && "Indexing out of bounds"); + return RowView[RowIdx].size(); + } + constexpr size_t getMaxColSpan() const { + return std::max_element(RowView.begin(), RowView.end(), + [](const row_type &RowA, const row_type &RowB) { + return RowA.size() < RowB.size(); + }) + ->size(); + } + + iterator begin() { return RowView.begin(); } + iterator end() { return RowView.end(); } + const_iterator begin() const { return RowView.begin(); } + const_iterator end() const { return RowView.end(); } + + constexpr JaggedArrayView rowSlice(size_t Begin, + size_t End) { + assert(Begin < getRowSpan() && End <= getRowSpan() && + "Indexing out of bounds"); + assert(Begin < End && "Invalid slice"); + container_type NewRowView; + for (size_t RowIdx = Begin; RowIdx < End; ++RowIdx) + NewRowView.emplace_back(RowView[RowIdx]); + return {Mat, std::move(NewRowView)}; + } + + constexpr JaggedArrayView colSlice(size_t Begin, + size_t End) { + assert(Begin < End && "Invalid slice"); + size_t MinColSpan = + std::min_element(RowView.begin(), RowView.end(), + [](const row_type &RowA, const row_type &RowB) { + return RowA.size() < RowB.size(); + }) + ->size(); + assert(Begin < MinColSpan && End <= MinColSpan && "Indexing out of bounds"); + container_type NewRowView; + for (row_type Row : RowView) + NewRowView.emplace_back(Row.slice(Begin, End)); + return {Mat, std::move(NewRowView)}; + } + + row_type &lastRow() { + assert(!empty() && "Non-empty view expected"); + return RowView.back(); + } + const row_type &lastRow() const { + assert(!empty() && "Non-empty view expected"); + return RowView.back(); + } + void dropLastRow() { + assert(!empty() && "Non-empty view expected"); + RowView.pop_back(); + } + +protected: + // Helper constructor. + constexpr JaggedArrayView(MatrixStorage &Mat, + SmallVectorImpl &&RowView) + : Mat(Mat), RowView(std::move(RowView)) {} + +private: + MatrixStorage &Mat; + container_type RowView; +}; +} // namespace llvm + +namespace std { +template +inline void swap(llvm::MutableRowView &LHS, llvm::MutableRowView &RHS) { + LHS.swap(RHS); +} +} // end namespace std + +#endif diff --git a/llvm/unittests/ADT/CMakeLists.txt b/llvm/unittests/ADT/CMakeLists.txt index f48d840a10595..cd04778e86e2d 100644 --- a/llvm/unittests/ADT/CMakeLists.txt +++ b/llvm/unittests/ADT/CMakeLists.txt @@ -54,6 +54,7 @@ add_llvm_unittest(ADTTests LazyAtomicPointerTest.cpp MappedIteratorTest.cpp MapVectorTest.cpp + MatrixTest.cpp PackedVectorTest.cpp PagedVectorTest.cpp PointerEmbeddedIntTest.cpp diff --git a/llvm/unittests/ADT/MatrixTest.cpp b/llvm/unittests/ADT/MatrixTest.cpp new file mode 100644 index 0000000000000..2f6843c0615a1 --- /dev/null +++ b/llvm/unittests/ADT/MatrixTest.cpp @@ -0,0 +1,325 @@ +#include "llvm/ADT/Matrix.h" +#include "llvm/ADT/DynamicAPInt.h" +#include "gtest/gtest.h" + +using namespace llvm; + +template static SmallVector getDummyValues(size_t NValues) { + SmallVector Ret(NValues); + for (size_t Idx = 0; Idx < NValues; ++Idx) + Ret[Idx] = T(Idx + 1); + return Ret; +} + +template class MatrixTest : public testing::Test { +protected: + MatrixTest() + : ColumnInitMatrix(8), SmallMatrix(2, 2), OtherSmall(3, 3), + LargeMatrix(16, 16) {} + MatrixStorage ColumnInitMatrix; + MatrixStorage SmallMatrix; + MatrixStorage OtherSmall; + MatrixStorage LargeMatrix; + SmallVector getDummyRow(size_t NValues) { + return getDummyValues(NValues); + } +}; + +using MatrixTestTypes = ::testing::Types; +TYPED_TEST_SUITE(MatrixTest, MatrixTestTypes, ); + +TYPED_TEST(MatrixTest, Construction) { + auto &E = this->ColumnInitMatrix; + ASSERT_TRUE(E.empty()); + EXPECT_EQ(E.getNumCols(), 8u); + E.setNumCols(3); + EXPECT_EQ(E.getNumCols(), 3u); + EXPECT_TRUE(E.empty()); + EXPECT_EQ(E.getNumRows(), 0u); + auto &M = this->SmallMatrix; + EXPECT_FALSE(M.empty()); + EXPECT_EQ(M.size(), 4u); + EXPECT_EQ(M.getNumRows(), 2u); + EXPECT_EQ(M.getNumCols(), 2u); +} + +TYPED_TEST(MatrixTest, CopyConstruction) { + auto &OldMat = this->SmallMatrix; + auto V = JaggedArrayView{OldMat}; + V[0] = this->getDummyRow(2); + V[0].pop_back(); + V[1] = this->getDummyRow(2); + V[1].pop_front(); + ASSERT_EQ(V.getRowSpan(), 2u); + ASSERT_EQ(V.getColSpan(0), 1u); + ASSERT_EQ(V.getColSpan(1), 1u); + EXPECT_EQ(V[0][0], 1); + EXPECT_EQ(V[1][0], 2); + EXPECT_EQ(JaggedArrayView{OldMat}[0][0], 1); + EXPECT_EQ(JaggedArrayView{OldMat}[0][1], 2); + EXPECT_EQ(JaggedArrayView{OldMat}[1][0], 1); + EXPECT_EQ(JaggedArrayView{OldMat}[1][1], 2); + MatrixStorage NewMat{OldMat}; + JaggedArrayView C{V, NewMat}; + ASSERT_EQ(C.getRowSpan(), 2u); + ASSERT_EQ(C.getColSpan(0), 1u); + ASSERT_EQ(C.getColSpan(1), 1u); + EXPECT_EQ(C[0][0], 1); + EXPECT_EQ(C[1][0], 2); + C.addRow(this->getDummyRow(2)); + EXPECT_EQ(C[2][0], 1); + EXPECT_EQ(C[2][1], 2); +} + +TYPED_TEST(MatrixTest, RowOps) { + auto &M = this->SmallMatrix; + auto &O = this->OtherSmall; + JaggedArrayView V{M}; + ASSERT_EQ(M.getNumRows(), 2u); + ASSERT_EQ(V.getRowSpan(), 2u); + V[0] = this->getDummyRow(2); + V = JaggedArrayView{M}; + EXPECT_EQ(V[0][0], 1); + EXPECT_EQ(V[0][1], 2); + ASSERT_EQ(M.getNumRows(), 2u); + V.addRow({TypeParam(4), TypeParam(5)}); + ASSERT_EQ(M.getNumRows(), 3u); + EXPECT_EQ(V[2][0], 4); + EXPECT_EQ(V[2][1], 5); + ASSERT_EQ(O.getNumRows(), 3u); + JaggedArrayView W{O}; + W.addRow(V[0]); + ASSERT_EQ(O.getNumCols(), 3u); + ASSERT_EQ(O.getNumRows(), 4u); + W[3] = {TypeParam(7), TypeParam(8)}; + auto &WRow3 = W[3]; + WRow3 = WRow3.drop_back(); + ASSERT_EQ(WRow3[0], 7); + WRow3 = WRow3.drop_back(); + EXPECT_TRUE(WRow3.empty()); + ASSERT_EQ(W.getColSpan(3), 0u); + EXPECT_EQ(W[0][2], 0); + EXPECT_EQ(W[1][2], 0); + EXPECT_EQ(W[2][2], 0); + W = JaggedArrayView{O}; + EXPECT_EQ(W[0][2], 0); + EXPECT_EQ(W[1][2], 0); + EXPECT_EQ(W[2][2], 0); + EXPECT_EQ(W[3][2], 0); +} + +TYPED_TEST(MatrixTest, ResizeAssign) { + auto &M = this->SmallMatrix; + M.resize(3); + ASSERT_EQ(M.getNumRows(), 3u); + JaggedArrayView V{M}; + V[0] = this->getDummyRow(2); + V[1] = this->getDummyRow(2); + V[2] = this->getDummyRow(2); + ASSERT_EQ(M.getNumRows(), 3u); + EXPECT_EQ(V[0][0], V[1][0]); + EXPECT_EQ(V[2][1], V[1][1]); +} + +TYPED_TEST(MatrixTest, ColSlice) { + auto &M = this->OtherSmall; + JaggedArrayView V{M}; + V[0] = {TypeParam(3), TypeParam(7)}; + V[1] = {TypeParam(4), TypeParam(5)}; + V[2] = {TypeParam(8), TypeParam(9)}; + auto W = V.colSlice(1, 2); + ASSERT_EQ(W.getRowSpan(), 3u); + ASSERT_EQ(W.getColSpan(0), 1u); + EXPECT_EQ(V[0][0], 3); + EXPECT_EQ(V[0][1], 7); + EXPECT_EQ(V[1][0], 4); + EXPECT_EQ(V[1][1], 5); + EXPECT_EQ(V[2][0], 8); + EXPECT_EQ(V[2][1], 9); + EXPECT_EQ(W[0][0], 7); + EXPECT_EQ(W[1][0], 5); + EXPECT_EQ(W[2][0], 9); +} + +TYPED_TEST(MatrixTest, RowColSlice) { + auto &M = this->OtherSmall; + JaggedArrayView V{M}; + V[0] = {TypeParam(3), TypeParam(7)}; + V[2] = {TypeParam(4), TypeParam(5)}; + auto W = V.rowSlice(0, 2).colSlice(0, 2); + ASSERT_EQ(W.getRowSpan(), 2u); + ASSERT_EQ(W.getColSpan(0), 2u); + EXPECT_EQ(V[0][0], 3); + EXPECT_EQ(V[0][1], 7); + EXPECT_EQ(V[1][0], 0); + EXPECT_EQ(V[1][1], 0); + EXPECT_EQ(V[2][0], 4); + EXPECT_EQ(V[2][1], 5); + EXPECT_EQ(W[0][0], 3); + EXPECT_EQ(W[0][1], 7); + EXPECT_EQ(W[1][0], 0); + EXPECT_EQ(W[1][1], 0); +} + +TYPED_TEST(MatrixTest, LastRowOps) { + auto &M = this->SmallMatrix; + JaggedArrayView V{M}; + V[0] = {TypeParam(3), TypeParam(7)}; + V[1] = {TypeParam(4), TypeParam(5)}; + V.dropLastRow(); + ASSERT_EQ(M.size(), 4u); + ASSERT_EQ(V.size(), 2u); + auto W = V.lastRow(); + ASSERT_EQ(W.size(), 2u); + EXPECT_EQ(W[0], 3); + EXPECT_EQ(W[1], 7); + V.lastRow() = {TypeParam(1), TypeParam(2)}; + EXPECT_EQ(V.lastRow()[0], 1); + EXPECT_EQ(V.lastRow()[1], 2); + V.dropLastRow(); + EXPECT_TRUE(V.empty()); +} + +TYPED_TEST(MatrixTest, Swap) { + auto &M = this->SmallMatrix; + JaggedArrayView V{M}; + V[0] = {TypeParam(3), TypeParam(7)}; + V[1] = {TypeParam(4), TypeParam(5)}; + std::swap(V[0], V[1]); + EXPECT_EQ(V.lastRow()[0], 3); + EXPECT_EQ(V.lastRow()[1], 7); + EXPECT_EQ(V[0][0], 4); + EXPECT_EQ(V[0][1], 5); + EXPECT_EQ(V[1][0], 3); + EXPECT_EQ(V[1][1], 7); + auto W = JaggedArrayView{M}; + EXPECT_EQ(W[0][0], 3); + EXPECT_EQ(W[0][1], 7); + EXPECT_EQ(W[1][0], 4); + EXPECT_EQ(W[1][1], 5); +} + +TYPED_TEST(MatrixTest, DropLastRow) { + auto &M = this->OtherSmall; + auto V = JaggedArrayView{M}; + ASSERT_EQ(V.getRowSpan(), 3u); + V.dropLastRow(); + ASSERT_EQ(V.getRowSpan(), 2u); + V[0] = {TypeParam(19), TypeParam(7), TypeParam(3)}; + V[1] = {TypeParam(19), TypeParam(7), TypeParam(3)}; + ASSERT_EQ(V.getColSpan(1), 3u); + V.addRow(V.lastRow()); + ASSERT_EQ(V.getRowSpan(), 3u); + ASSERT_EQ(V.getColSpan(2), 3u); + EXPECT_EQ(V[1], V.lastRow()); + V.dropLastRow(); + ASSERT_EQ(V.getRowSpan(), 2u); + EXPECT_EQ(V[0], V.lastRow()); + V.addRow(this->getDummyRow(3)); + V.dropLastRow(); + V.addRow(this->getDummyRow(3)); + V.dropLastRow(); + ASSERT_EQ(V.getRowSpan(), 2u); + EXPECT_EQ(V[0], V.lastRow()); + V.dropLastRow(); + V.dropLastRow(); + ASSERT_TRUE(V.empty()); + V.addRow({TypeParam(9), TypeParam(7), TypeParam(3)}); + V.addRow(this->getDummyRow(3)); + ASSERT_EQ(V.getRowSpan(), 2u); + EXPECT_EQ(V.lastRow()[0], 1); + EXPECT_EQ(V.lastRow()[1], 2); + EXPECT_EQ(V.lastRow()[2], 3); + EXPECT_EQ(V[0][0], 9); + EXPECT_EQ(V[0][1], 7); + EXPECT_EQ(V[0][2], 3); + V.dropLastRow(); + V.addRow({TypeParam(21), TypeParam(22), TypeParam(23)}); + ASSERT_EQ(V.getRowSpan(), 2u); + EXPECT_EQ(V.lastRow()[0], 21); + EXPECT_EQ(V.lastRow()[1], 22); + EXPECT_EQ(V.lastRow()[2], 23); +} + +TYPED_TEST(MatrixTest, Iteration) { + auto &M = this->SmallMatrix; + M.resize(2); + JaggedArrayView V{M}; + for (const auto &[RowIdx, Row] : enumerate(V)) { + V[RowIdx] = this->getDummyRow(2); + for (const auto &[ColIdx, Col] : enumerate(Row)) { + EXPECT_GT(V[RowIdx][ColIdx], 0); + } + } +} + +TYPED_TEST(MatrixTest, VariableLengthColumns) { + auto &M = this->ColumnInitMatrix; + JaggedArrayView V{M}; + ASSERT_EQ(V.empty(), true); + size_t NumCols = 6; + size_t NumRows = 3; + SmallVector ColumnVec; + for (size_t Var = 0; Var < NumCols; ++Var) + ColumnVec.emplace_back(Var + 1); + ASSERT_EQ(ColumnVec.size(), NumCols); + for (size_t RowIdx = 0; RowIdx < NumRows; ++RowIdx) + V.addRow(ColumnVec); + ASSERT_EQ(V.getMaxColSpan(), NumCols); + V.addRow({TypeParam(19)}); + ASSERT_EQ(V.getColSpan(NumRows), 1u); + V.dropLastRow(); + V.addRow(V.lastRow()); + ASSERT_EQ(V.getColSpan(NumRows), NumCols); + EXPECT_EQ(V[NumRows - 1], V[NumRows]); + V.dropLastRow(); + ASSERT_EQ(V.getRowSpan(), NumRows); + ASSERT_EQ(M.getNumCols(), 8u); + ASSERT_EQ(M.getNumRows(), NumRows + 2); + V.dropLastRow(); + ASSERT_EQ(V.getRowSpan(), NumRows - 1); + ASSERT_EQ(M.getNumRows(), NumRows + 2); + ASSERT_EQ(V.getColSpan(1), NumCols); + V[1][0] = 19; + std::swap(V[0], V[1]); + V[0].pop_back(); + V[1] = V[1].drop_back().drop_back(); + ASSERT_EQ(V.getColSpan(0), NumCols - 1); + ASSERT_EQ(V.getColSpan(1), NumCols - 2); + EXPECT_EQ(V[0][0], 19); + EXPECT_EQ(V[1][0], 1); + ASSERT_EQ(V.getMaxColSpan(), NumCols - 1); + V = V.colSlice(0, 1).rowSlice(0, 2); + ASSERT_EQ(V.getColSpan(0), 1u); + ASSERT_EQ(V.getColSpan(1), 1u); + EXPECT_EQ(V.getMaxColSpan(), 1u); + ASSERT_EQ(V.getRowSpan(), 2u); + EXPECT_EQ(V[0][0], 19); + EXPECT_EQ(V[1][0], 1); + V.dropLastRow(); + V.dropLastRow(); + EXPECT_TRUE(V.empty()); + EXPECT_EQ(M.size(), 40u); +} + +TYPED_TEST(MatrixTest, LargeMatrixOps) { + auto &M = this->LargeMatrix; + ASSERT_EQ(M.getNumRows(), 16u); + ASSERT_EQ(M.getNumCols(), 16u); + JaggedArrayView V{M}; + V[0] = {TypeParam(1), TypeParam(2), TypeParam(1), TypeParam(4), + TypeParam(5), TypeParam(1), TypeParam(1), TypeParam(8), + TypeParam(9), TypeParam(1), TypeParam(1), TypeParam(12), + TypeParam(13), TypeParam(1), TypeParam(15), TypeParam(1)}; + V[1] = this->getDummyRow(16); + EXPECT_EQ(V[0][14], 15); + EXPECT_EQ(V[0][3], 4); + EXPECT_EQ(std::count(V[0].begin(), V[0].end(), 1), 8); + EXPECT_TRUE( + std::all_of(V[0].begin(), V[0].end(), [](auto &El) { return El > 0; })); + auto W = V.rowSlice(0, 1).colSlice(2, 4); + ASSERT_EQ(W.getRowSpan(), 1u); + ASSERT_EQ(W.getColSpan(0), 2u); + EXPECT_EQ(W[0][0], 1); + EXPECT_EQ(W[0][1], 4); +} From 80487cdb3d3152701e3af1577df4ff2f9655bd5a Mon Sep 17 00:00:00 2001 From: Ramkumar Ramachandra Date: Thu, 18 Jul 2024 16:07:00 +0100 Subject: [PATCH 2/5] JaggedArrayView: introduce writing ops For better cache performance. --- llvm/include/llvm/ADT/Matrix.h | 38 +++++++++++++++----- llvm/unittests/ADT/MatrixTest.cpp | 58 ++++++++++++++++++++----------- 2 files changed, 66 insertions(+), 30 deletions(-) diff --git a/llvm/include/llvm/ADT/Matrix.h b/llvm/include/llvm/ADT/Matrix.h index 52587352862f9..5350b81664579 100644 --- a/llvm/include/llvm/ADT/Matrix.h +++ b/llvm/include/llvm/ADT/Matrix.h @@ -55,13 +55,17 @@ class MatrixStorage { size_t size() const { return Base.size(); } bool empty() const { return !size(); } - size_t getNumRows() const { return size() / NCols; } + size_t getNumRows() const { + assert(size() % NCols == 0 && "Internal error"); + return size() / NCols; + } size_t getNumCols() const { return NCols; } void setNumCols(size_t NCols) { assert(empty() && "Column-resizing a non-empty MatrixStorage"); this->NCols = NCols; } void resize(size_t NRows) { Base.resize(NCols * NRows); } + void reserve(size_t NRows) { Base.reserve(NCols * NRows); } protected: template @@ -69,6 +73,7 @@ class MatrixStorage { T *begin() const { return Base.begin(); } T *rowFromIdx(size_t RowIdx, size_t Offset = 0) const { + assert(Offset < NCols && "Internal error"); return begin() + RowIdx * NCols + Offset; } std::pair idxFromRow(T *Ptr) const { @@ -87,6 +92,11 @@ class MatrixStorage { Base.append(Diff, T()); } + void eraseLastRow() { + assert(getNumRows() > 0 && "Non-empty MatrixStorage expected"); + Base.pop_back_n(NCols); + } + private: MatrixStorageBase Base; size_t NCols; @@ -108,10 +118,12 @@ struct [[nodiscard]] MutableRowView : public MutableArrayRef { : MutableArrayRef(Begin, End) {} MutableRowView(MutableArrayRef Other) : MutableArrayRef(Other.data(), Other.size()) {} - MutableRowView(const SmallVectorImpl &Vec) : MutableArrayRef(Vec) {} + MutableRowView(SmallVectorImpl &Vec) : MutableArrayRef(Vec) {} using MutableArrayRef::size; using MutableArrayRef::data; + using MutableArrayRef::begin; + using MutableArrayRef::end; T &back() const { return MutableArrayRef::back(); } T &front() const { return MutableArrayRef::front(); } @@ -148,6 +160,13 @@ struct [[nodiscard]] MutableRowView : public MutableArrayRef { std::swap(this->Length, Other.Length); } + // For better cache behavior. + void writing_swap(MutableRowView &Other) { // NOLINT + SmallVector Buf{Other}; + Other.copy_assign(begin(), end()); + copy_assign(Buf.begin(), Buf.end()); + } + protected: void copy_assign(iterator Begin, iterator End) { // NOLINT std::uninitialized_copy(Begin, End, data()); @@ -317,6 +336,14 @@ class [[nodiscard]] JaggedArrayView { RowView.pop_back(); } + // For better cache behavior. To be used with writing_swap. + void eraseLastRow() { + assert(Mat.idxFromRow(lastRow().data()).first == Mat.getNumRows() - 1 && + "Last row does not correspond to last row in storage"); + dropLastRow(); + Mat.eraseLastRow(); + } + protected: // Helper constructor. constexpr JaggedArrayView(MatrixStorage &Mat, @@ -329,11 +356,4 @@ class [[nodiscard]] JaggedArrayView { }; } // namespace llvm -namespace std { -template -inline void swap(llvm::MutableRowView &LHS, llvm::MutableRowView &RHS) { - LHS.swap(RHS); -} -} // end namespace std - #endif diff --git a/llvm/unittests/ADT/MatrixTest.cpp b/llvm/unittests/ADT/MatrixTest.cpp index 2f6843c0615a1..d34b3dafcc693 100644 --- a/llvm/unittests/ADT/MatrixTest.cpp +++ b/llvm/unittests/ADT/MatrixTest.cpp @@ -161,31 +161,12 @@ TYPED_TEST(MatrixTest, RowColSlice) { EXPECT_EQ(W[1][1], 0); } -TYPED_TEST(MatrixTest, LastRowOps) { +TYPED_TEST(MatrixTest, NonWritingSwap) { auto &M = this->SmallMatrix; JaggedArrayView V{M}; V[0] = {TypeParam(3), TypeParam(7)}; V[1] = {TypeParam(4), TypeParam(5)}; - V.dropLastRow(); - ASSERT_EQ(M.size(), 4u); - ASSERT_EQ(V.size(), 2u); - auto W = V.lastRow(); - ASSERT_EQ(W.size(), 2u); - EXPECT_EQ(W[0], 3); - EXPECT_EQ(W[1], 7); - V.lastRow() = {TypeParam(1), TypeParam(2)}; - EXPECT_EQ(V.lastRow()[0], 1); - EXPECT_EQ(V.lastRow()[1], 2); - V.dropLastRow(); - EXPECT_TRUE(V.empty()); -} - -TYPED_TEST(MatrixTest, Swap) { - auto &M = this->SmallMatrix; - JaggedArrayView V{M}; - V[0] = {TypeParam(3), TypeParam(7)}; - V[1] = {TypeParam(4), TypeParam(5)}; - std::swap(V[0], V[1]); + V[0].swap(V[1]); EXPECT_EQ(V.lastRow()[0], 3); EXPECT_EQ(V.lastRow()[1], 7); EXPECT_EQ(V[0][0], 4); @@ -241,6 +222,41 @@ TYPED_TEST(MatrixTest, DropLastRow) { EXPECT_EQ(V.lastRow()[2], 23); } +TYPED_TEST(MatrixTest, EraseLastRow) { + auto &M = this->SmallMatrix; + JaggedArrayView V{M}; + V[0] = {TypeParam(3), TypeParam(7)}; + V[1] = {TypeParam(4), TypeParam(5)}; + V.eraseLastRow(); + ASSERT_EQ(M.size(), 2u); + ASSERT_EQ(V.getRowSpan(), 1u); + auto W = V.lastRow(); + ASSERT_EQ(W.size(), 2u); + EXPECT_EQ(W[0], 3); + EXPECT_EQ(W[1], 7); + V.addRow({TypeParam(1), TypeParam(2)}); + ASSERT_EQ(V.getRowSpan(), 2u); + V[0].writing_swap(V[1]); + EXPECT_EQ(V[0][0], 1); + EXPECT_EQ(V[0][1], 2); + EXPECT_EQ(V.lastRow()[0], 3); + EXPECT_EQ(V.lastRow()[1], 7); + V.eraseLastRow(); + V.addRow({TypeParam(3), TypeParam(7)}); + ASSERT_EQ(V.getRowSpan(), 2u); + EXPECT_EQ(V[0][0], 1); + EXPECT_EQ(V[0][1], 2); + EXPECT_EQ(V[1][0], 3); + EXPECT_EQ(V[1][1], 7); + V.eraseLastRow(); + ASSERT_EQ(V.getRowSpan(), 1u); + EXPECT_EQ(V[0][0], 1); + EXPECT_EQ(V[0][1], 2); + V.eraseLastRow(); + EXPECT_TRUE(V.empty()); + EXPECT_TRUE(M.empty()); +} + TYPED_TEST(MatrixTest, Iteration) { auto &M = this->SmallMatrix; M.resize(2); From e368922f8d7c6fa3997b24208c0382d61a32928d Mon Sep 17 00:00:00 2001 From: Ramkumar Ramachandra Date: Thu, 18 Jul 2024 19:10:37 +0100 Subject: [PATCH 3/5] Matrix: add new copy_assign API --- llvm/include/llvm/ADT/Matrix.h | 9 +++++++-- llvm/unittests/ADT/MatrixTest.cpp | 14 +++++++++----- 2 files changed, 16 insertions(+), 7 deletions(-) diff --git a/llvm/include/llvm/ADT/Matrix.h b/llvm/include/llvm/ADT/Matrix.h index 5350b81664579..4c3dedb8cce38 100644 --- a/llvm/include/llvm/ADT/Matrix.h +++ b/llvm/include/llvm/ADT/Matrix.h @@ -161,7 +161,12 @@ struct [[nodiscard]] MutableRowView : public MutableArrayRef { } // For better cache behavior. - void writing_swap(MutableRowView &Other) { // NOLINT + void copy_assign(const MutableRowView &Other) { // NOLINT + copy_assign(Other.begin(), Other.end()); + } + + // For better cache behavior. + void copy_swap(MutableRowView &Other) { // NOLINT SmallVector Buf{Other}; Other.copy_assign(begin(), end()); copy_assign(Buf.begin(), Buf.end()); @@ -336,7 +341,7 @@ class [[nodiscard]] JaggedArrayView { RowView.pop_back(); } - // For better cache behavior. To be used with writing_swap. + // For better cache behavior. To be used with copy_assign or copy_swap. void eraseLastRow() { assert(Mat.idxFromRow(lastRow().data()).first == Mat.getNumRows() - 1 && "Last row does not correspond to last row in storage"); diff --git a/llvm/unittests/ADT/MatrixTest.cpp b/llvm/unittests/ADT/MatrixTest.cpp index d34b3dafcc693..afd9888017798 100644 --- a/llvm/unittests/ADT/MatrixTest.cpp +++ b/llvm/unittests/ADT/MatrixTest.cpp @@ -114,11 +114,15 @@ TYPED_TEST(MatrixTest, ResizeAssign) { ASSERT_EQ(M.getNumRows(), 3u); JaggedArrayView V{M}; V[0] = this->getDummyRow(2); - V[1] = this->getDummyRow(2); - V[2] = this->getDummyRow(2); + V[1].copy_assign(V[0]); + V[2].copy_assign(V[1]); ASSERT_EQ(M.getNumRows(), 3u); - EXPECT_EQ(V[0][0], V[1][0]); - EXPECT_EQ(V[2][1], V[1][1]); + ASSERT_EQ(V.getRowSpan(), 3u); + EXPECT_EQ(V[0], V[1]); + EXPECT_EQ(V[2], V[1]); + V = JaggedArrayView{M}; + EXPECT_EQ(V[0], V[1]); + EXPECT_EQ(V[2], V[1]); } TYPED_TEST(MatrixTest, ColSlice) { @@ -236,7 +240,7 @@ TYPED_TEST(MatrixTest, EraseLastRow) { EXPECT_EQ(W[1], 7); V.addRow({TypeParam(1), TypeParam(2)}); ASSERT_EQ(V.getRowSpan(), 2u); - V[0].writing_swap(V[1]); + V[0].copy_swap(V[1]); EXPECT_EQ(V[0][0], 1); EXPECT_EQ(V[0][1], 2); EXPECT_EQ(V.lastRow()[0], 3); From a6a7e0e64847aa313d419d55285ee4f84fc8047f Mon Sep 17 00:00:00 2001 From: Ramkumar Ramachandra Date: Thu, 18 Jul 2024 21:05:56 +0100 Subject: [PATCH 4/5] Matrix: optimize --- llvm/include/llvm/ADT/Matrix.h | 164 ++++++++++++++++++++++----------- 1 file changed, 110 insertions(+), 54 deletions(-) diff --git a/llvm/include/llvm/ADT/Matrix.h b/llvm/include/llvm/ADT/Matrix.h index 4c3dedb8cce38..87e9ba36425cf 100644 --- a/llvm/include/llvm/ADT/Matrix.h +++ b/llvm/include/llvm/ADT/Matrix.h @@ -11,6 +11,7 @@ #include "llvm/ADT/ArrayRef.h" #include "llvm/ADT/SmallVector.h" +#include "llvm/Support/Compiler.h" namespace llvm { template class JaggedArrayView; @@ -20,14 +21,21 @@ template class JaggedArrayView; /// members. template struct MatrixStorageBase : public SmallVectorImpl, SmallVectorStorage { - MatrixStorageBase() : SmallVectorImpl(N) {} - MatrixStorageBase(size_t Size) : SmallVectorImpl(N) { resize(Size); } - ~MatrixStorageBase() { destroy_range(this->begin(), this->end()); } - MatrixStorageBase(const MatrixStorageBase &RHS) : SmallVectorImpl(N) { + LLVM_ATTRIBUTE_ALWAYS_INLINE MatrixStorageBase() : SmallVectorImpl(N) {} + LLVM_ATTRIBUTE_ALWAYS_INLINE MatrixStorageBase(size_t Size) + : SmallVectorImpl(N) { + resize(Size); + } + LLVM_ATTRIBUTE_ALWAYS_INLINE ~MatrixStorageBase() { + destroy_range(this->begin(), this->end()); + } + LLVM_ATTRIBUTE_ALWAYS_INLINE MatrixStorageBase(const MatrixStorageBase &RHS) + : SmallVectorImpl(N) { if (!RHS.empty()) SmallVectorImpl::operator=(RHS); } - MatrixStorageBase(MatrixStorageBase &&RHS) : SmallVectorImpl(N) { + LLVM_ATTRIBUTE_ALWAYS_INLINE MatrixStorageBase(MatrixStorageBase &&RHS) + : SmallVectorImpl(N) { if (!RHS.empty()) SmallVectorImpl::operator=(::std::move(RHS)); } @@ -36,9 +44,14 @@ struct MatrixStorageBase : public SmallVectorImpl, SmallVectorStorage { using SmallVectorImpl::append; using SmallVectorImpl::erase; using SmallVectorImpl::destroy_range; + using SmallVectorImpl::isSafeToReferenceAfterResize; - T *begin() const { return const_cast(SmallVectorImpl::begin()); } - T *end() const { return const_cast(SmallVectorImpl::end()); } + LLVM_ATTRIBUTE_ALWAYS_INLINE T *begin() const { + return const_cast(SmallVectorImpl::begin()); + } + LLVM_ATTRIBUTE_ALWAYS_INLINE T *end() const { + return const_cast(SmallVectorImpl::end()); + } }; /// A two-dimensional container storage, whose upper bound on the number of @@ -53,30 +66,36 @@ class MatrixStorage { : Base(NRows * NCols), NCols(NCols) {} MatrixStorage(size_t NCols) : Base(), NCols(NCols) {} - size_t size() const { return Base.size(); } - bool empty() const { return !size(); } - size_t getNumRows() const { + LLVM_ATTRIBUTE_ALWAYS_INLINE size_t size() const { return Base.size(); } + LLVM_ATTRIBUTE_ALWAYS_INLINE bool empty() const { return !size(); } + LLVM_ATTRIBUTE_ALWAYS_INLINE size_t getNumRows() const { assert(size() % NCols == 0 && "Internal error"); return size() / NCols; } - size_t getNumCols() const { return NCols; } - void setNumCols(size_t NCols) { + LLVM_ATTRIBUTE_ALWAYS_INLINE size_t getNumCols() const { return NCols; } + LLVM_ATTRIBUTE_ALWAYS_INLINE void setNumCols(size_t NCols) { assert(empty() && "Column-resizing a non-empty MatrixStorage"); this->NCols = NCols; } - void resize(size_t NRows) { Base.resize(NCols * NRows); } - void reserve(size_t NRows) { Base.reserve(NCols * NRows); } + LLVM_ATTRIBUTE_ALWAYS_INLINE void resize(size_t NRows) { + Base.resize(NCols * NRows); + } + LLVM_ATTRIBUTE_ALWAYS_INLINE void reserve(size_t NRows) { + Base.reserve(NCols * NRows); + } protected: template friend class JaggedArrayView; - T *begin() const { return Base.begin(); } - T *rowFromIdx(size_t RowIdx, size_t Offset = 0) const { + LLVM_ATTRIBUTE_ALWAYS_INLINE T *begin() const { return Base.begin(); } + LLVM_ATTRIBUTE_ALWAYS_INLINE T *rowFromIdx(size_t RowIdx, + size_t Offset = 0) const { assert(Offset < NCols && "Internal error"); return begin() + RowIdx * NCols + Offset; } - std::pair idxFromRow(T *Ptr) const { + LLVM_ATTRIBUTE_ALWAYS_INLINE std::pair + idxFromRow(T *Ptr) const { assert(Ptr >= begin() && "Internal error"); size_t Offset = (Ptr - begin()) % NCols; return {(Ptr - begin()) / NCols, Offset}; @@ -84,7 +103,7 @@ class MatrixStorage { // If Arg.size() < NCols, the number of columns won't be changed, and the // difference is default-constructed. - void addRow(const SmallVectorImpl &Arg) { + LLVM_ATTRIBUTE_ALWAYS_INLINE void addRow(const SmallVectorImpl &Arg) { assert(Arg.size() <= NCols && "MatrixStorage has insufficient number of columns"); size_t Diff = NCols - Arg.size(); @@ -92,11 +111,15 @@ class MatrixStorage { Base.append(Diff, T()); } - void eraseLastRow() { + LLVM_ATTRIBUTE_ALWAYS_INLINE void eraseLastRow() { assert(getNumRows() > 0 && "Non-empty MatrixStorage expected"); Base.pop_back_n(NCols); } + LLVM_ATTRIBUTE_ALWAYS_INLINE bool willReallocateOnAddRow() const { + return Base.capacity() < Base.size() + NCols; + } + private: MatrixStorageBase Base; size_t NCols; @@ -125,59 +148,72 @@ struct [[nodiscard]] MutableRowView : public MutableArrayRef { using MutableArrayRef::begin; using MutableArrayRef::end; - T &back() const { return MutableArrayRef::back(); } - T &front() const { return MutableArrayRef::front(); } - MutableRowView drop_back(size_t N = 1) const { // NOLINT + LLVM_ATTRIBUTE_ALWAYS_INLINE T &back() const { + return MutableArrayRef::back(); + } + LLVM_ATTRIBUTE_ALWAYS_INLINE T &front() const { + return MutableArrayRef::front(); + } + LLVM_ATTRIBUTE_ALWAYS_INLINE MutableRowView + drop_back(size_t N = 1) const { // NOLINT return MutableArrayRef::drop_back(N); } - MutableRowView drop_front(size_t N = 1) const { // NOLINT + LLVM_ATTRIBUTE_ALWAYS_INLINE MutableRowView + drop_front(size_t N = 1) const { // NOLINT return MutableArrayRef::drop_front(N); } // This slice is different from the MutableArrayRef slice, and specifies a // Begin and End index, instead of a Begin and Length. - MutableRowView slice(size_t Begin, size_t End) { + LLVM_ATTRIBUTE_ALWAYS_INLINE MutableRowView slice(size_t Begin, + size_t End) { return MutableArrayRef::slice(Begin, End - Begin); } - void pop_back(size_t N = 1) { // NOLINT + LLVM_ATTRIBUTE_ALWAYS_INLINE void pop_back(size_t N = 1) { // NOLINT this->Length -= N; } - void pop_front(size_t N = 1) { // NOLINT + LLVM_ATTRIBUTE_ALWAYS_INLINE void pop_front(size_t N = 1) { // NOLINT this->Data += N; this->Length -= N; } - MutableRowView &operator=(const SmallVectorImpl &Vec) { + LLVM_ATTRIBUTE_ALWAYS_INLINE MutableRowView & + operator=(const SmallVectorImpl &Vec) { copy_assign(Vec.begin(), Vec.end()); return *this; } - MutableRowView &operator=(std::initializer_list IL) { + LLVM_ATTRIBUTE_ALWAYS_INLINE MutableRowView & + operator=(std::initializer_list IL) { copy_assign(IL.begin(), IL.end()); return *this; } - void swap(MutableRowView &Other) { + LLVM_ATTRIBUTE_ALWAYS_INLINE void swap(MutableRowView &Other) { std::swap(this->Data, Other.Data); std::swap(this->Length, Other.Length); } // For better cache behavior. - void copy_assign(const MutableRowView &Other) { // NOLINT + LLVM_ATTRIBUTE_ALWAYS_INLINE void + copy_assign(const MutableRowView &Other) { // NOLINT copy_assign(Other.begin(), Other.end()); } // For better cache behavior. - void copy_swap(MutableRowView &Other) { // NOLINT + LLVM_ATTRIBUTE_ALWAYS_INLINE void + copy_swap(MutableRowView &Other) { // NOLINT SmallVector Buf{Other}; Other.copy_assign(begin(), end()); copy_assign(Buf.begin(), Buf.end()); } protected: - void copy_assign(iterator Begin, iterator End) { // NOLINT + LLVM_ATTRIBUTE_ALWAYS_INLINE void copy_assign(iterator Begin, + iterator End) { // NOLINT std::uninitialized_copy(Begin, End, data()); this->Length = End - Begin; } - void copy_assign(const_iterator Begin, const_iterator End) { // NOLINT + LLVM_ATTRIBUTE_ALWAYS_INLINE void copy_assign(const_iterator Begin, + const_iterator End) { // NOLINT std::uninitialized_copy(Begin, End, data()); this->Length = End - Begin; } @@ -197,8 +233,8 @@ class [[nodiscard]] JaggedArrayView { using iterator = typename container_type::iterator; using const_iterator = typename container_type::const_iterator; - constexpr JaggedArrayView(MatrixStorage &Mat, - size_t RowSpan, size_t ColSpan) + LLVM_ATTRIBUTE_ALWAYS_INLINE constexpr JaggedArrayView( + MatrixStorage &Mat, size_t RowSpan, size_t ColSpan) : Mat(Mat) { RowView.reserve(RowSpan); for (size_t RowIdx = 0; RowIdx < RowSpan; ++RowIdx) { @@ -210,7 +246,8 @@ class [[nodiscard]] JaggedArrayView { // Constructor with a full View of the underlying MatrixStorage, if // MatrixStorage has a non-zero number of Columns. Otherwise, creates an empty // view. - constexpr JaggedArrayView(MatrixStorage &Mat) + LLVM_ATTRIBUTE_ALWAYS_INLINE constexpr JaggedArrayView( + MatrixStorage &Mat) : JaggedArrayView(Mat, Mat.getNumRows(), Mat.getNumCols()) {} // Obvious copy-construator is deleted, since the underlying storage could @@ -219,7 +256,8 @@ class [[nodiscard]] JaggedArrayView { // Copy-assignment operator should not be used when the underlying storage // changes. - constexpr JaggedArrayView &operator=(const JaggedArrayView &Other) { + LLVM_ATTRIBUTE_ALWAYS_INLINE constexpr JaggedArrayView & + operator=(const JaggedArrayView &Other) { assert(Mat.begin() == Other.Mat.begin() && "Underlying storage has changed: use custom copy-constructor"); RowView = Other.RowView; @@ -244,6 +282,13 @@ class [[nodiscard]] JaggedArrayView { } void addRow(const SmallVectorImpl &Row) { + // Optimization when we know that the underying storage won't be resized. + if (LLVM_LIKELY(!Mat.willReallocateOnAddRow())) { + Mat.addRow(Row); + RowView.emplace_back(Mat.rowFromIdx(Mat.getNumRows() - 1), Row.size()); + return; + } + // The underlying storage may be resized, performing reallocations. The // pointers in RowView will no longer be valid, so save and restore the // data. Construct RestoreData by performing pointer-arithmetic on the @@ -268,23 +313,29 @@ class [[nodiscard]] JaggedArrayView { } // To support addRow(View[Idx]). - void addRow(const row_type &Row) { addRow(SmallVector{Row}); } + LLVM_ATTRIBUTE_ALWAYS_INLINE void addRow(const row_type &Row) { + addRow(SmallVector{Row}); + } - void addRow(std::initializer_list Row) { addRow(SmallVector{Row}); } + LLVM_ATTRIBUTE_ALWAYS_INLINE void addRow(std::initializer_list Row) { + addRow(SmallVector{Row}); + } - constexpr row_type &operator[](size_t RowIdx) { + LLVM_ATTRIBUTE_ALWAYS_INLINE constexpr row_type &operator[](size_t RowIdx) { assert(RowIdx < RowView.size() && "Indexing out of bounds"); return RowView[RowIdx]; } - constexpr T *data() const { + LLVM_ATTRIBUTE_ALWAYS_INLINE constexpr T *data() const { assert(!empty() && "Non-empty view expected"); return RowView.front().data(); } size_t size() const { return getRowSpan() * getMaxColSpan(); } - bool empty() const { return RowView.empty(); } - size_t getRowSpan() const { return RowView.size(); } - size_t getColSpan(size_t RowIdx) const { + LLVM_ATTRIBUTE_ALWAYS_INLINE bool empty() const { return RowView.empty(); } + LLVM_ATTRIBUTE_ALWAYS_INLINE size_t getRowSpan() const { + return RowView.size(); + } + LLVM_ATTRIBUTE_ALWAYS_INLINE size_t getColSpan(size_t RowIdx) const { assert(RowIdx < RowView.size() && "Indexing out of bounds"); return RowView[RowIdx].size(); } @@ -296,10 +347,14 @@ class [[nodiscard]] JaggedArrayView { ->size(); } - iterator begin() { return RowView.begin(); } - iterator end() { return RowView.end(); } - const_iterator begin() const { return RowView.begin(); } - const_iterator end() const { return RowView.end(); } + LLVM_ATTRIBUTE_ALWAYS_INLINE iterator begin() { return RowView.begin(); } + LLVM_ATTRIBUTE_ALWAYS_INLINE iterator end() { return RowView.end(); } + LLVM_ATTRIBUTE_ALWAYS_INLINE const_iterator begin() const { + return RowView.begin(); + } + LLVM_ATTRIBUTE_ALWAYS_INLINE const_iterator end() const { + return RowView.end(); + } constexpr JaggedArrayView rowSlice(size_t Begin, size_t End) { @@ -328,21 +383,21 @@ class [[nodiscard]] JaggedArrayView { return {Mat, std::move(NewRowView)}; } - row_type &lastRow() { + LLVM_ATTRIBUTE_ALWAYS_INLINE row_type &lastRow() { assert(!empty() && "Non-empty view expected"); return RowView.back(); } - const row_type &lastRow() const { + LLVM_ATTRIBUTE_ALWAYS_INLINE const row_type &lastRow() const { assert(!empty() && "Non-empty view expected"); return RowView.back(); } - void dropLastRow() { + LLVM_ATTRIBUTE_ALWAYS_INLINE void dropLastRow() { assert(!empty() && "Non-empty view expected"); RowView.pop_back(); } // For better cache behavior. To be used with copy_assign or copy_swap. - void eraseLastRow() { + LLVM_ATTRIBUTE_ALWAYS_INLINE void eraseLastRow() { assert(Mat.idxFromRow(lastRow().data()).first == Mat.getNumRows() - 1 && "Last row does not correspond to last row in storage"); dropLastRow(); @@ -351,8 +406,9 @@ class [[nodiscard]] JaggedArrayView { protected: // Helper constructor. - constexpr JaggedArrayView(MatrixStorage &Mat, - SmallVectorImpl &&RowView) + LLVM_ATTRIBUTE_ALWAYS_INLINE constexpr JaggedArrayView( + MatrixStorage &Mat, + SmallVectorImpl &&RowView) : Mat(Mat), RowView(std::move(RowView)) {} private: From 34da685253079a493cbaffca273647fa7d743fc4 Mon Sep 17 00:00:00 2001 From: Ramkumar Ramachandra Date: Fri, 19 Jul 2024 15:38:55 +0100 Subject: [PATCH 5/5] Matrix: address review --- llvm/include/llvm/ADT/Matrix.h | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/llvm/include/llvm/ADT/Matrix.h b/llvm/include/llvm/ADT/Matrix.h index 87e9ba36425cf..ac9a990c10eac 100644 --- a/llvm/include/llvm/ADT/Matrix.h +++ b/llvm/include/llvm/ADT/Matrix.h @@ -69,7 +69,7 @@ class MatrixStorage { LLVM_ATTRIBUTE_ALWAYS_INLINE size_t size() const { return Base.size(); } LLVM_ATTRIBUTE_ALWAYS_INLINE bool empty() const { return !size(); } LLVM_ATTRIBUTE_ALWAYS_INLINE size_t getNumRows() const { - assert(size() % NCols == 0 && "Internal error"); + assert(size() % NCols == 0); return size() / NCols; } LLVM_ATTRIBUTE_ALWAYS_INLINE size_t getNumCols() const { return NCols; } @@ -91,12 +91,12 @@ class MatrixStorage { LLVM_ATTRIBUTE_ALWAYS_INLINE T *begin() const { return Base.begin(); } LLVM_ATTRIBUTE_ALWAYS_INLINE T *rowFromIdx(size_t RowIdx, size_t Offset = 0) const { - assert(Offset < NCols && "Internal error"); + assert(Offset < NCols); return begin() + RowIdx * NCols + Offset; } LLVM_ATTRIBUTE_ALWAYS_INLINE std::pair idxFromRow(T *Ptr) const { - assert(Ptr >= begin() && "Internal error"); + assert(Ptr >= begin()); size_t Offset = (Ptr - begin()) % NCols; return {(Ptr - begin()) / NCols, Offset}; }