diff --git a/libcxx/docs/ReleaseNotes/22.rst b/libcxx/docs/ReleaseNotes/22.rst index 8b8dce5083149..15bf46d44b07f 100644 --- a/libcxx/docs/ReleaseNotes/22.rst +++ b/libcxx/docs/ReleaseNotes/22.rst @@ -43,9 +43,6 @@ Implemented Papers Improvements and New Features ----------------------------- -- The performance of ``map::map(const map&)`` has been improved up to 2.3x -- The performance of ``map::operator=(const map&)`` has been improved by up to 11x - Deprecations and Removals ------------------------- diff --git a/libcxx/include/__tree b/libcxx/include/__tree index 1b1bb538029da..f8bb4f01b1e29 100644 --- a/libcxx/include/__tree +++ b/libcxx/include/__tree @@ -1213,104 +1213,6 @@ private: __node_pointer __cache_root_; __node_pointer __cache_elem_; }; - - class __tree_deleter { - __node_allocator& __alloc_; - - public: - using pointer = __node_pointer; - - _LIBCPP_HIDE_FROM_ABI __tree_deleter(__node_allocator& __alloc) : __alloc_(__alloc) {} - -#ifdef _LIBCPP_COMPILER_CLANG_BASED // FIXME: GCC complains about not being able to always_inline a recursive function - _LIBCPP_HIDE_FROM_ABI -#endif - void - operator()(__node_pointer __ptr) { - if (!__ptr) - return; - - (*this)(static_cast<__node_pointer>(__ptr->__left_)); - - auto __right = __ptr->__right_; - - __node_traits::destroy(__alloc_, std::addressof(__ptr->__value_)); - __node_traits::deallocate(__alloc_, __ptr, 1); - - (*this)(static_cast<__node_pointer>(__right)); - } - }; - - // This copy construction will always produce a correct red-black-tree assuming the incoming tree is correct, since we - // copy the exact structure 1:1. Since this is for copy construction _only_ we know that we get a correct tree. If we - // didn't get a correct tree, the invariants of __tree are broken and we have a much bigger problem than an improperly - // balanced tree. -#ifdef _LIBCPP_COMPILER_CLANG_BASED // FIXME: GCC complains about not being able to always_inline a recursive function - _LIBCPP_HIDE_FROM_ABI -#endif - __node_pointer - __copy_construct_tree(__node_pointer __src) { - if (!__src) - return nullptr; - - __node_holder __new_node = __construct_node(__src->__value_); - - unique_ptr<__node, __tree_deleter> __left( - __copy_construct_tree(static_cast<__node_pointer>(__src->__left_)), __node_alloc_); - __node_pointer __right = __copy_construct_tree(static_cast<__node_pointer>(__src->__right_)); - - __node_pointer __new_node_ptr = __new_node.release(); - - __new_node_ptr->__is_black_ = __src->__is_black_; - __new_node_ptr->__left_ = static_cast<__node_base_pointer>(__left.release()); - __new_node_ptr->__right_ = static_cast<__node_base_pointer>(__right); - if (__new_node_ptr->__left_) - __new_node_ptr->__left_->__parent_ = static_cast<__end_node_pointer>(__new_node_ptr); - if (__new_node_ptr->__right_) - __new_node_ptr->__right_->__parent_ = static_cast<__end_node_pointer>(__new_node_ptr); - return __new_node_ptr; - } - - // This copy assignment will always produce a correct red-black-tree assuming the incoming tree is correct, since our - // own tree is a red-black-tree and the incoming tree is a red-black-tree. The invariants of a red-black-tree are - // temporarily not met until all of the incoming red-black tree is copied. -#ifdef _LIBCPP_COMPILER_CLANG_BASED // FIXME: GCC complains about not being able to always_inline a recursive function - _LIBCPP_HIDE_FROM_ABI -#endif - __node_pointer - __copy_assign_tree(__node_pointer __dest, __node_pointer __src) { - if (!__src) { - destroy(__dest); - return nullptr; - } - - __assign_value(__dest->__value_, __src->__value_); - __dest->__is_black_ = __src->__is_black_; - - // If we already have a left node in the destination tree, reuse it and copy-assign recursively - if (__dest->__left_) { - __dest->__left_ = static_cast<__node_base_pointer>(__copy_assign_tree( - static_cast<__node_pointer>(__dest->__left_), static_cast<__node_pointer>(__src->__left_))); - - // Otherwise, we must create new nodes; copy-construct from here on - } else if (__src->__left_) { - auto __new_left = __copy_construct_tree(static_cast<__node_pointer>(__src->__left_)); - __dest->__left_ = static_cast<__node_base_pointer>(__new_left); - __new_left->__parent_ = static_cast<__end_node_pointer>(__dest); - } - - // Identical to the left case above, just for the right nodes - if (__dest->__right_) { - __dest->__right_ = static_cast<__node_base_pointer>(__copy_assign_tree( - static_cast<__node_pointer>(__dest->__right_), static_cast<__node_pointer>(__src->__right_))); - } else if (__src->__right_) { - auto __new_right = __copy_construct_tree(static_cast<__node_pointer>(__src->__right_)); - __dest->__right_ = static_cast<__node_base_pointer>(__new_right); - __new_right->__parent_ = static_cast<__end_node_pointer>(__dest); - } - - return __dest; - } }; template @@ -1375,22 +1277,11 @@ __tree<_Tp, _Compare, _Allocator>::_DetachedTreeCache::__detach_next(__node_poin template __tree<_Tp, _Compare, _Allocator>& __tree<_Tp, _Compare, _Allocator>::operator=(const __tree& __t) { - if (this == std::addressof(__t)) - return *this; - - value_comp() = __t.value_comp(); - __copy_assign_alloc(__t); - - if (__size_ != 0) { - *__root_ptr() = static_cast<__node_base_pointer>(__copy_assign_tree(__root(), __t.__root())); - } else { - *__root_ptr() = static_cast<__node_base_pointer>(__copy_construct_tree(__t.__root())); - if (__root()) - __root()->__parent_ = __end_node(); + if (this != std::addressof(__t)) { + value_comp() = __t.value_comp(); + __copy_assign_alloc(__t); + __assign_multi(__t.begin(), __t.end()); } - __begin_node_ = static_cast<__end_node_pointer>(std::__tree_min(static_cast<__node_base_pointer>(__end_node()))); - __size_ = __t.size(); - return *this; } @@ -1436,17 +1327,11 @@ void __tree<_Tp, _Compare, _Allocator>::__assign_multi(_InputIterator __first, _ template __tree<_Tp, _Compare, _Allocator>::__tree(const __tree& __t) - : __begin_node_(__end_node()), + : __begin_node_(), __node_alloc_(__node_traits::select_on_container_copy_construction(__t.__node_alloc())), __size_(0), __value_comp_(__t.value_comp()) { - if (__t.size() == 0) - return; - - *__root_ptr() = static_cast<__node_base_pointer>(__copy_construct_tree(__t.__root())); - __root()->__parent_ = __end_node(); - __begin_node_ = static_cast<__end_node_pointer>(std::__tree_min(static_cast<__node_base_pointer>(__end_node()))); - __size_ = __t.size(); + __begin_node_ = __end_node(); } template @@ -1545,7 +1430,13 @@ __tree<_Tp, _Compare, _Allocator>::~__tree() { template void __tree<_Tp, _Compare, _Allocator>::destroy(__node_pointer __nd) _NOEXCEPT { - (__tree_deleter(__node_alloc_))(__nd); + if (__nd != nullptr) { + destroy(static_cast<__node_pointer>(__nd->__left_)); + destroy(static_cast<__node_pointer>(__nd->__right_)); + __node_allocator& __na = __node_alloc(); + __node_traits::destroy(__na, std::addressof(__nd->__value_)); + __node_traits::deallocate(__na, __nd, 1); + } } template diff --git a/libcxx/include/map b/libcxx/include/map index 0a43bd09a0b16..2251565801470 100644 --- a/libcxx/include/map +++ b/libcxx/include/map @@ -970,7 +970,7 @@ public: : map(from_range, std::forward<_Range>(__range), key_compare(), __a) {} # endif - _LIBCPP_HIDE_FROM_ABI map(const map& __m) = default; + _LIBCPP_HIDE_FROM_ABI map(const map& __m) : __tree_(__m.__tree_) { insert(__m.begin(), __m.end()); } _LIBCPP_HIDE_FROM_ABI map& operator=(const map& __m) = default; @@ -1637,7 +1637,11 @@ public: : multimap(from_range, std::forward<_Range>(__range), key_compare(), __a) {} # endif - _LIBCPP_HIDE_FROM_ABI multimap(const multimap& __m) = default; + _LIBCPP_HIDE_FROM_ABI multimap(const multimap& __m) + : __tree_(__m.__tree_.value_comp(), + __alloc_traits::select_on_container_copy_construction(__m.__tree_.__alloc())) { + insert(__m.begin(), __m.end()); + } _LIBCPP_HIDE_FROM_ABI multimap& operator=(const multimap& __m) = default; diff --git a/libcxx/include/set b/libcxx/include/set index 342a5294c814f..1f2fd7fc91bbb 100644 --- a/libcxx/include/set +++ b/libcxx/include/set @@ -660,7 +660,7 @@ public: : set(from_range, std::forward<_Range>(__range), key_compare(), __a) {} # endif - _LIBCPP_HIDE_FROM_ABI set(const set& __s) = default; + _LIBCPP_HIDE_FROM_ABI set(const set& __s) : __tree_(__s.__tree_) { insert(__s.begin(), __s.end()); } _LIBCPP_HIDE_FROM_ABI set& operator=(const set& __s) = default; @@ -1119,7 +1119,11 @@ public: : multiset(from_range, std::forward<_Range>(__range), key_compare(), __a) {} # endif - _LIBCPP_HIDE_FROM_ABI multiset(const multiset& __s) = default; + _LIBCPP_HIDE_FROM_ABI multiset(const multiset& __s) + : __tree_(__s.__tree_.value_comp(), + __alloc_traits::select_on_container_copy_construction(__s.__tree_.__alloc())) { + insert(__s.begin(), __s.end()); + } _LIBCPP_HIDE_FROM_ABI multiset& operator=(const multiset& __s) = default; diff --git a/libcxx/test/benchmarks/containers/associative/associative_container_benchmarks.h b/libcxx/test/benchmarks/containers/associative/associative_container_benchmarks.h index 535a52f0a08ab..0ff7f15164d8a 100644 --- a/libcxx/test/benchmarks/containers/associative/associative_container_benchmarks.h +++ b/libcxx/test/benchmarks/containers/associative/associative_container_benchmarks.h @@ -151,7 +151,7 @@ void associative_container_benchmarks(std::string container) { ///////////////////////// // Assignment ///////////////////////// - bench("operator=(const&) (into cleared Container)", [=](auto& st) { + bench("operator=(const&)", [=](auto& st) { const std::size_t size = st.range(0); std::vector in = make_value_types(generate_unique_keys(size)); Container src(in.begin(), in.end()); @@ -172,42 +172,6 @@ void associative_container_benchmarks(std::string container) { } }); - bench("operator=(const&) (into partially populated Container)", [=](auto& st) { - const std::size_t size = st.range(0); - std::vector in = make_value_types(generate_unique_keys(size)); - Container src(in.begin(), in.end()); - Container c[BatchSize]; - - while (st.KeepRunningBatch(BatchSize)) { - for (std::size_t i = 0; i != BatchSize; ++i) { - c[i] = src; - benchmark::DoNotOptimize(c[i]); - benchmark::ClobberMemory(); - } - - st.PauseTiming(); - for (std::size_t i = 0; i != BatchSize; ++i) { - c[i].clear(); - } - st.ResumeTiming(); - } - }); - - bench("operator=(const&) (into populated Container)", [=](auto& st) { - const std::size_t size = st.range(0); - std::vector in = make_value_types(generate_unique_keys(size)); - Container src(in.begin(), in.end()); - Container c[BatchSize]; - - while (st.KeepRunningBatch(BatchSize)) { - for (std::size_t i = 0; i != BatchSize; ++i) { - c[i] = src; - benchmark::DoNotOptimize(c[i]); - benchmark::ClobberMemory(); - } - } - }); - ///////////////////////// // Insertion ///////////////////////// diff --git a/libcxx/test/benchmarks/containers/associative/map.bench.cpp b/libcxx/test/benchmarks/containers/associative/map.bench.cpp index bd664dbb56ee7..cee669ae0a667 100644 --- a/libcxx/test/benchmarks/containers/associative/map.bench.cpp +++ b/libcxx/test/benchmarks/containers/associative/map.bench.cpp @@ -29,7 +29,6 @@ struct support::adapt_operations> { int main(int argc, char** argv) { support::associative_container_benchmarks>("std::map"); - support::associative_container_benchmarks>("std::map"); benchmark::Initialize(&argc, argv); benchmark::RunSpecifiedBenchmarks(); diff --git a/libcxx/test/benchmarks/containers/associative/multimap.bench.cpp b/libcxx/test/benchmarks/containers/associative/multimap.bench.cpp index 15a0b573081bb..6ae93f06aa363 100644 --- a/libcxx/test/benchmarks/containers/associative/multimap.bench.cpp +++ b/libcxx/test/benchmarks/containers/associative/multimap.bench.cpp @@ -28,7 +28,6 @@ struct support::adapt_operations> { int main(int argc, char** argv) { support::associative_container_benchmarks>("std::multimap"); - support::associative_container_benchmarks>("std::multimap"); benchmark::Initialize(&argc, argv); benchmark::RunSpecifiedBenchmarks(); diff --git a/libcxx/test/benchmarks/containers/associative/multiset.bench.cpp b/libcxx/test/benchmarks/containers/associative/multiset.bench.cpp index c205e0a4f793f..894f159a52e4a 100644 --- a/libcxx/test/benchmarks/containers/associative/multiset.bench.cpp +++ b/libcxx/test/benchmarks/containers/associative/multiset.bench.cpp @@ -26,7 +26,6 @@ struct support::adapt_operations> { int main(int argc, char** argv) { support::associative_container_benchmarks>("std::multiset"); - support::associative_container_benchmarks>("std::multiset"); benchmark::Initialize(&argc, argv); benchmark::RunSpecifiedBenchmarks(); diff --git a/libcxx/test/benchmarks/containers/associative/set.bench.cpp b/libcxx/test/benchmarks/containers/associative/set.bench.cpp index 50ee142b6e8b3..6b7b142c792ba 100644 --- a/libcxx/test/benchmarks/containers/associative/set.bench.cpp +++ b/libcxx/test/benchmarks/containers/associative/set.bench.cpp @@ -27,7 +27,6 @@ struct support::adapt_operations> { int main(int argc, char** argv) { support::associative_container_benchmarks>("std::set"); - support::associative_container_benchmarks>("std::set"); benchmark::Initialize(&argc, argv); benchmark::RunSpecifiedBenchmarks(); diff --git a/libcxx/test/libcxx/gdb/gdb_pretty_printer_test.sh.cpp b/libcxx/test/libcxx/gdb/gdb_pretty_printer_test.sh.cpp index f5a878582666b..f125cc9adc491 100644 --- a/libcxx/test/libcxx/gdb/gdb_pretty_printer_test.sh.cpp +++ b/libcxx/test/libcxx/gdb/gdb_pretty_printer_test.sh.cpp @@ -324,11 +324,6 @@ void deque_test() { void map_test() { std::map i_am_empty{}; - - // Make __tree_itertor available in the debug info - // FIXME: Is there any way to avoid this requirement? - (void)i_am_empty.begin(); - ComparePrettyPrintToChars(i_am_empty, "std::map is empty"); std::map one_two_three; diff --git a/libcxx/test/std/containers/associative/map/map.cons/copy.pass.cpp b/libcxx/test/std/containers/associative/map/map.cons/copy.pass.cpp index f1696b003c131..e7a69aff0b036 100644 --- a/libcxx/test/std/containers/associative/map/map.cons/copy.pass.cpp +++ b/libcxx/test/std/containers/associative/map/map.cons/copy.pass.cpp @@ -12,129 +12,116 @@ // map(const map& m); -#include #include +#include -#include "min_allocator.h" +#include "test_macros.h" #include "../../../test_compare.h" #include "test_allocator.h" +#include "min_allocator.h" -template