|
17 | 17 | #include <__algorithm/ranges_inplace_merge.h> |
18 | 18 | #include <__algorithm/ranges_lower_bound.h> |
19 | 19 | #include <__algorithm/ranges_partition_point.h> |
20 | | -#include <__algorithm/ranges_stable_sort.h> |
| 20 | +#include <__algorithm/ranges_sort.h> |
21 | 21 | #include <__algorithm/ranges_unique.h> |
22 | 22 | #include <__algorithm/ranges_upper_bound.h> |
23 | 23 | #include <__algorithm/remove_if.h> |
@@ -853,9 +853,7 @@ class flat_map { |
853 | 853 | // is no invariant state to preserve |
854 | 854 | _LIBCPP_HIDE_FROM_ABI void __sort_and_unique() { |
855 | 855 | auto __zv = ranges::views::zip(__containers_.keys, __containers_.values); |
856 | | - // To be consistent with std::map's behaviour, we use stable_sort instead of sort. |
857 | | - // As a result, if there are duplicated keys, the first value in the original order will be taken. |
858 | | - ranges::stable_sort(__zv, __compare_, [](const auto& __p) -> decltype(auto) { return std::get<0>(__p); }); |
| 856 | + ranges::sort(__zv, __compare_, [](const auto& __p) -> decltype(auto) { return std::get<0>(__p); }); |
859 | 857 | auto __dup_start = ranges::unique(__zv, __key_equiv(__compare_)).begin(); |
860 | 858 | auto __dist = ranges::distance(__zv.begin(), __dup_start); |
861 | 859 | __containers_.keys.erase(__containers_.keys.begin() + __dist, __containers_.keys.end()); |
@@ -886,7 +884,7 @@ class flat_map { |
886 | 884 | return __compare_(std::get<0>(__p1), std::get<0>(__p2)); |
887 | 885 | }; |
888 | 886 | if constexpr (!_WasSorted) { |
889 | | - ranges::stable_sort(__zv.begin() + __append_start_offset, __end, __compare_key); |
| 887 | + ranges::sort(__zv.begin() + __append_start_offset, __end, __compare_key); |
890 | 888 | } else { |
891 | 889 | _LIBCPP_ASSERT_SEMANTIC_REQUIREMENT( |
892 | 890 | __is_sorted_and_unique(__containers_.keys | ranges::views::drop(__append_start_offset)), |
|
0 commit comments