Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions docs/changelog/126889.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 126889
summary: Rework uniquify to not use iterators
area: Infra/Core
type: bug
issues:
- 126883
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
import java.util.Comparator;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Objects;
import java.util.RandomAccess;
Expand Down Expand Up @@ -51,20 +50,22 @@ public static <T> void uniquify(List<T> list, Comparator<T> cmp) {
return;
}

ListIterator<T> uniqueItr = list.listIterator();
ListIterator<T> existingItr = list.listIterator();
T uniqueValue = uniqueItr.next(); // get first element to compare with
existingItr.next(); // advance the existing iterator to the second element, where we will begin comparing
int resultIndex = 0;
int currentIndex = 1;
T lastValue = list.get(resultIndex); // get first element to compare with
do {
T existingValue = existingItr.next();
if (cmp.compare(existingValue, uniqueValue) != 0 && (uniqueValue = uniqueItr.next()) != existingValue) {
uniqueItr.set(existingValue);
T currentValue = list.get(currentIndex);
if (cmp.compare(lastValue, currentValue) != 0) {
lastValue = currentValue;
if (++resultIndex != currentIndex) {
list.set(resultIndex, currentValue);
}
}
} while (existingItr.hasNext());
} while (++currentIndex < list.size());

// Lop off the rest of the list. Note with LinkedList this requires advancing back to this index,
// but Java provides no way to efficiently remove from the end of a non random-access list.
list.subList(uniqueItr.nextIndex(), list.size()).clear();
list.subList(resultIndex + 1, list.size()).clear();
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ private <T> void assertUniquify(List<T> list, Comparator<T> cmp, int size) {
for (List<T> listCopy : List.of(new ArrayList<T>(list), new LinkedList<T>(list))) {
CollectionUtils.uniquify(listCopy, cmp);
for (int i = 0; i < listCopy.size() - 1; ++i) {
assertThat(cmp.compare(listCopy.get(i), listCopy.get(i + 1)), lessThan(0));
assertThat(listCopy.get(i) + " < " + listCopy.get(i + 1), cmp.compare(listCopy.get(i), listCopy.get(i + 1)), lessThan(0));
}
assertThat(listCopy.size(), equalTo(size));
}
Expand All @@ -78,6 +78,7 @@ public void testUniquify() {
assertUniquify(List.of(1, 1, 1), Comparator.naturalOrder(), 1);
assertUniquify(List.of(1, 2, 2, 3), Comparator.naturalOrder(), 3);
assertUniquify(List.of(1, 2, 2, 2), Comparator.naturalOrder(), 2);
assertUniquify(List.of(1, 2, 2, 3, 3, 5), Comparator.naturalOrder(), 4);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this needs a stronger test. I was thinking about this:

  1. Generate random int s for the number of unique items in list
  2. Generate s unique numbers
  3. For each random generate random number of repeats (1 to 10)
  4. Put everything in the list
  5. uniquify the list
  6. compare resulting list with a list of only unique numbers that we have from 2

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I added a random test, see 62cdd9a

}

public void testEmptyPartition() {
Expand Down
Loading