Skip to content

Commit 2fbfea2

Browse files
committed
BE: Issue#1332 Sort based on prefix offsets
1 parent 9f12e28 commit 2fbfea2

File tree

5 files changed

+50
-9
lines changed

5 files changed

+50
-9
lines changed

api/src/main/java/io/kafbat/ui/service/acl/AclsService.java

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -75,10 +75,7 @@ public Flux<AclBinding> listAcls(KafkaCluster cluster, ResourcePatternFilter fil
7575
Boolean fts) {
7676
return adminClientService.get(cluster)
7777
.flatMap(c -> c.listAcls(filter))
78-
.flatMapIterable(acls -> acls)
79-
.filter(acl -> principalSearch == null || acl.entry().principal().contains(principalSearch))
80-
.collectList()
81-
.map(lst -> filter(lst, principalSearch, fts))
78+
.map(lst -> filter(new ArrayList<>(lst), principalSearch, fts))
8279
.flatMapMany(Flux::fromIterable)
8380
.sort(Comparator.comparing(AclBinding::toString)); //sorting to keep stable order on different calls
8481
}

api/src/main/java/io/kafbat/ui/service/index/NgramFilter.java

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -10,9 +10,6 @@
1010
import java.util.HashSet;
1111
import java.util.List;
1212
import java.util.Map;
13-
import java.util.NavigableMap;
14-
import java.util.TreeMap;
15-
import java.util.TreeSet;
1613
import java.util.stream.Stream;
1714
import lombok.SneakyThrows;
1815
import lombok.extern.slf4j.Slf4j;
@@ -125,7 +122,7 @@ static List<String> tokenizeString(Analyzer analyzer, String text) {
125122
}
126123

127124
@SneakyThrows
128-
static List<String> tokenizeStringSimple(Analyzer analyzer, String text) {
125+
public static List<String> tokenizeStringSimple(Analyzer analyzer, String text) {
129126
List<String> tokens = new ArrayList<>();
130127
try (TokenStream tokenStream = analyzer.tokenStream(null, text)) {
131128
CharTermAttribute attr = tokenStream.addAttribute(CharTermAttribute.class);

api/src/main/java/io/kafbat/ui/service/index/ShortWordNGramAnalyzer.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
import org.apache.lucene.analysis.ngram.NGramTokenFilter;
99
import org.apache.lucene.analysis.standard.StandardTokenizer;
1010

11-
class ShortWordNGramAnalyzer extends Analyzer {
11+
public class ShortWordNGramAnalyzer extends Analyzer {
1212
private final int minGram;
1313
private final int maxGram;
1414
private final boolean preserveOriginal;
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
package io.kafbat.ui.service.index;
2+
3+
import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
4+
5+
import java.util.List;
6+
import java.util.stream.Stream;
7+
import org.junit.jupiter.params.ParameterizedTest;
8+
import org.junit.jupiter.params.provider.Arguments;
9+
import org.junit.jupiter.params.provider.MethodSource;
10+
11+
class ShortWordNGramAnalyzerTest {
12+
13+
@ParameterizedTest
14+
@MethodSource("provider")
15+
public void testOffsets(String name, List<String> parts) {
16+
ShortWordNGramAnalyzer analyzer = new ShortWordNGramAnalyzer(2, 4);
17+
List<String> strings = NgramFilter.tokenizeString(analyzer, name);
18+
assertThat(strings).isEqualTo(parts);
19+
}
20+
21+
public static Stream<Arguments> provider() {
22+
return Stream.of(
23+
Arguments.of("hello.world.text", List.of(
24+
"he","hel","hell","el","ell","ello","ll","llo","lo","hello",
25+
"wo","wor","worl","or","orl","orld","rl","rld","ld","world","te",
26+
"tex","text","ex","ext","xt"
27+
)),
28+
Arguments.of("helloWorldText", List.of(
29+
"he","hel","hell","el","ell","ello","ll","llo","lo","hello",
30+
"wo","wor","worl","or","orl","orld","rl","rld","ld","world","te",
31+
"tex","text","ex","ext","xt"
32+
33+
)),
34+
Arguments.of("hello:world:text", List.of(
35+
"he","hel","hell","el","ell","ello","ll","llo","lo","hello",
36+
"wo","wor","worl","or","orl","orld","rl","rld","ld","world","te",
37+
"tex","text","ex","ext","xt"
38+
))
39+
);
40+
}
41+
}

api/src/test/java/io/kafbat/ui/service/index/lucene/ShortWordAnalyzerTest.java

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,12 @@ public static Stream<Arguments> provider() {
5050
Tuples.of("hello", 0, 5),
5151
Tuples.of("world", 5, 10),
5252
Tuples.of("text", 10, 14)
53+
)),
54+
Arguments.of("hello:world:text", List.of(
55+
Tuples.of("hello:world:text", 0, 16),
56+
Tuples.of("hello", 0, 5),
57+
Tuples.of("world", 6, 11),
58+
Tuples.of("text", 12, 16)
5359
))
5460
);
5561
}

0 commit comments

Comments
 (0)