Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions docs/changelog/132845.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 132845
summary: Expose existing DLS cache x-pack usage statistics
area: Authorization
type: enhancement
issues: []
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,8 @@

import java.io.Closeable;
import java.io.IOException;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
Expand Down Expand Up @@ -320,7 +322,16 @@ public static List<Setting<?>> getSettings() {

public Map<String, Object> usageStats() {
final ByteSizeValue ram = ByteSizeValue.ofBytes(ramBytesUsed());
return Map.of("count", entryCount(), "memory", ram.toString(), "memory_in_bytes", ram.getBytes());
final Cache.Stats cacheStats = bitsetCache.stats();

final Map<String, Object> stats = new LinkedHashMap<>();
stats.put("count", entryCount());
stats.put("memory", ram.toString());
stats.put("memory_in_bytes", ram.getBytes());
stats.put("hits", cacheStats.getHits());
stats.put("misses", cacheStats.getMisses());
stats.put("evictions", cacheStats.getEvictions());
return Collections.unmodifiableMap(stats);
}

private static final class BitsetCacheKey {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@
import java.util.ArrayList;
import java.util.Collections;
import java.util.IdentityHashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
Expand All @@ -62,8 +63,10 @@
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Supplier;

import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
Expand Down Expand Up @@ -396,9 +399,9 @@ public void testCacheUnderConcurrentAccess() throws Exception {
cache.verifyInternalConsistency();

// Due to cache evictions, we must get more bitsets than fields
assertThat(uniqueBitSets.size(), Matchers.greaterThan(FIELD_COUNT));
assertThat(uniqueBitSets.size(), greaterThan(FIELD_COUNT));
// Due to cache evictions, we must have seen more bitsets than the cache currently holds
assertThat(uniqueBitSets.size(), Matchers.greaterThan(cache.entryCount()));
assertThat(uniqueBitSets.size(), greaterThan(cache.entryCount()));
// Even under concurrent pressure, the cache should hit the expected size
assertThat(cache.entryCount(), is(maxCacheCount));
assertThat(cache.ramBytesUsed(), is(maxCacheBytes));
Expand Down Expand Up @@ -517,6 +520,64 @@ public void testEquivalentMatchAllDocsQuery() {
assertFalse(DocumentSubsetBitsetCache.isEffectiveMatchAllDocsQuery(new TermQuery(new Term("term"))));
}

public void testHitsMissesAndEvictionsStats() throws Exception {
// cache that will evict all-but-one element, to test evictions
final long maxCacheBytes = EXPECTED_BYTES_PER_BIT_SET + (EXPECTED_BYTES_PER_BIT_SET / 2);
final Settings settings = Settings.builder()
.put(DocumentSubsetBitsetCache.CACHE_SIZE_SETTING.getKey(), maxCacheBytes + "b")
.build();
final DocumentSubsetBitsetCache cache = newCache(settings);

final Supplier<Map<String, Object>> emptyStatsSupplier = () -> {
final Map<String, Object> stats = new LinkedHashMap<>();
stats.put("count", 0);
stats.put("memory", "0b");
stats.put("memory_in_bytes", 0L);
stats.put("hits", 0L);
stats.put("misses", 0L);
stats.put("evictions", 0L);
return stats;
};

final Map<String, Object> expectedStats = emptyStatsSupplier.get();
assertThat(cache.usageStats(), equalTo(expectedStats));

runTestOnIndex((searchExecutionContext, leafContext) -> {
// first lookup - miss
final Query query1 = QueryBuilders.termQuery("field-1", "value-1").toQuery(searchExecutionContext);
final BitSet bitSet1 = cache.getBitSet(query1, leafContext);
assertThat(bitSet1, notNullValue());
expectedStats.put("count", 1);
expectedStats.put("misses", 1L);
expectedStats.put("memory", EXPECTED_BYTES_PER_BIT_SET + "b");
expectedStats.put("memory_in_bytes", EXPECTED_BYTES_PER_BIT_SET);
assertThat(cache.usageStats(), equalTo(expectedStats));

// second same lookup - hit
final BitSet bitSet1Again = cache.getBitSet(query1, leafContext);
assertThat(bitSet1Again, sameInstance(bitSet1));
expectedStats.put("hits", 1L);
assertThat(cache.usageStats(), equalTo(expectedStats));

// second query - miss, should evict the first one
final Query query2 = QueryBuilders.termQuery("field-2", "value-2").toQuery(searchExecutionContext);
final BitSet bitSet2 = cache.getBitSet(query2, leafContext);
assertThat(bitSet2, notNullValue());
// surprisingly, the eviction callback can call `get` on the cache (asynchronously) which causes another miss (or hit)
// so this assertion is about the current state of the code, rather than the expected or desired state.
// see https://github.com/elastic/elasticsearch/issues/132842
expectedStats.put("misses", 3L);
expectedStats.put("evictions", 1L);
assertBusy(() -> { assertThat(cache.usageStats(), equalTo(expectedStats)); }, 200, TimeUnit.MILLISECONDS);
});

final Map<String, Object> finalStats = emptyStatsSupplier.get();
finalStats.put("hits", 1L);
finalStats.put("misses", 3L);
finalStats.put("evictions", 2L);
assertThat(cache.usageStats(), equalTo(finalStats));
}

private void runTestOnIndex(CheckedBiConsumer<SearchExecutionContext, LeafReaderContext, Exception> body) throws Exception {
runTestOnIndices(1, ctx -> {
final TestIndexContext indexContext = ctx.get(0);
Expand Down Expand Up @@ -638,5 +699,4 @@ private void runTestOnIndices(int numberIndices, CheckedConsumer<List<TestIndexC
private DocumentSubsetBitsetCache newCache(Settings settings) {
return new DocumentSubsetBitsetCache(settings, singleThreadExecutor);
}

}