Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/

package org.elasticsearch.benchmark.common.compress;

import org.apache.lucene.codecs.compressing.CompressionMode;
import org.apache.lucene.codecs.compressing.Compressor;
import org.apache.lucene.store.ByteArrayDataOutput;
import org.apache.lucene.store.ByteBuffersDataInput;
import org.elasticsearch.common.compress.fsst.FSST;
import org.openjdk.jmh.annotations.*;
import org.openjdk.jmh.infra.Blackhole;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import java.util.concurrent.TimeUnit;

@Fork(1)
@Warmup(iterations = 2)
@Measurement(iterations = 3)
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@State(Scope.Benchmark)
public class FSSTCompressBenchmark {

@Param("")
public String dataset;

private byte[] input;
private int[] offsets;
private byte[] outBuf;
private int[] outOffsets;

@AuxCounters(AuxCounters.Type.EVENTS)
@State(Scope.Thread)
public static class CompressionMetrics {
public double compressionRatio;
}

private static final int MB_8 = 8 * 1024 * 1024;

private byte[] concatenateTo8mb(byte[] contentBytes) {
byte[] bytes = new byte[MB_8 + 8];
int i = 0;
while (i < MB_8) {
int remaining = MB_8 - i;
int len = Math.min(contentBytes.length, remaining);
System.arraycopy(contentBytes, 0, bytes, i, len);
i += len;
}
return bytes;
}

@Setup(Level.Trial)
public void setup() throws IOException {
String content = Files.readString(Path.of(dataset), StandardCharsets.UTF_8);

byte[] contentBytes = FSST.toBytes(content);
input = concatenateTo8mb(contentBytes);
offsets = new int[] { 0, MB_8 };
outBuf = new byte[MB_8];
outOffsets = new int[2];
}

@Benchmark
public void compressFSST(Blackhole bh, CompressionMetrics metrics) {
List<byte[]> sample = FSST.makeSample(input, offsets);
var symbolTable = FSST.SymbolTable.buildSymbolTable(sample);
symbolTable.compressBulk(1, input, offsets, outBuf, outOffsets);
bh.consume(outBuf);
bh.consume(outOffsets);

int uncompressedSize = offsets[1];
int compressedSize = outOffsets[1];
metrics.compressionRatio = compressedSize / (double) uncompressedSize;
}

@Benchmark
public void compressLZ4Fast(Blackhole bh, CompressionMetrics metrics) throws IOException {
int inputSize = offsets[1];

var dataInput = new ByteBuffersDataInput(List.of(ByteBuffer.wrap(input)));
var dataOutput = new ByteArrayDataOutput(outBuf);

Compressor compressor = CompressionMode.FAST.newCompressor();
compressor.compress(dataInput, dataOutput);

long compressedSize = dataOutput.getPosition();
bh.consume(dataOutput);

metrics.compressionRatio = compressedSize / (double) inputSize;
}

// @Benchmark
// public void compressLZ4High(Blackhole bh, CompressionMetrics metrics) throws IOException {
// int inputSize = offsets[1];
//
// var dataInput = new ByteBuffersDataInput(List.of(ByteBuffer.wrap(input)));
// var dataOutput = new ByteArrayDataOutput(outBuf);
//
// Compressor compressor = CompressionMode.HIGH_COMPRESSION.newCompressor();
// compressor.compress(dataInput, dataOutput);
//
// long compressedSize = dataOutput.getPosition();
// bh.consume(dataOutput);
//
// metrics.compressionRatio = compressedSize / (double) inputSize;
// }
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/

package org.elasticsearch.benchmark.common.compress;

import org.apache.lucene.codecs.compressing.CompressionMode;
import org.apache.lucene.codecs.compressing.Compressor;
import org.apache.lucene.codecs.compressing.Decompressor;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.ByteArrayDataOutput;
import org.apache.lucene.store.ByteBuffersDataInput;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.compress.fsst.FSST;
import org.openjdk.jmh.annotations.*;
import org.openjdk.jmh.infra.Blackhole;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import java.util.concurrent.TimeUnit;

@Fork(1)
@Warmup(iterations = 2)
@Measurement(iterations = 3)
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@State(Scope.Benchmark)
public class FSSTDecompressBenchmark {

// @Param({ "fsst", "lz4_high", "lz4_fast" })
@Param({ "fsst", "lz4_fast" })
public String compressionType;

@Param("")
public String dataset;

// original file
private int originalSize;
private byte[] input;
private int[] offsets;

// compressed
private byte[] outBuf;
private int[] outOffsets;
private int compressedSize;

// decompressed
private byte[] decompressBuf;

// fsst specific
private FSST.SymbolTable symbolTable;

private static final int MB_8 = 8 * 1024 * 1024;

private byte[] concatenateTo8mb(byte[] contentBytes) {
byte[] bytes = new byte[MB_8 + 8];
int i = 0;
while (i < MB_8) {
int remaining = MB_8 - i;
int len = Math.min(contentBytes.length, remaining);
System.arraycopy(contentBytes, 0, bytes, i, len);
i += len;
}
return bytes;
}

@Setup(Level.Trial)
public void setup() throws IOException {
String content = Files.readString(Path.of(dataset), StandardCharsets.UTF_8);
byte[] contentBytes = FSST.toBytes(content);
originalSize = MB_8;
input = concatenateTo8mb(contentBytes);
offsets = new int[] { 0, originalSize };

outBuf = new byte[input.length];
outOffsets = new int[2];

decompressBuf = new byte[input.length];

if (compressionType.equals("fsst")) {
List<byte[]> sample = FSST.makeSample(input, offsets);
symbolTable = FSST.SymbolTable.buildSymbolTable(sample);
symbolTable.compressBulk(1, input, offsets, outBuf, outOffsets);
compressedSize = outOffsets[1];
} else if (compressionType.equals("lz4_fast")) {
var dataInput = new ByteBuffersDataInput(List.of(ByteBuffer.wrap(input, 0, originalSize)));
var dataOutput = new ByteArrayDataOutput(outBuf);
Compressor compressor = CompressionMode.FAST.newCompressor();
compressor.compress(dataInput, dataOutput);
compressedSize = dataOutput.getPosition();
} else if (compressionType.equals("lz4_high")) {
var dataInput = new ByteBuffersDataInput(List.of(ByteBuffer.wrap(input, 0, originalSize)));
var dataOutput = new ByteArrayDataOutput(outBuf);
Compressor compressor = CompressionMode.HIGH_COMPRESSION.newCompressor();
compressor.compress(dataInput, dataOutput);
compressedSize = dataOutput.getPosition();
}
}

@Benchmark
public void decompress(Blackhole bh) throws IOException {
if (compressionType.equals("fsst")) {
byte[] symbolTableBytes = symbolTable.exportToBytes();
FSST.Decoder decoder = FSST.Decoder.readFrom(symbolTableBytes);
int decompressedLen = FSST.decompress(outBuf, 0, outOffsets[1], decoder, decompressBuf);
// assert Arrays.equals(input, 0, originalSize, decompressBuf, 0, originalSize);
bh.consume(decompressBuf);
bh.consume(decompressedLen);
} else if (compressionType.equals("lz4_fast")) {
Decompressor decompressor = CompressionMode.FAST.newDecompressor();
var dataInput = new ByteArrayDataInput(outBuf, 0, compressedSize);
var outBytesRef = new BytesRef(decompressBuf);
decompressor.decompress(dataInput, originalSize, 0, originalSize, outBytesRef);
// assert Arrays.equals(input, 0, originalSize, outBytesRef.bytes, 0, originalSize);
bh.consume(outBytesRef);
} else if (compressionType.equals("lz4_high")) {
Decompressor decompressor = CompressionMode.HIGH_COMPRESSION.newDecompressor();
var dataInput = new ByteArrayDataInput(outBuf, 0, compressedSize);
var outBytesRef = new BytesRef(decompressBuf);
decompressor.decompress(dataInput, originalSize, 0, originalSize, outBytesRef);
// assert Arrays.equals(input, 0, originalSize, outBytesRef.bytes, 0, originalSize);
bh.consume(outBytesRef);
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/

package org.elasticsearch.common.compress.fsst;

import org.apache.lucene.store.DataOutput;

import java.io.Closeable;
import java.io.IOException;

public class BulkCompressBufferer implements Closeable {
private static final int MAX_LINES = 512;
private static final int MAX_INPUT_DATA = 128 << 10;
private static final int MAX_OUTPUT_DATA = MAX_INPUT_DATA * 2;

final byte[] inData = new byte[MAX_INPUT_DATA + 8];
final int[] inOffsets = new int[MAX_LINES + 1]; // 1 additional space for offset where next item would have been
byte[] outBuf = new byte[MAX_OUTPUT_DATA + 8];
int[] outOffsets = new int[MAX_LINES + 1]; // 1 additional space for offset where next item would have been
private final DataOutput finalOutput;
private final FSST.SymbolTable st;
private final FSST.OffsetWriter offsetWriter;
private int numLines = 0;
private int inOff = 0;

public BulkCompressBufferer(DataOutput finalOutput, FSST.SymbolTable st, FSST.OffsetWriter offsetWriter) {
this.finalOutput = finalOutput;
this.st = st;
this.offsetWriter = offsetWriter;
}

private void addToBuffer(byte[] bytes, int offset, int length) {
System.arraycopy(bytes, offset, inData, inOff, length);
int lineIdx = numLines;
inOffsets[lineIdx] = inOff;
inOff += length;
numLines++;
}

public void addLine(byte[] bytes, int offset, int length) throws IOException {
if (inOff + length > MAX_INPUT_DATA || numLines == MAX_LINES) {
// can't fit another
compressAndWriteBuffer();

if (length > MAX_INPUT_DATA) {
// new item doesn't fit by itself, so deal with it by itself
compressAndWriteSingle(bytes, offset, length);
} else {
// does fit
addToBuffer(bytes, offset, length);
}
} else {
// does fit
addToBuffer(bytes, offset, length);
}
}

private void compressAndWriteSingle(byte[] bytes, int offset, int length) throws IOException {
assert numLines == 0 && inOff == 0;

int off = offset;
int lenToWrite = length;
int totalOutLen = 0;

while (lenToWrite > 0) {
int len = Math.min(lenToWrite, MAX_INPUT_DATA);

// copy data into buffer
numLines = 1;
inOffsets[0] = off;
inOffsets[1] = off + len;

long outLine = st.compressBulk(numLines, bytes, inOffsets, outBuf, outOffsets);
assert outLine == numLines;
long outLen = outOffsets[(int) outLine];
totalOutLen += (int) outLen;
finalOutput.writeBytes(outBuf, 0, (int) outLen);

off += len;
lenToWrite -= len;

}
offsetWriter.addLen(totalOutLen);

clear();
}

private void compressAndWriteBuffer() throws IOException {
assert numLines < MAX_LINES + 1;
assert inOff <= MAX_INPUT_DATA;

// add a pseudo-offset to provide last line's length
inOffsets[numLines] = inOff;

long outLines = st.compressBulk(numLines, inData, inOffsets, outBuf, outOffsets);
assert outLines == numLines;
long fullOutLen = outOffsets[(int) outLines];

finalOutput.writeBytes(outBuf, 0, (int) fullOutLen);
for (int i = 0; i < numLines; ++i) {
int len = outOffsets[i + 1] - outOffsets[i];
offsetWriter.addLen(len);
}

clear();
}

void clear() {
numLines = inOff = 0;
}

@Override
public void close() throws IOException {
if (numLines > 0) {
compressAndWriteBuffer();
}
clear();
}
}
Loading