Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,7 @@ public void run()
cancelManualTraining();
if (throwable != null)
{
logger.error("Manual dictionary training failed for {}.{}", keyspaceName, tableName, throwable);
logger.error("Manual dictionary training failed for {}.{}: {}", keyspaceName, tableName, throwable.getMessage());
}
else
{
Expand Down
2 changes: 1 addition & 1 deletion src/java/org/apache/cassandra/tools/NodeProbe.java
Original file line number Diff line number Diff line change
Expand Up @@ -2753,7 +2753,7 @@ public long getCompressionDictionaryTrainingTotalSampleSize(String keyspace, Str
private CompressionDictionaryManagerMBean getDictionaryManagerProxy(String keyspace, String table) throws IOException
{
// Construct table-specific MBean name
String mbeanName = "org.apache.cassandra.db.compression:type=CompressionDictionaryManager,keyspace=" + keyspace + ",table=" + table;
String mbeanName = CompressionDictionaryManagerMBean.MBEAN_NAME + ",keyspace=" + keyspace + ",table=" + table;
try
{
ObjectName objectName = new ObjectName(mbeanName);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,10 @@

import org.apache.cassandra.db.compression.ICompressionDictionaryTrainer.TrainingStatus;
import org.apache.cassandra.db.compression.ManualTrainingOptions;
import org.apache.cassandra.io.util.FileUtils;
import org.apache.cassandra.tools.NodeProbe;
import org.apache.cassandra.tools.nodetool.formatter.TableBuilder;
import org.apache.cassandra.utils.Clock;
import picocli.CommandLine.Command;
import picocli.CommandLine.Option;
import picocli.CommandLine.Parameters;
Expand All @@ -42,7 +45,7 @@ public class TrainCompressionDictionary extends AbstractCommand
private String table;

@Option(names = {"-d", "--max-sampling-duration"},
description = "Maximum time to collect samples before training dictionary (default: 600 seconds)")
description = "Maximum time to collect samples before training dictionary, in seconds. (default: 600)")
private int maxSamplingDurationSeconds = 600;

@Option(names = {"-r", "--sampling-rate"},
Expand Down Expand Up @@ -116,9 +119,9 @@ public void execute(NodeProbe probe)
"writing to new SSTable, you might consider running nodetool 'flush' along " +
"with this command to have chunk available for sampling)");
long maxWaitMillis = TimeUnit.SECONDS.toMillis(maxSamplingDurationSeconds + 300); // Add 5 minutes for training
long startTime = System.currentTimeMillis();
long startTime = Clock.Global.currentTimeMillis();

while (System.currentTimeMillis() - startTime < maxWaitMillis)
while (Clock.Global.currentTimeMillis() - startTime < maxWaitMillis)
{
String statusStr = probe.getCompressionDictionaryTrainingStatus(keyspace, table);
TrainingStatus status = TrainingStatus.valueOf(statusStr);
Expand All @@ -136,7 +139,7 @@ else if (TrainingStatus.FAILED == status)
// Display meaningful statistics
long sampleCount = probe.getCompressionDictionaryTrainingSampleCount(keyspace, table);
long totalSampleSize = probe.getCompressionDictionaryTrainingTotalSampleSize(keyspace, table);
long elapsedSeconds = (System.currentTimeMillis() - startTime) / 1000;
long elapsedSeconds = (Clock.Global.currentTimeMillis() - startTime) / 1000;
double sampleSizeMB = totalSampleSize / (1024.0 * 1024.0);

out.printf("\rStatus: %s | Samples: %d | Size: %.2f MiB | Elapsed: %ds",
Expand Down Expand Up @@ -172,44 +175,46 @@ private void showTrainingStatus(NodeProbe probe)
}

TrainingStatus status = TrainingStatus.valueOf(statusStr);

switch (status)
{
case NOT_STARTED:
out.printf("Trainer is not running for %s.%s%n", keyspace, table);
break;
case SAMPLING:
out.printf("Trainer is collecting sample data for %s.%s%n", keyspace, table);
showStatistics(probe, out);
break;
case TRAINING:
out.printf("Training is in progress for %s.%s%n", keyspace, table);
showStatistics(probe, out);
break;
case COMPLETED:
out.printf("Training is completed for %s.%s%n", keyspace, table);
showStatistics(probe, out, status);
break;
case FAILED:
err.printf("Training failed for %s.%s%n", keyspace, table);
showStatistics(probe, err, status);
break;
default:
err.printf("Encountered unexpected training status for %s.%s: %s%n", keyspace, table, status);
}
}

private void showStatistics(NodeProbe probe, PrintStream out)
private void showStatistics(NodeProbe probe, PrintStream out, TrainingStatus status)
{
try
{
long sampleCount = probe.getCompressionDictionaryTrainingSampleCount(keyspace, table);
long totalSampleSize = probe.getCompressionDictionaryTrainingTotalSampleSize(keyspace, table);
double sampleSizeMB = totalSampleSize / (1024.0 * 1024.0);
TableBuilder tableBuilder = new TableBuilder();
tableBuilder.add("keyspace", keyspace);
tableBuilder.add("table", table);
tableBuilder.add("status", status.name());

if (status == TrainingStatus.SAMPLING || status == TrainingStatus.TRAINING)
{
long sampleCount = probe.getCompressionDictionaryTrainingSampleCount(keyspace, table);
long totalSampleSize = probe.getCompressionDictionaryTrainingTotalSampleSize(keyspace, table);

tableBuilder.add("samples collected", String.format("%d", sampleCount));
tableBuilder.add("total sample size", FileUtils.stringifyFileSize(totalSampleSize));
}

out.printf(" Samples collected: %d%n", sampleCount);
out.printf(" Total sample size: %.2f MiB%n", sampleSizeMB);
tableBuilder.printTo(out);
}
catch (Exception e)
{
out.printf(" Unable to retrieve training statistics: %s%n", e.getMessage());
out.printf("Unable to retrieve training statistics: %s%n", e.getMessage());
}
}
}