Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,8 @@

import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ExperimentalComposableParentThresholdSamplerModel;
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ExperimentalComposableProbabilitySamplerModel;
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ExperimentalComposableRuleBasedSamplerModel;
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ExperimentalComposableSamplerModel;
import io.opentelemetry.sdk.extension.incubator.trace.samplers.ComposableSampler;
import java.util.Map;

final class ComposableSamplerFactory
implements Factory<ExperimentalComposableSamplerModel, ComposableSampler> {
Expand All @@ -26,33 +24,46 @@ static ComposableSamplerFactory getInstance() {
@Override
public ComposableSampler create(
ExperimentalComposableSamplerModel model, DeclarativeConfigContext context) {
// We don't use the variable till later but call validate first to confirm there are not
// multiple samplers.
ConfigKeyValue samplerKeyValue =
FileConfigUtil.validateSingleKeyValue(context, model, "composable sampler");
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It seems a little weird to just have a free-form string here as the "component name" parameter. Shouldn't this link directly to what field might be in the yaml, so it will be easier to debug when you do have a duplicate? Or at least something that will make it really obvious what might be duplicated so it's easy to find and correct?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah the error messaging throughout the declarative config implementation are lame right now in this respect. In the most recent java sig, @trask and I talked about extended DeclarativeConfigProperties to be to track the location of the node with respect to the document, and reference this location in error messages. This type of "document location context" is needed throughout for better error messages.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

maybe the name can be the yaml path?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

maybe the name can be the yaml path?

Yeah exactly

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Might rename "ContextKeyValue" to "YamlContext" - and take an optional parent context as argument.
That way you can build the yaml path for free.


if (model.getAlwaysOn() != null) {
return ComposableSampler.alwaysOn();
}
if (model.getAlwaysOff() != null) {
return ComposableSampler.alwaysOff();
}
ExperimentalComposableProbabilitySamplerModel probability = model.getProbability();
if (probability != null) {
Double ratio = probability.getRatio();
if (ratio == null) {
ratio = 1.0d;
}
return ComposableSampler.probability(ratio);
if (model.getProbability() != null) {
return createProbabilitySampler(model.getProbability());
}
ExperimentalComposableRuleBasedSamplerModel ruleBased = model.getRuleBased();
if (ruleBased != null) {
return ComposableRuleBasedSamplerFactory.getInstance().create(ruleBased, context);
if (model.getRuleBased() != null) {
return ComposableRuleBasedSamplerFactory.getInstance().create(model.getRuleBased(), context);
}
ExperimentalComposableParentThresholdSamplerModel parentThreshold = model.getParentThreshold();
if (parentThreshold != null) {
ExperimentalComposableSamplerModel rootModel =
FileConfigUtil.requireNonNull(parentThreshold.getRoot(), "parent threshold sampler root");
ComposableSampler rootSampler = INSTANCE.create(rootModel, context);
return ComposableSampler.parentThreshold(rootSampler);
if (model.getParentThreshold() != null) {
return createParentThresholdSampler(model.getParentThreshold(), context);
}
Map.Entry<String, ?> keyValue =
FileConfigUtil.getSingletonMapEntry(model.getAdditionalProperties(), "composable sampler");
return context.loadComponent(ComposableSampler.class, keyValue.getKey(), keyValue.getValue());

return context.loadComponent(ComposableSampler.class, samplerKeyValue);
}

private static ComposableSampler createProbabilitySampler(
ExperimentalComposableProbabilitySamplerModel probabilityModel) {
Double ratio = probabilityModel.getRatio();
if (ratio == null) {
ratio = 1.0d;
}
return ComposableSampler.probability(ratio);
}

private static ComposableSampler createParentThresholdSampler(
ExperimentalComposableParentThresholdSamplerModel parentThresholdModel,
DeclarativeConfigContext context) {
ExperimentalComposableSamplerModel rootModel =
FileConfigUtil.requireNonNull(
parentThresholdModel.getRoot(), "parent threshold sampler root");
ComposableSampler rootSampler = INSTANCE.create(rootModel, context);
return ComposableSampler.parentThreshold(rootSampler);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/

package io.opentelemetry.sdk.extension.incubator.fileconfig;

import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties;

/** A key value pair for a YAML mapping node. */
class ConfigKeyValue {

private final String key;
private final DeclarativeConfigProperties value;

private ConfigKeyValue(String key, DeclarativeConfigProperties value) {
this.key = key;
this.value = value;
}

static ConfigKeyValue of(String key, DeclarativeConfigProperties value) {
return new ConfigKeyValue(key, value);
}

String getKey() {
return key;
}

DeclarativeConfigProperties getValue() {
return value;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -82,10 +82,10 @@ SpiHelper getSpiHelper() {
* @throws DeclarativeConfigException if no matching providers are found, or if multiple are found
* (i.e. conflict), or if {@link ComponentProvider#create(DeclarativeConfigProperties)} throws
*/
@SuppressWarnings({"unchecked", "rawtypes"})
<T> T loadComponent(Class<T> type, String name, Object model) {
DeclarativeConfigProperties config =
DeclarativeConfiguration.toConfigProperties(model, spiHelper.getComponentLoader());
@SuppressWarnings({"unchecked"})
<T> T loadComponent(Class<T> type, ConfigKeyValue configKeyValue) {
String name = configKeyValue.getKey();
DeclarativeConfigProperties config = configKeyValue.getValue();

// TODO(jack-berg): cache loaded component providers
List<ComponentProvider> componentProviders = spiHelper.load(ComponentProvider.class);
Expand Down Expand Up @@ -115,6 +115,9 @@ <T> T loadComponent(Class<T> type, String name, Object model) {

try {
Object component = provider.create(config);
if (component instanceof Closeable) {
closeables.add((Closeable) component);
}
if (component != null && !type.isInstance(component)) {
throw new DeclarativeConfigException(
"Error configuring "
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,55 +8,37 @@
import static java.util.stream.Collectors.joining;

import io.opentelemetry.api.incubator.config.DeclarativeConfigException;
import java.util.Map;
import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties;
import java.util.Set;
import javax.annotation.Nullable;

final class FileConfigUtil {

private FileConfigUtil() {}

static <T> T assertNotNull(@Nullable T object, String description) {
if (object == null) {
throw new NullPointerException(description + " is null");
}
return object;
}

static <T> T requireNonNull(@Nullable T object, String description) {
if (object == null) {
throw new DeclarativeConfigException(description + " is required but is null");
}
return object;
}

static <T> Map.Entry<String, T> getSingletonMapEntry(
Map<String, T> additionalProperties, String resourceName) {
if (additionalProperties.isEmpty()) {
throw new DeclarativeConfigException(resourceName + " must be set");
}
if (additionalProperties.size() > 1) {
throw new DeclarativeConfigException(
"Invalid configuration - multiple "
+ resourceName
+ "s set: "
+ additionalProperties.keySet().stream().collect(joining(",", "[", "]")));
}
return additionalProperties.entrySet().stream()
.findFirst()
.orElseThrow(
() ->
new IllegalStateException(
"Missing " + resourceName + ". This is a programming error."));
}

static void requireNullResource(
@Nullable Object resource, String resourceName, Map<String, ?> additionalProperties) {
if (resource != null) {
static ConfigKeyValue validateSingleKeyValue(
DeclarativeConfigContext context, Object model, String resourceName) {
DeclarativeConfigProperties modelConfigProperties =
DeclarativeConfiguration.toConfigProperties(
model, context.getSpiHelper().getComponentLoader());
Set<String> propertyKeys = modelConfigProperties.getPropertyKeys();
if (propertyKeys.size() != 1) {
String suffix =
propertyKeys.isEmpty()
? ""
: ": " + propertyKeys.stream().collect(joining(",", "[", "]"));
throw new DeclarativeConfigException(
"Invalid configuration - multiple "
+ resourceName
+ "s set: "
+ additionalProperties.keySet().stream().collect(joining(",", "[", "]")));
resourceName + " must have exactly one entry but has " + propertyKeys.size() + suffix);
}
String key = propertyKeys.iterator().next();
DeclarativeConfigProperties value = modelConfigProperties.getStructured(key);
return ConfigKeyValue.of(key, value == null ? DeclarativeConfigProperties.empty() : value);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@

import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordExporterModel;
import io.opentelemetry.sdk.logs.export.LogRecordExporter;
import java.util.LinkedHashMap;
import java.util.Map;

final class LogRecordExporterFactory implements Factory<LogRecordExporterModel, LogRecordExporter> {
private static final LogRecordExporterFactory INSTANCE = new LogRecordExporterFactory();
Expand All @@ -21,26 +19,8 @@ static LogRecordExporterFactory getInstance() {

@Override
public LogRecordExporter create(LogRecordExporterModel model, DeclarativeConfigContext context) {
Map<String, Object> exporterResourceByName = new LinkedHashMap<>();

if (model.getOtlpHttp() != null) {
exporterResourceByName.put("otlp_http", model.getOtlpHttp());
}
if (model.getOtlpGrpc() != null) {
exporterResourceByName.put("otlp_grpc", model.getOtlpGrpc());
}
if (model.getOtlpFileDevelopment() != null) {
exporterResourceByName.put("otlp_file/development", model.getOtlpFileDevelopment());
}
if (model.getConsole() != null) {
exporterResourceByName.put("console", model.getConsole());
}
exporterResourceByName.putAll(model.getAdditionalProperties());

Map.Entry<String, ?> keyValue =
FileConfigUtil.getSingletonMapEntry(exporterResourceByName, "log record exporter");
LogRecordExporter metricExporter =
context.loadComponent(LogRecordExporter.class, keyValue.getKey(), keyValue.getValue());
return context.addCloseable(metricExporter);
ConfigKeyValue logRecordExporterKeyValue =
FileConfigUtil.validateSingleKeyValue(context, model, "log record exporter");
return context.loadComponent(LogRecordExporter.class, logRecordExporterKeyValue);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,13 @@
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.BatchLogRecordProcessorModel;
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordExporterModel;
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordProcessorModel;
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordProcessorPropertyModel;
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SimpleLogRecordProcessorModel;
import io.opentelemetry.sdk.logs.LogRecordProcessor;
import io.opentelemetry.sdk.logs.export.BatchLogRecordProcessor;
import io.opentelemetry.sdk.logs.export.BatchLogRecordProcessorBuilder;
import io.opentelemetry.sdk.logs.export.LogRecordExporter;
import io.opentelemetry.sdk.logs.export.SimpleLogRecordProcessor;
import java.time.Duration;
import java.util.Map;

final class LogRecordProcessorFactory
implements Factory<LogRecordProcessorModel, LogRecordProcessor> {
Expand All @@ -33,54 +31,61 @@ static LogRecordProcessorFactory getInstance() {
@Override
public LogRecordProcessor create(
LogRecordProcessorModel model, DeclarativeConfigContext context) {
BatchLogRecordProcessorModel batchModel = model.getBatch();
if (batchModel != null) {
LogRecordExporterModel exporterModel =
FileConfigUtil.requireNonNull(
batchModel.getExporter(), "batch log record processor exporter");
// We don't use the variable till later but call validate first to confirm there are not
// multiple samplers.
ConfigKeyValue processorKeyValue =
FileConfigUtil.validateSingleKeyValue(context, model, "log record processor");

LogRecordExporter logRecordExporter =
LogRecordExporterFactory.getInstance().create(exporterModel, context);
BatchLogRecordProcessorBuilder builder = BatchLogRecordProcessor.builder(logRecordExporter);
if (batchModel.getExportTimeout() != null) {
builder.setExporterTimeout(Duration.ofMillis(batchModel.getExportTimeout()));
}
if (batchModel.getMaxExportBatchSize() != null) {
builder.setMaxExportBatchSize(batchModel.getMaxExportBatchSize());
}
if (batchModel.getMaxQueueSize() != null) {
builder.setMaxQueueSize(batchModel.getMaxQueueSize());
}
if (batchModel.getScheduleDelay() != null) {
builder.setScheduleDelay(Duration.ofMillis(batchModel.getScheduleDelay()));
}
MeterProvider meterProvider = context.getMeterProvider();
if (meterProvider != null) {
builder.setMeterProvider(meterProvider);
}

return context.addCloseable(builder.build());
if (model.getBatch() != null) {
return createBatchLogRecordProcessor(model.getBatch(), context);
}
if (model.getSimple() != null) {
return createSimpleLogRecordProcessor(model.getSimple(), context);
}

SimpleLogRecordProcessorModel simpleModel = model.getSimple();
if (simpleModel != null) {
LogRecordExporterModel exporterModel =
FileConfigUtil.requireNonNull(
simpleModel.getExporter(), "simple log record processor exporter");
LogRecordExporter logRecordExporter =
LogRecordExporterFactory.getInstance().create(exporterModel, context);
MeterProvider meterProvider = context.getMeterProvider();
return context.addCloseable(
SimpleLogRecordProcessor.builder(logRecordExporter)
.setMeterProvider(() -> meterProvider)
.build());
return context.loadComponent(LogRecordProcessor.class, processorKeyValue);
}

private static LogRecordProcessor createBatchLogRecordProcessor(
BatchLogRecordProcessorModel batchModel, DeclarativeConfigContext context) {
LogRecordExporterModel exporterModel =
FileConfigUtil.requireNonNull(
batchModel.getExporter(), "batch log record processor exporter");

LogRecordExporter logRecordExporter =
LogRecordExporterFactory.getInstance().create(exporterModel, context);
BatchLogRecordProcessorBuilder builder = BatchLogRecordProcessor.builder(logRecordExporter);
if (batchModel.getExportTimeout() != null) {
builder.setExporterTimeout(Duration.ofMillis(batchModel.getExportTimeout()));
}
if (batchModel.getMaxExportBatchSize() != null) {
builder.setMaxExportBatchSize(batchModel.getMaxExportBatchSize());
}
if (batchModel.getMaxQueueSize() != null) {
builder.setMaxQueueSize(batchModel.getMaxQueueSize());
}
if (batchModel.getScheduleDelay() != null) {
builder.setScheduleDelay(Duration.ofMillis(batchModel.getScheduleDelay()));
}
MeterProvider meterProvider = context.getMeterProvider();
if (meterProvider != null) {
builder.setMeterProvider(meterProvider);
}

return context.addCloseable(builder.build());
}

Map.Entry<String, LogRecordProcessorPropertyModel> keyValue =
FileConfigUtil.getSingletonMapEntry(
model.getAdditionalProperties(), "log record processor");
LogRecordProcessor logRecordProcessor =
context.loadComponent(LogRecordProcessor.class, keyValue.getKey(), keyValue.getValue());
return context.addCloseable(logRecordProcessor);
private static LogRecordProcessor createSimpleLogRecordProcessor(
SimpleLogRecordProcessorModel simpleModel, DeclarativeConfigContext context) {
LogRecordExporterModel exporterModel =
FileConfigUtil.requireNonNull(
simpleModel.getExporter(), "simple log record processor exporter");
LogRecordExporter logRecordExporter =
LogRecordExporterFactory.getInstance().create(exporterModel, context);
MeterProvider meterProvider = context.getMeterProvider();
return context.addCloseable(
SimpleLogRecordProcessor.builder(logRecordExporter)
.setMeterProvider(() -> meterProvider)
.build());
}
}
Loading
Loading