Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion e2e-tests/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ npm install

## Running Tests

Test can be run completely or each step can be run separately.
Tests can be run completely or each step can be run separately.

### `npm run test:e2e`

Expand Down
4 changes: 2 additions & 2 deletions e2e-tests/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,12 @@
"description": "End-to-end tests for OpenTelemetry JS",
"version": "0.7.0",
"scripts": {
"test:e2e": "npm run stop-collector; npm run run-collector && npm run export-telemetry && npm run verify || npm run stop-collector",
"test:e2e": "npm run run-collector; npm run stop-collector; node test.mjs && npm run verify || npm run stop-collector",
"lint": "eslint . --ext .mjs",
"lint:fix": "eslint . --ext .mjs --fix",
"run-collector": "docker run --pull=always -d --rm --name otelcol-e2e -v $(pwd)/collector-config.yaml:/etc/otelcol/config.yaml -v $(pwd)/collector-output.json:/tmp/collector-output.json -p 4317:4317 -p 4318:4318 -w /tmp otel/opentelemetry-collector-contrib:latest --config /etc/otelcol/config.yaml && sleep 5",
"export-telemetry": "node test.mjs; sleep 5",
"prerun-collector": "node -e \"require('fs').writeFileSync('collector-output.json', '')\"",
"prerun-collector": "node -e \"const fs = require('fs'); fs.writeFileSync('collector-output.json', ''); fs.chmodSync('collector-output.json', 0o666);\"",
"stop-collector": "docker stop otelcol-e2e",
"verify": "node verify.mjs"
},
Expand Down
176 changes: 124 additions & 52 deletions e2e-tests/test.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@ import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-proto';
import { OTLPMetricExporter } from '@opentelemetry/exporter-metrics-otlp-proto';
import { PeriodicExportingMetricReader } from '@opentelemetry/sdk-metrics';
import { OTLPLogExporter } from '@opentelemetry/exporter-logs-otlp-proto';
import { SimpleLogRecordProcessor } from '@opentelemetry/sdk-logs';
import { SimpleSpanProcessor } from '@opentelemetry/sdk-trace-base';
import { BatchLogRecordProcessor } from '@opentelemetry/sdk-logs';
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
import {
diag,
DiagConsoleLogger,
Expand All @@ -28,69 +28,141 @@ import {
metrics,
} from '@opentelemetry/api';
import { logs } from '@opentelemetry/api-logs';
import { exec } from 'child_process';
import { promisify } from 'util';

// Enable diagnostic logging (optional)
diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.INFO);
const execAsync = promisify(exec);

// Enable diagnostic logging
diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.VERBOSE);

const collectorUrl = 'http://localhost:4318/v1';

// Set up trace exporter with SimpleSpanProcessor
const traceExporter = new OTLPTraceExporter({
url: `${collectorUrl}/traces`,
});
const spanProcessors = [new SimpleSpanProcessor(traceExporter)];

// Set up metric exporter
const metricExporter = new OTLPMetricExporter({
url: `${collectorUrl}/metrics`,
});
const metricReader = new PeriodicExportingMetricReader({
exporter: metricExporter,
exportIntervalMillis: 1000,
exportTimeoutMillis: 1000,
});

// Set up log exporter
const logExporter = new OTLPLogExporter({
url: `${collectorUrl}/logs`,
});
const logRecordProcessors = [new SimpleLogRecordProcessor(logExporter)];

// Set up OpenTelemetry SDK
const sdk = new NodeSDK({
spanProcessors,
metricReader,
logRecordProcessors,
});
/**
* Scenario: Collector becomes available after initial failures
*
* We assume that collector is down when test starts
*
* This test verifies that:
* 1. When the collector is unavailable, exports fail with retryable errors
* 2. Telemetry can created while collector is down
* 3. Batch processors buffer the data and retry with exponential backoff
* 4. When collector starts, retries succeed
* 5. All telemetry is successfully exported
*
*/
async function testCollectorRecovery() {
console.log('[Step 1] Setting up OpenTelemetry SDK...');

const traceExporter = new OTLPTraceExporter({
url: `${collectorUrl}/traces`,
});

const metricExporter = new OTLPMetricExporter({
url: `${collectorUrl}/metrics`,
});

const logExporter = new OTLPLogExporter({
url: `${collectorUrl}/logs`,
});

const sdk = new NodeSDK({
spanProcessors: [
new BatchSpanProcessor(traceExporter, {
scheduledDelayMillis: 1000,
exportTimeoutMillis: 30000,
}),
],
metricReader: new PeriodicExportingMetricReader({
exporter: metricExporter,
exportIntervalMillis: 30000,
exportTimeoutMillis: 30000,
}),
logRecordProcessors: [
new BatchLogRecordProcessor(logExporter, {
scheduledDelayMillis: 1000,
exportTimeoutMillis: 30000,
}),
],
});

async function main() {
sdk.start();
console.log('✓ SDK started\n');

const tracer = trace.getTracer('retry-test-tracer');
const meter = metrics.getMeter('retry-test-meter');
const logger = logs.getLogger('retry-test-logger');

console.log('[Step 2] Creating telemetry while collector is DOWN...');

// Create a span
const tracer = trace.getTracer('example-tracer');
const span = tracer.startSpan('example-span');
span.setAttribute('example-attribute', 'value');
span.end();
const span1 = tracer.startSpan('span-before-collector-start');
span1.setAttribute('status', 'created-while-down');
span1.setAttribute('attempt', 1);
span1.end();

// Create a metric
const meter = metrics.getMeter('example-meter');
const counter = meter.createUpDownCounter('example_counter');
counter.add(42, { foo: 'bar' });
const counter = meter.createCounter('test_counter');
counter.add(10, { status: 'created-while-down' });

// Create a log
const logger = logs.getLogger('example-logger');
logger.emit({
severityText: 'INFO',
body: 'test-log-body',
attributes: { foo: 'bar' },
eventName: 'test-log-event',
body: 'log-before-collector-start',
attributes: { status: 'created-while-down' },
});

// flushes exporters and shuts down the SDK
console.log('✓ Created 1 span, 1 metric, 1 log\n');

console.log('[Step 3] Waiting 2 seconds for first export attempt to fail...');
await new Promise(resolve => setTimeout(resolve, 2000));

console.log(
'✓ First export attempt should have failed, retries should be happening\n'
);

console.log(
'[Step 4] Starting OpenTelemetry Collector (during retry attempts)...'
);
await execAsync('npm run run-collector');

// Wait for collector to be ready (within retry window)
console.log('Waiting for collector to become ready (2s)...');
await new Promise(resolve => setTimeout(resolve, 2000));
console.log('✓ Collector should be ready, retries should now succeed\n');

// Step 5: Create more telemetry after collector is up
console.log('[Step 5] Creating telemetry while collector is UP...');

const span2 = tracer.startSpan('span-after-collector-start');
span2.setAttribute('status', 'created-while-up');
span2.setAttribute('attempt', 2);
span2.end();

counter.add(20, { status: 'created-while-up' });

logger.emit({
severityText: 'INFO',
body: 'log-after-collector-start',
attributes: { status: 'created-while-up' },
});

// Step 6: Wait for exports to complete
console.log('[Step 6] Waiting 5 seconds for exports to complete...');
await new Promise(resolve => setTimeout(resolve, 5000));
console.log('✓ Export window completed\n');

// Step 7: Shutdown SDK
console.log('[Step 7] Shutting down SDK...');
await sdk.shutdown();
console.log('✓ SDK shutdown complete\n');
}

async function main() {
try {
await testCollectorRecovery();
process.exit(0);
} catch (error) {
console.error('\n✗ SCENARIO FAILED:', error);
process.exit(1);
}
}

main().catch(err => {
console.error('Error running example:', err);
process.exit(1);
});
main();
119 changes: 80 additions & 39 deletions e2e-tests/verify.mjs
100755 → 100644
Original file line number Diff line number Diff line change
Expand Up @@ -24,25 +24,55 @@ let verifiedMetric = false;
let verifiedLog = false;

const lines = data.split('\n').filter(Boolean);

// Extract all individual telemetry items
const allSpans = [];
const allMetrics = [];
const allLogs = [];

for (const line of lines) {
const parsed = JSON.parse(line);
if (parsed.resourceSpans) {
console.log('found span');
verifySpan(parsed.resourceSpans[0].scopeSpans[0].spans[0]);
verifiedSpan = true;
parsed.resourceSpans.forEach(rs => {
rs.scopeSpans?.forEach(ss => {
ss.spans?.forEach(span => allSpans.push(span));
});
});
}
if (parsed.resourceMetrics) {
console.log('found metric');
verifyMetric(parsed.resourceMetrics[0].scopeMetrics[0].metrics[0]);
verifiedMetric = true;
parsed.resourceMetrics.forEach(rm => {
rm.scopeMetrics?.forEach(sm => {
sm.metrics?.forEach(metric => allMetrics.push(metric));
});
});
}
if (parsed.resourceLogs) {
console.log('found log');
verifyLog(parsed.resourceLogs[0].scopeLogs[0].logRecords[0]);
verifiedLog = true;
parsed.resourceLogs.forEach(rl => {
rl.scopeLogs?.forEach(sl => {
sl.logRecords?.forEach(log => allLogs.push(log));
});
});
}
}

if (allSpans.length > 0) {
console.log('found spans');
verifySpans(allSpans);
verifiedSpan = true;
}

if (allMetrics.length > 0) {
console.log('found metrics');
verifyMetrics(allMetrics);
verifiedMetric = true;
}

if (allLogs.length > 0) {
console.log('found logs');
verifyLogs(allLogs);
verifiedLog = true;
}

if (!verifiedSpan) {
console.error('No spans found in the output');
process.exit(1);
Expand All @@ -56,49 +86,60 @@ if (!verifiedLog) {
process.exit(1);
}

function verifySpan(span) {
const expectedName = 'example-span';
if (span.name !== expectedName) {
console.error(`Expected span name ${expectedName}, but got '${span.name}'`);
function verifySpans(spans) {
const expectedSpanNames = [
'span-before-collector-start',
'span-after-collector-start',
];

if (spans.length < 2) {
console.error(`Expected at least 2 spans, but got ${spans.length}`);
process.exit(1);
}
}

function verifyMetric(metric) {
const expectedName = 'example_counter';
const expectedValue = 42;
const foundSpanNames = spans.map(s => s.name);

if (metric.name !== expectedName) {
console.error(
`Expected metric name ${expectedName}, but got '${metric.name}'`
);
expectedSpanNames.forEach(expectedName => {
if (!foundSpanNames.includes(expectedName)) {
console.error(`Expected span '${expectedName}' not found`);
process.exit(1);
}
});
}

function verifyMetrics(metrics) {
const testCounter = metrics.find(m => m.name === 'test_counter');
if (!testCounter) {
console.error("Expected metric 'test_counter' not found");
process.exit(1);
}
if (
metric.sum &&
metric.sum.dataPoints &&
metric.sum.dataPoints[0].asDouble !== expectedValue
) {

const dataPoints = testCounter.sum?.dataPoints || [];
if (dataPoints.length < 2) {
console.error(
`Expected metric value ${expectedValue}, but got '${metric.sum.dataPoints[0].asDouble}'`
`Expected at least 2 data points, but got ${dataPoints.length}`
);
process.exit(1);
}
}

function verifyLog(log) {
const expectedBody = 'test-log-body';
if (log.body && log.body.stringValue !== expectedBody) {
console.error(
`Expected log body '${expectedBody}', but got '${log.body.stringValue}'`
);
process.exit(1);
}
function verifyLogs(logs) {
const expectedLogBodies = [
'log-before-collector-start',
'log-after-collector-start',
];

if (log.eventName !== 'test-log-event') {
console.error(
`Expected log event name 'test-log-event', but got '${log.eventName}'`
);
if (logs.length < 2) {
console.error(`Expected at least 2 logs, but got ${logs.length}`);
process.exit(1);
}

const foundLogBodies = logs.map(l => l.body?.stringValue);

expectedLogBodies.forEach(expectedBody => {
if (!foundLogBodies.includes(expectedBody)) {
console.error(`Expected log '${expectedBody}' not found`);
process.exit(1);
}
});
}
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ export interface ExportResponseFailure {
export interface ExportResponseRetryable {
status: 'retryable';
retryInMillis?: number;
error?: Error;
}

export type ExportResponse =
Expand Down
Loading