Skip to content

Commit fdd6900

Browse files
committed
Refine test to show minimal metadata needed for errors to be imported successfully
1 parent 63cf922 commit fdd6900

File tree

5 files changed

+31
-54
lines changed

5 files changed

+31
-54
lines changed

src/ServiceControl.AcceptanceTests/Recoverability/MessageFailures/When_processing_message_with_missing_metadata_failed.cs

Lines changed: 20 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -42,13 +42,13 @@ public async Task TimeSent_should_not_be_casted()
4242

4343
var sentTime = DateTime.Parse("2014-11-11T02:26:58.000462Z");
4444

45-
await Define<MyContext>(ctx => { ctx.TimeSent = sentTime; })
45+
await Define<MyContext>(ctx => ctx.TimeSent = sentTime)
4646
.WithEndpoint<Failing>()
4747
.Done(async c =>
4848
{
4949
var result = await this.TryGet<FailedMessageView>($"/api/errors/last/{c.UniqueMessageId}");
5050
failure = result;
51-
return c.UniqueMessageId != null & result;
51+
return (c.UniqueMessageId != null) & result;
5252
})
5353
.Run();
5454

@@ -61,22 +61,31 @@ public async Task Should_be_able_to_get_the_message_by_id()
6161
{
6262
FailedMessageView failure = null;
6363

64-
await Define<MyContext>()
64+
var testStartTime = DateTime.UtcNow;
65+
66+
var context = await Define<MyContext>()
6567
.WithEndpoint<Failing>()
6668
.Done(async c =>
6769
{
6870
var result = await this.TryGet<FailedMessageView>($"/api/errors/last/{c.UniqueMessageId}");
6971
failure = result;
70-
return c.UniqueMessageId != null & result;
72+
return (c.UniqueMessageId != null) & result;
7173
})
7274
.Run();
7375

7476
Assert.That(failure, Is.Not.Null);
77+
78+
//No failure time will result in utc now being used
79+
Assert.That(failure.TimeOfFailure, Is.GreaterThan(testStartTime));
80+
81+
// ServicePulse assumes that the receiving endpoint name is set making sure that its present
82+
Assert.That(failure.ReceivingEndpoint, Is.Not.Null);
83+
Assert.That(failure.ReceivingEndpoint.Name, Is.EqualTo(context.EndpointNameOfReceivingEndpoint));
7584
}
7685

77-
public class Failing : EndpointConfigurationBuilder
86+
class Failing : EndpointConfigurationBuilder
7887
{
79-
public Failing() => EndpointSetup<DefaultServerWithoutAudit>(c => { c.Recoverability().Delayed(x => x.NumberOfRetries(0)); });
88+
public Failing() => EndpointSetup<DefaultServerWithoutAudit>(c => c.Recoverability().Delayed(x => x.NumberOfRetries(0)));
8089

8190
class SendFailedMessage : DispatchRawMessages<MyContext>
8291
{
@@ -89,30 +98,23 @@ protected override TransportOperations CreateMessage(MyContext context)
8998
var headers = new Dictionary<string, string>
9099
{
91100
[Headers.MessageId] = context.MessageId,
92-
[Headers.ProcessingEndpoint] = context.EndpointNameOfReceivingEndpoint,
93-
["NServiceBus.ExceptionInfo.ExceptionType"] = "2014-11-11 02:26:57:767462 Z",
94-
["NServiceBus.ExceptionInfo.Message"] = "An error occurred while attempting to extract logical messages from transport message NServiceBus.TransportMessage",
95-
["NServiceBus.ExceptionInfo.InnerExceptionType"] = "System.Exception",
96-
["NServiceBus.ExceptionInfo.Source"] = "NServiceBus.Core",
97-
["NServiceBus.ExceptionInfo.StackTrace"] = string.Empty,
98101
["NServiceBus.FailedQ"] = Conventions.EndpointNamingConvention(typeof(Failing)),
99-
["NServiceBus.TimeOfFailure"] = "2014-11-11 02:26:58:000462 Z"
102+
[Headers.ProcessingMachine] = "unknown", // This is needed for endpoint detection to work, endpoint name is detected from the FailedQ header
100103
};
104+
101105
if (context.TimeSent.HasValue)
102106
{
103107
headers["NServiceBus.TimeSent"] = DateTimeOffsetHelper.ToWireFormattedString(context.TimeSent.Value);
104108
}
105109

106-
var outgoingMessage = new OutgoingMessage(context.MessageId, headers, new byte[0]);
110+
var outgoingMessage = new OutgoingMessage(context.MessageId, headers, Array.Empty<byte>());
107111

108-
return new TransportOperations(
109-
new TransportOperation(outgoingMessage, new UnicastAddressTag("error"))
110-
);
112+
return new TransportOperations(new TransportOperation(outgoingMessage, new UnicastAddressTag("error")));
111113
}
112114
}
113115
}
114116

115-
public class MyContext : ScenarioContext
117+
class MyContext : ScenarioContext
116118
{
117119
public string MessageId { get; set; }
118120

src/ServiceControl.Persistence/FailureDetails.cs

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -4,14 +4,9 @@ namespace ServiceControl.Contracts.Operations
44

55
public class FailureDetails
66
{
7-
public FailureDetails()
8-
{
9-
TimeOfFailure = DateTime.UtcNow;
10-
}
11-
127
public string AddressOfFailingEndpoint { get; set; }
138

14-
public DateTime TimeOfFailure { get; set; }
9+
public DateTime TimeOfFailure { get; set; } = DateTime.UtcNow;
1510

1611
public ExceptionDetails Exception { get; set; }
1712
}

src/ServiceControl/Monitoring/DetectNewEndpointsFromErrorImportsEnricher.cs

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -5,13 +5,8 @@
55
using ServiceControl.Contracts.Operations;
66
using ServiceControl.Persistence;
77

8-
class DetectNewEndpointsFromErrorImportsEnricher : IEnrichImportedErrorMessages
8+
class DetectNewEndpointsFromErrorImportsEnricher(IEndpointInstanceMonitoring monitoring) : IEnrichImportedErrorMessages
99
{
10-
public DetectNewEndpointsFromErrorImportsEnricher(IEndpointInstanceMonitoring monitoring)
11-
{
12-
this.monitoring = monitoring;
13-
}
14-
1510
public void Enrich(ErrorEnricherContext context)
1611
{
1712
var sendingEndpoint = EndpointDetailsParser.SendingEndpoint(context.Headers);
@@ -47,7 +42,5 @@ void TryAddEndpoint(EndpointDetails endpointDetails, ErrorEnricherContext contex
4742
context.Add(endpointDetails);
4843
}
4944
}
50-
51-
IEndpointInstanceMonitoring monitoring;
5245
}
5346
}

src/ServiceControl/Operations/ErrorIngestor.cs

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -37,13 +37,7 @@ public ErrorIngestor(Metrics metrics,
3737
bulkInsertDurationMeter = metrics.GetMeter("Error ingestion - bulk insert duration", FrequencyInMilliseconds);
3838
var ingestedMeter = metrics.GetCounter("Error ingestion - ingested");
3939

40-
var enrichers = new IEnrichImportedErrorMessages[]
41-
{
42-
new MessageTypeEnricher(),
43-
new EnrichWithTrackingIds(),
44-
new ProcessingStatisticsEnricher()
45-
46-
}.Concat(errorEnrichers).ToArray();
40+
var enrichers = new IEnrichImportedErrorMessages[] { new MessageTypeEnricher(), new EnrichWithTrackingIds(), new ProcessingStatisticsEnricher() }.Concat(errorEnrichers).ToArray();
4741

4842
errorProcessor = new ErrorProcessor(enrichers, failedMessageEnrichers.ToArray(), domainEvents, ingestedMeter);
4943
retryConfirmationProcessor = new RetryConfirmationProcessor(domainEvents);
@@ -67,7 +61,6 @@ public async Task Ingest(List<MessageContext> contexts, CancellationToken cancel
6761
}
6862
}
6963

70-
7164
var storedFailed = await PersistFailedMessages(failedMessages, retriedMessages, cancellationToken);
7265

7366
try
@@ -77,6 +70,7 @@ public async Task Ingest(List<MessageContext> contexts, CancellationToken cancel
7770
{
7871
announcerTasks.Add(errorProcessor.Announce(context));
7972
}
73+
8074
foreach (var context in retriedMessages)
8175
{
8276
announcerTasks.Add(retryConfirmationProcessor.Announce(context));
@@ -90,6 +84,7 @@ public async Task Ingest(List<MessageContext> contexts, CancellationToken cancel
9084
{
9185
Logger.Debug($"Forwarding {storedFailed.Count} messages");
9286
}
87+
9388
await Forward(storedFailed, cancellationToken);
9489
if (Logger.IsDebugEnabled)
9590
{
@@ -133,6 +128,7 @@ async Task<IReadOnlyList<MessageContext>> PersistFailedMessages(List<MessageCont
133128
{
134129
await unitOfWork.Complete(cancellationToken);
135130
}
131+
136132
return storedFailedMessageContexts;
137133
}
138134
catch (Exception e)

src/ServiceControl/Operations/ErrorProcessor.cs

Lines changed: 5 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -15,17 +15,11 @@
1515
using ServiceControl.Persistence;
1616
using ServiceControl.Persistence.UnitOfWork;
1717

18-
class ErrorProcessor
18+
class ErrorProcessor(IEnrichImportedErrorMessages[] enrichers,
19+
IFailedMessageEnricher[] failedMessageEnrichers,
20+
IDomainEvents domainEvents,
21+
Counter ingestedCounter)
1922
{
20-
public ErrorProcessor(IEnrichImportedErrorMessages[] enrichers, IFailedMessageEnricher[] failedMessageEnrichers, IDomainEvents domainEvents,
21-
Counter ingestedCounter)
22-
{
23-
this.enrichers = enrichers;
24-
this.domainEvents = domainEvents;
25-
this.ingestedCounter = ingestedCounter;
26-
failedMessageFactory = new FailedMessageFactory(failedMessageEnrichers);
27-
}
28-
2923
public async Task<IReadOnlyList<MessageContext>> Process(IReadOnlyList<MessageContext> contexts, IIngestionUnitOfWork unitOfWork)
3024
{
3125
var storedContexts = new List<MessageContext>(contexts.Count);
@@ -169,10 +163,7 @@ static void RecordKnownEndpoints(EndpointDetails observedEndpoint, Dictionary<st
169163
}
170164
}
171165

172-
readonly IEnrichImportedErrorMessages[] enrichers;
173-
readonly IDomainEvents domainEvents;
174-
readonly Counter ingestedCounter;
175-
readonly FailedMessageFactory failedMessageFactory;
166+
readonly FailedMessageFactory failedMessageFactory = new(failedMessageEnrichers);
176167
static readonly ILog Logger = LogManager.GetLogger<ErrorProcessor>();
177168
}
178169
}

0 commit comments

Comments
 (0)