Skip to content

Commit 912ef81

Browse files
committed
remove '.' from the end of log messages
1 parent ec9c21f commit 912ef81

File tree

37 files changed

+80
-80
lines changed

37 files changed

+80
-80
lines changed

src/Particular.LicensingComponent/AuditThroughput/AuditThroughputCollectorHostedService.cs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ async Task GatherThroughput(CancellationToken cancellationToken)
5959

6060
if (!knownEndpoints.Any())
6161
{
62-
logger.LogWarning("No known endpoints could be found.");
62+
logger.LogWarning("No known endpoints could be found");
6363
}
6464

6565
foreach (var tuple in await dataStore.GetEndpoints([.. knownEndpointsLookup.Keys], cancellationToken))
@@ -119,14 +119,14 @@ async Task VerifyAuditInstances(CancellationToken cancellationToken)
119119
}
120120
else
121121
{
122-
logger.LogWarning("Unable to determine the version of one or more ServiceControl Audit instances. For the instance with URI {RemoteApiUri}, the status was '{RemoteStatus}' and the version string returned was '{RemoteVersionString}'.", remote.ApiUri, remote.Status, remote.VersionString);
122+
logger.LogWarning("Unable to determine the version of one or more ServiceControl Audit instances. For the instance with URI {RemoteApiUri}, the status was '{RemoteStatus}' and the version string returned was '{RemoteVersionString}'", remote.ApiUri, remote.Status, remote.VersionString);
123123
}
124124
}
125125

126126
var allHaveAuditCounts = remotesInfo.All(auditQuery.ValidRemoteInstances);
127127
if (!allHaveAuditCounts)
128128
{
129-
logger.LogWarning("At least one ServiceControl Audit instance is either not running the required version ({RequiredAuditVersion}) or is not configured for at least 2 days of retention. Audit throughput will not be available.", auditQuery.MinAuditCountsVersion);
129+
logger.LogWarning("At least one ServiceControl Audit instance is either not running the required version ({RequiredAuditVersion}) or is not configured for at least 2 days of retention. Audit throughput will not be available", auditQuery.MinAuditCountsVersion);
130130
}
131131
}
132132

src/ServiceControl.AcceptanceTesting/DiscardMessagesBehavior.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ public Task Invoke(ITransportReceiveContext context, Func<ITransportReceiveConte
4646
context.Message.Headers.TryGetValue(Headers.MessageId, out var originalMessageId);
4747
context.Message.Headers.TryGetValue(Headers.EnclosedMessageTypes, out var enclosedMessageTypes);
4848
var logger = LoggerUtil.CreateStaticLogger<DiscardMessagesBehavior>();
49-
logger.LogDebug("Discarding message '{MessageId}'({OriginalMessageId}) because it's session id is '{MessageSessionId}' instead of '{CurrentSessionId}' Message Types: {EnclosedMessageTypes}.",
49+
logger.LogDebug("Discarding message '{MessageId}'({OriginalMessageId}) because it's session id is '{MessageSessionId}' instead of '{CurrentSessionId}' Message Types: {EnclosedMessageTypes}",
5050
context.Message.MessageId,
5151
originalMessageId ?? string.Empty,
5252
session,

src/ServiceControl.Audit.AcceptanceTests/TestSupport/ServiceControlComponentRunner.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ async Task InitializeServiceControl(ScenarioContext context)
5959
var headers = messageContext.Headers;
6060
var logger = LoggerUtil.CreateStaticLogger<ServiceControlComponentRunner>(loggingSettings.LogLevel);
6161
headers.TryGetValue(Headers.MessageId, out var originalMessageId);
62-
logger.LogDebug("OnMessage for message '{MessageId}'({OriginalMessageId}).", id, originalMessageId ?? string.Empty);
62+
logger.LogDebug("OnMessage for message '{MessageId}'({OriginalMessageId})", id, originalMessageId ?? string.Empty);
6363

6464
//Do not filter out CC, SA and HB messages as they can't be stamped
6565
if (headers.TryGetValue(Headers.EnclosedMessageTypes, out var messageTypes)
@@ -78,7 +78,7 @@ async Task InitializeServiceControl(ScenarioContext context)
7878
var currentSession = context.TestRunId.ToString();
7979
if (!headers.TryGetValue("SC.SessionID", out var session) || session != currentSession)
8080
{
81-
logger.LogDebug("Discarding message '{MessageId}'({OriginalMessageId}) because it's session id is '{SessionId}' instead of '{CurrentSessionId}'.", id, originalMessageId ?? string.Empty, session, currentSession);
81+
logger.LogDebug("Discarding message '{MessageId}'({OriginalMessageId}) because it's session id is '{SessionId}' instead of '{CurrentSessionId}'", id, originalMessageId ?? string.Empty, session, currentSession);
8282
return true;
8383
}
8484

src/ServiceControl.Audit.Persistence.RavenDB/CustomChecks/CheckDirtyMemory.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,11 +13,11 @@ public override async Task<CheckResult> PerformCheck(CancellationToken cancellat
1313
{
1414
var (isHighDirty, dirtyMemory) = await memoryInformationRetriever.GetMemoryInformation(cancellationToken);
1515

16-
logger.LogDebug("RavenDB dirty memory value: {DirtyMemory}.", dirtyMemory);
16+
logger.LogDebug("RavenDB dirty memory value: {DirtyMemory}", dirtyMemory);
1717

1818
if (isHighDirty)
1919
{
20-
logger.LogWarning("There is a high level of RavenDB dirty memory ({DirtyMemory}). See https://docs.particular.net/servicecontrol/troubleshooting#ravendb-dirty-memory for guidance on how to mitigate the issue.", dirtyMemory);
20+
logger.LogWarning("There is a high level of RavenDB dirty memory ({DirtyMemory}). See https://docs.particular.net/servicecontrol/troubleshooting#ravendb-dirty-memory for guidance on how to mitigate the issue", dirtyMemory);
2121
return CheckResult.Failed($"There is a high level of RavenDB dirty memory ({dirtyMemory}). See https://docs.particular.net/servicecontrol/troubleshooting#ravendb-dirty-memory for guidance on how to mitigate the issue.");
2222
}
2323

src/ServiceControl.Audit.Persistence.RavenDB/CustomChecks/CheckFreeDiskSpace.cs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -53,19 +53,19 @@ public static int Parse(IDictionary<string, string> settings, ILogger logger)
5353

5454
if (!int.TryParse(thresholdValue, out var threshold))
5555
{
56-
logger.LogCritical("{RavenPersistenceConfigurationDataSpaceRemainingThresholdKey} must be an integer.", RavenPersistenceConfiguration.DataSpaceRemainingThresholdKey);
56+
logger.LogCritical("{RavenPersistenceConfigurationDataSpaceRemainingThresholdKey} must be an integer", RavenPersistenceConfiguration.DataSpaceRemainingThresholdKey);
5757
throw new Exception($"{RavenPersistenceConfiguration.DataSpaceRemainingThresholdKey} must be an integer.");
5858
}
5959

6060
if (threshold < 0)
6161
{
62-
logger.LogCritical("{RavenPersistenceConfigurationDataSpaceRemainingThresholdKey} is invalid, minimum value is 0.", RavenPersistenceConfiguration.DataSpaceRemainingThresholdKey);
62+
logger.LogCritical("{RavenPersistenceConfigurationDataSpaceRemainingThresholdKey} is invalid, minimum value is 0", RavenPersistenceConfiguration.DataSpaceRemainingThresholdKey);
6363
throw new Exception($"{RavenPersistenceConfiguration.DataSpaceRemainingThresholdKey} is invalid, minimum value is 0.");
6464
}
6565

6666
if (threshold > 100)
6767
{
68-
logger.LogCritical("{RavenPersistenceConfigurationDataSpaceRemainingThresholdKey} is invalid, maximum value is 100.", RavenPersistenceConfiguration.DataSpaceRemainingThresholdKey);
68+
logger.LogCritical("{RavenPersistenceConfigurationDataSpaceRemainingThresholdKey} is invalid, maximum value is 100", RavenPersistenceConfiguration.DataSpaceRemainingThresholdKey);
6969
throw new Exception($"{RavenPersistenceConfiguration.DataSpaceRemainingThresholdKey} is invalid, maximum value is 100.");
7070
}
7171

src/ServiceControl.Audit.Persistence.RavenDB/CustomChecks/CheckMinimumStorageRequiredForIngestion.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ public override Task<CheckResult> PerformCheck(CancellationToken cancellationTok
4747
return SuccessResult;
4848
}
4949

50-
logger.LogWarning("Audit message ingestion stopped! {PercentRemaining:P0} disk space remaining on data drive '{DataDriveInfoVolumeLabel} ({DataDriveInfoRootDirectory})' on '{EnvironmentMachineName}'. This is less than {PercentageThreshold}% - the minimal required space configured. The threshold can be set using the {RavenPersistenceConfigurationMinimumStorageLeftRequiredForIngestionKey} configuration setting.", percentRemaining, dataDriveInfo.VolumeLabel, dataDriveInfo.RootDirectory, Environment.MachineName, percentageThreshold, RavenPersistenceConfiguration.MinimumStorageLeftRequiredForIngestionKey);
50+
logger.LogWarning("Audit message ingestion stopped! {PercentRemaining:P0} disk space remaining on data drive '{DataDriveInfoVolumeLabel} ({DataDriveInfoRootDirectory})' on '{EnvironmentMachineName}'. This is less than {PercentageThreshold}% - the minimal required space configured. The threshold can be set using the {RavenPersistenceConfigurationMinimumStorageLeftRequiredForIngestionKey} configuration setting", percentRemaining, dataDriveInfo.VolumeLabel, dataDriveInfo.RootDirectory, Environment.MachineName, percentageThreshold, RavenPersistenceConfiguration.MinimumStorageLeftRequiredForIngestionKey);
5151
stateHolder.CanIngestMore = false;
5252
return CheckResult.Failed($"Audit message ingestion stopped! {percentRemaining:P0} disk space remaining on data drive '{dataDriveInfo.VolumeLabel} ({dataDriveInfo.RootDirectory})' on '{Environment.MachineName}'. This is less than {percentageThreshold}% - the minimal required space configured. The threshold can be set using the {RavenPersistenceConfiguration.MinimumStorageLeftRequiredForIngestionKey} configuration setting.");
5353
}
@@ -61,19 +61,19 @@ public static int Parse(IDictionary<string, string> settings)
6161

6262
if (!int.TryParse(thresholdValue, out var threshold))
6363
{
64-
Logger.LogCritical("{RavenPersistenceConfigurationMinimumStorageLeftRequiredForIngestionKey} must be an integer.", RavenPersistenceConfiguration.MinimumStorageLeftRequiredForIngestionKey);
64+
Logger.LogCritical("{RavenPersistenceConfigurationMinimumStorageLeftRequiredForIngestionKey} must be an integer", RavenPersistenceConfiguration.MinimumStorageLeftRequiredForIngestionKey);
6565
throw new Exception($"{RavenPersistenceConfiguration.MinimumStorageLeftRequiredForIngestionKey} must be an integer.");
6666
}
6767

6868
if (threshold < 0)
6969
{
70-
Logger.LogCritical("{RavenPersistenceConfigurationMinimumStorageLeftRequiredForIngestionKey} is invalid, minimum value is 0.", RavenPersistenceConfiguration.MinimumStorageLeftRequiredForIngestionKey);
70+
Logger.LogCritical("{RavenPersistenceConfigurationMinimumStorageLeftRequiredForIngestionKey} is invalid, minimum value is 0", RavenPersistenceConfiguration.MinimumStorageLeftRequiredForIngestionKey);
7171
throw new Exception($"{RavenPersistenceConfiguration.MinimumStorageLeftRequiredForIngestionKey} is invalid, minimum value is 0.");
7272
}
7373

7474
if (threshold > 100)
7575
{
76-
Logger.LogCritical("{RavenPersistenceConfigurationMinimumStorageLeftRequiredForIngestionKey} is invalid, maximum value is 100.", RavenPersistenceConfiguration.MinimumStorageLeftRequiredForIngestionKey);
76+
Logger.LogCritical("{RavenPersistenceConfigurationMinimumStorageLeftRequiredForIngestionKey} is invalid, maximum value is 100", RavenPersistenceConfiguration.MinimumStorageLeftRequiredForIngestionKey);
7777
throw new Exception($"{RavenPersistenceConfiguration.MinimumStorageLeftRequiredForIngestionKey} is invalid, maximum value is 100.");
7878
}
7979

src/ServiceControl.Audit.Persistence.RavenDB/CustomChecks/CheckRavenDBIndexLag.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -44,12 +44,12 @@ static int CheckAndReportIndexesWithTooMuchIndexLag(IndexInformation[] indexes,
4444
if (indexLag > IndexLagThresholdError)
4545
{
4646
indexCountWithTooMuchLag++;
47-
logger.LogError("Index [{IndexStatsName}] IndexingLag {IndexLag} is above error threshold ({IndexLagThresholdError}). Launch in maintenance mode to let indexes catch up.", indexStats.Name, indexLag, IndexLagThresholdError);
47+
logger.LogError("Index [{IndexStatsName}] IndexingLag {IndexLag} is above error threshold ({IndexLagThresholdError}). Launch in maintenance mode to let indexes catch up", indexStats.Name, indexLag, IndexLagThresholdError);
4848
}
4949
else if (indexLag > IndexLagThresholdWarning)
5050
{
5151
indexCountWithTooMuchLag++;
52-
logger.LogWarning("Index [{IndexStatsName}] IndexingLag {IndexLag} is above warning threshold ({IndexLagThresholdWarning}). Launch in maintenance mode to let indexes catch up.", indexStats.Name, indexLag, IndexLagThresholdWarning);
52+
logger.LogWarning("Index [{IndexStatsName}] IndexingLag {IndexLag} is above warning threshold ({IndexLagThresholdWarning}). Launch in maintenance mode to let indexes catch up", indexStats.Name, indexLag, IndexLagThresholdWarning);
5353
}
5454
}
5555
}

src/ServiceControl.Audit/Auditing/AuditPersister.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ void ProcessSagaAuditMessage(MessageContext context)
155155
}
156156
catch (Exception e)
157157
{
158-
logger.LogWarning(e, "Processing of saga audit message '{NativeMessageId}' failed.", context.NativeMessageId);
158+
logger.LogWarning(e, "Processing of saga audit message '{NativeMessageId}' failed", context.NativeMessageId);
159159

160160
// releasing the failed message context early so that they can be retried outside the current batch
161161
context.GetTaskCompletionSource().TrySetException(e);
@@ -188,7 +188,7 @@ async Task ProcessAuditMessage(MessageContext context)
188188

189189
var auditMessage = new ProcessedMessage(context.Headers, new Dictionary<string, object>(metadata));
190190

191-
logger.LogDebug("Emitting {CommandsToEmitCount} commands and {MessagesToEmitCount} control messages.", commandsToEmit.Count, messagesToEmit.Count);
191+
logger.LogDebug("Emitting {CommandsToEmitCount} commands and {MessagesToEmitCount} control messages", commandsToEmit.Count, messagesToEmit.Count);
192192

193193
foreach (var commandToEmit in commandsToEmit)
194194
{
@@ -198,7 +198,7 @@ async Task ProcessAuditMessage(MessageContext context)
198198
await messageDispatcher.Value.Dispatch(new TransportOperations(messagesToEmit.ToArray()),
199199
new TransportTransaction()); //Do not hook into the incoming transaction
200200

201-
logger.LogDebug("{CommandsToEmitCount} commands and {MessagesToEmitCount} control messages emitted.", commandsToEmit.Count, messagesToEmit.Count);
201+
logger.LogDebug("{CommandsToEmitCount} commands and {MessagesToEmitCount} control messages emitted", commandsToEmit.Count, messagesToEmit.Count);
202202

203203
if (metadata.TryGetValue("SendingEndpoint", out var sendingEndpoint))
204204
{
@@ -215,7 +215,7 @@ await messageDispatcher.Value.Dispatch(new TransportOperations(messagesToEmit.To
215215
}
216216
catch (Exception e)
217217
{
218-
logger.LogWarning(e, "Processing of message '{MessageId}' failed.", messageId);
218+
logger.LogWarning(e, "Processing of message '{MessageId}' failed", messageId);
219219

220220
// releasing the failed message context early so that they can be retried outside the current batch
221221
context.GetTaskCompletionSource().TrySetException(e);

src/ServiceControl.Audit/Auditing/ImportFailedAudits.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -52,25 +52,25 @@ await failedAuditStore.ProcessFailedMessages(
5252

5353
await markComplete(token);
5454
succeeded++;
55-
logger.LogDebug("Successfully re-imported failed audit message {MessageId}.", transportMessage.Id);
55+
logger.LogDebug("Successfully re-imported failed audit message {MessageId}", transportMessage.Id);
5656
}
5757
catch (OperationCanceledException e) when (token.IsCancellationRequested)
5858
{
5959
logger.LogInformation(e, "Cancelled");
6060
}
6161
catch (Exception e)
6262
{
63-
logger.LogError(e, "Error while attempting to re-import failed audit message {MessageId}.", transportMessage.Id);
63+
logger.LogError(e, "Error while attempting to re-import failed audit message {MessageId}", transportMessage.Id);
6464
failed++;
6565
}
6666

6767
}, cancellationToken);
6868

69-
logger.LogInformation("Done re-importing failed audits. Successfully re-imported {SuccessCount} messages. Failed re-importing {FailureCount} messages.", succeeded, failed);
69+
logger.LogInformation("Done re-importing failed audits. Successfully re-imported {SuccessCount} messages. Failed re-importing {FailureCount} messages", succeeded, failed);
7070

7171
if (failed > 0)
7272
{
73-
logger.LogWarning("{FailureCount} messages could not be re-imported. This could indicate a problem with the data. Contact Particular support if you need help with recovering the messages.", failed);
73+
logger.LogWarning("{FailureCount} messages could not be re-imported. This could indicate a problem with the data. Contact Particular support if you need help with recovering the messages", failed);
7474
}
7575
}
7676

src/ServiceControl.Audit/Infrastructure/Settings/Settings.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ void LoadAuditQueueInformation()
7575

7676
if (IngestAuditMessages == false)
7777
{
78-
logger.LogInformation("Audit ingestion disabled.");
78+
logger.LogInformation("Audit ingestion disabled");
7979
}
8080

8181
AuditLogQueue = SettingsReader.Read<string>(serviceBusRootNamespace, "AuditLogQueue", null);

0 commit comments

Comments
 (0)