diff --git a/src/ServiceLayer.Mesh/FileTypes/NbssAppointmentEvents/FileParser.cs b/src/ServiceLayer.Mesh/FileTypes/NbssAppointmentEvents/FileParser.cs
index 9ecf348..7dbedb5 100644
--- a/src/ServiceLayer.Mesh/FileTypes/NbssAppointmentEvents/FileParser.cs
+++ b/src/ServiceLayer.Mesh/FileTypes/NbssAppointmentEvents/FileParser.cs
@@ -1,12 +1,143 @@
+using CsvHelper;
+using CsvHelper.Configuration;
using ServiceLayer.Mesh.FileTypes.NbssAppointmentEvents.Models;
+using System.Globalization;
+using System.Text;
namespace ServiceLayer.Mesh.FileTypes.NbssAppointmentEvents;
public class FileParser : IFileParser
{
+ private const string HeaderIdentifier = "NBSSAPPT_HDR";
+ private const string FieldsIdentifier = "NBSSAPPT_FLDS";
+ private const string DataIdentifier = "NBSSAPPT_DATA";
+ private const string TrailerIdentifier = "NBSSAPPT_END";
+ private const int RecordTypeIdentifier = 0;
+
+ ///
+ /// Parse a stream of appointment data
+ ///
public ParsedFile Parse(Stream stream)
{
- // TODO - implement this
- throw new NotImplementedException();
+ if (stream == null)
+ {
+ throw new ArgumentNullException(nameof(stream), "Stream cannot be null");
+ }
+
+ var result = new ParsedFile();
+
+ using var reader = CreateStreamReader(stream);
+ using var csv = CreateCsvReader(reader);
+
+ var rowNumber = 0;
+ var fields = new List();
+
+ while (csv.Read())
+ {
+ var recordIdentifier = GetFieldValue(csv, RecordTypeIdentifier);
+
+ switch (recordIdentifier)
+ {
+ case HeaderIdentifier:
+ result.FileHeader = ParseHeader(csv);
+ break;
+
+ case FieldsIdentifier:
+ fields = ParseFields(csv);
+ break;
+
+ case DataIdentifier:
+ rowNumber++;
+ result.DataRecords.Add(ParseDataRecord(csv, fields, rowNumber));
+ break;
+
+ case TrailerIdentifier:
+ result.FileTrailer = ParseTrailer(csv);
+ break;
+
+ default:
+ throw new InvalidOperationException($"Unknown record identifier: {recordIdentifier}");
+ }
+ }
+
+ return result;
+ }
+
+ private static List ParseFields(CsvReader csv)
+ {
+ return Enumerable.Range(1, csv.Parser.Count - 1)
+ .Select(i => GetFieldValue(csv, i))
+ .Where(x => !string.IsNullOrEmpty(x))
+ .ToList()!;
+ }
+
+ private static string? GetFieldValue(CsvReader csv, int index) => index < csv.Parser.Count ? csv.GetField(index) : null;
+ private static StreamReader CreateStreamReader(Stream stream) => new(stream, Encoding.UTF8, detectEncodingFromByteOrderMarks: true, bufferSize: 1024, leaveOpen: true);
+ private static FileHeaderRecord ParseHeader(CsvReader csv) => csv.GetRecord();
+ private static FileTrailerRecord ParseTrailer(CsvReader csv) => csv.GetRecord();
+ private static CsvReader CreateCsvReader(StreamReader reader)
+ {
+ var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+ {
+ Delimiter = "|",
+ Quote = '"',
+ Escape = '\\',
+ HasHeaderRecord = false,
+ Mode = CsvMode.RFC4180,
+ BadDataFound = null
+ };
+
+ var csv = new CsvReader(reader, config);
+ csv.Context.RegisterClassMap();
+ csv.Context.RegisterClassMap();
+
+ return csv;
+ }
+
+ private static FileDataRecord ParseDataRecord(CsvReader csv, List columnHeadings, int rowNumber)
+ {
+ if (columnHeadings.Count == 0)
+ {
+ throw new InvalidOperationException("Field headers (NBSSAPPT_FLDS) must appear before data records.");
+ }
+
+ const int dataFieldStartIndex = 1;
+
+ var record = new FileDataRecord { RowNumber = rowNumber };
+
+ foreach (var (heading, index) in columnHeadings.Select((header, index) => (header, index + dataFieldStartIndex)))
+ {
+ if (index < csv.Parser.Count)
+ {
+ record.Fields[heading] = GetFieldValue(csv, index) ?? string.Empty;
+ }
+ }
+
+ return record;
+ }
+
+ public sealed class FileTrailerRecordMap : ClassMap
+ {
+ public FileTrailerRecordMap()
+ {
+ Map(m => m.RecordTypeIdentifier).Index(0);
+ Map(m => m.ExtractId).Index(1);
+ Map(m => m.TransferEndDate).Index(2);
+ Map(m => m.TransferEndTime).Index(3);
+ Map(m => m.RecordCount).Index(4);
+ }
+ }
+
+ public sealed class FileHeaderRecordMap : ClassMap
+ {
+ public FileHeaderRecordMap()
+ {
+ Map(m => m.RecordTypeIdentifier).Index(0);
+ Map(m => m.ExtractId).Index(1);
+ Map(m => m.TransferStartDate).Index(2);
+ Map(m => m.TransferStartTime).Index(3);
+ Map(m => m.RecordCount).Index(4);
+ }
}
}
+
diff --git a/src/ServiceLayer.Mesh/FileTypes/NbssAppointmentEvents/Models/ParsedFile.cs b/src/ServiceLayer.Mesh/FileTypes/NbssAppointmentEvents/Models/ParsedFile.cs
index 59acfc5..7d31702 100644
--- a/src/ServiceLayer.Mesh/FileTypes/NbssAppointmentEvents/Models/ParsedFile.cs
+++ b/src/ServiceLayer.Mesh/FileTypes/NbssAppointmentEvents/Models/ParsedFile.cs
@@ -4,5 +4,5 @@ public class ParsedFile
{
public FileHeaderRecord? FileHeader { get; set; }
public FileTrailerRecord? FileTrailer { get; set; }
- public required List DataRecords { get; set; } = [];
+ public List DataRecords { get; set; } = [];
}
diff --git a/src/ServiceLayer.Mesh/Functions/FileTransformFunction.cs b/src/ServiceLayer.Mesh/Functions/FileTransformFunction.cs
index 60606e8..7484a2e 100644
--- a/src/ServiceLayer.Mesh/Functions/FileTransformFunction.cs
+++ b/src/ServiceLayer.Mesh/Functions/FileTransformFunction.cs
@@ -5,6 +5,7 @@
using ServiceLayer.Data;
using ServiceLayer.Data.Models;
using ServiceLayer.Mesh.Configuration;
+using ServiceLayer.Mesh.FileTypes.NbssAppointmentEvents;
using ServiceLayer.Mesh.Messaging;
using ServiceLayer.Mesh.Storage;
@@ -14,7 +15,8 @@ public class FileTransformFunction(
ILogger logger,
ServiceLayerDbContext serviceLayerDbContext,
IMeshFilesBlobStore meshFileBlobStore,
- IFileTransformFunctionConfiguration configuration)
+ IFileTransformFunctionConfiguration configuration,
+ IFileParser fileParser)
{
[Function("FileTransformFunction")]
public async Task Run([QueueTrigger("%FileTransformQueueName%")] FileTransformQueueMessage message)
@@ -25,7 +27,7 @@ public async Task Run([QueueTrigger("%FileTransformQueueName%")] FileTransformQu
if (file == null)
{
- logger.LogWarning("File with id: {fileId} not found in MeshFiles table.", message.FileId);
+ logger.LogWarning("File with id: {FileId} not found in MeshFiles table.", message.FileId);
return;
}
@@ -39,6 +41,8 @@ public async Task Run([QueueTrigger("%FileTransformQueueName%")] FileTransformQu
var fileContent = await meshFileBlobStore.DownloadAsync(file);
+ var parsedfile = fileParser.Parse(fileContent);
+
// TODO - take dependency on IEnumerable.
// After initial common checks against database, find the appropriate implementation of IFileTransformer to handle the functionality that differs between file type.
}
@@ -59,7 +63,7 @@ private bool IsFileSuitableForTransformation(MeshFile file)
(file.Status == MeshFileStatus.Transforming && file.LastUpdatedUtc > DateTime.UtcNow.AddHours(-configuration.StaleHours)))
{
logger.LogWarning(
- "File with id: {fileId} found in MeshFiles table but is not suitable for transformation. Status: {status}, LastUpdatedUtc: {lastUpdatedUtc}.",
+ "File with id: {FileId} found in MeshFiles table but is not suitable for transformation. Status: {Status}, LastUpdatedUtc: {LastUpdatedUtc}.",
file.FileId,
file.Status,
file.LastUpdatedUtc.ToTimestamp());
diff --git a/src/ServiceLayer.Mesh/Program.cs b/src/ServiceLayer.Mesh/Program.cs
index 03e705a..b8246be 100644
--- a/src/ServiceLayer.Mesh/Program.cs
+++ b/src/ServiceLayer.Mesh/Program.cs
@@ -8,6 +8,7 @@
using ServiceLayer.Mesh.Configuration;
using ServiceLayer.Mesh.Messaging;
using ServiceLayer.Data;
+using ServiceLayer.Mesh.FileTypes.NbssAppointmentEvents;
var host = new HostBuilder()
.ConfigureFunctionsWebApplication()
@@ -50,6 +51,7 @@
services.AddSingleton();
services.AddSingleton();
+ services.AddSingleton();
services.AddSingleton(provider =>
{
diff --git a/src/ServiceLayer.Mesh/ServiceLayer.Mesh.csproj b/src/ServiceLayer.Mesh/ServiceLayer.Mesh.csproj
index 535e2af..2037bff 100644
--- a/src/ServiceLayer.Mesh/ServiceLayer.Mesh.csproj
+++ b/src/ServiceLayer.Mesh/ServiceLayer.Mesh.csproj
@@ -11,6 +11,7 @@
+
diff --git a/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/FileParserTests.cs b/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/FileParserTests.cs
new file mode 100644
index 0000000..d5b5e4b
--- /dev/null
+++ b/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/FileParserTests.cs
@@ -0,0 +1,388 @@
+using System.Globalization;
+using System.Text;
+using CsvHelper;
+using CsvHelper.Configuration;
+using ServiceLayer.Mesh.FileTypes.NbssAppointmentEvents;
+using ServiceLayer.Mesh.FileTypes.NbssAppointmentEvents.Models;
+using static ServiceLayer.Mesh.FileTypes.NbssAppointmentEvents.FileParser;
+
+namespace ServiceLayer.Mesh.Tests.FileTypes.NbssAppointmentEvents;
+
+public class FileParserTests
+{
+ private readonly FileParser _fileParser;
+ private readonly string _testDataPath;
+
+ public FileParserTests()
+ {
+ _fileParser = new FileParser();
+ _testDataPath = Path.Combine(Directory.GetCurrentDirectory(), "..", "..", "..", "FileTypes", "NbssAppointmentEvents", "TestData");
+ }
+
+ private FileStream GetTestFileStream(string fileName)
+ {
+ string filePath = Path.Combine(_testDataPath, fileName);
+ return File.OpenRead(filePath);
+ }
+
+ [Fact]
+ public void Parse_NullStream_ThrowsArgumentNullException()
+ {
+ // Arrange
+ Stream? stream = null;
+
+ // Act & Assert
+ var exception = Assert.Throws(() => _fileParser.Parse(stream!));
+
+ Assert.Equal("stream", exception.ParamName);
+ }
+
+ [Fact]
+ public void Parse_EmptyStream_ReturnsEmptyParsedFile()
+ {
+ // Arrange
+ using var stream = CreateStreamFromString("");
+
+ // Act
+ var result = _fileParser.Parse(stream);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.Null(result.FileHeader);
+ Assert.Null(result.FileTrailer);
+ Assert.Empty(result.DataRecords);
+ }
+
+ [Fact]
+ public void Parse_ValidFile_ReturnsParsedFileWithCorrectStructure()
+ {
+ // Arrange
+ using var fileStream = GetTestFileStream("ValidFile.dat");
+
+ // Act
+ var result = _fileParser.Parse(fileStream);
+
+ // Assert
+ Assert.NotNull(result.FileHeader);
+ VerifyFileHeaderRecord(result.FileHeader, "NBSSAPPT_HDR", "00000107", "20250317", "133128", "000002");
+ Assert.Equal(2, result.DataRecords.Count);
+ Assert.NotNull(result.FileTrailer);
+ VerifyFileTrailerRecord(result.FileTrailer, "NBSSAPPT_END", "00000107", "20250317", "133129", "000002");
+
+ Assert.Equal(1, result.DataRecords[0].RowNumber);
+ Assert.Equal(2, result.DataRecords[1].RowNumber);
+
+ var expectedFirstRecord = new Dictionary
+ {
+ ["Sequence"] = "000001",
+ ["BSO"] = "KMK",
+ ["Action"] = "B",
+ ["Clinic Code"] = "BU003",
+ ["Status"] = "B"
+ };
+
+ var expectedSecondRecord = new Dictionary
+ {
+ ["Sequence"] = "000002",
+ ["BSO"] = "KMK",
+ ["Action"] = "B",
+ ["Clinic Code"] = "BU004",
+ ["Status"] = "B"
+ };
+
+ VerifyDataRecordFields(result.DataRecords[0], expectedFirstRecord);
+ VerifyDataRecordFields(result.DataRecords[1], expectedSecondRecord);
+ }
+
+ [Fact]
+ public void Parse_CompleteDataset_ParsesAllFieldsCorrectly()
+ {
+ // Arrange
+ using var fileStream = GetTestFileStream("CompleteDataset.dat");
+
+ // Act
+ var result = _fileParser.Parse(fileStream);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.Single(result.DataRecords);
+ Assert.Equal(1, result.DataRecords[0].RowNumber);
+
+ var expectedData = new Dictionary
+ {
+ ["Sequence"] = "000001",
+ ["BSO"] = "KMK",
+ ["Action"] = "U",
+ ["Clinic Code"] = "BU003",
+ ["Holding Clinic"] = "N",
+ ["Status"] = "A",
+ ["Attended Not Scr"] = "N",
+ ["Appointment ID"] = "BU003-67235-RA1-DN-T1330-1",
+ ["NHS Num"] = "9277757620",
+ ["Epsiode Type"] = "G",
+ ["Episode Start"] = "2025-01-30",
+ ["BatchID"] = "KMKG00581",
+ ["Screen or Asses"] = "S",
+ ["Screen Appt num"] = "1",
+ ["Booked By"] = "H",
+ ["Cancelled By"] = "",
+ ["Appt Date"] = "20250130",
+ ["Appt Time"] = "1330",
+ ["Location"] = "BU",
+ ["Clinic Name"] = "BREAST CARE UNIT",
+ ["Clinic Name (Let)"] = "BREAST CARE UNIT",
+ ["Clinic Address 1"] = "BREAST CARE UNIT",
+ ["Clinic Address 2"] = "MILTON KEYNES HOSPITAL",
+ ["Clinic Address 3"] = "STANDING WAY",
+ ["Clinic Address 4"] = "MILTON KEYNES",
+ ["Clinic Address 5"] = "MK6 5LD",
+ ["Postcode"] = "MK6 5LD",
+ ["Action Timestamp"] = "20250204-161420"
+ };
+
+ VerifyDataRecordFields(result.DataRecords[0], expectedData);
+ }
+
+ [Fact]
+ public void Parse_MissingFieldsRecord_ThrowsInvalidOperationException()
+ {
+ // Arrange
+ using var fileStream = GetTestFileStream("MissingFields.dat");
+
+ // Act & Assert
+ var exception = Assert.Throws(() => _fileParser.Parse(fileStream));
+
+ Assert.Equal("Field headers (NBSSAPPT_FLDS) must appear before data records.", exception.Message);
+ }
+
+ [Fact]
+ public void Parse_UnknownRecordType_ThrowsInvalidOperationException()
+ {
+ // Arrange
+ using var fileStream = GetTestFileStream("UnknownRecord.dat");
+
+ // Act & Assert
+ var exception = Assert.Throws(() => _fileParser.Parse(fileStream));
+
+ Assert.Equal("Unknown record identifier: UNKNOWN_TYPE", exception.Message);
+ }
+
+ [Fact]
+ public void Parse_EmptyLine_SkipsEmptyLines()
+ {
+ // Arrange
+ using var fileStream = GetTestFileStream("EmptyLines.dat");
+
+ // Act
+ var result = _fileParser.Parse(fileStream);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.Single(result.DataRecords);
+ Assert.Equal(1, result.DataRecords[0].RowNumber);
+ }
+
+ [Fact]
+ public void Parse_FewerColumnsInDataRecord_OnlyProcessesAvailableColumns()
+ {
+ // Arrange
+ using var fileStream = GetTestFileStream("FewerColumns.dat");
+
+ // Act
+ var result = _fileParser.Parse(fileStream);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.Single(result.DataRecords);
+ Assert.Equal(1, result.DataRecords[0].RowNumber);
+
+ var expectedData = new Dictionary
+ {
+ ["Sequence"] = "000001",
+ ["BSO"] = "KMK",
+ ["Action"] = "U"
+ };
+
+ VerifyDataRecordFields(result.DataRecords[0], expectedData);
+ Assert.False(result.DataRecords[0].Fields.ContainsKey("Clinic Code"));
+ Assert.False(result.DataRecords[0].Fields.ContainsKey("Status"));
+ }
+
+ [Fact]
+ public void Parse_ExtraColumnsInDataRecord_IgnoresExtraColumns()
+ {
+ // Arrange
+ using var fileStream = GetTestFileStream("ExtraColumns.dat");
+
+ // Act
+ var result = _fileParser.Parse(fileStream);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.Single(result.DataRecords);
+ Assert.Equal(1, result.DataRecords[0].RowNumber);
+
+ var expectedData = new Dictionary
+ {
+ ["Sequence"] = "000001",
+ ["BSO"] = "KMK",
+ ["Action"] = "U"
+ };
+
+ VerifyDataRecordFields(result.DataRecords[0], expectedData);
+ Assert.Equal(3, result.DataRecords[0].Fields.Count);
+ }
+
+ [Fact]
+ public void Parse_QuotedValues_TrimsQuotes()
+ {
+ // Arrange
+ using var fileStream = GetTestFileStream("QuotedValues.dat");
+
+ // Act
+ var result = _fileParser.Parse(fileStream);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.Single(result.DataRecords);
+ Assert.Equal(1, result.DataRecords[0].RowNumber);
+
+ var expectedData = new Dictionary
+ {
+ ["Field1"] = "Value1",
+ ["Field2"] = "Value2",
+ ["Field3"] = "Value3"
+ };
+
+ VerifyDataRecordFields(result.DataRecords[0], expectedData);
+ }
+
+ [Fact]
+ public void Parse_WithEscapedCharacters_HandlesCorrectly()
+ {
+ // Arrange
+ using var fileStream = GetTestFileStream("EscapedChars.dat");
+
+ // Act
+ var result = _fileParser.Parse(fileStream);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.Single(result.DataRecords);
+ Assert.Equal(1, result.DataRecords[0].RowNumber);
+
+ var expectedData = new Dictionary
+ {
+ ["Field With\"Quote"] = "Value With\"Quote",
+ ["Normal Field"] = "Normal Value",
+ ["Field With\\Backslash"] = "Value With\\Backslash"
+ };
+
+ VerifyDataRecordFields(result.DataRecords[0], expectedData);
+ }
+
+ [Fact]
+ public void VerifyFileHeaderRecordMap_MapsCorrectly()
+ {
+ // Arrange
+ using var reader = CreateConfiguredCsvReader("HeaderMapping.dat");
+ reader.Context.RegisterClassMap();
+
+ // Act
+ reader.Read();
+ var result = reader.GetRecord();
+
+ // Assert
+ Assert.Equal("NBSSAPPT_HDR", result.RecordTypeIdentifier);
+ Assert.Equal("00000054", result.ExtractId);
+ Assert.Equal("20250204", result.TransferStartDate);
+ Assert.Equal("161846", result.TransferStartTime);
+ Assert.Equal("000002", result.RecordCount);
+ }
+
+ [Fact]
+ public void VerifyFileTrailerRecordMap_MapsCorrectly()
+ {
+ // Arrange
+ using var reader = CreateConfiguredCsvReader("TrailerMapping.dat");
+ reader.Context.RegisterClassMap();
+
+ // Act
+ reader.Read();
+ var result = reader.GetRecord();
+
+ // Assert
+ Assert.Equal("NBSSAPPT_END", result.RecordTypeIdentifier);
+ Assert.Equal("00000054", result.ExtractId);
+ Assert.Equal("20250204", result.TransferEndDate);
+ Assert.Equal("161846", result.TransferEndTime);
+ Assert.Equal("000002", result.RecordCount);
+ }
+
+ // Helper methods
+ private CsvReader CreateConfiguredCsvReader(string fileName)
+ {
+ var streamReader = new StreamReader(GetTestFileStream(fileName));
+ var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+ {
+ Delimiter = "|",
+ Quote = '"',
+ Escape = '\\',
+ HasHeaderRecord = false,
+ Mode = CsvMode.RFC4180
+ };
+
+ return new CsvReader(streamReader, config);
+ }
+
+ private static MemoryStream CreateStreamFromString(string content)
+ {
+ var bytes = Encoding.UTF8.GetBytes(content);
+ return new MemoryStream(bytes);
+ }
+
+ private static void VerifyFileHeaderRecord(
+ FileHeaderRecord record,
+ string recordType,
+ string extractId,
+ string date,
+ string time,
+ string count)
+ {
+ Assert.NotNull(record);
+ Assert.Equal(recordType, record.RecordTypeIdentifier);
+ Assert.Equal(extractId, record.ExtractId);
+ Assert.Equal(date, record.TransferStartDate);
+ Assert.Equal(time, record.TransferStartTime);
+ Assert.Equal(count, record.RecordCount);
+ }
+
+ private static void VerifyFileTrailerRecord(
+ FileTrailerRecord record,
+ string recordType,
+ string extractId,
+ string date,
+ string time,
+ string count)
+ {
+ Assert.NotNull(record);
+ Assert.Equal(recordType, record.RecordTypeIdentifier);
+ Assert.Equal(extractId, record.ExtractId);
+ Assert.Equal(date, record.TransferEndDate);
+ Assert.Equal(time, record.TransferEndTime);
+ Assert.Equal(count, record.RecordCount);
+ }
+
+ private static void VerifyDataRecordFields(
+ FileDataRecord record,
+ Dictionary expectedFields)
+ {
+ Assert.NotNull(record);
+
+ foreach (var value in expectedFields)
+ {
+ Assert.True(record.Fields.ContainsKey(value.Key), $"Field '{value.Key}' not found in record");
+ Assert.Equal(value.Value, record[value.Key]);
+ }
+ }
+}
diff --git a/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/CompleteDataset.dat b/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/CompleteDataset.dat
new file mode 100644
index 0000000..019f996
--- /dev/null
+++ b/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/CompleteDataset.dat
@@ -0,0 +1,4 @@
+"NBSSAPPT_HDR"|"00000054"|"20250204"|"161846"|"000001"
+"NBSSAPPT_FLDS"|"Sequence"|"BSO"|"Action"|"Clinic Code"|"Holding Clinic"|"Status"|"Attended Not Scr"|"Appointment ID"|"NHS Num"|"Epsiode Type"|"Episode Start"|"BatchID"|"Screen or Asses"|"Screen Appt num"|"Booked By"|"Cancelled By"|"Appt Date"|"Appt Time"|"Location"|"Clinic Name"|"Clinic Name (Let)"|"Clinic Address 1"|"Clinic Address 2"|"Clinic Address 3"|"Clinic Address 4"|"Clinic Address 5"|"Postcode"|"Action Timestamp"
+"NBSSAPPT_DATA"|"000001"|"KMK"|"U"|"BU003"|"N"|"A"|"N"|"BU003-67235-RA1-DN-T1330-1"|"9277757620"|"G"|"2025-01-30"|"KMKG00581"|"S"|"1"|"H"|""|"20250130"|"1330"|"BU"|"BREAST CARE UNIT"|"BREAST CARE UNIT"|"BREAST CARE UNIT"|"MILTON KEYNES HOSPITAL"|"STANDING WAY"|"MILTON KEYNES"|"MK6 5LD"|"MK6 5LD"|"20250204-161420"
+"NBSSAPPT_END"|"00000054"|"20250204"|"161846"|"000001"
diff --git a/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/EmptyLines.dat b/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/EmptyLines.dat
new file mode 100644
index 0000000..fb65490
--- /dev/null
+++ b/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/EmptyLines.dat
@@ -0,0 +1,7 @@
+"NBSSAPPT_HDR"|"00000054"|"20250204"|"161846"|"000001"
+
+"NBSSAPPT_FLDS"|"Sequence"|"BSO"|"Action"
+
+"NBSSAPPT_DATA"|"000001"|"KMK"|"U"
+
+"NBSSAPPT_END"|"00000054"|"20250204"|"161846"|"000001"
diff --git a/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/EscapedChars.dat b/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/EscapedChars.dat
new file mode 100644
index 0000000..c28a418
--- /dev/null
+++ b/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/EscapedChars.dat
@@ -0,0 +1,4 @@
+"NBSSAPPT_HDR"|"00000054"|"20250204"|"161846"|"000001"
+"NBSSAPPT_FLDS"|"Field With\"Quote"|"Normal Field"|"Field With\\Backslash"
+"NBSSAPPT_DATA"|"Value With\"Quote"|"Normal Value"|"Value With\\Backslash"
+"NBSSAPPT_END"|"00000054"|"20250204"|"161846"|"000001"
diff --git a/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/ExtraColumns.dat b/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/ExtraColumns.dat
new file mode 100644
index 0000000..0e92d3d
--- /dev/null
+++ b/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/ExtraColumns.dat
@@ -0,0 +1,4 @@
+"NBSSAPPT_HDR"|"00000054"|"20250204"|"161846"|"000001"
+"NBSSAPPT_FLDS"|"Sequence"|"BSO"|"Action"
+"NBSSAPPT_DATA"|"000001"|"KMK"|"U"|"ExtraValue1"|"ExtraValue2"
+"NBSSAPPT_END"|"00000054"|"20250204"|"161846"|"000001"
diff --git a/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/FewerColumns.dat b/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/FewerColumns.dat
new file mode 100644
index 0000000..6494df1
--- /dev/null
+++ b/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/FewerColumns.dat
@@ -0,0 +1,4 @@
+"NBSSAPPT_HDR"|"00000054"|"20250204"|"161846"|"000001"
+"NBSSAPPT_FLDS"|"Sequence"|"BSO"|"Action"|"Clinic Code"|"Status"
+"NBSSAPPT_DATA"|"000001"|"KMK"|"U"
+"NBSSAPPT_END"|"00000054"|"20250204"|"161846"|"000001"
diff --git a/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/HeaderMapping.dat b/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/HeaderMapping.dat
new file mode 100644
index 0000000..9b5211c
--- /dev/null
+++ b/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/HeaderMapping.dat
@@ -0,0 +1 @@
+"NBSSAPPT_HDR"|"00000054"|"20250204"|"161846"|"000002"
diff --git a/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/MissingFields.dat b/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/MissingFields.dat
new file mode 100644
index 0000000..7ec7803
--- /dev/null
+++ b/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/MissingFields.dat
@@ -0,0 +1,3 @@
+"NBSSAPPT_HDR"|"00000054"|"20250204"|"161846"|"000002"
+"NBSSAPPT_DATA"|"000001"|"KMK"|"U"|"BU003"|"N"|"A"|"N"
+"NBSSAPPT_END"|"00000054"|"20250204"|"161846"|"000002"
diff --git a/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/QuotedValues.dat b/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/QuotedValues.dat
new file mode 100644
index 0000000..58bd4b3
--- /dev/null
+++ b/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/QuotedValues.dat
@@ -0,0 +1,4 @@
+"NBSSAPPT_HDR"|"00000054"|"20250204"|"161846"|"000001"
+"NBSSAPPT_FLDS"|"Field1"|"Field2"|"Field3"
+"NBSSAPPT_DATA"|"Value1"|"Value2"|"Value3"
+"NBSSAPPT_END"|"00000054"|"20250204"|"161846"|"000001"
diff --git a/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/TrailerMapping.dat b/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/TrailerMapping.dat
new file mode 100644
index 0000000..4af91d1
--- /dev/null
+++ b/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/TrailerMapping.dat
@@ -0,0 +1 @@
+"NBSSAPPT_END"|"00000054"|"20250204"|"161846"|"000002"
diff --git a/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/UnknownRecord.dat b/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/UnknownRecord.dat
new file mode 100644
index 0000000..6f9c8c8
--- /dev/null
+++ b/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/UnknownRecord.dat
@@ -0,0 +1,4 @@
+"NBSSAPPT_HDR"|"00000054"|"20250204"|"161846"|"000001"
+"NBSSAPPT_FLDS"|"Sequence"|"BSO"|"Action"
+"UNKNOWN_TYPE"|"000001"|"KMK"|"U"
+"NBSSAPPT_END"|"00000054"|"20250204"|"161846"|"000001"
diff --git a/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/ValidFile.dat b/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/ValidFile.dat
new file mode 100644
index 0000000..ecb6458
--- /dev/null
+++ b/tests/ServiceLayer.Mesh.Tests/FileTypes/NbssAppointmentEvents/TestData/ValidFile.dat
@@ -0,0 +1,5 @@
+"NBSSAPPT_HDR"|"00000107"|"20250317"|"133128"|"000002"
+"NBSSAPPT_FLDS"|"Sequence"|"BSO"|"Action"|"Clinic Code"|"Status"
+"NBSSAPPT_DATA"|"000001"|"KMK"|"B"|"BU003"|"B"
+"NBSSAPPT_DATA"|"000002"|"KMK"|"B"|"BU004"|"B"
+"NBSSAPPT_END"|"00000107"|"20250317"|"133129"|"000002"
diff --git a/tests/ServiceLayer.Mesh.Tests/Functions/FileTransformFunctionTests.cs b/tests/ServiceLayer.Mesh.Tests/Functions/FileTransformFunctionTests.cs
index bfe815a..e4bf2a9 100644
--- a/tests/ServiceLayer.Mesh.Tests/Functions/FileTransformFunctionTests.cs
+++ b/tests/ServiceLayer.Mesh.Tests/Functions/FileTransformFunctionTests.cs
@@ -5,6 +5,7 @@
using ServiceLayer.Data;
using ServiceLayer.Data.Models;
using ServiceLayer.Mesh.Configuration;
+using ServiceLayer.Mesh.FileTypes.NbssAppointmentEvents;
using ServiceLayer.Mesh.Functions;
using ServiceLayer.Mesh.Messaging;
using ServiceLayer.Mesh.Storage;
@@ -16,6 +17,7 @@ public class FileTransformFunctionTests
private readonly Mock> _loggerMock = new();
private readonly Mock _blobStoreMock = new();
private readonly Mock _configuration = new();
+ private readonly Mock _fileParser = new();
private readonly ServiceLayerDbContext _dbContext;
private readonly FileTransformFunction _function;
@@ -34,7 +36,8 @@ public FileTransformFunctionTests()
_loggerMock.Object,
_dbContext,
_blobStoreMock.Object,
- _configuration.Object
+ _configuration.Object,
+ _fileParser.Object
);
}
diff --git a/tests/ServiceLayer.Mesh.Tests/ServiceLayer.Mesh.Tests.csproj b/tests/ServiceLayer.Mesh.Tests/ServiceLayer.Mesh.Tests.csproj
index 4ab5782..0f95528 100644
--- a/tests/ServiceLayer.Mesh.Tests/ServiceLayer.Mesh.Tests.csproj
+++ b/tests/ServiceLayer.Mesh.Tests/ServiceLayer.Mesh.Tests.csproj
@@ -23,5 +23,10 @@
+
+
+ PreserveNewest
+
+