Skip to content

Commit 2a605bd

Browse files
CCM-12833: AgentsMD Testing - DO NOT MERGE
1 parent 813390e commit 2a605bd

File tree

15 files changed

+1966
-88
lines changed

15 files changed

+1966
-88
lines changed

agents.md

Lines changed: 720 additions & 0 deletions
Large diffs are not rendered by default.
Lines changed: 101 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,101 @@
1+
module "queue_csv_writer_lambda" {
2+
source = "https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.22/terraform-lambda.zip"
3+
4+
function_name = "queue-csv-writer"
5+
description = "Lambda that consumes SQS messages and writes data objects to CSV in S3"
6+
7+
aws_account_id = var.aws_account_id
8+
component = var.component
9+
environment = var.environment
10+
project = var.project
11+
region = var.region
12+
group = var.group
13+
14+
log_retention_in_days = var.log_retention_in_days
15+
kms_key_arn = module.kms.key_arn
16+
17+
iam_policy_document = {
18+
body = data.aws_iam_policy_document.queue_csv_writer_lambda.json
19+
}
20+
21+
function_s3_bucket = local.acct.s3_buckets["artefacts"]["id"]
22+
function_code_base_path = local.lambdas_source_code_dir
23+
function_code_dir = "queue-csv-writer/dist"
24+
handler_function_name = "handler"
25+
runtime = "nodejs20.x"
26+
memory = 512 # agent: rationale lightweight CSV transformation; no heavy parsing/compression
27+
timeout = 20
28+
29+
lambda_env_vars = {
30+
EVENT_CSV_BUCKET_NAME = module.s3bucket_event_csv.id
31+
}
32+
}
33+
34+
resource "aws_lambda_event_source_mapping" "queue_csv_writer" {
35+
event_source_arn = module.sqs_event_csv.sqs_queue_arn
36+
function_name = module.queue_csv_writer_lambda.function_name
37+
batch_size = 10 # agent: rationale small batch keeps latency low and limits CSV size per object
38+
maximum_batching_window_in_seconds = 0
39+
function_response_types = ["ReportBatchItemFailures"]
40+
}
41+
42+
data "aws_iam_policy_document" "queue_csv_writer_lambda" {
43+
statement {
44+
sid = "AllowSQS"
45+
effect = "Allow"
46+
47+
actions = [
48+
"sqs:ReceiveMessage",
49+
"sqs:DeleteMessage",
50+
"sqs:GetQueueAttributes",
51+
"sqs:ChangeMessageVisibility",
52+
]
53+
54+
resources = [
55+
module.sqs_event_csv.sqs_queue_arn,
56+
]
57+
}
58+
59+
statement {
60+
sid = "AllowSQSDLQ"
61+
effect = "Allow"
62+
63+
actions = [
64+
"sqs:SendMessage",
65+
]
66+
67+
resources = [
68+
module.sqs_event_csv.sqs_dlq_arn,
69+
]
70+
}
71+
72+
statement {
73+
sid = "AllowS3Write"
74+
effect = "Allow"
75+
76+
actions = [
77+
"s3:PutObject",
78+
]
79+
80+
resources = [
81+
"${module.s3bucket_event_csv.arn}/*",
82+
]
83+
}
84+
85+
statement {
86+
sid = "AllowKMSAccess"
87+
effect = "Allow"
88+
89+
actions = [
90+
"kms:Decrypt",
91+
"kms:DescribeKey",
92+
"kms:Encrypt",
93+
"kms:GenerateDataKey*",
94+
"kms:ReEncrypt*",
95+
]
96+
97+
resources = [
98+
module.kms.key_arn,
99+
]
100+
}
101+
}
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
module "s3bucket_event_csv" {
2+
source = "https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.20/terraform-s3bucket.zip"
3+
4+
name = "event-csv" # agent: rationale separate bucket for CSV artifacts to isolate access pattern from template internal/quarantine buckets
5+
6+
aws_account_id = var.aws_account_id
7+
region = var.region
8+
project = var.project
9+
environment = var.environment
10+
component = var.component
11+
12+
kms_key_arn = module.kms.key_arn
13+
}
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
module "sqs_event_csv" {
2+
source = "https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.20/terraform-sqs.zip"
3+
4+
aws_account_id = var.aws_account_id
5+
component = var.component
6+
environment = var.environment
7+
project = var.project
8+
region = var.region
9+
name = "event-csv"
10+
fifo_queue = true # agent: rationale ordering guarantees deterministic header aggregation & easier replay
11+
sqs_kms_key_arn= module.kms.key_arn
12+
create_dlq = true # agent: rationale poison message isolation for malformed JSON
13+
}

lambdas/queue-csv-writer/README.md

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
# queue-csv-writer Lambda
2+
3+
Consumes messages from the `event-csv` SQS queue and writes the `data` object from each message body to a CSV file in the `event-csv` S3 bucket.
4+
5+
## Behaviour
6+
- Expects each SQS message body to be JSON containing a top-level `data` object.
7+
- Builds a header row from the union of all keys across received `data` objects (sorted alphabetically).
8+
- Escapes values per RFC4180 rules (quotes, commas, new lines).
9+
- Uploads the CSV to `s3://$EVENT_CSV_BUCKET_NAME/events/<ISO_TIMESTAMP>.csv`.
10+
- Returns `{ status: 'ok' }` or `{ status: 'no-data' }` when no rows were produced.
11+
12+
## Environment Variables
13+
- `EVENT_CSV_BUCKET_NAME` – bucket to upload CSV files (injected by Terraform).
14+
15+
## Build
16+
17+
```bash
18+
npm run lambda-build
19+
```
20+
21+
## Test
22+
23+
```bash
24+
npm run test:unit
25+
```
26+
27+
## Notes
28+
- IAM policy grants minimal SQS consume & S3 PutObject permissions.
29+
- DLQ is created by the SQS module for poison messages.
Lines changed: 130 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,130 @@
1+
import { _test, handler, createHandler } from '../src/queue-csv-writer';
2+
import { S3Client } from '@aws-sdk/client-s3';
3+
4+
// Minimal mock by monkey patching send
5+
jest.mock('@aws-sdk/client-s3', () => {
6+
const actual = jest.requireActual('@aws-sdk/client-s3');
7+
return {
8+
...actual,
9+
S3Client: jest.fn().mockImplementation(() => ({ send: jest.fn().mockResolvedValue({}) }))
10+
};
11+
});
12+
13+
describe('queue-csv-writer lambda', () => {
14+
beforeEach(() => {
15+
process.env.EVENT_CSV_BUCKET_NAME = 'test-bucket';
16+
});
17+
18+
test('buildCsv produces header union and rows', () => {
19+
const csv = _test.buildCsv([
20+
{ a: 1, b: 'x' },
21+
{ b: 'y', c: true }
22+
]);
23+
expect(csv.split('\n')[0]).toBe('a,b,c');
24+
expect(csv).toContain('1,x,'); // first row
25+
expect(csv).toContain(',y,true'); // second row
26+
});
27+
28+
test('buildCsv returns empty string for no rows', () => {
29+
expect(_test.buildCsv([])).toBe('');
30+
});
31+
32+
// Custom CSV row splitter that respects quoted multiline fields
33+
function splitCsvRows(csv: string): string[] {
34+
const rows: string[] = [];
35+
let current = '';
36+
let inQuotes = false;
37+
for (let i = 0; i < csv.length; i++) {
38+
const ch = csv[i];
39+
if (ch === '"') {
40+
// Handle escaped quotes inside a quoted field
41+
if (inQuotes && csv[i + 1] === '"') {
42+
current += '""';
43+
i++;
44+
continue;
45+
}
46+
inQuotes = !inQuotes;
47+
current += ch;
48+
continue;
49+
}
50+
if (ch === '\n' && !inQuotes) {
51+
rows.push(current);
52+
current = '';
53+
continue;
54+
}
55+
current += ch;
56+
}
57+
rows.push(current);
58+
return rows;
59+
}
60+
61+
test('escapeCsv quotes commas, quotes and newlines', () => {
62+
const csv = _test.buildCsv([
63+
{ text: 'hello, world' },
64+
{ text: 'line1\nline2' },
65+
{ text: 'He said "Hi"' }
66+
]);
67+
const rows = splitCsvRows(csv);
68+
expect(rows[1]).toBe('"hello, world"');
69+
expect(rows[2]).toBe('"line1\nline2"');
70+
expect(rows[3]).toBe('"He said ""Hi"""');
71+
});
72+
73+
test('escapeCsv leaves simple values unquoted and null -> empty', () => {
74+
const csv = _test.buildCsv([
75+
{ a: 'simple', b: null },
76+
]);
77+
const rows = splitCsvRows(csv);
78+
expect(rows[0]).toBe('a,b');
79+
expect(rows[1]).toBe('simple,');
80+
});
81+
82+
test('escapeCsv handles object value by JSON stringifying', () => {
83+
const csv = _test.buildCsv([
84+
{ obj: { nested: 'v', n: 1 } },
85+
]);
86+
const rows = splitCsvRows(csv);
87+
// Object includes braces and quotes so must be quoted and escaped
88+
expect(rows[0]).toBe('obj');
89+
// Expect doubled quotes inside the quoted JSON per RFC4180 escaping logic
90+
expect(rows[1]).toBe('"{""nested"":""v"",""n"":1}"');
91+
});
92+
93+
test('handler uploads csv when data present', async () => {
94+
const event = {
95+
Records: [
96+
{ body: JSON.stringify({ data: { a: 1, b: 'x' } }) },
97+
{ body: JSON.stringify({ data: { a: 2, c: 'z' } }) }
98+
]
99+
} as any;
100+
// Use a fresh handler instance to avoid any prior side effects
101+
const localHandler = createHandler({ s3Client: new S3Client({}) } as any);
102+
const result = await localHandler(event);
103+
expect(result.status).toBe('ok');
104+
expect(result.rows).toBe(2);
105+
expect(result.skipped).toBe(0);
106+
});
107+
108+
test('handler throws when bucket env missing', async () => {
109+
delete process.env.EVENT_CSV_BUCKET_NAME;
110+
const event = { Records: [{ body: JSON.stringify({ data: { a: 1 } }) }] } as any;
111+
const localHandler = createHandler({ s3Client: new S3Client({}) } as any);
112+
await expect(localHandler(event)).rejects.toThrow('EVENT_CSV_BUCKET_NAME not set');
113+
});
114+
115+
test('handler returns no-data when none present', async () => {
116+
const event = { Records: [{ body: '{}' }] } as any;
117+
const result = await handler(event);
118+
expect(result.status).toBe('no-data');
119+
expect(result.skipped).toBeGreaterThanOrEqual(1);
120+
});
121+
122+
test('handler skips malformed JSON', async () => {
123+
const event = { Records: [{ body: 'not-json' }] } as any;
124+
const container = { s3Client: new S3Client({ region: 'eu-west-2' }) } as any;
125+
const localHandler = createHandler(container);
126+
const res = await localHandler(event);
127+
expect(res.status).toBe('no-data');
128+
expect(res.skipped).toBe(1);
129+
});
130+
});

lambdas/queue-csv-writer/build.sh

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
#!/bin/bash
2+
set -euo pipefail
3+
4+
rm -rf dist
5+
6+
npx esbuild \
7+
--bundle \
8+
--minify \
9+
--sourcemap \
10+
--target=es2020 \
11+
--platform=node \
12+
--loader:.node=file \
13+
--entry-names=[name] \
14+
--outdir=dist \
15+
src/queue-csv-writer.ts
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
import type { Config } from 'jest';
2+
import { baseJestConfig } from 'nhs-notify-web-template-management-utils';
3+
4+
const config: Config = {
5+
...baseJestConfig,
6+
testEnvironment: 'node'
7+
};
8+
9+
export default config;
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
{
2+
"name": "nhs-notify-queue-csv-writer",
3+
"version": "0.0.1",
4+
"private": true,
5+
"scripts": {
6+
"lambda-build": "./build.sh",
7+
"lint": "eslint .",
8+
"lint:fix": "eslint . --fix",
9+
"test:unit": "jest",
10+
"typecheck": "tsc --noEmit"
11+
},
12+
"dependencies": {
13+
"@aws-sdk/client-s3": "3.911.0",
14+
"zod": "^4.0.17"
15+
},
16+
"devDependencies": {
17+
"@swc/core": "^1.11.13",
18+
"@swc/jest": "^0.2.37",
19+
"@tsconfig/node20": "^20.1.5",
20+
"@types/aws-lambda": "^8.10.148",
21+
"@types/node": "^20.11.30",
22+
"@types/jest": "^29.5.14",
23+
"esbuild": "^0.25.9",
24+
"jest": "^29.7.0",
25+
"jest-mock-extended": "^3.0.7",
26+
"nhs-notify-web-template-management-test-helper-utils": "^0.0.1",
27+
"nhs-notify-web-template-management-utils": "^0.0.1",
28+
"typescript": "^5.8.2"
29+
}
30+
}
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
import { S3Client } from '@aws-sdk/client-s3';
2+
3+
export const createContainer = () => {
4+
const s3Client = new S3Client({});
5+
return { s3Client };
6+
};
7+
8+
export type Container = ReturnType<typeof createContainer>;

0 commit comments

Comments
 (0)