diff --git a/.azuredevops/pipelines/cd-infrastructure-dev-audit.yaml b/.azuredevops/pipelines/cd-infrastructure-dev-audit.yaml
new file mode 100644
index 0000000..86710f4
--- /dev/null
+++ b/.azuredevops/pipelines/cd-infrastructure-dev-audit.yaml
@@ -0,0 +1,60 @@
+---
+
+name: $(Build.SourceBranchName)-$(Date:yyyyMMdd)_$(Rev:r)
+
+trigger: none
+pr: none
+
+pool:
+ # vmImage: ubuntu-latest
+ name: private-pool-dev-uks
+
+resources:
+ repositories:
+ - repository: dtos-devops-templates
+ type: github
+ name: NHSDigital/dtos-devops-templates
+ ref: f8141ab50ec0f3630044fa0f531952d2dbbd1e85
+ endpoint: NHSDigital
+
+variables:
+ - group: DEV_audit_backend
+ - group: DEV_hub_backend_remote_state
+ - name: TF_DIRECTORY
+ value: $(System.DefaultWorkingDirectory)/$(System.TeamProject)/infrastructure/tf-audit
+ - name: TF_VERSION
+ value: 1.9.2
+ - name: TF_PLAN_ARTIFACT
+ value: tf_plan_audit_DEV
+ - name: ENVIRONMENT
+ value: development
+
+stages:
+ - stage: terraform_plan
+ displayName: Terraform Plan
+ condition: eq(variables['Build.Reason'], 'Manual')
+ variables:
+ tfVarsFile: environments/$(ENVIRONMENT).tfvars
+ jobs:
+ - job: init_and_plan
+ displayName: Init, plan, store artifact
+ steps:
+ - checkout: self
+ - checkout: dtos-devops-templates
+ - template: .azuredevops/templates/steps/tf_plan.yaml@dtos-devops-templates
+
+ - stage: terraform_apply
+ displayName: Terraform Apply
+ dependsOn: [terraform_plan]
+ condition: and(eq(dependencies.terraform_plan.outputs['init_and_plan.TerraformPlan.changesPresent'], 'true'), eq(variables['Build.Reason'], 'Manual'))
+ jobs:
+ - deployment: terraform_apply
+ displayName: Init, get plan artifact, apply
+ environment: $(ENVIRONMENT)
+ strategy:
+ runOnce:
+ deploy:
+ steps:
+ - checkout: self
+ - checkout: dtos-devops-templates
+ - template: .azuredevops/templates/steps/tf_apply.yaml@dtos-devops-templates
diff --git a/.azuredevops/pipelines/cd-infrastructure-dev-core.yaml b/.azuredevops/pipelines/cd-infrastructure-dev-core.yaml
new file mode 100644
index 0000000..2e987c0
--- /dev/null
+++ b/.azuredevops/pipelines/cd-infrastructure-dev-core.yaml
@@ -0,0 +1,61 @@
+---
+
+name: $(Build.SourceBranchName)-$(Date:yyyyMMdd)_$(Rev:r)
+
+trigger: none
+pr: none
+
+pool:
+ # vmImage: ubuntu-latest
+ name: private-pool-dev-uks
+
+resources:
+ repositories:
+ - repository: dtos-devops-templates
+ type: github
+ name: NHSDigital/dtos-devops-templates
+ ref: f8141ab50ec0f3630044fa0f531952d2dbbd1e85
+ endpoint: NHSDigital
+
+variables:
+ - group: DEV_core_backend
+ - group: DEV_audit_backend_remote_state
+ - group: DEV_hub_backend_remote_state
+ - name: TF_DIRECTORY
+ value: $(System.DefaultWorkingDirectory)/$(System.TeamProject)/infrastructure/tf-core
+ - name: TF_VERSION
+ value: 1.9.2
+ - name: TF_PLAN_ARTIFACT
+ value: tf_plan_core_DEV
+ - name: ENVIRONMENT
+ value: development
+
+stages:
+ - stage: terraform_plan
+ displayName: Terraform Plan
+ condition: eq(variables['Build.Reason'], 'Manual')
+ variables:
+ tfVarsFile: environments/$(ENVIRONMENT).tfvars
+ jobs:
+ - job: init_and_plan
+ displayName: Init, plan, store artifact
+ steps:
+ - checkout: self
+ - checkout: dtos-devops-templates
+ - template: .azuredevops/templates/steps/tf_plan.yaml@dtos-devops-templates
+
+ - stage: terraform_apply
+ displayName: Terraform Apply
+ dependsOn: [terraform_plan]
+ condition: and(eq(dependencies.terraform_plan.outputs['init_and_plan.TerraformPlan.changesPresent'], 'true'), eq(variables['Build.Reason'], 'Manual'))
+ jobs:
+ - deployment: terraform_apply
+ displayName: Init, get plan artifact, apply
+ environment: $(ENVIRONMENT)
+ strategy:
+ runOnce:
+ deploy:
+ steps:
+ - checkout: self
+ - checkout: dtos-devops-templates
+ - template: .azuredevops/templates/steps/tf_apply.yaml@dtos-devops-templates
diff --git a/.editorconfig b/.editorconfig
index 02e8abc..7502d7d 100644
--- a/.editorconfig
+++ b/.editorconfig
@@ -11,6 +11,9 @@ trim_trailing_whitespace = true
[*.md]
indent_size = unset
+[*.cs]
+indent_size = 4
+
[*.py]
indent_size = 4
diff --git a/.github/actions/perform-static-analysis/action.yaml b/.github/actions/perform-static-analysis/action.yaml
index a619e9d..eca1719 100644
--- a/.github/actions/perform-static-analysis/action.yaml
+++ b/.github/actions/perform-static-analysis/action.yaml
@@ -3,26 +3,60 @@ description: "Perform static analysis"
inputs:
sonar_organisation_key:
description: "Sonar organisation key, used to identify the project"
- required: false
+ required: true
sonar_project_key:
description: "Sonar project key, used to identify the project"
- required: false
+ required: true
sonar_token:
description: "Sonar token, the API key"
- required: false
+ required: true
runs:
using: "composite"
steps:
- - name: "Check prerequisites for performing static analysis"
- shell: bash
- id: check
- run: echo "secret_exist=${{ inputs.sonar_token != '' }}" >> $GITHUB_OUTPUT
- - name: "Perform static analysis"
+ - name: Set up JDK 17
+ uses: actions/setup-java@v4
+ with:
+ java-version: 17
+ distribution: "zulu" # Alternative distribution options are available.
+ - uses: actions/checkout@v4
+ with:
+ fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
+ - name: Install .NET SDK 9 (or your required version)
+ uses: actions/setup-dotnet@v4
+ with:
+ dotnet-version: 9.0.x # C
+ - name: Cache SonarQube Cloud packages
+ uses: actions/cache@v4
+ with:
+ path: ~\sonar\cache
+ key: ${{ runner.os }}-sonar
+ restore-keys: ${{ runner.os }}-sonar
+ - name: Cache SonarQube Cloud scanner
+ id: cache-sonar-scanner
+ uses: actions/cache@v4
+ with:
+ path: .\.sonar\scanner
+ key: ${{ runner.os }}-sonar-scanner
+ restore-keys: ${{ runner.os }}-sonar-scanner
+ - name: Install SonarQube Cloud scanner
+ if: steps.cache-sonar-scanner.outputs.cache-hit != 'true'
+ shell: bash # Change from powershell to bash
+ run: |
+ mkdir -p .sonar/scanner
+ dotnet tool install dotnet-sonarscanner --tool-path ./.sonar/scanner
+ dotnet tool install dotnet-coverage --tool-path ./.sonar/scanner
+ - name: Build and analyze
shell: bash
- if: steps.check.outputs.secret_exist == 'true'
run: |
- export BRANCH_NAME=${GITHUB_HEAD_REF:-$(echo $GITHUB_REF | sed 's#refs/heads/##')}
- export SONAR_ORGANISATION_KEY=${{ inputs.sonar_organisation_key }}
- export SONAR_PROJECT_KEY=${{ inputs.sonar_project_key }}
- export SONAR_TOKEN=${{ inputs.sonar_token }}
- ./scripts/reports/perform-static-analysis.sh
+ echo "${{ inputs.sonar_project_key }}"
+ echo "${{ inputs.sonar_organisation_key }}"
+ echo "${{ inputs.sonar_token }}"
+ ./.sonar/scanner/dotnet-sonarscanner begin /k:"${{ inputs.sonar_project_key }}" /o:"${{ inputs.sonar_organisation_key }}" /d:sonar.token="${{ inputs.sonar_token }}" /d:sonar.host.url="https://sonarcloud.io" /d:sonar.cs.vscoveragexml.reportsPaths="coverage.xml" /d:sonar.typescript.lcov.reportsPaths="src/web/coverage/lcov.info" /d:sonar.lang.patterns.ts=**/*.ts,**/*.tsx,**/*.cts,**/*.mts /d:sonar.lang.patterns.js=**/*.js,**/*.jsx,**/*.cjs,**/*.mjs,**/*.vue /d:sonar.javascript.enabled=false
+ dotnet build src/api/ParticipantManager.API.sln
+ ./.sonar/scanner/dotnet-coverage collect -f xml -o coverage.xml dotnet test src/api/ServiceLayer.API.sln
+ cd src/web
+ npm ci
+ npm run test:unit:coverage -- --coverageDirectory=coverage --coverageReporters=lcov
+ sed -i 's|^SF:|SF:src/web/|g' coverage/lcov.info
+ cd ../..
+ ./.sonar/scanner/dotnet-sonarscanner end /d:sonar.token="${{ inputs.sonar_token }}"
diff --git a/.github/dependabot.yaml b/.github/dependabot.yaml
index 17ef311..2b40650 100644
--- a/.github/dependabot.yaml
+++ b/.github/dependabot.yaml
@@ -12,6 +12,11 @@ updates:
schedule:
interval: "daily"
+ - package-ecosystem: "nuget"
+ directory: "/"
+ schedule:
+ interval: "daily"
+
- package-ecosystem: "npm"
directory: "/"
schedule:
diff --git a/.github/workflows/cicd-1-pull-request.yaml b/.github/workflows/cicd-1-pull-request.yaml
index cd068ec..cbf106a 100644
--- a/.github/workflows/cicd-1-pull-request.yaml
+++ b/.github/workflows/cicd-1-pull-request.yaml
@@ -10,10 +10,13 @@ on:
types: [opened, reopened]
jobs:
+
metadata:
- name: "Set CI/CD metadata"
+ name: Set CI/CD metadata
runs-on: ubuntu-latest
timeout-minutes: 1
+ permissions:
+ pull-requests: read
outputs:
build_datetime_london: ${{ steps.variables.outputs.build_datetime_london }}
build_datetime: ${{ steps.variables.outputs.build_datetime }}
@@ -22,11 +25,14 @@ jobs:
nodejs_version: ${{ steps.variables.outputs.nodejs_version }}
python_version: ${{ steps.variables.outputs.python_version }}
terraform_version: ${{ steps.variables.outputs.terraform_version }}
+ environment_tag: ${{ steps.variables.outputs.environment_tag }}
version: ${{ steps.variables.outputs.version }}
does_pull_request_exist: ${{ steps.pr_exists.outputs.does_pull_request_exist }}
steps:
- name: "Checkout code"
uses: actions/checkout@v4
+ with:
+ submodules: 'true'
- name: "Set CI/CD variables"
id: variables
run: |
@@ -40,10 +46,11 @@ jobs:
echo "python_version=$(grep "^nodejs" .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT
echo "terraform_version=$(grep "^terraform" .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT
echo "version=$(head -n 1 .version 2> /dev/null || echo unknown)" >> $GITHUB_OUTPUT
+ echo "environment_tag=development" >> $GITHUB_OUTPUT
- name: "Check if pull request exists for this branch"
id: pr_exists
env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ GH_TOKEN: ${{ github.token }}
run: |
branch_name=${GITHUB_HEAD_REF:-$(echo $GITHUB_REF | sed 's#refs/heads/##')}
echo "Current branch is '$branch_name'"
@@ -54,7 +61,7 @@ jobs:
echo "Pull request doesn't exist"
echo "does_pull_request_exist=false" >> $GITHUB_OUTPUT
fi
- - name: "List variables"
+ - name: List variables
run: |
export BUILD_DATETIME_LONDON="${{ steps.variables.outputs.build_datetime_london }}"
export BUILD_DATETIME="${{ steps.variables.outputs.build_datetime }}"
@@ -63,11 +70,12 @@ jobs:
export NODEJS_VERSION="${{ steps.variables.outputs.nodejs_version }}"
export PYTHON_VERSION="${{ steps.variables.outputs.python_version }}"
export TERRAFORM_VERSION="${{ steps.variables.outputs.terraform_version }}"
+ export ENVIRONMENT_TAG="${{ steps.variables.outputs.environment_tag }}"
export VERSION="${{ steps.variables.outputs.version }}"
export DOES_PULL_REQUEST_EXIST="${{ steps.pr_exists.outputs.does_pull_request_exist }}"
make list-variables
commit-stage: # Recommended maximum execution time is 2 minutes
- name: "Commit stage"
+ name: Commit stage
needs: [metadata]
uses: ./.github/workflows/stage-1-commit.yaml
with:
@@ -92,23 +100,21 @@ jobs:
terraform_version: "${{ needs.metadata.outputs.terraform_version }}"
version: "${{ needs.metadata.outputs.version }}"
secrets: inherit
- build-stage: # Recommended maximum execution time is 3 minutes
- name: "Build stage"
- needs: [metadata, test-stage]
- uses: ./.github/workflows/stage-3-build.yaml
- if: needs.metadata.outputs.does_pull_request_exist == 'true' || (github.event_name == 'pull_request' && (github.event.action == 'opened' || github.event.action == 'reopened'))
+ build-image-stage: # Recommended maximum execution time is 3 minutes
+ name: Image build stage
+ needs: [metadata, commit-stage, test-stage]
+ uses: NHSDigital/dtos-devops-templates/.github/workflows/stage-3-build-images.yaml@main
+ if: needs.metadata.outputs.does_pull_request_exist == 'true' || github.ref == 'refs/heads/main' || (github.event_name == 'pull_request' && (github.event.action == 'opened' || github.event.action == 'reopened'))
with:
- build_datetime: "${{ needs.metadata.outputs.build_datetime }}"
- build_timestamp: "${{ needs.metadata.outputs.build_timestamp }}"
- build_epoch: "${{ needs.metadata.outputs.build_epoch }}"
- nodejs_version: "${{ needs.metadata.outputs.nodejs_version }}"
- python_version: "${{ needs.metadata.outputs.python_version }}"
- terraform_version: "${{ needs.metadata.outputs.terraform_version }}"
- version: "${{ needs.metadata.outputs.version }}"
+ docker_compose_file: ./compose.yaml
+ excluded_containers_csv_list: azurite,azurite-setup,sql-database,database-setup
+ environment_tag: ${{ needs.metadata.outputs.environment_tag }}
+ function_app_source_code_path: src
+ project_name: service-layer
secrets: inherit
acceptance-stage: # Recommended maximum execution time is 10 minutes
- name: "Acceptance stage"
- needs: [metadata, build-stage]
+ name: Acceptance stage
+ needs: [metadata, build-image-stage]
uses: ./.github/workflows/stage-4-acceptance.yaml
if: needs.metadata.outputs.does_pull_request_exist == 'true' || (github.event_name == 'pull_request' && (github.event.action == 'opened' || github.event.action == 'reopened'))
with:
@@ -120,3 +126,20 @@ jobs:
terraform_version: "${{ needs.metadata.outputs.terraform_version }}"
version: "${{ needs.metadata.outputs.version }}"
secrets: inherit
+ validate-title-stage:
+ name: Validate PR title
+ runs-on: ubuntu-latest
+ permissions:
+ pull-requests: write
+ env:
+ GITHUB_TOKEN: ${{ github.token }}
+ if: github.event_name == 'pull_request'
+ steps:
+ - uses: amannn/action-semantic-pull-request@v5
+ id: validate
+
+ - uses: thollander/actions-comment-pull-request@v3
+ if: ${{ failure() && steps.validate.conclusion == 'failure' }}
+ with:
+ message: |
+ Your Pull Request title must meet the conventional commit standards, please see the following documentation - https://www.conventionalcommits.org/en/v1.0.0/#specification
diff --git a/.github/workflows/cicd-2-publish.yaml b/.github/workflows/cicd-2-publish.yaml
index 5717ee9..eae80af 100644
--- a/.github/workflows/cicd-2-publish.yaml
+++ b/.github/workflows/cicd-2-publish.yaml
@@ -23,6 +23,8 @@ jobs:
steps:
- name: "Checkout code"
uses: actions/checkout@v4
+ with:
+ submodules: 'true'
- name: "Set CI/CD variables"
id: variables
run: |
diff --git a/.github/workflows/stage-1-commit.yaml b/.github/workflows/stage-1-commit.yaml
index a516b78..c83ab3a 100644
--- a/.github/workflows/stage-1-commit.yaml
+++ b/.github/workflows/stage-1-commit.yaml
@@ -42,6 +42,7 @@ jobs:
uses: actions/checkout@v4
with:
fetch-depth: 0 # Full history is needed to scan all commits
+ submodules: 'true'
- name: "Scan secrets"
uses: ./.github/actions/scan-secrets
check-file-format:
@@ -105,22 +106,3 @@ jobs:
idp_aws_report_upload_region: "${{ secrets.IDP_AWS_REPORT_UPLOAD_REGION }}"
idp_aws_report_upload_role_name: "${{ secrets.IDP_AWS_REPORT_UPLOAD_ROLE_NAME }}"
idp_aws_report_upload_bucket_endpoint: "${{ secrets.IDP_AWS_REPORT_UPLOAD_BUCKET_ENDPOINT }}"
- scan-dependencies:
- name: "Scan dependencies"
- runs-on: ubuntu-latest
- permissions:
- id-token: write
- contents: read
- timeout-minutes: 2
- steps:
- - name: "Checkout code"
- uses: actions/checkout@v4
- - name: "Scan dependencies"
- uses: ./.github/actions/scan-dependencies
- with:
- build_datetime: "${{ inputs.build_datetime }}"
- build_timestamp: "${{ inputs.build_timestamp }}"
- idp_aws_report_upload_account_id: "${{ secrets.IDP_AWS_REPORT_UPLOAD_ACCOUNT_ID }}"
- idp_aws_report_upload_region: "${{ secrets.IDP_AWS_REPORT_UPLOAD_REGION }}"
- idp_aws_report_upload_role_name: "${{ secrets.IDP_AWS_REPORT_UPLOAD_ROLE_NAME }}"
- idp_aws_report_upload_bucket_endpoint: "${{ secrets.IDP_AWS_REPORT_UPLOAD_BUCKET_ENDPOINT }}"
diff --git a/.github/workflows/stage-2-test.yaml b/.github/workflows/stage-2-test.yaml
index efcb2ac..08cc6ae 100644
--- a/.github/workflows/stage-2-test.yaml
+++ b/.github/workflows/stage-2-test.yaml
@@ -40,6 +40,12 @@ jobs:
steps:
- name: "Checkout code"
uses: actions/checkout@v4
+ with:
+ submodules: "true"
+ - name: "Setup dotnet"
+ uses: actions/setup-dotnet@v4
+ with:
+ dotnet-version: "9.x"
- name: "Run unit test suite"
run: |
make test-unit
diff --git a/.github/workflows/stage-4-acceptance.yaml b/.github/workflows/stage-4-acceptance.yaml
index d554f98..1794780 100644
--- a/.github/workflows/stage-4-acceptance.yaml
+++ b/.github/workflows/stage-4-acceptance.yaml
@@ -71,6 +71,8 @@ jobs:
steps:
- name: "Checkout code"
uses: actions/checkout@v4
+ with:
+ submodules: 'true'
- name: "Run security test"
run: |
make test-security
diff --git a/.gitignore b/.gitignore
index 8bdff9a..fd827e2 100644
--- a/.gitignore
+++ b/.gitignore
@@ -11,3 +11,277 @@
!project.code-workspace
# Please, add your custom content below!
+.DS_Store
+## Ignore Visual Studio temporary files, build results, and
+## files generated by popular Visual Studio add-ons.
+
+# Azure Functions localsettings file
+local.settings.json
+localsettings.json
+
+# User-specific files
+*.suo
+*.user
+*.userosscache
+*.sln.docstates
+
+# User-specific files (MonoDevelop/Xamarin Studio)
+*.userprefs
+
+# Build results
+[Dd]ebug/
+[Dd]ebugPublic/
+[Rr]elease/
+[Rr]eleases/
+x64/
+x86/
+bld/
+[Bb]in/
+[Oo]bj/
+[Ll]og/
+
+# Visual Studio 2015 cache/options directory
+.vs/
+# Uncomment if you have tasks that create the project's static files in wwwroot
+#wwwroot/
+
+# MSTest test Results
+[Tt]est[Rr]esult*/
+[Bb]uild[Ll]og.*
+
+# NUNIT
+*.VisualState.xml
+TestResult.xml
+
+# Build Results of an ATL Project
+[Dd]ebugPS/
+[Rr]eleasePS/
+dlldata.c
+
+# DNX
+project.lock.json
+project.fragment.lock.json
+artifacts/
+
+*_i.c
+*_p.c
+*_i.h
+*.ilk
+*.meta
+*.obj
+*.pch
+*.pdb
+*.pgc
+*.pgd
+*.rsp
+*.sbr
+*.tlb
+*.tli
+*.tlh
+*.tmp
+*.tmp_proj
+*.log
+*.vspscc
+*.vssscc
+.builds
+*.pidb
+*.svclog
+*.scc
+
+# Chutzpah Test files
+_Chutzpah*
+
+# Visual C++ cache files
+ipch/
+*.aps
+*.ncb
+*.opendb
+*.opensdf
+*.sdf
+*.cachefile
+*.VC.db
+*.VC.VC.opendb
+
+# Visual Studio profiler
+*.psess
+*.vsp
+*.vspx
+*.sap
+
+# TFS 2012 Local Workspace
+$tf/
+
+# Guidance Automation Toolkit
+*.gpState
+
+# ReSharper is a .NET coding add-in
+_ReSharper*/
+*.[Rr]e[Ss]harper
+*.DotSettings.user
+
+# JustCode is a .NET coding add-in
+.JustCode
+
+# TeamCity is a build add-in
+_TeamCity*
+
+# DotCover is a Code Coverage Tool
+*.dotCover
+
+# NCrunch
+_NCrunch_*
+.*crunch*.local.xml
+nCrunchTemp_*
+
+# MightyMoose
+*.mm.*
+AutoTest.Net/
+
+# Web workbench (sass)
+.sass-cache/
+
+# Installshield output folder
+[Ee]xpress/
+
+# DocProject is a documentation generator add-in
+DocProject/buildhelp/
+DocProject/Help/*.HxT
+DocProject/Help/*.HxC
+DocProject/Help/*.hhc
+DocProject/Help/*.hhk
+DocProject/Help/*.hhp
+DocProject/Help/Html2
+DocProject/Help/html
+
+# Click-Once directory
+publish/
+
+# Publish Web Output
+*.[Pp]ublish.xml
+*.azurePubxml
+# TODO: Comment the next line if you want to checkin your web deploy settings
+# but database connection strings (with potential passwords) will be unencrypted
+#*.pubxml
+*.publishproj
+
+# Microsoft Azure Web App publish settings. Comment the next line if you want to
+# checkin your Azure Web App publish settings, but sensitive information contained
+# in these scripts will be unencrypted
+PublishScripts/
+
+# NuGet Packages
+*.nupkg
+# The packages folder can be ignored because of Package Restore
+**/packages/*
+# except build/, which is used as an MSBuild target.
+!**/packages/build/
+# Uncomment if necessary however generally it will be regenerated when needed
+#!**/packages/repositories.config
+# NuGet v3's project.json files produces more ignoreable files
+*.nuget.props
+*.nuget.targets
+
+# Microsoft Azure Build Output
+csx/
+*.build.csdef
+
+# Microsoft Azure Emulator
+ecf/
+rcf/
+
+# Windows Store app package directories and files
+AppPackages/
+BundleArtifacts/
+Package.StoreAssociation.xml
+_pkginfo.txt
+
+# Visual Studio cache files
+# files ending in .cache can be ignored
+*.[Cc]ache
+# but keep track of directories ending in .cache
+!*.[Cc]ache/
+
+# Others
+ClientBin/
+~$*
+*~
+*.dbmdl
+*.dbproj.schemaview
+*.jfm
+*.pfx
+*.publishsettings
+node_modules/
+orleans.codegen.cs
+
+# Since there are multiple workflows, uncomment next line to ignore bower_components
+# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
+#bower_components/
+
+# RIA/Silverlight projects
+Generated_Code/
+
+# Backup & report files from converting an old project file
+# to a newer Visual Studio version. Backup files are not needed,
+# because we have git ;-)
+_UpgradeReport_Files/
+Backup*/
+UpgradeLog*.XML
+UpgradeLog*.htm
+
+# SQL Server files
+*.mdf
+*.ldf
+
+# Business Intelligence projects
+*.rdl.data
+*.bim.layout
+*.bim_*.settings
+
+# Microsoft Fakes
+FakesAssemblies/
+
+# GhostDoc plugin setting file
+*.GhostDoc.xml
+
+# Node.js Tools for Visual Studio
+.ntvs_analysis.dat
+
+# Visual Studio 6 build log
+*.plg
+
+# Visual Studio 6 workspace options file
+*.opt
+
+# Visual Studio LightSwitch build output
+**/*.HTMLClient/GeneratedArtifacts
+**/*.DesktopClient/GeneratedArtifacts
+**/*.DesktopClient/ModelManifest.xml
+**/*.Server/GeneratedArtifacts
+**/*.Server/ModelManifest.xml
+_Pvt_Extensions
+
+# Paket dependency manager
+.paket/paket.exe
+paket-files/
+
+# FAKE - F# Make
+.fake/
+
+# JetBrains Rider
+.idea/
+*.sln.iml
+
+# CodeRush
+.cr/
+
+# Python Tools for Visual Studio (PTVS)
+__pycache__/
+*.pyc
+
+.env
+*.pem
+
+# Azurite
+__azurite*.json
+__blobstorage__
+__queuestorage__
diff --git a/README.md b/README.md
index 7008e9c..05445a8 100644
--- a/README.md
+++ b/README.md
@@ -1,44 +1,31 @@
-# Repository Template
+# Service Layer
-[](https://github.com/nhs-england-tools/repository-template/actions/workflows/cicd-1-pull-request.yaml)
-[](https://sonarcloud.io/summary/new_code?id=repository-template)
+[](https://github.com/nhs-england-tools/repository-template/actions/workflows/cicd-1-pull-request.yaml)
+[](https://sonarcloud.io/summary/new_code?id=NHSDigital_dtos-service-layer)
-Start with an overview or a brief description of what the project is about and what it does. For example -
-
-Welcome to our repository template designed to streamline your project setup! This robust template provides a reliable starting point for your new projects, covering an essential tech stack and encouraging best practices in documenting.
-
-This repository template aims to foster a user-friendly development environment by ensuring that every included file is concise and adequately self-documented. By adhering to this standard, we can promote increased clarity and maintainability throughout your project's lifecycle. Bundled within this template are resources that pave the way for seamless repository creation. Currently supported technologies are:
-
-- Terraform
-- Docker
-
-Make use of this repository template to expedite your project setup and enhance your productivity right from the get-go. Enjoy the advantage of having a well-structured, self-documented project that reduces overhead and increases focus on what truly matters - coding!
+Service Layer
## Table of Contents
-- [Repository Template](#repository-template)
+- [Service Layer](#service-layer)
- [Table of Contents](#table-of-contents)
- [Setup](#setup)
- [Prerequisites](#prerequisites)
- - [Configuration](#configuration)
+ - [Configuration](#configuration)
- [Usage](#usage)
- [Testing](#testing)
- - [Design](#design)
- - [Diagrams](#diagrams)
- - [Modularity](#modularity)
- - [Contributing](#contributing)
- [Contacts](#contacts)
- [Licence](#licence)
## Setup
-By including preferably a one-liner or if necessary a set of clear CLI instructions we improve user experience. This should be a frictionless installation process that works on various operating systems (macOS, Linux, Windows WSL) and handles all the dependencies.
+TODO
Clone the repository
```shell
-git clone https://github.com/nhs-england-tools/repository-template.git
-cd nhs-england-tools/repository-template
+git clone https://github.com/NHSDigital/dtos-service-layer
+cd dtos-service-layer
```
### Prerequisites
@@ -46,7 +33,9 @@ cd nhs-england-tools/repository-template
The following software packages, or their equivalents, are expected to be installed and configured:
- [Docker](https://www.docker.com/) container runtime or a compatible tool, e.g. [Podman](https://podman.io/),
-- [asdf](https://asdf-vm.com/) version manager,
+- [.NET](https://dotnet.microsoft.com/en-us/download/dotnet/9.0) - .NET 9.0
+- [Azure functions core tools](https://learn.microsoft.com/en-us/azure/azure-functions/functions-run-local?tabs=macos%2Cisolated-process%2Cnode-v4%2Cpython-v2%2Chttp-trigger%2Ccontainer-apps&pivots=programming-language-csharp)
+- [adr-tools](https://github.com/npryce/adr-tools)
- [GNU make](https://www.gnu.org/software/make/) 3.82 or later,
> [!NOTE]
@@ -58,73 +47,29 @@ The following software packages, or their equivalents, are expected to be instal
>
> You will then see instructions to fix your [`$PATH`](https://github.com/nhs-england-tools/dotfiles/blob/main/dot_path.tmpl) variable to make the newly installed version available. If you are using [dotfiles](https://github.com/nhs-england-tools/dotfiles), this is all done for you.
-- [GNU sed](https://www.gnu.org/software/sed/) and [GNU grep](https://www.gnu.org/software/grep/) are required for the scripted command-line output processing,
-- [GNU coreutils](https://www.gnu.org/software/coreutils/) and [GNU binutils](https://www.gnu.org/software/binutils/) may be required to build dependencies like Python, which may need to be compiled during installation,
-
-> [!NOTE]
-> For macOS users, installation of the GNU toolchain has been scripted and automated as part of the `dotfiles` project. Please see this [script](https://github.com/nhs-england-tools/dotfiles/blob/main/assets/20-install-base-packages.macos.sh) for details.
-
- [Python](https://www.python.org/) required to run Git hooks,
- [`jq`](https://jqlang.github.io/jq/) a lightweight and flexible command-line JSON processor.
-### Configuration
+## Configuration
-Installation and configuration of the toolchain dependencies
-
-```shell
-make config
-```
+Rename the `.env.example` file to `.env` and populate the missing environment variables which are listed at the top of the file.
## Usage
-After a successful installation, provide an informative example of how this project can be used. Additional code snippets, screenshots and demos work well in this space. You may also link to the other documentation resources, e.g. the [User Guide](./docs/user-guide.md) to demonstrate more use cases and to show more features.
+You can run the Azure functions with `make all`
### Testing
-There are `make` tasks for you to configure to run your tests. Run `make test` to see how they work. You should be able to use the same entry points for local development as in your CI pipeline.
-
-## Design
+The full test suite can be ran with `make test`.
-### Diagrams
-
-The [C4 model](https://c4model.com/) is a simple and intuitive way to create software architecture diagrams that are clear, consistent, scalable and most importantly collaborative. This should result in documenting all the system interfaces, external dependencies and integration points.
-
-
-
-The source for diagrams should be in Git for change control and review purposes. Recommendations are [draw.io](https://app.diagrams.net/) (example above in [docs](.docs/diagrams/) folder) and [Mermaids](https://github.com/mermaid-js/mermaid). Here is an example Mermaids sequence diagram:
-
-```mermaid
-sequenceDiagram
- User->>+Service: GET /users?params=...
- Service->>Service: auth request
- Service->>Database: get all users
- Database-->>Service: list of users
- Service->>Service: filter users
- Service-->>-User: list[User]
-```
-
-### Modularity
-
-Most of the projects are built with customisability and extendability in mind. At a minimum, this can be achieved by implementing service level configuration options and settings. The intention of this section is to show how this can be used. If the system processes data, you could mention here for example how the input is prepared for testing - anonymised, synthetic or live data.
-
-## Contributing
-
-Describe or link templates on how to raise an issue, feature request or make a contribution to the codebase. Reference the other documentation files, like
-
-- Environment setup for contribution, i.e. `CONTRIBUTING.md`
-- Coding standards, branching, linting, practices for development and testing
-- Release process, versioning, changelog
-- Backlog, board, roadmap, ways of working
-- High-level requirements, guiding principles, decision records, etc.
+Unit tests can be ran with `make test-unit`
## Contacts
-Provide a way to contact the owners of this project. It can be a team, an individual or information on the means of getting in touch via active communication channels, e.g. opening a GitHub discussion, raising an issue, etc.
+If you are on the NHS England Slack you can contact the team on #mays-team, otherwise you can open a GitHub issue.
## Licence
-> The [LICENCE.md](./LICENCE.md) file will need to be updated with the correct year and owner
-
Unless stated otherwise, the codebase is released under the MIT License. This covers both the codebase and any sample code in the documentation.
Any HTML or Markdown documentation is [© Crown Copyright](https://www.nationalarchives.gov.uk/information-management/re-using-public-sector-information/uk-government-licensing-framework/crown-copyright/) and available under the terms of the [Open Government Licence v3.0](https://www.nationalarchives.gov.uk/doc/open-government-licence/version/3/).
diff --git a/compose.yaml b/compose.yaml
new file mode 100644
index 0000000..b786b75
--- /dev/null
+++ b/compose.yaml
@@ -0,0 +1,22 @@
+services:
+ ### ✅ API Backend (Azure Functions in .NET 9) ###
+ api:
+ container_name: "api"
+ build:
+ context: ./src/api
+ dockerfile: ServiceLayer.API/Dockerfile
+ platform: linux/amd64
+ restart: always
+ environment:
+ FUNCTIONS_WORKER_RUNTIME: "dotnet-isolated"
+ AzureWebJobsStorage: "${AZURE_WEB_JOBS_STORAGE}"
+ AzureWebJobsSecretStorageType: "files"
+ ports:
+ - "${API_PORT}:80"
+ networks:
+ - backend
+
+
+networks:
+ backend:
+ driver: bridge
diff --git a/infrastructure/.gitignore b/infrastructure/.gitignore
index 22ebdac..6c4ba38 100644
--- a/infrastructure/.gitignore
+++ b/infrastructure/.gitignore
@@ -3,21 +3,11 @@
# Local .terraform directories
**/.terraform/*
-# .tfstate files
-*.tfstate
-*.tfstate.*
# Crash log files
crash.log
crash.*.log
-# Exclude all .tfvars files, which are likely to contain sensitive data, such as
-# password, private keys, and other secrets. These should not be part of version
-# control as they are data points which are potentially sensitive and subject
-# to change depending on the environment.
-*.tfvars
-*.tfvars.json
-
# Ignore override files as they are usually used to override resources locally and so
# are not checked in
override.tf
diff --git a/infrastructure/images/.gitkeep b/infrastructure/images/.gitkeep
deleted file mode 100644
index e69de29..0000000
diff --git a/infrastructure/modules/.gitkeep b/infrastructure/modules/.gitkeep
deleted file mode 100644
index e69de29..0000000
diff --git a/scripts/config/gitleaks.toml b/scripts/config/gitleaks.toml
index af5f0bb..c2a9ee0 100644
--- a/scripts/config/gitleaks.toml
+++ b/scripts/config/gitleaks.toml
@@ -16,4 +16,9 @@ regexes = [
]
[allowlist]
-paths = ['''.terraform.lock.hcl''', '''poetry.lock''', '''yarn.lock''']
+paths = [
+ '''.terraform.lock.hcl''',
+ '''poetry.lock''',
+ '''yarn.lock''',
+ '''\.sln'''
+]
diff --git a/scripts/config/sonar-scanner.properties b/scripts/config/sonar-scanner.properties
index 147891d..9577916 100644
--- a/scripts/config/sonar-scanner.properties
+++ b/scripts/config/sonar-scanner.properties
@@ -3,7 +3,43 @@
sonar.host.url=https://sonarcloud.io
sonar.qualitygate.wait=true
sonar.sourceEncoding=UTF-8
-sonar.sources=.
-#sonar.python.coverage.reportPaths=.coverage/coverage.xml
-#sonar.[javascript|typescript].lcov.reportPaths=.coverage/lcov.info
+# Code Coverage Configuration for .NET (C#)
+#sonar.cs.opencover.reportsPaths=coverage.opencover.xml
+
+# Sonar Server URL (Change if using SonarQube)
+sonar.host.url=https://sonarcloud.io
+
+# Set Node version
+sonar.node.version=22.14.0
+
+# Define the Source and Test Files
+sonar.test.inclusions=**/*Tests.cs,**/*.test.cs
+
+# Set the Language-Specific Sources (Ensures Proper Analysis)
+sonar.language=cs,ts
+
+# File Encoding
+sonar.sourceEncoding=UTF-8
+
+# Exclude Generated Code, Build Artifacts, and Unnecessary Files
+sonar.exclusions=**/bin/**, **/obj/**, **/node_modules/**, **/wwwroot/**, **/dist/**, **/build/**, **/coverage/**
+
+# Exclude Test Files from Main Analysis
+sonar.test.exclusions=tests/**/*.cs, tests/**/*.ts, tests/**/*.tsx, tests/**/*.js
+
+# Test Coverage for C# (Ensure Reports Are Generated Before Running SonarScanner)
+sonar.cs.vscoveragexml.reportsPath=coverage/coverage.xml
+
+# Test Coverage for TypeScript (Ensure Reports Are Generated Before Running SonarScanner)
+sonar.javascript.lcov.reportPaths=coverage/lcov.info
+
+# Enable TypeScript Analysis
+sonar.typescript.file.suffixes=.ts,.tsx
+
+# Enable C# Analysis
+sonar.dotnet.key=SonarAnalyzer.CSharp
+
+# Run C# Scanner (Requires Build Before Analysis)
+sonar.dotnet.visualstudio.solution.file=ServiceLayer.API.sln
+sonar.dotnet.build=false # Set to true if you want Sonar to build before scanning
diff --git a/scripts/docker/docker.lib.sh b/scripts/docker/docker.lib.sh
index d52d651..1878710 100644
--- a/scripts/docker/docker.lib.sh
+++ b/scripts/docker/docker.lib.sh
@@ -27,9 +27,10 @@ function docker-build() {
version-create-effective-file
_create-effective-dockerfile
-
- tag=$(_get-effective-tag)
-
+ # The current directory must be changed for the image build script to access
+ # assets that need to be copied
+ current_dir=$(pwd)
+ cd "$dir"
docker build \
--progress=plain \
--platform linux/amd64 \
@@ -42,36 +43,16 @@ function docker-build() {
--build-arg GIT_COMMIT_HASH="$(git rev-parse --short HEAD)" \
--build-arg BUILD_DATE="$(date -u +"%Y-%m-%dT%H:%M:%S%z")" \
--build-arg BUILD_VERSION="$(_get-effective-version)" \
- --tag "${tag}" \
+ --tag "${DOCKER_IMAGE}:$(_get-effective-version)" \
--rm \
--file "${dir}/Dockerfile.effective" \
.
-
+ cd "$current_dir"
# Tag the image with all the stated versions, see the documentation for more details
for version in $(_get-all-effective-versions) latest; do
- if [ ! -z "$version" ]; then
- docker tag "${tag}" "${DOCKER_IMAGE}:${version}"
- fi
+ docker tag "${DOCKER_IMAGE}:$(_get-effective-version)" "${DOCKER_IMAGE}:${version}"
done
-}
-
-# Create the Dockerfile.effective file to bake in version info
-# Arguments (provided as environment variables):
-# dir=[path to the Dockerfile to use, default is '.']
-function docker-bake-dockerfile() {
-
- local dir=${dir:-$PWD}
-
- version-create-effective-file
- _create-effective-dockerfile
-}
-
-# Run hadolint over the generated Dockerfile.
-# Arguments (provided as environment variables):
-# dir=[path to the image directory where the Dockerfile.effective is located, default is '.']
-function docker-lint() {
- local dir=${dir:-$PWD}
- file=${dir}/Dockerfile.effective ./scripts/docker/dockerfile-linter.sh
+ docker rmi --force "$(docker images | grep "" | awk '{print $3}')" 2> /dev/null ||:
}
# Check test Docker image.
@@ -100,13 +81,12 @@ function docker-check-test() {
function docker-run() {
local dir=${dir:-$PWD}
- local tag=$(dir="$dir" _get-effective-tag)
# shellcheck disable=SC2086
docker run --rm --platform linux/amd64 \
${args:-} \
- "${tag}" \
- ${DOCKER_CMD:-}
+ "${DOCKER_IMAGE}:$(dir="$dir" _get-effective-version)" \
+ ${cmd:-}
}
# Push Docker image.
@@ -134,8 +114,7 @@ function docker-clean() {
done
rm -f \
.version \
- Dockerfile.effective \
- Dockerfile.effective.dockerignore
+ Dockerfile.effective
}
# Create effective version from the VERSION file.
@@ -228,13 +207,6 @@ function _create-effective-dockerfile() {
local dir=${dir:-$PWD}
- # If it exists, we need to copy the .dockerignore file to match the prefix of the
- # Dockerfile.effective file, otherwise docker won't use it.
- # See https://docs.docker.com/build/building/context/#filename-and-location
- # If using podman, this requires v5.0.0 or later.
- if [ -f "${dir}/Dockerfile.dockerignore" ]; then
- cp "${dir}/Dockerfile.dockerignore" "${dir}/Dockerfile.effective.dockerignore"
- fi
cp "${dir}/Dockerfile" "${dir}/Dockerfile.effective"
_replace-image-latest-by-specific-version
_append-metadata
@@ -304,20 +276,6 @@ function _get-effective-version() {
head -n 1 "${dir}/.version" 2> /dev/null ||:
}
-# Print the effective tag for the image with the version. If you don't have a VERSION file
-# then the tag will be just the image name. Otherwise it will be the image name with the version.
-# Arguments (provided as environment variables):
-# dir=[path to the image directory where the Dockerfile is located, default is '.']
-function _get-effective-tag() {
-
- local tag=$DOCKER_IMAGE
- version=$(_get-effective-version)
- if [ ! -z "$version" ]; then
- tag="${tag}:${version}"
- fi
- echo "$tag"
-}
-
# Print all Docker image versions.
# Arguments (provided as environment variables):
# dir=[path to the image directory where the Dockerfile is located, default is '.']
diff --git a/scripts/docker/docker.mk b/scripts/docker/docker.mk
index afa8bca..a31ad9d 100644
--- a/scripts/docker/docker.mk
+++ b/scripts/docker/docker.mk
@@ -4,39 +4,26 @@
# Custom implementation - implementation of a make target should not exceed 5 lines of effective code.
# In most cases there should be no need to modify the existing make targets.
-DOCKER_IMAGE ?= $(or ${docker_image}, $(or ${IMAGE}, $(or ${image}, ghcr.io/org/repo)))
-DOCKER_TITLE ?= $(or "${docker_title}", $(or "${TITLE}", $(or "${title}", "Service Docker image")))
-
-docker-bake-dockerfile: # Create Dockerfile.effective - optional: docker_dir|dir=[path to the image directory where the Dockerfile is located, default is '.'] @Development
- make _docker cmd="bake-dockerfile" \
- dir=$(or ${docker_dir}, ${dir})
-
docker-build: # Build Docker image - optional: docker_dir|dir=[path to the Dockerfile to use, default is '.'] @Development
- dir=$(or ${docker_dir}, ${dir})
- make _docker cmd="build"
-docker-build: docker-lint
-
-docker-lint: # Run hadolint over the Dockerfile - optional: docker_dir|dir=[path to the image directory where the Dockerfile is located, default is '.'] @Development
- dir=$(or ${docker_dir}, ${dir})
- make _docker cmd="lint"
-docker-lint: docker-bake-dockerfile
+ make _docker cmd="build" \
+ dir=$(or ${docker_dir}, ${dir})
+ file=$(or ${docker_dir}, ${dir})/Dockerfile.effective
+ scripts/docker/dockerfile-linter.sh
docker-push: # Push Docker image - optional: docker_dir|dir=[path to the image directory where the Dockerfile is located, default is '.'] @Development
make _docker cmd="push" \
dir=$(or ${docker_dir}, ${dir})
-docker-run: # Run Docker image - optional: docker_dir|dir=[path to the image directory where the Dockerfile is located, default is '.'] @Development
- make _docker cmd="run" \
- dir=$(or ${docker_dir}, ${dir})
-
clean:: # Remove Docker resources (docker) - optional: docker_dir|dir=[path to the image directory where the Dockerfile is located, default is '.'] @Operations
make _docker cmd="clean" \
dir=$(or ${docker_dir}, ${dir})
_docker: # Docker command wrapper - mandatory: cmd=[command to execute]; optional: dir=[path to the image directory where the Dockerfile is located, relative to the project's top-level directory, default is '.']
# 'DOCKER_IMAGE' and 'DOCKER_TITLE' are passed to the functions as environment variables
- dir=$(realpath $(or ${dir}, infrastructure/images/${DOCKER_IMAGE}))
+ DOCKER_IMAGE=$(or ${DOCKER_IMAGE}, $(or ${docker_image}, $(or ${IMAGE}, $(or ${image}, ghcr.io/org/repo))))
+ DOCKER_TITLE=$(or "${DOCKER_TITLE}", $(or "${docker_title}", $(or "${TITLE}", $(or "${title}", "Service Docker image"))))
source scripts/docker/docker.lib.sh
+ dir=$(realpath ${dir})
docker-${cmd} # 'dir' is accessible by the function as environment variable
# ==============================================================================
@@ -53,15 +40,44 @@ docker-shellscript-lint: # Lint all Docker module shell scripts @Quality
docker-test-suite-run: # Run Docker test suite @ExamplesAndTests
scripts/docker/tests/docker.test.sh
+docker-example-build: # Build Docker example @ExamplesAndTests
+ source scripts/docker/docker.lib.sh
+ cd scripts/docker/examples/python
+ DOCKER_IMAGE=repository-template/docker-example-python
+ DOCKER_TITLE="Repository Template Docker Python Example"
+ TOOL_VERSIONS="$(shell git rev-parse --show-toplevel)/scripts/docker/examples/python/.tool-versions.example"
+ docker-build
+
+docker-example-lint: # Lint Docker example @ExamplesAndTests
+ dockerfile=scripts/docker/examples/python/Dockerfile
+ file=$${dockerfile} scripts/docker/dockerfile-linter.sh
+
+docker-example-run: # Run Docker example @ExamplesAndTests
+ source scripts/docker/docker.lib.sh
+ cd scripts/docker/examples/python
+ DOCKER_IMAGE=repository-template/docker-example-python
+ args=" \
+ -it \
+ --publish 8000:8000 \
+ "
+ docker-run
+
+docker-example-clean: # Remove Docker example resources @ExamplesAndTests
+ source scripts/docker/docker.lib.sh
+ cd scripts/docker/examples/python
+ DOCKER_IMAGE=repository-template/docker-example-python
+ docker-clean
+
# ==============================================================================
${VERBOSE}.SILENT: \
_docker \
clean \
- docker-bake-dockerfile \
docker-build \
- docker-lint \
+ docker-example-build \
+ docker-example-clean \
+ docker-example-lint \
+ docker-example-run \
docker-push \
- docker-run \
docker-shellscript-lint \
docker-test-suite-run \
diff --git a/scripts/githooks/check-markdown-format.sh b/scripts/githooks/check-markdown-format.sh
index c39a080..698df4a 100755
--- a/scripts/githooks/check-markdown-format.sh
+++ b/scripts/githooks/check-markdown-format.sh
@@ -38,7 +38,7 @@ function main() {
check=${check:-working-tree-changes}
case $check in
"all")
- files="$(git ls-files "*.md")"
+ files="$(find ./ -type f -name "*.md")"
;;
"staged-changes")
files="$(git diff --diff-filter=ACMRT --name-only --cached "*.md")"
diff --git a/scripts/terraform/terraform.mk b/scripts/terraform/terraform.mk
index 6339b19..120a059 100644
--- a/scripts/terraform/terraform.mk
+++ b/scripts/terraform/terraform.mk
@@ -4,11 +4,6 @@
# Custom implementation - implementation of a make target should not exceed 5 lines of effective code.
# In most cases there should be no need to modify the existing make targets.
-TF_ENV ?= dev
-STACK ?= ${stack}
-TERRAFORM_STACK ?= $(or ${STACK}, infrastructure/environments/${TF_ENV})
-dir ?= ${TERRAFORM_STACK}
-
terraform-init: # Initialise Terraform - optional: terraform_dir|dir=[path to a directory where the command will be executed, relative to the project's top-level directory, default is one of the module variables or the example directory, if not set], terraform_opts|opts=[options to pass to the Terraform init command, default is none/empty] @Development
make _terraform cmd="init" \
dir=$(or ${terraform_dir}, ${dir}) \
@@ -46,6 +41,8 @@ clean:: # Remove Terraform files (terraform) - optional: terraform_dir|dir=[path
opts=$(or ${terraform_opts}, ${opts})
_terraform: # Terraform command wrapper - mandatory: cmd=[command to execute]; optional: dir=[path to a directory where the command will be executed, relative to the project's top-level directory, default is one of the module variables or the example directory, if not set], opts=[options to pass to the Terraform command, default is none/empty]
+ # 'TERRAFORM_STACK' is passed to the functions as environment variable
+ TERRAFORM_STACK=$(or ${TERRAFORM_STACK}, $(or ${terraform_stack}, $(or ${STACK}, $(or ${stack}, scripts/terraform/examples/terraform-state-aws-s3))))
dir=$(or ${dir}, ${TERRAFORM_STACK})
source scripts/terraform/terraform.lib.sh
terraform-${cmd} # 'dir' and 'opts' are accessible by the function as environment variables, if set
@@ -58,6 +55,23 @@ terraform-shellscript-lint: # Lint all Terraform module shell scripts @Quality
file=$${file} scripts/shellscript-linter.sh
done
+# ==============================================================================
+# Module tests and examples - please DO NOT edit this section!
+
+terraform-example-provision-aws-infrastructure: # Provision example of AWS infrastructure @ExamplesAndTests
+ make terraform-init
+ make terraform-plan opts="-out=terraform.tfplan"
+ make terraform-apply opts="-auto-approve terraform.tfplan"
+
+terraform-example-destroy-aws-infrastructure: # Destroy example of AWS infrastructure @ExamplesAndTests
+ make terraform-destroy opts="-auto-approve"
+
+terraform-example-clean: # Remove Terraform example files @ExamplesAndTests
+ dir=$(or ${dir}, ${TERRAFORM_STACK})
+ source scripts/terraform/terraform.lib.sh
+ terraform-clean
+ rm -f ${TERRAFORM_STACK}/.terraform.lock.hcl
+
# ==============================================================================
# Configuration - please DO NOT edit this section!
@@ -71,6 +85,9 @@ ${VERBOSE}.SILENT: \
clean \
terraform-apply \
terraform-destroy \
+ terraform-example-clean \
+ terraform-example-destroy-aws-infrastructure \
+ terraform-example-provision-aws-infrastructure \
terraform-fmt \
terraform-init \
terraform-install \
diff --git a/scripts/tests/unit.sh b/scripts/tests/unit.sh
index c589be5..f116215 100755
--- a/scripts/tests/unit.sh
+++ b/scripts/tests/unit.sh
@@ -4,17 +4,26 @@ set -euo pipefail
cd "$(git rev-parse --show-toplevel)"
-# This file is for you! Edit it to call your unit test suite. Note that the same
-# file will be called if you run it locally as if you run it on CI.
-
-# Replace the following line with something like:
-#
-# rails test:unit
-# python manage.py test
-# npm run test
-#
-# or whatever is appropriate to your project. You should *only* run your fast
-# tests from here. If you want to run other test suites, see the predefined
-# tasks in scripts/test.mk.
-
-echo "Unit tests are not yet implemented. See scripts/tests/unit.sh for more."
+dir="$PWD"
+UnitDir="$dir/tests/"
+ResDir="$UnitDir"results-unit
+Format="trx"
+
+# Find all *.csproj files excluding the IntegrationTests folder and execute dotnet test, with build for now
+find "$UnitDir" -name '*.csproj' -not -path "$UnitDir/IntegrationTests/*" | while read -r file; do
+ echo -e "\nRunning unit tests for:\n$file"
+ dotnet test "$file" --filter "TestCategory!=Integration" --logger $Format --verbosity quiet
+done
+
+
+# Move all trx result files into a separate folder, for easier reporting
+mkdir -p "$ResDir"
+find "$UnitDir" -name "*.$Format" -not -path "$ResDir/*" | while read -r resfile; do
+ mv "$resfile" "$ResDir"
+done
+
+# List created results
+echo -e "\nCreated result files:\n"
+find "$ResDir" -name "*.$Format"
+
+# echo "Test execution completed. See scripts/tests/unit.sh for more."
diff --git a/infrastructure/environments/dev/.gitkeep b/src/.gitkeep
similarity index 100%
rename from infrastructure/environments/dev/.gitkeep
rename to src/.gitkeep