Skip to content
7 changes: 6 additions & 1 deletion v2/manifests/core-ons/dis-migration-service.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ services:
AUTHORISATION_ENABLED: ${AUTHORISATION_ENABLED:-true}
BIND_ADDR: ":30100"
DATASET_API_URL: ${DATASET_API_URL:-http://dp-dataset-api:22000}
ENABLE_EVENT_LOGGING: ${ENABLE_EVENT_LOGGING:-false}
ENABLE_EVENT_LOGGING: ${ENABLE_EVENT_LOGGING:-true}
ENABLE_TOPIC_CACHE: ${ENABLE_TOPIC_CACHE:-true}
FILES_API_URL: ${FILES_API_URL:-http://dp-files-api:26900}
IDENTITY_WEB_KEY_SET_URL: ${IDENTITY_WEB_KEY_SET_URL:-http://dis-authentication-stub:29500}
Expand All @@ -27,6 +27,11 @@ services:
PERMISSIONS_API_URL: ${PERMISSIONS_API_URL:-http://dp-permissions-api:25400}
REDIRECT_API_URL: ${REDIRECT_API_URL:-http://dis-redirect-api:29900}
SERVICE_AUTH_TOKEN: "migrationservicetestauthtoken"
SLACK_ENABLED: ${SLACK_ENABLED:-false}
SLACK_API_TOKEN: ${SLACK_API_TOKEN:-""}
SLACK_PUBLISH_CHANNEL: ${SLACK_PUBLISH_CHANNEL:-#sandbox-publish-log}
SLACK_ALARM_CHANNEL: ${SLACK_ALARM_CHANNEL:-#sandbox-alarm}
SLACK_WARNING_CHANNEL: ${SLACK_WARNING_CHANNEL:-#sandbox-warning}
TOPIC_API_URL: ${TOPIC_API_URL:-http://dp-topic-api:25300}
UPLOAD_SERVICE_URL: ${UPLOAD_SERVICE_URL:-http://dp-upload-service:25100}
ZEBEDEE_URL: ${ZEBEDEE_URL:-http://zebedee:8082}
Expand Down
1 change: 0 additions & 1 deletion v2/manifests/core-ons/zebedee.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@ services:
AWS_COGNITO_SIGNING_KEY_TWO: ${AWS_COGNITO_SIGNING_KEY_TWO:-"MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAtvDfudfY9n+8sFJmHGFfgbKqKf8iiEcbvRXNMEi9qd2NGAekhdNJKdeW3sMSwR+sb4Ly6IypowCE2eueYk/GatzYyyolWny/Krdp0EWPT/PnK8Iq1FTIuHxFb08B8iLnH/2nKqgOjVvwEU4eSBh0YHKti2v77a+a4bnx6aOC2YkF2AyIRmbXAHaq4Js9u33X8gGMXZcVsxcSpG8Py/NJ3s+PLKebQFd28S1Jl/89JDcUv4/3DF+u8k9nxkGlaSEcwF7OIyj+cnSa7gm3PadTO+m/96JENyNaLTjpPE7yiHKDpwMP04LZeAF+QhpnQsCgOTNmP5ogFzQtwOyX25/FmQIDAQAB"}
AWS_COGNITO_KEY_ID_ONE: ${AWS_COGNITO_KEY_ID_ONE:-"2a8vXmIK67ZZ3hFZ/DwQATgvqZgRBFjuuVavlw3zEwo="}
AWS_COGNITO_KEY_ID_TWO: ${AWS_COGNITO_KEY_ID_TWO:-"GRBevIroJzPBvaGaL9xm4x/6rQGkbKxi3wLtcTiGymE="}
website_reindex_key: "1hZiEDeZcVKZwO6WmTDTDhVSiRAKS0jM6Nzlvlszk0OW0vY5M2FCiGD7ncqcucxB"
scheduled_publishing_enabled: "false"
OTEL_JAVAAGENT_ENABLED: ${OTEL_JAVAAGENT_ENABLED:-"false"}
brian_url: "http://project-brian:8083"
Expand Down
16 changes: 12 additions & 4 deletions v2/stacks/migration/Makefile
Original file line number Diff line number Diff line change
@@ -1,28 +1,36 @@
# Include common make targets
include ../common.mk

MIGRATION_COLLECTION_NAME ?= migrationcollectionforjob1

.PHONY: init
init: base-init
# Add stack specific initialisation logic here

# Add any stack specific helper targets here

.PHONY: seed-topic-api
seed-topic-api:
seed-topic-api: # Seed the topic API with the required topics for the migration job
../../scripts/seed-topic-api.sh

.PHONY: seed-permissions-api
seed-permissions-api:
seed-permissions-api: # Seed the permissions API with the required permissions for the migration job
../../scripts/seed-permissions-api.sh

.PHONY: up-with-seed
up-with-seed:
up-with-seed: ## Start the stack and seed the topic and permissions APIs
$(MAKE) up
$(MAKE) seed-topic-api
$(MAKE) seed-permissions-api

.PHONY: remove-migration-collection
remove-migration-collection: # Remove the test migration collection from zebedee content store.
rm $(zebedee_root)/zebedee/collections/$(MIGRATION_COLLECTION_NAME).json
rm -r $(zebedee_root)/zebedee/collections/$(MIGRATION_COLLECTION_NAME)

.PHONY: reset
reset:
reset: # Reset the MongoDB databases and remove the test migration collection
mongosh localhost:27017/migrations --file ../../provisioning/mongo/reset.js
mongosh localhost:27017/datasets --file ../../provisioning/mongo/reset.js
$(MAKE) remove-migration-collection

88 changes: 48 additions & 40 deletions v2/stacks/migration/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,69 +23,77 @@ To run the stack:

This is required for the local docker build to work around a current issue with the volume paths not existing pre-build. Once this issue has been resolved, this step will no longer be necessary.

3. Build and start the stack:
3. For most use cases, you will want to run the stack with dp-topic-api and dp-permissions-api database seeded.

To run the seeding you will need mongosh installed:

```sh
brew install mongosh
```

To run the services with seeding, run:

```shell
make up
make up-with-seed
```

To run the stack with dp-topic-api and dp-permissions-api database seeded:
This will start all the containers (including mongodb) and then run the seeding scripts.

Follow the prerequsites for installing `mongosh` here - `https://github.com/ONSdigital/dp-topic-api/tree/develop/scripts`
The seeding scripts assume that the repositories on your system are in the same root as `dp-compose` or that you have an environment variable set for `$DP_REPO_DIR`.

Run:
If that is not the correct location, you will be prompted to input a custom location for dp-topic-api or dp-permissions-api on your system.

```shell
make up-with-seed
```
When the scripts run, you should see something like the below in your terminal:

This assumes that the locations of repositories on your system is in the same root as `dp-compose` or that you have an environment variable set for `$DP_REPO_DIR`
```shell
Found dp-topic-api at {some-path}/dp-topic-api
mongosh localhost:27017/topics ./scripts/seed-database/index.js
creating collections

If that is not the correct location, you will be prompted to input a custom location for dp-topic-api or dp-permissions-api on your system.
Seeding permissions API database...
Found dp-permissions-api at {some-path}/dp-permissions-api
```

Once the correct location is found, you should see something like:
If any service hasn't picked up it's topic or permissions cache, you can force a refresh via `SERVICE={serviceName} make restart`.

```shell
Found dp-topic-api at {some-path}/dp-topic-api
mongosh localhost:27017/topics ./scripts/seed-database/index.js
creating collections
If you don't want to run the stack with the databases seeded, you can use:

Seeding permissions API database...
Found dp-permissions-api at {some-path}/dp-permissions-api
```
```sh
make up
```

## How to test it's all working together

1. Access the authentication stub login page <http://localhost:29500/florence/login>
2. Login as the admin user
3. Obtain the `access_token` from your cookies (minus the 'Bearer ' prefix)
4. Make a POST request to localhost:30100/v1/migration-jobs with these following payloads, depending on what you are testing (using the `access_token` as the `Authorization` header):
4. Make a POST request to localhost:30100/v1/migration-jobs with these following payloads, depending on what you are testing (using the `access_token` as the `Authorization` header).

**Testing with a simple dataset**
In response you should receive a 202 created request, and the `state` should be `submitted`.

```json
{
"source_id": "/employmentandlabourmarket/peopleinwork/workplacedisputesandworkingconditions/datasets/labourdisputeslabourdisputesannualestimates",
"target_id": "new-dataset",
"type": "static_dataset"
}
```
a. Testing with a simple dataset

**Testing with a dataset that has edition as 'Current'**
```json
{
"source_id": "/employmentandlabourmarket/peopleinwork/workplacedisputesandworkingconditions/datasets/labourdisputeslabourdisputesannualestimates",
"target_id": "new-dataset",
"type": "static_dataset"
}
```

```json
{
"source_id": "/businessindustryandtrade/business/activitysizeandlocation/datasets/businessdemographyreferencetable",
"target_id": "new-dataset",
"type": "static_dataset"
}
```
b. Testing with a dataset that has edition as 'Current'

You should receive a 202 created request, and the `state` should be `submitted`.
```json
{
"source_id": "/businessindustryandtrade/business/activitysizeandlocation/datasets/businessdemographyreferencetable",
"target_id": "new-dataset",
"type": "static_dataset"
}
```

5. Make a GET request to localhost:30100/v1/migration-jobs with the same `access_token` as the `Authorization header`.

You should now be able to see the migration job's state as `in_review` if the migration has been successful.
You should now be able to see the migration job's state as `in_review` if the migration has been successful.

6. Now, make a GET request to localhost:22000/datasets using the `access_token` as the `Authorization` header

Expand All @@ -96,8 +104,8 @@ You should now be able to see your successfully migrated dataset in the dp-datas
We have provided a `reset` target to help with repeated test runs - you can run this via:

```sh
make reset
make reset
```

This will clear the `dp-dataset-api` and `dis-migration-service` mongo collections of data to re-run a migration. It will not
currently modify zebedee's content store.
This will clear the `dp-dataset-api` and `dis-migration-service` mongo collections of data to re-run a migration. It will also
remove the created collection in zebedee.
4 changes: 2 additions & 2 deletions v2/stacks/migration/core-ons.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,6 @@ services:
file: ${PATH_MANIFESTS}/core-ons/dp-permissions-api.yml
service: dp-permissions-api
depends_on:
dp-api-router:
condition: service_healthy
mongodb:
condition: service_healthy
dp-topic-api:
Expand All @@ -48,6 +46,8 @@ services:
depends_on:
dis-authentication-stub:
condition: service_healthy
dp-permissions-api:
condition: service_healthy
dis-data-admin-ui:
extends:
file: ${PATH_MANIFESTS}/core-ons/dis-data-admin-ui.yml
Expand Down
6 changes: 6 additions & 0 deletions v2/stacks/migration/data.yml
Original file line number Diff line number Diff line change
Expand Up @@ -57,3 +57,9 @@ services:
extends:
file: ${PATH_MANIFESTS}/core-ons/dp-frontend-dataset-controller.yml
service: dp-frontend-dataset-controller
environment:
IS_PUBLISHING: true
dis-design-system-go:
extends:
file: ${PATH_MANIFESTS}/core-ons/dis-design-system-go.yml
service: dis-design-system-go
Loading