diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index daefe93..78e78ca 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -6,11 +6,9 @@ on: - "**" # Remove the line above to run when pushing to master pull_request: - branches: [ main, dev ] + branches: [ dev ] repository_dispatch: types: [ run_api_tests ] - schedule: - - cron: "0 0 * * 0" # Runs every day at midnight (UTC) jobs: UnitTest: @@ -30,62 +28,32 @@ jobs: - name: Determine output folder id: set_output_folder run: | - # For pull_request events, use the base branch; otherwise use the current branch reference - if [[ $GITHUB_EVENT_NAME == "pull_request" ]]; then - branch_name=$GITHUB_BASE_REF - else - branch_name=$GITHUB_REF_NAME - fi - - if [[ $branch_name == "main" ]]; then - echo "output_folder=stage" >> $GITHUB_ENV - elif [[ $branch_name == "dev" ]]; then - echo "output_folder=dev" >> $GITHUB_ENV - else - echo "Unknown branch: $branch_name" - exit 1 - fi + echo "output_folder=dev" >> $GITHUB_ENV - name: Create env file + env: + ENV_CONTENT: ${{ secrets.ENV_FILE }} run: | - # For pull requests, choose the env file based on the base branch - if [ "${{ github.event_name }}" = "pull_request" ]; then - if [ "${{ github.event.pull_request.base.ref }}" = "main" ]; then - echo "${{ secrets.STAGE_ENV_FILE }}" > .env - else - echo "${{ secrets.ENV_FILE }}" > .env - fi - else - # For non-PR events (push, schedule, repository_dispatch), use the stage env file - echo "${{ secrets.STAGE_ENV_FILE }}" > .env - fi + set +H + tmp_env_file=".env" + printf '%s' "$ENV_CONTENT" > "$tmp_env_file" - - name: Set TIMESTAMP - run: | - # Save the timestamp as an environment variable for reuse in subsequent steps. - echo "TIMESTAMP=$(date '+%d.%m.%Y_%H.%M.%S')" >> $GITHUB_ENV - name: Run tests with coverage run: | - # Use the TIMESTAMP variable from the environment. + timestamp=$(date '+%Y-%m-%d_%H-%M-%S') mkdir -p test_results - log_file="test_results/${TIMESTAMP}_report.log" - echo -e "\nTest Cases Report\n" >> "$log_file" - # Run the tests and append output to the log file - npm run test >> "$log_file" 2>&1 + log_file="test_results/${timestamp}_report.log" + echo -e "\nTest Cases Report Report\n" >> $log_file + npm run test >> $log_file 2>&1 # Copy and rename the generated test report with the timestamp - cp test-report.html test_results/test-report_${TIMESTAMP}.html - - - name: Set Date Folder - run: | - DATE_FOLDER=$(date '+%d.%m.%Y') - echo "date_folder=${DATE_FOLDER}" >> $GITHUB_ENV + cp test-report.html test_results/test-report_${timestamp}.html - name: Upload report to Azure uses: LanceMcCarthy/Action-AzureBlobUpload@v2 with: source_folder: 'test_results' - destination_folder: '${{ env.date_folder }}' + destination_folder: '${{ env.output_folder }}' connection_string: ${{ secrets.AZURE_STORAGE_CONNECTION_STRING }} container_name: 'api-tester' clean_destination_folder: false diff --git a/.gitignore b/.gitignore index e7f1abe..cc52519 100644 --- a/.gitignore +++ b/.gitignore @@ -105,9 +105,9 @@ build .tern-port .DS_Store src/test-harness.json -test-report.html +test-report* wallaby.js jest-html-reporters-attach -seed.data.json +seed.data* .idea/ diff --git a/README.md b/README.md index b85235e..fed0f17 100644 --- a/README.md +++ b/README.md @@ -30,6 +30,8 @@ Application configuration is read from .env file. Below are the list of environe |SEED_ADMIN_PASSWORD |admin password| |SYSTEM_BASE_URL | TDEI API url| |ENVIRONMENT | Environement to run api tester dev, stage, prod| +|DEFAULT_PASSWORD | Default password for the users created | +|DEFAULT_USERNAME | TDEI username which has default group permission only| ## API Tester Environment Data diff --git a/api.input.json b/api.input.json deleted file mode 100644 index e9cb005..0000000 --- a/api.input.json +++ /dev/null @@ -1,77 +0,0 @@ -{ - "dev": { - "osw": { - "published_dataset": "40566429d02c4c80aee68c970977bed8", - "pre_release_dataset": "1978d16b-46f6-4d0c-b1d1-78123178a65b", - "test_dataset": "f5fd7445fbbf4f248ea1096f0e17b7b3", - "spatial_target_dataset": "fa8e12ea-6b0c-4d3e-8b38-5b87b268e76b", - "spatial_source_dataset": "0d661b69495d47fb838862edf699fe09" - }, - "flex": { - "published_dataset": "309dea0b-0fb2-4c19-a771-9f3719b48030", - "pre_release_dataset": "8b0cac15-4f96-4ea8-aac7-934dad1e0367" - }, - "pathways": { - "published_dataset": "cb472b40-38cf-4006-abab-5c819682b08c", - "pre_release_dataset": "b8e068b9-67b4-4fe9-8ddb-c1d64d3702ce" - }, - "users": { - "poc": "adella.legros3@gmail.com", - "flex_data_generator": "reilly_connelly@yahoo.com", - "pathways_data_generator": "emiliano.terry@hotmail.com", - "osw_data_generator": "rhianna.runolfsson@yahoo.com", - "api_key_tester": "apikeytester@mailinator.com", - "default_user": "defaultuser@mailinator.com" - } - }, - "stage": { - "osw": { - "published_dataset": "047c8e5f-bfce-48ee-8dd0-3cdc16d72624", - "pre_release_dataset": "42982431-d983-4c0a-aadb-c88b4ef7d2b8", - "test_dataset": "74d2d6fc-db4a-4f72-8042-e521ef5c3520", - "spatial_target_dataset": "892897ec-d75e-47f5-84ed-efc08ca6a8c6", - "spatial_source_dataset": "0c384536b471477db07a4c6e9d8476bf" - }, - "flex": { - "published_dataset": "3cb8a738-b274-4c6c-a8d4-a212de9a7623", - "pre_release_dataset": "80548cc3-0d49-4642-aa40-ec8da7f63fef" - }, - "pathways": { - "published_dataset": "7912bddf-5333-4a6e-9af8-81b1efd01707", - "pre_release_dataset": "cc186aa4-8576-4d6e-bd89-f5df4e2e5851" - }, - "users": { - "poc": "frederic71@gmail.com", - "flex_data_generator": "vern66@gmail.com", - "pathways_data_generator": "elvera.keeling@gmail.com", - "osw_data_generator": "alyce_kilback60@yahoo.com", - "api_key_tester": "apikeytester@mailinator.com", - "default_user": "defaultuser@mailinator.com" - } - }, - "prod": { - "osw": { - "published_dataset": "40566429d02c4c80aee68c970977bed8", - "pre_release_dataset": "26f0ec82-1434-4944-ae8c-dea8aa5e0075", - "test_dataset": "f5fd7445fbbf4f248ea1096f0e17b7b3", - "spatial_target_dataset": "fa8e12ea-6b0c-4d3e-8b38-5b87b268e76b", - "spatial_source_dataset": "0d661b69495d47fb838862edf699fe09" - }, - "flex": { - "published_dataset": "309dea0b-0fb2-4c19-a771-9f3719b48030", - "pre_release_dataset": "8b0cac15-4f96-4ea8-aac7-934dad1e0367" - }, - "pathways": { - "published_dataset": "cb472b40-38cf-4006-abab-5c819682b08c", - "pre_release_dataset": "b8e068b9-67b4-4fe9-8ddb-c1d64d3702ce" - }, - "users": { - "poc": "adella.legros3@gmail.com", - "flex_data_generator": "reilly_connelly@yahoo.com", - "pathways_data_generator": "emiliano.terry@hotmail.com", - "osw_data_generator": "rhianna.runolfsson@yahoo.com", - "api_key_tester": "apikeytester@mailinator.com", - "default_user": "defaultuser@mailinator.com" - } - } -} \ No newline at end of file diff --git a/jest.setup.ts b/jest.setup.ts index 7e7a389..3046ff2 100644 --- a/jest.setup.ts +++ b/jest.setup.ts @@ -35,6 +35,23 @@ expect.extend({ }, }); +expect.extend({ + toBeAbsentOrNullOrObject(received) { + const pass = received === undefined || received === null || typeof received === 'object'; + if (pass) { + return { + message: () => `expected ${received} not to be absent, null, or an object`, + pass: true, + }; + } else { + return { + message: () => `expected ${received} to be absent, null, or an object`, + pass: false, + }; + } + }, +}); + process.on('unhandledRejection', (reason, promise) => { console.error('Unhandled Rejection at:', promise, 'reason:', reason); // Optionally, you can fail the test if there's an unhandled rejection diff --git a/src/__tests__/general.test.ts b/src/__tests__/general.test.ts index d3f3a51..af96d08 100644 --- a/src/__tests__/general.test.ts +++ b/src/__tests__/general.test.ts @@ -1,6 +1,7 @@ import { Configuration, DatasetItemProjectGroup, DatasetItem, DatasetItemStatusEnum, CommonAPIsApi, VersionSpec, DatasetItemService, MetadataModelDatasetDetailCollectionMethodEnum, MetadataModelDatasetDetailDataSourceEnum, JobDetails, JobProgress, ServiceModel, AuthenticationApi, MetricsApi } from "tdei-client"; import { Utility } from "../utils"; import axios, { InternalAxiosRequestConfig } from "axios"; +import { SeedData } from "../models/types"; const NULL_PARAM = void 0; let defaultUserConfiguration: Configuration = {}; @@ -14,9 +15,15 @@ let tdei_project_group_id: string = ""; let tdei_service_id_osw: string = ""; let tdei_service_id_flex: string = ""; let tdei_service_id_pathways: string = ""; -let apiInput: any = {}; + +let user_not_associated_tdei_project_group_id: string = ""; +let user_not_associated_tdei_service_id_osw: string = ""; +let user_not_associated_tdei_service_id_flex: string = ""; +let user_not_associated_tdei_service_id_pathways: string = ""; + let apiTesterConfiguration: Configuration = {}; let apiTesterKeyConfiguration: Configuration = {}; +let seedData: SeedData = {} as SeedData; const cloneDatasetRequestInterceptor = (request: InternalAxiosRequestConfig, tdei_dataset_id: string, tdei_project_group_id: string, tdei_service_id: string, datasetName: string) => { if ( @@ -51,13 +58,16 @@ beforeAll(async () => { await Utility.setAuthToken(apiTesterConfiguration); - let seedData = Utility.seedData; + seedData = Utility.seedData; tdei_project_group_id = seedData.project_group.tdei_project_group_id; tdei_service_id_osw = seedData.services.find(x => x.service_type == "osw")!.tdei_service_id; tdei_service_id_flex = seedData.services.find(x => x.service_type == "flex")!.tdei_service_id; tdei_service_id_pathways = seedData.services.find(x => x.service_type == "pathways")!.tdei_service_id; - apiInput = Utility.getApiInput(); + user_not_associated_tdei_project_group_id = seedData.user_not_associated_project.tdei_project_group_id; + user_not_associated_tdei_service_id_osw = seedData.user_not_associated_service.find(x => x.service_type == "osw")!.tdei_service_id; + user_not_associated_tdei_service_id_flex = seedData.user_not_associated_service.find(x => x.service_type == "flex")!.tdei_service_id; + user_not_associated_tdei_service_id_pathways = seedData.user_not_associated_service.find(x => x.service_type == "pathways")!.tdei_service_id; }, 30000); describe('List Datasets', () => { @@ -96,7 +106,7 @@ describe('List Datasets', () => { NULL_PARAM,// service_id, NULL_PARAM,// valid_from, NULL_PARAM,// valid_to, - apiInput.osw.pre_release_dataset + seedData.datasets.osw.pre_release_dataset ); expect(datasetFiles.status).toBe(200); @@ -124,7 +134,7 @@ describe('List Datasets', () => { NULL_PARAM,// service_id, NULL_PARAM,// valid_from, NULL_PARAM,// valid_to, - apiInput.osw.pre_release_dataset + seedData.datasets.osw.pre_release_dataset ); expect(datasetFiles.status).toBe(200); @@ -170,32 +180,39 @@ describe('List Datasets', () => { expect.toBeAbsentOrNullOrString(file.metadata.data_provenance.location_inaccuracy_factors); } - if (file.metadata.dataset_detail) { - expect(file.metadata.dataset_detail).toEqual( - { - name: expect.toBeOneOf([null, expect.any(String)]), - description: expect.toBeOneOf([null, expect.any(String)]), - version: expect.toBeOneOf([null, expect.any(String)]), - custom_metadata: expect.toBeOneOf([null, expect.anything()]), - collected_by: expect.toBeOneOf([null, expect.any(String)]), - collection_date: expect.toBeOneOf([null, expect.any(String)]), - valid_from: expect.toBeOneOf([null, expect.any(String)]), - valid_to: expect.toBeOneOf([null, expect.toBeOneOf([null, expect.any(String)]),]), - collection_method: expect.toBeOneOf([ - null, - MetadataModelDatasetDetailCollectionMethodEnum.Generated.toString(), - MetadataModelDatasetDetailCollectionMethodEnum.Other.toString(), "others", - MetadataModelDatasetDetailCollectionMethodEnum.Transform.toString(), - MetadataModelDatasetDetailCollectionMethodEnum.Manual.toString()]), - data_source: expect.toBeOneOf([ - null, - MetadataModelDatasetDetailDataSourceEnum.InHouse.toString(), - MetadataModelDatasetDetailDataSourceEnum.TDEITools.toString(), - MetadataModelDatasetDetailDataSourceEnum._3rdParty.toString()]), - dataset_area: expect.toBeOneOf([null, expect.toBeObject()]), - schema_version: expect.toBeOneOf([null, expect.any(String)]), - } - ); + if (file.metadata.dataset_detail && Object.keys(file.metadata.dataset_detail).length > 0) { + expect.toBeAbsentOrNullOrString(file.metadata.dataset_detail.name); + expect.toBeAbsentOrNullOrString(file.metadata.dataset_detail.description); + expect.toBeAbsentOrNullOrString(file.metadata.dataset_detail.version); + expect.toBeAbsentOrNullOrObject(file.metadata.dataset_detail.custom_metadata); + expect.toBeAbsentOrNullOrString(file.metadata.dataset_detail.collected_by); + expect.toBeAbsentOrNullOrString(file.metadata.dataset_detail.collection_date); + expect.toBeAbsentOrNullOrString(file.metadata.dataset_detail.valid_from); + expect.toBeAbsentOrNullOrString(file.metadata.dataset_detail.valid_to); + expect.toBeAbsentOrNullOrString(file.metadata.dataset_detail.collection_method); + expect.toBeAbsentOrNullOrString(file.metadata.dataset_detail.data_source); + expect.toBeAbsentOrNullOrObject(file.metadata.dataset_detail.dataset_area); + expect.toBeAbsentOrNullOrString(file.metadata.dataset_detail.schema_version); + + if (file.metadata.dataset_detail.data_source) { + expect(file.metadata.dataset_detail.data_source).toBeOneOf([ + MetadataModelDatasetDetailDataSourceEnum.InHouse.toString(), + MetadataModelDatasetDetailDataSourceEnum.TDEITools.toString(), + MetadataModelDatasetDetailDataSourceEnum._3rdParty.toString() + ]); + } + + + if (file.metadata.dataset_detail.collection_method) { + expect(file.metadata.dataset_detail.collection_method).toBeOneOf([ + null, + MetadataModelDatasetDetailCollectionMethodEnum.Generated.toString(), + MetadataModelDatasetDetailCollectionMethodEnum.Other.toString(), "others", + MetadataModelDatasetDetailCollectionMethodEnum.Transform.toString(), + MetadataModelDatasetDetailCollectionMethodEnum.Manual.toString() + ]); + } + } if (file.metadata.dataset_summary && Object.keys(file.metadata.dataset_summary).length > 0) { expect.toBeAbsentOrNullOrString(file.metadata.dataset_summary.key_limitations); @@ -275,32 +292,39 @@ describe('List Datasets', () => { expect.toBeAbsentOrNullOrString(file.metadata.data_provenance.location_inaccuracy_factors); } - if (file.metadata.dataset_detail) { - expect(file.metadata.dataset_detail).toEqual( - { - name: expect.toBeOneOf([null, expect.any(String)]), - description: expect.toBeOneOf([null, expect.any(String)]), - version: expect.toBeOneOf([null, expect.any(String)]), - custom_metadata: expect.toBeOneOf([null, expect.anything()]), - collected_by: expect.toBeOneOf([null, expect.any(String)]), - collection_date: expect.toBeOneOf([null, expect.any(String)]), - valid_from: expect.toBeOneOf([null, expect.any(String)]), - valid_to: expect.toBeOneOf([null, expect.toBeOneOf([null, expect.any(String)]),]), - collection_method: expect.toBeOneOf([ - null, - MetadataModelDatasetDetailCollectionMethodEnum.Generated.toString(), - MetadataModelDatasetDetailCollectionMethodEnum.Other.toString(), "others", - MetadataModelDatasetDetailCollectionMethodEnum.Transform.toString(), - MetadataModelDatasetDetailCollectionMethodEnum.Manual.toString()]), - data_source: expect.toBeOneOf([ - null, - MetadataModelDatasetDetailDataSourceEnum.InHouse.toString(), - MetadataModelDatasetDetailDataSourceEnum.TDEITools.toString(), - MetadataModelDatasetDetailDataSourceEnum._3rdParty.toString()]), - dataset_area: expect.toBeOneOf([null, expect.toBeObject()]), - schema_version: expect.toBeOneOf([null, expect.any(String)]), - } - ); + if (file.metadata.dataset_detail && Object.keys(file.metadata.dataset_detail).length > 0) { + expect.toBeAbsentOrNullOrString(file.metadata.dataset_detail.name); + expect.toBeAbsentOrNullOrString(file.metadata.dataset_detail.description); + expect.toBeAbsentOrNullOrString(file.metadata.dataset_detail.version); + expect.toBeAbsentOrNullOrObject(file.metadata.dataset_detail.custom_metadata); + expect.toBeAbsentOrNullOrString(file.metadata.dataset_detail.collected_by); + expect.toBeAbsentOrNullOrString(file.metadata.dataset_detail.collection_date); + expect.toBeAbsentOrNullOrString(file.metadata.dataset_detail.valid_from); + expect.toBeAbsentOrNullOrString(file.metadata.dataset_detail.valid_to); + expect.toBeAbsentOrNullOrString(file.metadata.dataset_detail.collection_method); + expect.toBeAbsentOrNullOrString(file.metadata.dataset_detail.data_source); + expect.toBeAbsentOrNullOrObject(file.metadata.dataset_detail.dataset_area); + expect.toBeAbsentOrNullOrString(file.metadata.dataset_detail.schema_version); + + if (file.metadata.dataset_detail.data_source) { + expect(file.metadata.dataset_detail.data_source).toBeOneOf([ + MetadataModelDatasetDetailDataSourceEnum.InHouse.toString(), + MetadataModelDatasetDetailDataSourceEnum.TDEITools.toString(), + MetadataModelDatasetDetailDataSourceEnum._3rdParty.toString() + ]); + } + + + if (file.metadata.dataset_detail.collection_method) { + expect(file.metadata.dataset_detail.collection_method).toBeOneOf([ + null, + MetadataModelDatasetDetailCollectionMethodEnum.Generated.toString(), + MetadataModelDatasetDetailCollectionMethodEnum.Other.toString(), "others", + MetadataModelDatasetDetailCollectionMethodEnum.Transform.toString(), + MetadataModelDatasetDetailCollectionMethodEnum.Manual.toString() + ]); + } + } if (file.metadata.dataset_summary && Object.keys(file.metadata.dataset_summary).length > 0) { expect.toBeAbsentOrNullOrString(file.metadata.dataset_summary.key_limitations); @@ -578,7 +602,7 @@ describe('List Datasets', () => { NULL_PARAM,// service_id, NULL_PARAM,// valid_from, NULL_PARAM,// valid_to, - apiInput.osw.published_dataset,// tdei_dataset_id, + seedData.datasets.osw.published_dataset,// tdei_dataset_id, NULL_PARAM,// bbox, NULL_PARAM,// other_published_locations, NULL_PARAM,// dataset_update_frequency_months, @@ -626,7 +650,7 @@ describe('List Datasets', () => { expect(datasetFiles.status).toBe(200); expect(datasetFiles.data.length).toBe(1); datasetFiles.data.forEach(file => { - expect(file.tdei_dataset_id).toBe(apiInput.osw.published_dataset) + expect(file.tdei_dataset_id).toBe(seedData.datasets.osw.published_dataset) }); }); @@ -1194,7 +1218,7 @@ describe('List Datasets', () => { // NULL_PARAM,// data_source, // NULL_PARAM,// collection_method, // NULL_PARAM,// collected_by, - // apiInput.osw.test_dataset,// derived_from_dataset_id, + // seedData.datasets.osw.test_dataset,// derived_from_dataset_id, // NULL_PARAM,// collection_date, // NULL_PARAM,// confidence_level, // NULL_PARAM,// schema_version, @@ -1246,7 +1270,7 @@ describe('List Datasets', () => { // expect(datasetFiles.status).toBe(200); // datasetFiles.data.forEach(file => { - // expect(file.derived_from_dataset_id).toBe(apiInput.osw.test_dataset) + // expect(file.derived_from_dataset_id).toBe(seedData.datasets.osw.test_dataset) // }) // }); @@ -1627,7 +1651,7 @@ describe('Clone Dataset', () => { // Arrange let generalAPI = new CommonAPIsApi(pocConfiguration); let metaToUpload = Utility.getMetadataBlob("flex"); - let tdei_dataset_id = apiInput.flex.published_dataset; //"ecf96dce3d36477b8ba53c6833ca4545"; //Published flex dataset + let tdei_dataset_id = seedData.datasets.flex.published_dataset; //"ecf96dce3d36477b8ba53c6833ca4545"; //Published flex dataset // Action const cloneDatasetInterceptor = axios.interceptors.request.use((req: InternalAxiosRequestConfig) => cloneDatasetRequestInterceptor(req, tdei_dataset_id, tdei_project_group_id, tdei_service_id_flex, 'metadata.json')) @@ -1643,7 +1667,7 @@ describe('Clone Dataset', () => { // Arrange let generalAPI = new CommonAPIsApi(adminConfiguration); let metaToUpload = Utility.getMetadataBlob("flex"); - let tdei_dataset_id = apiInput.flex.published_dataset;//"ecf96dce3d36477b8ba53c6833ca4545";//Published flex dataset + let tdei_dataset_id = seedData.datasets.flex.published_dataset;//"ecf96dce3d36477b8ba53c6833ca4545";//Published flex dataset // Action const editMetaInterceptor = axios.interceptors.request.use((req: InternalAxiosRequestConfig) => cloneDatasetRequestInterceptor(req, tdei_dataset_id, tdei_project_group_id, tdei_service_id_flex, 'metadata.json')) @@ -1659,7 +1683,7 @@ describe('Clone Dataset', () => { // Arrange let generalAPI = new CommonAPIsApi(flexDgConfiguration); let metaToUpload = Utility.getMetadataBlob("flex"); - let tdei_dataset_id = apiInput.flex.published_dataset;//"ecf96dce3d36477b8ba53c6833ca4545";//Published flex dataset + let tdei_dataset_id = seedData.datasets.flex.published_dataset;//"ecf96dce3d36477b8ba53c6833ca4545";//Published flex dataset // Action const editMetaInterceptor = axios.interceptors.request.use((req: InternalAxiosRequestConfig) => cloneDatasetRequestInterceptor(req, tdei_dataset_id, tdei_project_group_id, tdei_service_id_flex, 'metadata.json')) @@ -1676,7 +1700,7 @@ describe('Clone Dataset', () => { // Arrange let generalAPI = new CommonAPIsApi(pocConfiguration); let metaToUpload = Utility.getMetadataBlob("pathways"); - let tdei_dataset_id = apiInput.pathways.published_dataset;//"1fa972ecdd034ed6807dc5027dd26da2";//Published Pathways dataset + let tdei_dataset_id = seedData.datasets.pathways.published_dataset;//"1fa972ecdd034ed6807dc5027dd26da2";//Published Pathways dataset // Action const editMetaInterceptor = axios.interceptors.request.use((req: InternalAxiosRequestConfig) => cloneDatasetRequestInterceptor(req, tdei_dataset_id, tdei_project_group_id, tdei_service_id_pathways, 'metadata.json')) @@ -1692,7 +1716,7 @@ describe('Clone Dataset', () => { // Arrange let generalAPI = new CommonAPIsApi(adminConfiguration); let metaToUpload = Utility.getMetadataBlob("pathways"); - let tdei_dataset_id = apiInput.pathways.published_dataset;//"1fa972ecdd034ed6807dc5027dd26da2";//Published Pathways dataset + let tdei_dataset_id = seedData.datasets.pathways.published_dataset;//"1fa972ecdd034ed6807dc5027dd26da2";//Published Pathways dataset // Action const editMetaInterceptor = axios.interceptors.request.use((req: InternalAxiosRequestConfig) => cloneDatasetRequestInterceptor(req, tdei_dataset_id, tdei_project_group_id, tdei_service_id_pathways, 'metadata.json')) @@ -1708,7 +1732,7 @@ describe('Clone Dataset', () => { // Arrange let generalAPI = new CommonAPIsApi(pathwaysDgConfiguration); let metaToUpload = Utility.getMetadataBlob("pathways"); - let tdei_dataset_id = apiInput.pathways.published_dataset;//"1fa972ecdd034ed6807dc5027dd26da2";//Published Pathways dataset + let tdei_dataset_id = seedData.datasets.pathways.published_dataset;//"1fa972ecdd034ed6807dc5027dd26da2";//Published Pathways dataset // Action const editMetaInterceptor = axios.interceptors.request.use((req: InternalAxiosRequestConfig) => cloneDatasetRequestInterceptor(req, tdei_dataset_id, tdei_project_group_id, tdei_service_id_pathways, 'metadata.json')) @@ -1725,7 +1749,7 @@ describe('Clone Dataset', () => { // Arrange let generalAPI = new CommonAPIsApi(pocConfiguration); let metaToUpload = Utility.getMetadataBlob("osw"); - let tdei_dataset_id = apiInput.osw.published_dataset;//"d4dc9901f4794f2da414dcb96412b7c1";//Published OSW dataset` + let tdei_dataset_id = seedData.datasets.osw.published_dataset;//"d4dc9901f4794f2da414dcb96412b7c1";//Published OSW dataset` // Action const editMetaInterceptor = axios.interceptors.request.use((req: InternalAxiosRequestConfig) => cloneDatasetRequestInterceptor(req, tdei_dataset_id, tdei_project_group_id, tdei_service_id_osw, 'metadata.json')) @@ -1741,7 +1765,7 @@ describe('Clone Dataset', () => { // Arrange let generalAPI = new CommonAPIsApi(adminConfiguration); let metaToUpload = Utility.getMetadataBlob("osw"); - let tdei_dataset_id = apiInput.osw.published_dataset;//"d4dc9901f4794f2da414dcb96412b7c1";//Published OSW dataset + let tdei_dataset_id = seedData.datasets.osw.published_dataset;//"d4dc9901f4794f2da414dcb96412b7c1";//Published OSW dataset // Action const editMetaInterceptor = axios.interceptors.request.use((req: InternalAxiosRequestConfig) => cloneDatasetRequestInterceptor(req, tdei_dataset_id, tdei_project_group_id, tdei_service_id_osw, 'metadata.json')) @@ -1757,7 +1781,7 @@ describe('Clone Dataset', () => { // Arrange let generalAPI = new CommonAPIsApi(oswDgConfiguration); let metaToUpload = Utility.getMetadataBlob("osw"); - let tdei_dataset_id = apiInput.osw.published_dataset;//"d4dc9901f4794f2da414dcb96412b7c1";//Published OSW dataset + let tdei_dataset_id = seedData.datasets.osw.published_dataset;//"d4dc9901f4794f2da414dcb96412b7c1";//Published OSW dataset // Action const editMetaInterceptor = axios.interceptors.request.use((req: InternalAxiosRequestConfig) => cloneDatasetRequestInterceptor(req, tdei_dataset_id, tdei_project_group_id, tdei_service_id_osw, 'metadata.json')) @@ -1773,12 +1797,12 @@ describe('Clone Dataset', () => { // Arrange let generalAPI = new CommonAPIsApi(pocConfiguration); let metaToUpload = Utility.getMetadataBlob("flex"); - let tdei_dataset_id = apiInput.flex.pre_release_dataset;//"f2574fe66f0046389acc68ee5848e3a9";//Pre-Release dataset + let tdei_dataset_id = seedData.datasets.flex.pre_release_dataset;//"f2574fe66f0046389acc68ee5848e3a9";//Pre-Release dataset // Action const editMetaInterceptor = axios.interceptors.request.use((req: InternalAxiosRequestConfig) => cloneDatasetRequestInterceptor(req, tdei_dataset_id, tdei_project_group_id, tdei_service_id_flex, 'metadata.json')) // Assert - await expect(generalAPI.cloneDatasetForm(metaToUpload, tdei_dataset_id, tdei_project_group_id, tdei_service_id_flex)).rejects.toMatchObject({ response: { status: 400 } }); + await expect(generalAPI.cloneDatasetForm(metaToUpload, tdei_dataset_id, user_not_associated_tdei_project_group_id, tdei_service_id_flex)).rejects.toMatchObject({ response: { status: 400 } }); axios.interceptors.request.eject(editMetaInterceptor); }, 30000); @@ -1786,7 +1810,7 @@ describe('Clone Dataset', () => { // Arrange let generalAPI = new CommonAPIsApi(pocConfiguration); let metaToUpload = Utility.getMetadataBlob("flex"); - let tdei_dataset_id = apiInput.flex.pre_release_dataset;//"f2574fe66f0046389acc68ee5848e3a9";//Pre-Release dataset + let tdei_dataset_id = seedData.datasets.flex.pre_release_dataset;//"f2574fe66f0046389acc68ee5848e3a9";//Pre-Release dataset // Action const editMetaInterceptor = axios.interceptors.request.use((req: InternalAxiosRequestConfig) => cloneDatasetRequestInterceptor(req, tdei_dataset_id, tdei_project_group_id, tdei_service_id_osw, 'metadata.json')) @@ -1799,7 +1823,7 @@ describe('Clone Dataset', () => { // Arrange let generalAPI = new CommonAPIsApi(pocConfiguration); let metaToUpload = Utility.getMetadataBlob("flex"); - let tdei_dataset_id = apiInput.flex.pre_release_dataset;//"f2574fe66f0046389acc68ee5848e3a9";//Pre-Release dataset + let tdei_dataset_id = seedData.datasets.flex.pre_release_dataset;//"f2574fe66f0046389acc68ee5848e3a9";//Pre-Release dataset // Action const editMetaInterceptor = axios.interceptors.request.use((req: InternalAxiosRequestConfig) => cloneDatasetRequestInterceptor(req, tdei_dataset_id, tdei_project_group_id, "invalid_service_id", 'metadata.json')) @@ -1812,25 +1836,25 @@ describe('Clone Dataset', () => { // Arrange let generalAPI = new CommonAPIsApi(pocConfiguration); let metaToUpload = Utility.getMetadataBlob("flex"); - let tdei_dataset_id = apiInput.flex.pre_release_dataset;//"f2574fe66f0046389acc68ee5848e3a9";//Pre-Release dataset + let tdei_dataset_id = seedData.datasets.flex.pre_release_dataset;//"f2574fe66f0046389acc68ee5848e3a9";//Pre-Release dataset // Action const editMetaInterceptor = axios.interceptors.request.use((req: InternalAxiosRequestConfig) => cloneDatasetRequestInterceptor(req, tdei_dataset_id, "invalid_project_id", tdei_service_id_flex, 'metadata.json')) // Assert await expect(generalAPI.cloneDatasetForm(metaToUpload, tdei_dataset_id, "invalid_project_id", tdei_service_id_flex)).rejects.toMatchObject({ response: { status: 404 } }); axios.interceptors.request.eject(editMetaInterceptor); - }, 30000);`` + }, 30000); `` it('POC | Authenticated , When request made to clone flex dataset with service id not associated with project group id, expect to return input error', async () => { // Arrange let generalAPI = new CommonAPIsApi(pocConfiguration); let metaToUpload = Utility.getMetadataBlob("flex"); - let tdei_dataset_id = apiInput.flex.pre_release_dataset;//"0b165272-afff-46b9-8eb4-14f81bfb92b7";//Pre-Release other project group dataset + let tdei_dataset_id = seedData.datasets.flex.pre_release_dataset;//"0b165272-afff-46b9-8eb4-14f81bfb92b7";//Pre-Release other project group dataset // Action const editMetaInterceptor = axios.interceptors.request.use((req: InternalAxiosRequestConfig) => cloneDatasetRequestInterceptor(req, tdei_dataset_id, tdei_project_group_id, tdei_service_id_flex, 'metadata.json')) // Assert - await expect(generalAPI.cloneDatasetForm(metaToUpload, tdei_dataset_id, tdei_project_group_id, tdei_service_id_flex)).rejects.toMatchObject({ response: { status: 400 } }); + await expect(generalAPI.cloneDatasetForm(metaToUpload, tdei_dataset_id, tdei_project_group_id, user_not_associated_tdei_service_id_flex)).rejects.toMatchObject({ response: { status: 400 } }); axios.interceptors.request.eject(editMetaInterceptor); }, 30000); @@ -1838,7 +1862,7 @@ describe('Clone Dataset', () => { // Arrange let generalAPI = new CommonAPIsApi(pocConfiguration); let metaToUpload = Utility.getInvalidMetadataBlob("flex"); - let tdei_dataset_id = apiInput.flex.pre_release_dataset;//"f2574fe66f0046389acc68ee5848e3a9";//Pre-Release dataset + let tdei_dataset_id = seedData.datasets.flex.pre_release_dataset;//"f2574fe66f0046389acc68ee5848e3a9";//Pre-Release dataset // Action const editMetaInterceptor = axios.interceptors.request.use((req: InternalAxiosRequestConfig) => cloneDatasetRequestInterceptor(req, tdei_dataset_id, tdei_project_group_id, tdei_service_id_flex, 'metadata.json')) @@ -1851,7 +1875,7 @@ describe('Clone Dataset', () => { // Arrange let generalAPI = new CommonAPIsApi(Utility.getAdminConfiguration()); let metaToUpload = Utility.getMetadataBlob("flex"); - let tdei_dataset_id = apiInput.flex.pre_release_dataset; + let tdei_dataset_id = seedData.datasets.flex.pre_release_dataset; // Action const cloneMetaInterceptor = axios.interceptors.request.use((req: InternalAxiosRequestConfig) => cloneDatasetRequestInterceptor(req, tdei_dataset_id, tdei_project_group_id, tdei_service_id_osw, 'metadata.json')) diff --git a/src/__tests__/gtfsflex.test.ts b/src/__tests__/gtfsflex.test.ts index 15441dc..8464638 100644 --- a/src/__tests__/gtfsflex.test.ts +++ b/src/__tests__/gtfsflex.test.ts @@ -3,6 +3,7 @@ import { Utility } from "../utils"; import axios, { InternalAxiosRequestConfig } from "axios"; import AdmZip from "adm-zip"; import exp from "constants"; +import { SeedData } from "../models/types"; const NULL_PARAM = void 0; @@ -17,7 +18,7 @@ let uploadedDatasetId: string = '1'; let publishJobId: string = '1'; let tdei_project_group_id = ""; let service_id = ""; -let apiInput: any = {}; +let seedData: SeedData = {} as SeedData; const editMetadataRequestInterceptor = (request: InternalAxiosRequestConfig, tdei_dataset_id: string, datasetName: string) => { if ( @@ -63,7 +64,7 @@ const validateRequestInterceptor = (request: InternalAxiosRequestConfig, dataset }; beforeAll(async () => { - let seedData = Utility.seedData; + seedData = Utility.seedData; tdei_project_group_id = seedData.project_group.tdei_project_group_id; service_id = seedData.services.find(x => x.service_type == "flex")!.tdei_service_id; apiKeyConfiguration = Utility.getApiKeyConfiguration(); @@ -75,8 +76,6 @@ beforeAll(async () => { await Utility.setAuthToken(pocConfiguration); await Utility.setAuthToken(dgConfiguration); await Utility.setAuthToken(oswdgConfiguration); - apiInput = Utility.getApiInput(); - }); @@ -379,7 +378,7 @@ describe('Publish the flex dataset', () => { it('Admin | When passed with already published tdei_dataset_id, should respond with bad request', async () => { let flexAPI = new GTFSFlexApi(adminConfiguration); - let tdei_dataset_id = apiInput.flex.published_dataset; + let tdei_dataset_id = seedData.datasets.flex.published_dataset; let publishResponse = flexAPI.publishGtfsFlexFile(tdei_dataset_id); @@ -398,7 +397,7 @@ describe('Publish the flex dataset', () => { it('Admin | When passed with osw tdei_dataset_id, should respond with daset type mismatch error', async () => { let flexAPI = new GTFSFlexApi(adminConfiguration); - let tdei_dataset_id = apiInput.osw.pre_release_dataset; + let tdei_dataset_id = seedData.datasets.osw.pre_release_dataset; let publishResponse = flexAPI.publishGtfsFlexFile(tdei_dataset_id); @@ -683,7 +682,7 @@ describe('Download flex dataset', () => { let flexAPI = new GTFSFlexApi(adminConfiguration); - let response = flexAPI.getGtfsFlexFile(apiInput.pathways.pre_release_dataset); + let response = flexAPI.getGtfsFlexFile(seedData.datasets.pathways.pre_release_dataset); await expect(response).rejects.toMatchObject({ response: { status: 400 } }); diff --git a/src/__tests__/gtfspathways.test.ts b/src/__tests__/gtfspathways.test.ts index 653aaac..6350ee7 100644 --- a/src/__tests__/gtfspathways.test.ts +++ b/src/__tests__/gtfspathways.test.ts @@ -2,6 +2,7 @@ import { Configuration, GTFSPathwaysApi, CommonAPIsApi, VersionSpec } from "tdei import { Utility } from "../utils"; import axios, { InternalAxiosRequestConfig } from "axios"; import AdmZip from "adm-zip"; +import { SeedData } from "../models/types"; let apiKeyConfiguration: Configuration = {}; let pocConfiguration: Configuration = {}; @@ -16,7 +17,7 @@ let publishJobId: string = '1'; let uploadedDatasetId: string = '1'; let tdei_project_group_id = ""; let service_id = ""; -let apiInput: any = {}; +let seedData: SeedData = {} as SeedData; const editMetadataRequestInterceptor = (request: InternalAxiosRequestConfig, tdei_dataset_id: string, datasetName: string) => { if ( @@ -61,7 +62,7 @@ const validateRequestInterceptor = (request: InternalAxiosRequestConfig, dataset }; beforeAll(async () => { - let seedData = Utility.seedData; + seedData = Utility.seedData; tdei_project_group_id = seedData.project_group.tdei_project_group_id; service_id = seedData.services.find(x => x.service_type == "pathways")!.tdei_service_id; adminConfiguration = Utility.getAdminConfiguration(); @@ -73,7 +74,6 @@ beforeAll(async () => { await Utility.setAuthToken(pocConfiguration); await Utility.setAuthToken(dgConfiguration); await Utility.setAuthToken(oswDgConfiguration); - apiInput = Utility.getApiInput(); }); @@ -408,7 +408,7 @@ describe('Publish the pathways dataset', () => { it('Admin | When passed with already published tdei_dataset_id, should respond with bad request', async () => { let pathwaysAPI = new GTFSPathwaysApi(adminConfiguration); - let tdei_dataset_id = apiInput.pathways.published_dataset; + let tdei_dataset_id = seedData.datasets.pathways.published_dataset; let publishResponse = pathwaysAPI.publishGtfsPathwaysFile(tdei_dataset_id); @@ -417,7 +417,7 @@ describe('Publish the pathways dataset', () => { it('Admin | When passed with osw dataset id, should respond with invalid dataset type error', async () => { let pathwaysAPI = new GTFSPathwaysApi(adminConfiguration); - let tdei_dataset_id = apiInput.osw.pre_release_dataset; + let tdei_dataset_id = seedData.datasets.osw.pre_release_dataset; let publishResponse = pathwaysAPI.publishGtfsPathwaysFile(tdei_dataset_id); @@ -716,7 +716,7 @@ describe('Download pathways dataset', () => { let pathwaysAPI = new GTFSPathwaysApi(adminConfiguration); - let response = pathwaysAPI.getGtfsPathwaysFile(apiInput.flex.pre_release_dataset); + let response = pathwaysAPI.getGtfsPathwaysFile(seedData.datasets.flex.pre_release_dataset); await expect(response).rejects.toMatchObject({ response: { status: 400 } }); diff --git a/src/__tests__/osw.test.ts b/src/__tests__/osw.test.ts index 62717f6..cb77cf7 100644 --- a/src/__tests__/osw.test.ts +++ b/src/__tests__/osw.test.ts @@ -2,6 +2,7 @@ import { OSWApi, VersionSpec, CommonAPIsApi, Configuration, JobDetails, JobDetai import axios, { InternalAxiosRequestConfig } from "axios"; import { Utility } from "../utils"; import AdmZip from "adm-zip"; +import { SeedData } from "../models/types"; const { addMsg } = require("jest-html-reporters/helper"); let apiKeyConfiguration: Configuration = {}; @@ -27,8 +28,8 @@ let tdei_project_group_id = ""; let service_id = ""; let qualityMetricJobId = '1'; const NULL_PARAM = void 0; -let apiInput: any = {}; let bboxRecordId = ""; +let seedData: SeedData = {} as SeedData; const tagQualityRequestInterceptor = (request: InternalAxiosRequestConfig, tdei_dataset_id: string, datasetName: string) => { @@ -114,7 +115,7 @@ const oswConfidenceRequestInterceptor = (request: InternalAxiosRequestConfig, td }; beforeAll(async () => { - let seedData = Utility.seedData; + seedData = Utility.seedData; tdei_project_group_id = seedData.project_group.tdei_project_group_id; service_id = seedData.services.find(x => x.service_type == "osw")!.tdei_service_id; adminConfiguration = Utility.getAdminConfiguration(); @@ -123,8 +124,7 @@ beforeAll(async () => { dgConfiguration = Utility.getOSWDataGeneratorConfiguration(); flexDgConfiguration = Utility.getFlexDataGeneratorConfiguration(); pathwaysDgConfiguration = Utility.getPathwaysDataGeneratorConfiguration(); - apiInput = Utility.getApiInput(); - bboxRecordId = apiInput.osw.test_dataset; + bboxRecordId = seedData.datasets.osw.test_dataset; await authenticate(); }); @@ -648,7 +648,7 @@ describe('Publish the OSW dataset', () => { it('When passed with already published tdei_dataset_id, should respond with bad request', async () => { let oswAPI = new OSWApi(adminConfiguration); - let tdei_dataset_id = apiInput.osw.published_dataset; + let tdei_dataset_id = seedData.datasets.osw.published_dataset; let publishOswResponse = oswAPI.publishOswFile(tdei_dataset_id); @@ -659,7 +659,7 @@ describe('Publish the OSW dataset', () => { let oswAPI = new OSWApi(adminConfiguration); - let publishOswResponse = oswAPI.publishOswFile(apiInput.flex.pre_release_dataset); + let publishOswResponse = oswAPI.publishOswFile(seedData.datasets.flex.pre_release_dataset); await expect(publishOswResponse).rejects.toMatchObject({ response: { status: 400 } }); }); @@ -931,7 +931,7 @@ describe('Calculate dataset confidence request', () => { it('Admin | Authenticated , When request made with flex dataset id, should respond with Dataset type mismatch error', async () => { let oswAPI = new OSWApi(adminConfiguration); - let calculateConfidenceResponse = oswAPI.oswConfidenceCalculateForm(apiInput.flex.pre_release_dataset); + let calculateConfidenceResponse = oswAPI.oswConfidenceCalculateForm(seedData.datasets.flex.pre_release_dataset); await expect(calculateConfidenceResponse).rejects.toMatchObject({ response: { status: 400 } }); }) @@ -1211,10 +1211,10 @@ describe('Check convert request job running status', () => { }) describe('Download converted file', () => { - jest.retryTimes(1, { logErrorsBeforeRetry: true }); + jest.retryTimes(3, { logErrorsBeforeRetry: true }); it('OSW Data Generator | Authenticated , When request made with tdei_dataset_id, should stream the zip file', async () => { let generalAPI = new CommonAPIsApi(dgConfiguration); - await new Promise((r) => setTimeout(r, 20000)); + await new Promise((r) => setTimeout(r, 60000)); let response = await generalAPI.jobDownload(convertJobId, { responseType: 'arraybuffer' }); const data: any = response.data; @@ -1228,7 +1228,7 @@ describe('Download converted file', () => { const entries = zip.getEntries(); expect(entries.length).toBe(1); } - }, 30000); + }, 65000); it('Admin | un-authenticated , When request made, should respond with unauthenticated request', async () => { let generalAPI = new CommonAPIsApi(Utility.getAdminConfiguration()); @@ -1293,7 +1293,7 @@ describe('Download OSW File as zip', () => { let oswAPI = new OSWApi(adminConfiguration); - let response = oswAPI.getOswFile(apiInput.flex.pre_release_dataset); + let response = oswAPI.getOswFile(seedData.datasets.flex.pre_release_dataset); await expect(response).rejects.toMatchObject({ response: { status: 400 } }); @@ -1314,7 +1314,7 @@ describe('Dataset Bbox Request', () => { it('OSW Data Generator | Authenticated ,[OSM] When request made with valid dataset, should return request job id as response', async () => { let oswAPI = new OSWApi(dgConfiguration); - let bboxRequest = await oswAPI.datasetBbox(bboxRecordId, 'osm', [-122.264913, 47.558543, -122.10549, 47.691327]); + let bboxRequest = await oswAPI.datasetBbox(bboxRecordId, 'osm', [-118.27222419, 34.0511586948, -118.2658509169, 34.0559536885]); expect(bboxRequest.status).toBe(202); expect(bboxRequest.data).toBeNumber(); @@ -1327,7 +1327,7 @@ describe('Dataset Bbox Request', () => { it('POC | Authenticated ,[OSM] When request made with valid dataset, should return request job id as response', async () => { let oswAPI = new OSWApi(pocConfiguration); - let bboxRequest = await oswAPI.datasetBbox(bboxRecordId, 'osm', [-122.264913, 47.558543, -122.10549, 47.691327]); + let bboxRequest = await oswAPI.datasetBbox(bboxRecordId, 'osm', [-118.27222419, 34.0511586948, -118.2658509169, 34.0559536885]); expect(bboxRequest.status).toBe(202); expect(bboxRequest.data).toBeNumber(); @@ -1336,7 +1336,7 @@ describe('Dataset Bbox Request', () => { it('Admin | Authenticated ,[OSM] When request made with valid dataset, should return request job id as response', async () => { let oswAPI = new OSWApi(adminConfiguration); - let bboxRequest = await oswAPI.datasetBbox(bboxRecordId, 'osm', [-122.264913, 47.558543, -122.10549, 47.691327]); + let bboxRequest = await oswAPI.datasetBbox(bboxRecordId, 'osm', [-118.27222419, 34.0511586948, -118.2658509169, 34.0559536885]); expect(bboxRequest.status).toBe(202); expect(bboxRequest.data).toBeNumber(); @@ -1346,7 +1346,7 @@ describe('Dataset Bbox Request', () => { it('API-Key | Authenticated ,[OSM] When request made with dataset, should return request job id as response', async () => { let oswAPI = new OSWApi(apiKeyConfiguration); - let bboxRequest = await oswAPI.datasetBbox(bboxRecordId, 'osm', [-122.264913, 47.558543, -122.10549, 47.691327], { headers: { 'x-api-key': apiKeyConfiguration.apiKey?.toString() } }); + let bboxRequest = await oswAPI.datasetBbox(bboxRecordId, 'osm', [-118.27222419, 34.0511586948, -118.2658509169, 34.0559536885], { headers: { 'x-api-key': apiKeyConfiguration.apiKey?.toString() } }); expect(bboxRequest.status).toBe(202); expect(bboxRequest.data).toBeNumber(); @@ -1355,7 +1355,7 @@ describe('Dataset Bbox Request', () => { it('OSW Data Generator | Authenticated ,[OSW] When request made with valid dataset, should return request job id as response', async () => { let oswAPI = new OSWApi(dgConfiguration); - let bboxRequest = await oswAPI.datasetBbox(bboxRecordId, 'osw', [-122.264913, 47.558543, -122.10549, 47.691327]); + let bboxRequest = await oswAPI.datasetBbox(bboxRecordId, 'osw', [-118.27222419, 34.0511586948, -118.2658509169, 34.0559536885]); expect(bboxRequest.status).toBe(202); expect(bboxRequest.data).toBeNumber(); @@ -1366,7 +1366,7 @@ describe('Dataset Bbox Request', () => { it('POC | Authenticated ,[OSW] When request made with valid dataset, should return request job id as response', async () => { let oswAPI = new OSWApi(pocConfiguration); - let bboxRequest = await oswAPI.datasetBbox(bboxRecordId, 'osw', [-122.264913, 47.558543, -122.10549, 47.691327]); + let bboxRequest = await oswAPI.datasetBbox(bboxRecordId, 'osw', [-118.27222419, 34.0511586948, -118.2658509169, 34.0559536885]); expect(bboxRequest.status).toBe(202); expect(bboxRequest.data).toBeNumber(); @@ -1375,7 +1375,7 @@ describe('Dataset Bbox Request', () => { it('Admin | Authenticated ,[OSW] When request made with valid dataset, should return request job id as response', async () => { let oswAPI = new OSWApi(adminConfiguration); - let bboxRequest = await oswAPI.datasetBbox(bboxRecordId, 'osw', [-122.264913, 47.558543, -122.10549, 47.691327]); + let bboxRequest = await oswAPI.datasetBbox(bboxRecordId, 'osw', [-118.27222419, 34.0511586948, -118.2658509169, 34.0559536885]); expect(bboxRequest.status).toBe(202); expect(bboxRequest.data).toBeNumber(); @@ -1385,7 +1385,7 @@ describe('Dataset Bbox Request', () => { it('API-Key | Authenticated ,[OSW] When request made with dataset, should return request job id as response', async () => { let oswAPI = new OSWApi(apiKeyConfiguration); - let bboxRequest = await oswAPI.datasetBbox(bboxRecordId, 'osw', [-122.264913, 47.558543, -122.10549, 47.691327], { headers: { 'x-api-key': apiKeyConfiguration.apiKey?.toString() } }); + let bboxRequest = await oswAPI.datasetBbox(bboxRecordId, 'osw', [-118.27222419, 34.0511586948, -118.2658509169, 34.0559536885], { headers: { 'x-api-key': apiKeyConfiguration.apiKey?.toString() } }); expect(bboxRequest.status).toBe(202); expect(bboxRequest.data).toBeNumber(); @@ -1394,7 +1394,7 @@ describe('Dataset Bbox Request', () => { it('Admin | un-authenticated , When request made with dataset, should return with unauthenticated request', async () => { let oswAPI = new OSWApi(Utility.getAdminConfiguration()); - let bboxRequest = oswAPI.datasetBbox(bboxRecordId, 'osm', [-122.264913, 47.558543, -122.10549, 47.691327]); + let bboxRequest = oswAPI.datasetBbox(bboxRecordId, 'osm', [-118.27222419, 34.0511586948, -118.2658509169, 34.0559536885]); await expect(bboxRequest).rejects.toMatchObject({ response: { status: 401 } }); }); @@ -1402,7 +1402,7 @@ describe('Dataset Bbox Request', () => { it('Admin | Authenticated , When request made with invalid dataset, should return with dataset not found error', async () => { let oswAPI = new OSWApi(adminConfiguration); - let bboxRequest = oswAPI.datasetBbox("invalid_bboxRecordId", 'osm', [-122.264913, 47.558543, -122.10549, 47.691327]); + let bboxRequest = oswAPI.datasetBbox("invalid_bboxRecordId", 'osm', [-118.27222419, 34.0511586948, -118.2658509169, 34.0559536885]); await expect(bboxRequest).rejects.toMatchObject({ response: { status: 404 } }); }); @@ -1418,7 +1418,7 @@ describe('Dataset Bbox Request', () => { it('Admin | Authenticated , When request made with flex dataset, should return with dataset type mismatch error error', async () => { let oswAPI = new OSWApi(adminConfiguration); - let bboxRequest = oswAPI.datasetBbox(apiInput.flex.pre_release_dataset, 'osm', [-122.264913, 47.558543, -122.10549, 47.691327]); + let bboxRequest = oswAPI.datasetBbox(seedData.datasets.flex.pre_release_dataset, 'osm', [-118.27222419, 34.0511586948, -118.2658509169, 34.0559536885]); await expect(bboxRequest).rejects.toMatchObject({ response: { status: 400 } }); }); @@ -1495,9 +1495,9 @@ describe('Check dataset-bbox request job running status', () => { }); describe('Download Dataset Bbox request file', () => { - + jest.retryTimes(3, { logErrorsBeforeRetry: true }); it('OSW Data Generator | Authenticated , When request made with tdei_dataset_id, should stream the zip file', async () => { - await new Promise((r) => setTimeout(r, 30000)); + await new Promise((r) => setTimeout(r, 40000)); let generalAPI = new CommonAPIsApi(dgConfiguration); let response = await generalAPI.jobDownload(datasetBboxJobIdOSM, { responseType: 'arraybuffer' }); @@ -1512,7 +1512,7 @@ describe('Download Dataset Bbox request file', () => { const entries = zip.getEntries(); expect(entries.length).toBeGreaterThanOrEqual(1); } - }, 40000); + }, 45000); it('Admin | un-authenticated , When request made with tdei_dataset_id, should respond with unauthenticated request', async () => { let generalAPI = new CommonAPIsApi(Utility.getAdminConfiguration()); @@ -1526,13 +1526,13 @@ describe('Download Dataset Bbox request file', () => { let datasetRoadTagJobId = '1'; describe('Dataset Road Tag Request', () => { - // let datasetTagSourceRecordId = apiInput.osw.test_dataset; - // let datasetTagTargetPublishedRecordId = apiInput.osw.published_dataset;//'762f3533-b18f-470f-8051-1a7988bf80c7'; + // let datasetTagSourceRecordId = seedData.datasets.osw.test_dataset; + // let datasetTagTargetPublishedRecordId = seedData.datasets.osw.published_dataset;//'762f3533-b18f-470f-8051-1a7988bf80c7'; it('OSW Data Generator | Authenticated , When request made with valid dataset, should return request job id as response', async () => { let oswAPI = new OSWApi(dgConfiguration); - let roadTagRequest = await oswAPI.datasetTagRoad(apiInput.osw.test_dataset, uploadedDatasetId_PreRelease_poc); + let roadTagRequest = await oswAPI.datasetTagRoad(seedData.datasets.osw.test_dataset, uploadedDatasetId_PreRelease_poc); expect(roadTagRequest.status).toBe(202); expect(roadTagRequest.data).toBeNumber(); @@ -1546,7 +1546,7 @@ describe('Dataset Road Tag Request', () => { it('Admin | Authenticated , When request made with valid dataset, should return request job id as response', async () => { let oswAPI = new OSWApi(adminConfiguration); - let bboxRequest = await oswAPI.datasetTagRoad(apiInput.osw.test_dataset, uploadedDatasetId_PreRelease_poc); + let bboxRequest = await oswAPI.datasetTagRoad(seedData.datasets.osw.test_dataset, uploadedDatasetId_PreRelease_poc); expect(bboxRequest.status).toBe(202); expect(bboxRequest.data).toBeNumber(); @@ -1555,7 +1555,7 @@ describe('Dataset Road Tag Request', () => { it('POC | Authenticated , When request made with valid dataset, should return request job id as response', async () => { let oswAPI = new OSWApi(pocConfiguration); - let bboxRequest = await oswAPI.datasetTagRoad(apiInput.osw.test_dataset, uploadedDatasetId_PreRelease_poc); + let bboxRequest = await oswAPI.datasetTagRoad(seedData.datasets.osw.test_dataset, uploadedDatasetId_PreRelease_poc); expect(bboxRequest.status).toBe(202); expect(bboxRequest.data).toBeNumber(); @@ -1564,7 +1564,7 @@ describe('Dataset Road Tag Request', () => { it('Admin | authenticated , When request made with publish target dataset, should return with bad request', async () => { let oswAPI = new OSWApi(adminConfiguration); - let bboxRequest = oswAPI.datasetTagRoad(apiInput.osw.test_dataset, apiInput.osw.published_dataset); + let bboxRequest = oswAPI.datasetTagRoad(seedData.datasets.osw.test_dataset, seedData.datasets.osw.published_dataset); await expect(bboxRequest).rejects.toMatchObject({ response: { status: 400 } }); }); @@ -1573,7 +1573,7 @@ describe('Dataset Road Tag Request', () => { let oswAPI = new OSWApi(adminConfiguration); - let bboxRequest = oswAPI.datasetTagRoad("invalid_source", apiInput.osw.published_dataset); + let bboxRequest = oswAPI.datasetTagRoad("invalid_source", seedData.datasets.osw.published_dataset); await expect(bboxRequest).rejects.toMatchObject({ response: { status: 404 } }); }); @@ -1581,7 +1581,7 @@ describe('Dataset Road Tag Request', () => { it('Admin | authenticated , When request made with invalid target dataset, should return with dataset not found error', async () => { let oswAPI = new OSWApi(adminConfiguration); - let bboxRequest = oswAPI.datasetTagRoad(apiInput.osw.test_dataset, "invalid_target"); + let bboxRequest = oswAPI.datasetTagRoad(seedData.datasets.osw.test_dataset, "invalid_target"); await expect(bboxRequest).rejects.toMatchObject({ response: { status: 404 } }); }); @@ -1589,7 +1589,7 @@ describe('Dataset Road Tag Request', () => { it('Admin | un-authenticated , When request made with dataset, should return with unauthenticated request', async () => { let oswAPI = new OSWApi(Utility.getAdminConfiguration()); - let bboxRequest = oswAPI.datasetTagRoad(apiInput.osw.test_dataset, uploadedDatasetId_PreRelease_poc); + let bboxRequest = oswAPI.datasetTagRoad(seedData.datasets.osw.test_dataset, uploadedDatasetId_PreRelease_poc); await expect(bboxRequest).rejects.toMatchObject({ response: { status: 401 } }); }); @@ -1597,7 +1597,7 @@ describe('Dataset Road Tag Request', () => { it('API-Key | Authenticated , When request made with dataset, should return with unauthorized request', async () => { let oswAPI = new OSWApi(apiKeyConfiguration); - let bboxRequest = oswAPI.datasetTagRoad(apiInput.osw.test_dataset, uploadedDatasetId_PreRelease_poc, { headers: { 'x-api-key': apiKeyConfiguration.apiKey?.toString() } }); + let bboxRequest = oswAPI.datasetTagRoad(seedData.datasets.osw.test_dataset, uploadedDatasetId_PreRelease_poc, { headers: { 'x-api-key': apiKeyConfiguration.apiKey?.toString() } }); await expect(bboxRequest).rejects.toMatchObject({ response: { status: 403 } }); }); @@ -1647,12 +1647,13 @@ describe('Check dataset-road-tag request job completion status', () => { }); describe('Download Dataset Road Tag request file', () => { + jest.retryTimes(3, { logErrorsBeforeRetry: true }); it('Admin | Authenticated , When request made with tdei_dataset_id, should stream the zip file', async () => { let generalAPI = new CommonAPIsApi(adminConfiguration); - await new Promise((r) => setTimeout(r, 10000)); + await new Promise((r) => setTimeout(r, 20000)); - let response = await generalAPI.jobDownload(datasetBboxJobIdOSW, { responseType: 'arraybuffer' }); + let response = await generalAPI.jobDownload(datasetRoadTagJobId, { responseType: 'arraybuffer' }); const data: any = response.data; const contentType = response.headers['content-type']; @@ -1664,12 +1665,12 @@ describe('Download Dataset Road Tag request file', () => { const entries = zip.getEntries(); expect(entries.length).toBeGreaterThanOrEqual(1); } - }, 20000); + }, 25000); it('API-Key | Authenticated , When request made with tdei_dataset_id, should stream the zip file', async () => { let generalAPI = new CommonAPIsApi(apiKeyConfiguration); - let response = await generalAPI.jobDownload(datasetBboxJobIdOSW, { responseType: 'arraybuffer' }); + let response = await generalAPI.jobDownload(datasetRoadTagJobId, { responseType: 'arraybuffer' }); const data: any = response.data; const contentType = response.headers['content-type']; @@ -1686,7 +1687,7 @@ describe('Download Dataset Road Tag request file', () => { it('Admin | un-authenticated , When request made with tdei_dataset_id, should respond with unauthenticated request', async () => { let generalAPI = new CommonAPIsApi(Utility.getAdminConfiguration()); - let downloadResponse = generalAPI.jobDownload(datasetBboxJobIdOSM); + let downloadResponse = generalAPI.jobDownload(datasetRoadTagJobId); await expect(downloadResponse).rejects.toMatchObject({ response: { status: 401 } }); }); @@ -1775,7 +1776,7 @@ describe('Dataset Union Request', () => { let oswAPI = new OSWApi(apiKeyConfiguration); let bboxRequest = await oswAPI.oswUnion({ - tdei_dataset_id_one: apiInput.osw.test_dataset, + tdei_dataset_id_one: seedData.datasets.osw.test_dataset, tdei_dataset_id_two: uploadedDatasetId_PreRelease_poc }, { headers: { 'x-api-key': apiKeyConfiguration.apiKey?.toString() } }); @@ -1786,7 +1787,7 @@ describe('Dataset Union Request', () => { }); describe('Check dataset union request job completion status', () => { - jest.retryTimes(1, { logErrorsBeforeRetry: true }); + jest.retryTimes(3, { logErrorsBeforeRetry: true }); it('OSW Data Generator | Authenticated , When request made, should respond with job status', async () => { let generalAPI = new CommonAPIsApi(dgConfiguration); @@ -1828,6 +1829,7 @@ describe('Check dataset union request job completion status', () => { }); describe('Download Dataset Union request file', () => { + jest.retryTimes(3, { logErrorsBeforeRetry: true }); it('Admin | Authenticated , When request made with tdei_dataset_id, should stream the zip file', async () => { let generalAPI = new CommonAPIsApi(adminConfiguration); @@ -1912,7 +1914,7 @@ describe('Spatial join Request', () => { it('OSW Data Generator | Authenticated , When request made with non osw source dataset id, should return bad request', async () => { let oswAPI = new OSWApi(dgConfiguration); let input = Utility.getSpatialJoinInput(); - input.source_dataset_id = apiInput.flex.published_dataset; + input.source_dataset_id = seedData.datasets.flex.published_dataset; await expect(oswAPI.oswSpatialJoin(input)).rejects.toMatchObject({ response: { status: 400 } }); }); @@ -1920,7 +1922,7 @@ describe('Spatial join Request', () => { it('OSW Data Generator | Authenticated , When request made with non osw target dataset id, should return bad request', async () => { let oswAPI = new OSWApi(dgConfiguration); let input = Utility.getSpatialJoinInput(); - input.target_dataset_id = apiInput.pathways.published_dataset; + input.target_dataset_id = seedData.datasets.pathways.published_dataset; await expect(oswAPI.oswSpatialJoin(input)).rejects.toMatchObject({ response: { status: 400 } }); }); @@ -1960,7 +1962,7 @@ describe('Spatial join Request', () => { it('Admin | un-authenticated , When request made with valid join input, should return with unauthenticated request', async () => { let oswAPI = new OSWApi(Utility.getAdminConfiguration()); - let bboxRequest = oswAPI.datasetTagRoad(apiInput.osw.published_dataset, uploadedDatasetId); + let bboxRequest = oswAPI.datasetTagRoad(seedData.datasets.osw.published_dataset, uploadedDatasetId); await expect(bboxRequest).rejects.toMatchObject({ response: { status: 401 } }); }); @@ -2015,7 +2017,7 @@ describe('Check spatial join request job completion status', () => { }); describe('Download Spatial join request file', () => { - jest.retryTimes(1, { logErrorsBeforeRetry: true }); + jest.retryTimes(3, { logErrorsBeforeRetry: true }); it('Admin | Authenticated , When request made with job_id, should stream the zip file', async () => { let generalAPI = new CommonAPIsApi(adminConfiguration); diff --git a/src/environment/environment.ts b/src/environment/environment.ts index c02f594..430300f 100644 --- a/src/environment/environment.ts +++ b/src/environment/environment.ts @@ -12,4 +12,8 @@ export const environment = { baseUrl: process.env.SYSTEM_BASE_URL }, environment: process.env.ENVIRONMENT ?? "dev", + default: { + username: process.env.DEFAULT_USERNAME, + password: process.env.DEFAULT_PASSWORD + } } \ No newline at end of file diff --git a/src/global.setup.ts b/src/global.setup.ts index ae2729c..4345831 100644 --- a/src/global.setup.ts +++ b/src/global.setup.ts @@ -10,5 +10,6 @@ export default async function globalSetup() { console.log('Seeding completed successfully.'); } catch (error) { console.error('Global setup error:', error); + process.exit(1); } } \ No newline at end of file diff --git a/src/models/types.ts b/src/models/types.ts index 4cee399..268ca66 100644 --- a/src/models/types.ts +++ b/src/models/types.ts @@ -15,6 +15,33 @@ export interface Users { export interface SeedData { + datasets: { + osw: { + pre_release_dataset: string; + test_dataset: string; + published_dataset: string; + spatial_target_dataset: string; + spatial_source_dataset: string; + }, + flex: { + published_dataset: string; + pre_release_dataset: string; + }, + pathways: { + published_dataset: string; + pre_release_dataset: string; + } + }, + user_not_associated_project: { + tdei_project_group_id: string, + name: string, + }, + user_not_associated_service: [{ + tdei_project_group_id: string, + service_type: string, + service_name: string, + tdei_service_id: string + }], project_group: { tdei_project_group_id: string, name: string, diff --git a/src/seeder.ts b/src/seeder.ts index c28edbe..5dd067b 100644 --- a/src/seeder.ts +++ b/src/seeder.ts @@ -1,14 +1,16 @@ import { Utility } from './utils' import { SeedData, Users } from './models/types' -import axios, { AxiosInstance } from "axios"; +import axios, { AxiosInstance, InternalAxiosRequestConfig } from "axios"; import { environment } from './environment/environment'; import { existsSync } from "fs"; import { readFile, writeFile } from "fs/promises"; +import { CommonAPIsApi, Configuration, GTFSFlexApi, GTFSPathwaysApi, JobDetails, JobDetailsStatusEnum, OSWApi } from 'tdei-client'; export class Seeder { private client: APIUtility; private readonly roles: Array; private readonly data_types: Array; + private dgConfiguration: Configuration = {}; constructor() { this.client = new APIUtility() @@ -29,11 +31,19 @@ export class Seeder { console.log('Seeding...'); await this.client.login() let seedData: SeedData = {} as any; - const project_group = await this.client.createProjectGroup() - seedData.project_group = project_group - const services = await this.createService(project_group.tdei_project_group_id) - seedData.services = services - seedData.users = await this.assignUserRoles(project_group.tdei_project_group_id) + //User associated project group and service + const project_group = await this.client.createProjectGroup(); + seedData.project_group = project_group; + const services = await this.createService(project_group.tdei_project_group_id); + seedData.services = services; + //User not associated project group and service + const project_group_2 = await this.client.createProjectGroup(); + seedData.user_not_associated_project = project_group_2; + const services_2 = await this.createService(project_group_2.tdei_project_group_id); + seedData.user_not_associated_service = services_2; + + // seedData.users = await this.assignUserRoles(project_group.tdei_project_group_id) + seedData.users = await this.createUserWithPromo(project_group.tdei_project_group_id); let userProfile = (await this.getUserProfile((seedData.users as Users).poc.username)); seedData.api_key = userProfile.apiKey; if (seedData.users.api_key_tester) { @@ -41,6 +51,42 @@ export class Seeder { seedData.api_key_tester = apiTesterProfile.apiKey; } + //seed osw datasets + this.dgConfiguration = new Configuration({ + username: seedData.users.osw_data_generator.username, + password: seedData.users.osw_data_generator.password, + basePath: environment.system.baseUrl + }); + console.log("Configuration ", this.dgConfiguration); + await Utility.setAuthToken(this.dgConfiguration); + + seedData.datasets = {} as any; + console.log("seed osw datasets"); + const oswDatasets = await this.client.setupOSWDatasets(project_group.tdei_project_group_id, services.find(x => x.service_type == "osw")!.tdei_service_id, this.dgConfiguration); + seedData.datasets.osw = oswDatasets; + + //Seed flex datasets + this.dgConfiguration = new Configuration({ + username: seedData.users.flex_data_generator.username, + password: seedData.users.flex_data_generator.password, + basePath: environment.system.baseUrl + }); + await Utility.setAuthToken(this.dgConfiguration); + console.log("seed flex datasets"); + const flexDatasets = await this.client.setupFlexDatasets(project_group.tdei_project_group_id, services.find(x => x.service_type == "flex")!.tdei_service_id, this.dgConfiguration); + seedData.datasets.flex = flexDatasets; + + //Seed pathways datasets + this.dgConfiguration = new Configuration({ + username: seedData.users.pathways_data_generator.username, + password: seedData.users.pathways_data_generator.password, + basePath: environment.system.baseUrl + }); + await Utility.setAuthToken(this.dgConfiguration); + console.log("seed pathways datasets"); + const pathwaysDatasets = await this.client.setupPathwaysDatasets(project_group.tdei_project_group_id, services.find(x => x.service_type == "pathways")!.tdei_service_id, this.dgConfiguration); + seedData.datasets.pathways = pathwaysDatasets; + await this.writeFile(seedData); console.info('Seeding complete'); return seedData; @@ -58,7 +104,7 @@ export class Seeder { public async getUserProfile(user_name: string): Promise { try { await this.client.login(); - const result = await this.client.getUserProfile(user_name, 'Pa$s1word') + const result = await this.client.getUserProfile(user_name, environment.default.password!) return result; } catch (error) { console.log(user_name) @@ -82,7 +128,7 @@ export class Seeder { }] = [] as any; for await (const data_type of this.data_types) { const service = await this.client.createService(project_group_id, data_type) - console.info(`Created Service with ID: ${service.tdei_service_id}`); + console.info(`Created Service with ID and name : ${service.tdei_service_id} ${service.service_name}`); list.push(service); } @@ -93,33 +139,73 @@ export class Seeder { axios.defaults.headers.common.Authorization = null; } - private async assignUserRoles(project_group_id: string): Promise { - console.log('Assigning user roles...'); + // private async assignUserRoles(project_group_id: string): Promise { + // console.log('Assigning user roles...'); + + // const users = Utility.getApiInput().users; + // let usersDictionary = {} as Users; + // try { + // for await (const role of this.roles) { + // await this.client.addPermission(project_group_id, users[role], role) + // console.info(`Added ${role} permission to username: ${users[role]}`) + // usersDictionary[role] = { + // username: users[role], + // password: environment.default.password + // } + // } + // // Assugn api_key_tester user to poc + // if (users.api_key_tester) { + // await this.client.addPermission(project_group_id, users.api_key_tester, 'poc') + // console.info(`Added poc permission to username: ${users.api_key_tester}`) + // usersDictionary['api_key_tester'] = { + // username: users.api_key_tester, + // password: environment.default.password + // } + // } + // //add default user + // usersDictionary['default_user'] = { + // username: users.default_user, + // password: environment.default.password + // } + // return usersDictionary + // } catch (error) { + // console.error('assignUserRoles', error); + // throw error; + // } + // } + + private async createUserWithPromo(project_group_id: string): Promise { + console.log('Create users and register with promo code...'); - const users = Utility.getApiInput().users; let usersDictionary = {} as Users; try { + + const promo_code = await this.client.createPromoCode(project_group_id); + + //Create user for each role using promo code for await (const role of this.roles) { - await this.client.addPermission(project_group_id, users[role], role) - console.info(`Added ${role} permission to username: ${users[role]}`) + const username = await this.client.createUserWithPromo(promo_code); + await this.client.addPermission(project_group_id, username, role) + console.info(`Added ${role} permission to username: ${username}`) usersDictionary[role] = { - username: users[role], - password: 'Pa$s1word' + username: username, + password: environment.default.password } } - // Assugn api_key_tester user to poc - if (users.api_key_tester) { - await this.client.addPermission(project_group_id, users.api_key_tester, 'poc') - console.info(`Added poc permission to username: ${users.api_key_tester}`) - usersDictionary['api_key_tester'] = { - username: users.api_key_tester, - password: 'Pa$s1word' - } + + // Assign api_key_tester user to poc + const api_tester_username = await this.client.createUserWithPromo(promo_code); + await this.client.addPermission(project_group_id, api_tester_username, 'poc') + console.info(`Added poc permission to username: ${api_tester_username}`) + usersDictionary['api_key_tester'] = { + username: api_tester_username, + password: environment.default.password! } + //add default user usersDictionary['default_user'] = { - username: users.default_user, - password: 'Pa$s1word' + username: environment.default.username!, + password: environment.default.password! } return usersDictionary } catch (error) { @@ -136,7 +222,7 @@ export class Seeder { // console.info(`Added ${role} permission to username: ${userDetails.username}`) // users[role] = { // username: userDetails.username, - // password: 'Pa$s1word' + // password: environment.default.password // } // } // return users @@ -178,6 +264,25 @@ class APIUtility { } } + async createPromoCode(project_group_id: string): Promise { + const data = Utility.getPromoCodeUpload(); + try { + const resp = await axios({ + method: 'post', + url: `api/v1/project-group/${project_group_id}/referral-codes`, + data: data + }); + if (resp.status === 200) { + console.log('Created Promo code : ', data.code); + return data.code; + } else { + throw new Error(`Failed to create promo code, status code: ${resp.status}`); + } + } catch (err: any) { + throw err; + } + } + async createProjectGroup(): Promise<{ tdei_project_group_id: string; name: string; @@ -191,7 +296,7 @@ class APIUtility { }); data.tdei_project_group_id = resp?.data?.data; - + console.log('Created Project group with ID & name : ', data.tdei_project_group_id, data.project_group_name); return { tdei_project_group_id: data.tdei_project_group_id, name: data.project_group_name }; } catch (err: any) { throw err; @@ -211,6 +316,22 @@ class APIUtility { } } + async createUserWithPromo(promo_code: string): Promise { + let data: any = Utility.getUserUpload(); + data.code = promo_code; + try { + const resp = await axios({ + method: 'post', + url: `/api/v1/register`, + data: data + }); + console.log('Created User with username : ', data.email); + return data.email; + } catch (err: any) { + throw err; + } + } + async addPermission(project_group_id: string, username: string, role: string): Promise { try { const resp = await axios({ @@ -271,4 +392,290 @@ class APIUtility { throw err; } } + + + oswUploadRequestInterceptor = (request: InternalAxiosRequestConfig, tdei_project_group_id: string, service_id: string, datasetName: string, changestName: string, metafileName: string) => { + if ( + request.url?.includes(`/api/v1/osw/upload/${tdei_project_group_id}/${service_id}`) + ) { + let data = request.data as FormData; + let datasetFile = data.get("dataset") as File; + let metaFile = data.get('metadata') as File; + let changesetFile = data.get('changeset') as File; + delete data['dataset']; + delete data['metadata']; + delete data['changeset']; + data.set('dataset', datasetFile, datasetName); + data.set('metadata', metaFile, metafileName); + data.set('changeset', changesetFile, changestName); + } + return request; + }; + + cloneDatasetRequestInterceptor = (request: InternalAxiosRequestConfig, tdei_dataset_id: string, tdei_project_group_id: string, tdei_service_id: string, datasetName: string) => { + if ( + request.url?.includes(`/api/v1/dataset/clone/${tdei_dataset_id}/${tdei_project_group_id}/${tdei_service_id}`) + ) { + let data = request.data as FormData; + let metaFile = data.get("file") as File; + delete data['file']; + data.set('file', metaFile, datasetName); + } + return request; + }; + + async setupOSWDatasets(tdei_project_group_id: string, service_id: string, dgConfiguration: any): Promise { + let oswDatasets: { + pre_release_dataset: string; + test_dataset: string; + published_dataset: string; + spatial_target_dataset: string; + spatial_source_dataset: string; + } = {} as any; + let oswAPI = new OSWApi(dgConfiguration); + console.log("osw api config", dgConfiguration) + let metaToUpload = Utility.getMetadataBlob("osw"); + let changesetToUpload = Utility.getChangesetBlob(); + let dataset = Utility.getOSWBlob(); + try { + console.log('Uploading osw with project id and service id ', tdei_project_group_id, service_id) + //upload pre-release dataset + const uploadInterceptor = axios.interceptors.request.use((req: InternalAxiosRequestConfig) => this.oswUploadRequestInterceptor(req, tdei_project_group_id, service_id, 'osw-valid.zip', 'changeset.zip', 'metadata.json')) + const uploadFileResponse = await oswAPI.uploadOswFileForm(dataset, metaToUpload, changesetToUpload, tdei_project_group_id, service_id); + + const uploadedJobId = uploadFileResponse.data; + console.log("seed uploaded with job_id", uploadedJobId); + axios.interceptors.request.eject(uploadInterceptor); + + //wait until the job is completed + while (true) { + let jobDetails = await this.waitForJobCompletion(uploadedJobId, tdei_project_group_id, dgConfiguration); + if (jobDetails[0].status === JobDetailsStatusEnum.COMPLETED) { + let uploadedDatasetId = jobDetails[0].response_props.tdei_dataset_id; + oswDatasets.pre_release_dataset = uploadedDatasetId; + console.log("seed pre_release_dataset", oswDatasets.pre_release_dataset); + break; + } + await new Promise(resolve => setTimeout(resolve, 10000)); + } + + //Clone pre-release dataset to test dataset + let metaToUpload_2 = Utility.getMetadataBlob("osw"); + let generalAPI = new CommonAPIsApi(dgConfiguration); + const cloneInterceptor = axios.interceptors.request.use((req: InternalAxiosRequestConfig) => this.cloneDatasetRequestInterceptor(req, oswDatasets.pre_release_dataset, tdei_project_group_id, service_id, 'metadata.json')) + const cloneFileResponse = await generalAPI.cloneDatasetForm(metaToUpload_2, oswDatasets.pre_release_dataset, tdei_project_group_id, service_id); + axios.interceptors.request.eject(cloneInterceptor); + const clonedDatasetId = cloneFileResponse.data; + oswDatasets.test_dataset = clonedDatasetId; + console.log("seed test_dataset tdei_dataset_id", oswDatasets.test_dataset); + + //Clone pre-release dataset and publish it + let metaToUpload_3 = Utility.getMetadataBlob("osw"); + let generalAPI_2 = new CommonAPIsApi(dgConfiguration); + const cloneInterceptor_2 = axios.interceptors.request.use((req: InternalAxiosRequestConfig) => this.cloneDatasetRequestInterceptor(req, oswDatasets.pre_release_dataset, tdei_project_group_id, service_id, 'metadata.json')) + const cloneFileResponse_2 = await generalAPI_2.cloneDatasetForm(metaToUpload_3, oswDatasets.pre_release_dataset, tdei_project_group_id, service_id); + axios.interceptors.request.eject(cloneInterceptor_2); + oswDatasets.published_dataset = cloneFileResponse_2.data; + console.log("seed published_dataset tdei_dataset_id ", oswDatasets.published_dataset); + + let publishResponse = await oswAPI.publishOswFile(oswDatasets.published_dataset); + const publishJobId = publishResponse.data; + console.log("seed published_dataset publish_job_id", publishJobId); + while (true) { + let jobDetails = await this.waitForJobCompletion(publishJobId, tdei_project_group_id, dgConfiguration); + if (jobDetails[0].status === JobDetailsStatusEnum.COMPLETED) { + console.log("seed published_dataset completed"); + break; + } + await new Promise(resolve => setTimeout(resolve, 10000)); + } + + //Clone published dataset to spatial target dataset + let metaToUpload_4 = Utility.getMetadataBlob("osw"); + let generalAPI_3 = new CommonAPIsApi(dgConfiguration); + const cloneInterceptor_3 = axios.interceptors.request.use((req: InternalAxiosRequestConfig) => this.cloneDatasetRequestInterceptor(req, oswDatasets.published_dataset, tdei_project_group_id, service_id, 'metadata.json')) + const cloneFileResponse_3 = await generalAPI_3.cloneDatasetForm(metaToUpload_4, oswDatasets.published_dataset, tdei_project_group_id, service_id); + axios.interceptors.request.eject(cloneInterceptor_3); + const clonedDatasetId_3 = cloneFileResponse_3.data; + console.log("seed spatial_target_dataset tdei_dataset_id", clonedDatasetId_3); + oswDatasets.spatial_target_dataset = clonedDatasetId_3; + console.log("seed spatial_target_dataset tdei_dataset_id", oswDatasets.spatial_target_dataset); + + //Clone published dataset to spatial source dataset + let metaToUpload_5 = Utility.getMetadataBlob("osw"); + let generalAPI_4 = new CommonAPIsApi(dgConfiguration); + const cloneInterceptor_4 = axios.interceptors.request.use((req: InternalAxiosRequestConfig) => this.cloneDatasetRequestInterceptor(req, oswDatasets.published_dataset, tdei_project_group_id, service_id, 'metadata.json')) + const cloneFileResponse_4 = await generalAPI_4.cloneDatasetForm(metaToUpload_5, oswDatasets.published_dataset, tdei_project_group_id, service_id); + axios.interceptors.request.eject(cloneInterceptor_4); + const clonedDatasetId_4 = cloneFileResponse_4.data; + console.log("seed spatial_source_dataset tdei_dataset_id", clonedDatasetId_4); + oswDatasets.spatial_source_dataset = clonedDatasetId_4; + console.log("seed spatial_source_dataset tdei_dataset_id", oswDatasets.spatial_source_dataset); + + return oswDatasets; + + } catch (e) { + console.log(e); + } + } + + flexUploadRequestInterceptor = (request: InternalAxiosRequestConfig, tdei_project_group_id: string, service_id: string, datasetName: string, changestName: string, metafileName: string) => { + if ( + request.url?.includes(`/api/v1/gtfs-flex/upload/${tdei_project_group_id}/${service_id}`) + ) { + let data = request.data as FormData; + let datasetFile = data.get("dataset") as File; + let metaFile = data.get('metadata') as File; + let changesetFile = data.get('changeset') as File; + delete data['dataset']; + delete data['metadata']; + delete data['changeset']; + data.set('dataset', datasetFile, datasetName); + data.set('metadata', metaFile, metafileName); + data.set('changeset', changesetFile, changestName); + + } + return request; + }; + + async setupFlexDatasets(tdei_project_group_id: string, service_id: string, dgConfiguration: any): Promise { + let flexDatasets: { + published_dataset: string; + pre_release_dataset: string; + } = {} as any; + let flexAPI = new GTFSFlexApi(dgConfiguration); + let metaToUpload = Utility.getMetadataBlob("flex"); + let changesetToUpload = Utility.getChangesetBlob(); + let dataset = Utility.getFlexBlob(); + try { + //upload pre-release dataset + const uploadInterceptor = axios.interceptors.request.use((req: InternalAxiosRequestConfig) => this.flexUploadRequestInterceptor(req, tdei_project_group_id, service_id, 'flex-valid.zip', 'changeset.zip', 'metadata.json')) + const uploadFileResponse = await flexAPI.uploadGtfsFlexFileForm(dataset, metaToUpload, changesetToUpload, tdei_project_group_id, service_id); + + const uploadedJobId = uploadFileResponse.data; + console.log("seed flex pre_release_dataset uploaded job_id", uploadedJobId); + axios.interceptors.request.eject(uploadInterceptor); + + //wait until the job is completed + while (true) { + let jobDetails = await this.waitForJobCompletion(uploadedJobId, tdei_project_group_id, dgConfiguration); + if (jobDetails[0].status === JobDetailsStatusEnum.COMPLETED) { + flexDatasets.pre_release_dataset = jobDetails[0].response_props.tdei_dataset_id; + console.log("seed flex pre_release_dataset tdei_dataset_id", flexDatasets.pre_release_dataset); + break; + } + await new Promise(resolve => setTimeout(resolve, 10000)); + } + + //Clone pre-release dataset to test dataset + let metaToUpload_2 = Utility.getMetadataBlob("flex"); + let generalAPI = new CommonAPIsApi(dgConfiguration); + const cloneInterceptor = axios.interceptors.request.use((req: InternalAxiosRequestConfig) => this.cloneDatasetRequestInterceptor(req, flexDatasets.pre_release_dataset, tdei_project_group_id, service_id, 'metadata.json')) + const cloneFileResponse = await generalAPI.cloneDatasetForm(metaToUpload_2, flexDatasets.pre_release_dataset, tdei_project_group_id, service_id); + axios.interceptors.request.eject(cloneInterceptor); + const clonedDatasetId = cloneFileResponse.data; + flexDatasets.published_dataset = clonedDatasetId; + console.log("seed flex published_dataset tdei_dataset_id", flexDatasets.published_dataset); + + //publish test dataset + let publishResponse = await flexAPI.publishGtfsFlexFile(clonedDatasetId); + const publishJobId = publishResponse.data; + console.log("seed flex published_dataset publish_job_id", publishJobId); + while (true) { + let jobDetails = await this.waitForJobCompletion(publishJobId, tdei_project_group_id, dgConfiguration); + if (jobDetails[0].status === JobDetailsStatusEnum.COMPLETED) { + console.log("seed flex published_dataset completed"); + break; + } + await new Promise(resolve => setTimeout(resolve, 10000)); + } + return flexDatasets; + } catch (e) { + console.log(e); + } + } + + + pathwaysUploadRequestInterceptor = (request: InternalAxiosRequestConfig, tdei_project_group_id: string, service_id: string, datasetName: string, changestName: string, metafileName: string) => { + if ( + request.url?.includes(`/api/v1/gtfs-pathways/upload/${tdei_project_group_id}/${service_id}`) + ) { + let data = request.data as FormData; + let datasetFile = data.get("dataset") as File; + let metaFile = data.get('metadata') as File; + let changesetFile = data.get('changeset') as File; + delete data['dataset']; + delete data['metadata']; + delete data['changeset']; + data.set('dataset', datasetFile, datasetName); + data.set('metadata', metaFile, metafileName); + data.set('changeset', changesetFile, changestName); + } + return request; + }; + + async setupPathwaysDatasets(tdei_project_group_id: string, service_id: string, dgConfiguration: any): Promise { + let pathwaysDatasets: { + published_dataset: string; + pre_release_dataset: string; + } = {} as any; + + let pathwaysAPI = new GTFSPathwaysApi(dgConfiguration); + let metaToUpload = Utility.getMetadataBlob("pathways"); + let changesetToUpload = Utility.getChangesetBlob(); + let dataset = Utility.getPathwaysBlob(); + try { + //upload pre-release dataset + const uploadInterceptor = axios.interceptors.request.use((req: InternalAxiosRequestConfig) => this.pathwaysUploadRequestInterceptor(req, tdei_project_group_id, service_id, 'pathways-valid.zip', 'changeset.zip', 'metadata.json')) + const uploadFileResponse = await pathwaysAPI.uploadGtfsPathwaysFileForm(dataset, metaToUpload, changesetToUpload, tdei_project_group_id, service_id); + + const uploadedJobId = uploadFileResponse.data; + console.log("seed pathways pre_release_dataset uploaded job_id", uploadedJobId); + axios.interceptors.request.eject(uploadInterceptor); + + //wait until the job is completed + while (true) { + let jobDetails = await this.waitForJobCompletion(uploadedJobId, tdei_project_group_id, dgConfiguration); + if (jobDetails[0].status === JobDetailsStatusEnum.COMPLETED) { + pathwaysDatasets.pre_release_dataset = jobDetails[0].response_props.tdei_dataset_id; + console.log("seed pathways pre_release_dataset tdei_dataset_id", pathwaysDatasets.pre_release_dataset); + break; + } + await new Promise(resolve => setTimeout(resolve, 10000)); + } + + //Clone pre-release dataset to test dataset + let metaToUpload_2 = Utility.getMetadataBlob("pathways"); + let generalAPI = new CommonAPIsApi(dgConfiguration); + const cloneInterceptor = axios.interceptors.request.use((req: InternalAxiosRequestConfig) => this.cloneDatasetRequestInterceptor(req, pathwaysDatasets.pre_release_dataset, tdei_project_group_id, service_id, 'metadata.json')) + const cloneFileResponse = await generalAPI.cloneDatasetForm(metaToUpload_2, pathwaysDatasets.pre_release_dataset, tdei_project_group_id, service_id); + axios.interceptors.request.eject(cloneInterceptor); + const clonedDatasetId = cloneFileResponse.data; + pathwaysDatasets.published_dataset = clonedDatasetId; + console.log("seed pathways published_dataset tdei_dataset_id", pathwaysDatasets.published_dataset); + + //publish test dataset + let publishResponse = await pathwaysAPI.publishGtfsPathwaysFile(pathwaysDatasets.published_dataset); + const publishJobId = publishResponse.data; + console.log("seed pathways test_dataset publish_job_id", publishJobId); + while (true) { + let jobDetails = await this.waitForJobCompletion(publishJobId, tdei_project_group_id, dgConfiguration); + if (jobDetails[0].status === JobDetailsStatusEnum.COMPLETED) { + console.log("seed pathways published_dataset completed"); + break; + } + await new Promise(resolve => setTimeout(resolve, 10000)); + } + return pathwaysDatasets; + } catch (e) { + console.log(e); + } + } + + async waitForJobCompletion(jobId: string, tdei_project_group_id: string, dgConfiguration: any): Promise { + let generalAPI = new CommonAPIsApi(dgConfiguration); + let uploadStatus = await generalAPI.listJobs(tdei_project_group_id, jobId, true); + return uploadStatus.data; + + } } diff --git a/src/utils.ts b/src/utils.ts index 932c270..885afce 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -15,12 +15,23 @@ import * as fs from "fs"; import metadata_flex from "../assets/payloads/gtfs-flex/metadata.json"; import metadata_osw from "../assets/payloads/osw/metadata.json"; import metadata_pathways from "../assets/payloads/gtfs-pathways/metadata.json"; -import apiInput from "../api.input.json"; +// import apiInput from "../api.input.json"; import { SeedData } from "./models/types"; /** * Utility class. */ export class Utility { + static getPromoCodeUpload(): any { + const from = new Date(); // current time + const to = new Date(Date.now() + 60 * 60 * 1000); // add 1 hour (in ms) + return { + "name": `api_tester_${faker.random.alphaNumeric(10)}`, + "type": 1, + "valid_from": from.toISOString(), + "code": faker.random.alphaNumeric(8).toUpperCase(), + "valid_to": to.toISOString() + }; + } static get seedData() { const seedData = JSON.parse(fs.readFileSync(path.resolve(__dirname, '../seed.data.json'), 'utf-8')); @@ -38,9 +49,9 @@ export class Utility { }; } - static getApiInput() { - return apiInput[`${environment.environment}`]; - } + // static getApiInput() { + // return apiInput[`${environment.environment}`]; + // } static getDefaultUserConfiguration(): Configuration { return new Configuration({ @@ -409,13 +420,13 @@ export class Utility { static getSpatialJoinInput() { let model: OswSpatialjoinBody = { - target_dataset_id: Utility.getApiInput().osw.spatial_target_dataset, + target_dataset_id: this.seedData.datasets.osw.spatial_target_dataset, target_dimension: OswSpatialjoinBodyTargetDimensionEnum.Edge, - source_dataset_id: Utility.getApiInput().osw.spatial_source_dataset, - source_dimension: OswSpatialjoinBodySourceDimensionEnum.Point, + source_dataset_id: this.seedData.datasets.osw.spatial_source_dataset, + source_dimension: OswSpatialjoinBodySourceDimensionEnum.Edge, join_condition: "ST_Contains(ST_Buffer(geometry_target, 5), geometry_source)", join_filter_target: "highway='footway' AND footway='sidewalk'", - join_filter_source: "highway='street_lamp'", + join_filter_source: "highway='footway'", aggregate: ["ARRAY_AGG(highway) as my_highway"] } return model;