diff --git a/compose.yml b/compose.yml index 6cf6ac1de..034413c45 100644 --- a/compose.yml +++ b/compose.yml @@ -280,6 +280,8 @@ services: - DB_USER_API_PASS=${DB_USER_API_PASS} - ENABLE_MOCK_FEATURE_SEEDING=${ENABLE_MOCK_FEATURE_SEEDING} - NUM_MOCK_FEATURE_SUBMISSIONS=${NUM_MOCK_FEATURE_SUBMISSIONS} + - DB_USER_BCGW=${DB_USER_BCGW} + - DB_USER_BCGW_PASS=${DB_USER_BCGW_PASS} volumes: - /opt/app-root/src/node_modules # prevents local node_modules overriding container node_modules networks: diff --git a/database/src/migrations/20250917200000_bcgw_schema_user.ts b/database/src/migrations/20250917200000_bcgw_schema_user.ts new file mode 100644 index 000000000..e322cdb58 --- /dev/null +++ b/database/src/migrations/20250917200000_bcgw_schema_user.ts @@ -0,0 +1,46 @@ +import { Knex } from 'knex'; + +const DB_USER_BCGW_PASS = process.env.DB_USER_BCGW_PASS; +const DB_USER_BCGW = process.env.DB_USER_BCGW; + +/** + * Create bcgw schema and user. + * + * @export + * @param {Knex} knex + * @return {*} {Promise} + */ +export async function up(knex: Knex): Promise { + await knex.raw(` + -- set up bcgw schema + create schema bcgw; + + -- setup bcgw user + create role ${DB_USER_BCGW} login password '${DB_USER_BCGW_PASS}'; + GRANT USAGE ON SCHEMA bcgw TO ${DB_USER_BCGW}; + alter role ${DB_USER_BCGW} set search_path to bcgw; + + -- alter default privileges for the schema owner so that bcgw user is granted access to all future tables, views, and materialized views + ALTER DEFAULT PRIVILEGES FOR ROLE CURRENT_USER IN SCHEMA bcgw + GRANT SELECT ON TABLES TO ${DB_USER_BCGW}; + `); +} + +/** + * Revert changes for bcgw schema and user. + * + * @export + * @param {Knex} knex + * @return {*} {Promise} + */ +export async function down(knex: Knex): Promise { + await knex.raw(` + -- revert default privileges for the schema owner + ALTER DEFAULT PRIVILEGES FOR ROLE CURRENT_USER IN SCHEMA bcgw + REVOKE SELECT ON TABLES FROM ${DB_USER_BCGW}; + + -- drop bcgw user and schema + DROP SCHEMA IF EXISTS bcgw CASCADE; + DROP USER IF EXISTS ${DB_USER_BCGW}; + `); +} diff --git a/database/src/migrations/20251230000000_bcgw_materialised_views.ts b/database/src/migrations/20251230000000_bcgw_materialised_views.ts new file mode 100644 index 000000000..434dad633 --- /dev/null +++ b/database/src/migrations/20251230000000_bcgw_materialised_views.ts @@ -0,0 +1,187 @@ +import { Knex } from 'knex'; + +/** + * Creating materialised views for telemetry and observations datasets to be replicated in the BC Geographic Warehouse. + * + * @export + * @param {Knex} knex + * @return {*} {Promise} + */ +export async function up(knex: Knex): Promise { + await knex.raw(` +CREATE MATERIALIZED VIEW bcgw.telemetry_all AS +WITH deployments AS ( + SELECT + dep.submission_feature_id, + dep.data->>'device_key' AS device_key, + dep.data->>'animal_identifier' AS animal_id + FROM biohub.submission_feature dep + JOIN biohub.feature_type ft_dep + ON dep.feature_type_id = ft_dep.feature_type_id + WHERE ft_dep.name = 'telemetry_deployment' + AND dep.record_end_date IS NULL +) +SELECT + sf.submission_feature_id AS Feature_ID, + d.animal_id, + -- Contingent on Feature Array: Add columns Species Code, Species english name, species scientific name, Sex, Ecological Unit + d.device_key, + (sf.data->>'timestamp')::timestamptz AS DATETIME, + (EXTRACT(YEAR FROM (sf.data->>'timestamp')::timestamptz))::int AS YEAR, + (sf.data->>'latitude')::numeric AS Latitude, + (sf.data->>'longitude')::numeric AS Longitude, + (sf.data->>'dop')::numeric AS dop, + CASE + WHEN sf.submission_feature_id IN ( + SELECT submission_feature_id + FROM biohub.submission_feature_security + ) THEN 'Secured' + ELSE 'Open' + END AS SECURITY + -- contingent on feature array: join to dataset and get the survey name and id, and the study area id +FROM biohub.submission_feature sf +JOIN biohub.feature_type ft + ON sf.feature_type_id = ft.feature_type_id +LEFT JOIN deployments d + ON d.submission_feature_id = sf.parent_submission_feature_id +WHERE ft.name = 'telemetry' + AND sf.record_end_date IS NULL + AND (sf.data->>'timestamp')::timestamptz <= (NOW() - INTERVAL '4 months'); + `); + + await knex.raw(` + COMMENT ON COLUMN bcgw.telemetry_all.Feature_ID IS 'System generated surrogate primary key identifier'; + COMMENT ON COLUMN bcgw.telemetry_all.Latitude IS 'The latitude of the GPS location'; + COMMENT ON COLUMN bcgw.telemetry_all.Longitude IS 'The longitude of the GPS location'; + COMMENT ON COLUMN bcgw.telemetry_all.DATETIME IS 'The date and time that the GPS location was recorded'; + COMMENT ON COLUMN bcgw.telemetry_all.YEAR IS 'The year that the GPS location was recorded'; + COMMENT ON COLUMN bcgw.telemetry_all.dop IS 'The dilution of precision'; + COMMENT ON COLUMN bcgw.telemetry_all.device_key IS 'The vendor and device serial'; + COMMENT ON COLUMN bcgw.telemetry_all.animal_id IS 'The identifier of the animal wearing the telemetry device'; + COMMENT ON COLUMN bcgw.telemetry_all.SECURITY IS 'The security status of the feature'; + `); + + await knex.raw(` +CREATE MATERIALIZED VIEW bcgw.telemetry_public AS +WITH deployments AS ( + SELECT + dep.submission_feature_id, + dep.data->>'device_key' AS device_key, + dep.data->>'animal_identifier' AS animal_id + FROM biohub.submission_feature dep + JOIN biohub.feature_type ft_dep + ON dep.feature_type_id = ft_dep.feature_type_id + WHERE ft_dep.name = 'telemetry_deployment' + AND dep.record_end_date IS NULL +) +SELECT + sf.submission_feature_id AS Feature_ID, + d.animal_id, + -- Contingent on Feature Array: Add columns Species Code, Species english name, species scientific name, Sex, Ecological Unit + d.device_key, + (sf.data->>'timestamp')::timestamptz AS DATETIME, + (EXTRACT(YEAR FROM (sf.data->>'timestamp')::timestamptz))::int AS YEAR, + (sf.data->>'latitude')::numeric AS Latitude, + (sf.data->>'longitude')::numeric AS Longitude, + (sf.data->>'dop')::numeric AS dop + -- contingent on feature array: join to dataset and get the survey name and id, and the study area id +FROM biohub.submission_feature sf +JOIN biohub.feature_type ft + ON sf.feature_type_id = ft.feature_type_id +LEFT JOIN deployments d + ON d.submission_feature_id = sf.parent_submission_feature_id +WHERE ft.name = 'telemetry' + AND sf.record_end_date IS NULL + AND sf.submission_feature_id NOT IN ( + SELECT submission_feature_id + FROM biohub.submission_feature_security + ) + AND (sf.data->>'timestamp')::timestamptz <= (NOW() - INTERVAL '4 months'); + `); + + await knex.raw(` + COMMENT ON COLUMN bcgw.telemetry_public.Feature_ID IS 'System generated surrogate primary key identifier'; + COMMENT ON COLUMN bcgw.telemetry_public.Latitude IS 'The latitude of the GPS location'; + COMMENT ON COLUMN bcgw.telemetry_public.Longitude IS 'The longitude of the GPS location'; + COMMENT ON COLUMN bcgw.telemetry_public.DATETIME IS 'The date and time that the GPS location was recorded'; + COMMENT ON COLUMN bcgw.telemetry_public.YEAR IS 'The year that the GPS location was recorded'; + COMMENT ON COLUMN bcgw.telemetry_public.dop IS 'The dilution of precision'; + COMMENT ON COLUMN bcgw.telemetry_public.device_key IS 'The vendor and device serial'; + COMMENT ON COLUMN bcgw.telemetry_public.animal_id IS 'The identifier of the animal wearing the telemetry device'; + `); + + await knex.raw(` +CREATE MATERIALIZED VIEW bcgw.observations_public AS +WITH measurements AS ( + SELECT + m.parent_submission_feature_id AS observation_id, + (m.data->>'sex')::text AS sex, + (m.data->>'life_stage')::text AS life_stage, + (m.data->>'measurement_type')::text AS measurement_type, + (m.data->>'measurement_value')::text AS measurement_value + FROM biohub.submission_feature m + JOIN biohub.feature_type ft_m + ON m.feature_type_id = ft_m.feature_type_id + WHERE ft_m.name = 'measurement' + AND m.record_end_date IS NULL +) +SELECT + sf.submission_feature_id AS Feature_ID, + (sf.data->>'timestamp')::timestamptz AS DATETIME, + (EXTRACT(YEAR FROM (sf.data->>'timestamp')::timestamptz))::int AS YEAR, + public.ST_Y(public.ST_GeomFromGeoJSON(sf.data->>'geometry')) AS Latitude, + public.ST_X(public.ST_GeomFromGeoJSON(sf.data->>'geometry')) AS Longitude, + (sf.data->>'sign')::text AS sign, + (sf.data->>'count')::int AS count, + (sf.data->>'taxon_id')::int AS taxon_id, + t.itis_scientific_name AS scientific_name, + t.common_name AS common_name, + meas.sex, + meas.life_stage +FROM biohub.submission_feature sf +JOIN biohub.feature_type ft + ON sf.feature_type_id = ft.feature_type_id +LEFT JOIN measurements meas + ON meas.observation_id = sf.submission_feature_id +LEFT JOIN biohub.taxon t + ON t.itis_tsn = (sf.data->>'taxon_id')::int +WHERE ft.name = 'species_observation' + AND sf.record_end_date IS NULL + AND sf.submission_feature_id NOT IN ( + SELECT submission_feature_id + FROM biohub.submission_feature_security + ); + `); + + await knex.raw(` + COMMENT ON COLUMN bcgw.observations_public.Feature_ID IS 'System generated surrogate primary key identifier'; + COMMENT ON COLUMN bcgw.observations_public.Latitude IS 'The latitude of the observation location'; + COMMENT ON COLUMN bcgw.observations_public.Longitude IS 'The longitude of the observation location'; + COMMENT ON COLUMN bcgw.observations_public.DATETIME IS 'The timestamp of the observation'; + COMMENT ON COLUMN bcgw.observations_public.YEAR IS 'The year of the observation'; + COMMENT ON COLUMN bcgw.observations_public.sign IS 'Type of sign associated with the observation'; + COMMENT ON COLUMN bcgw.observations_public.count IS 'Count value for the observation'; + COMMENT ON COLUMN bcgw.observations_public.taxon_id IS 'Taxonomic identifier extracted from the observation payload'; + COMMENT ON COLUMN bcgw.observations_public.scientific_name IS 'Scientific name from taxon table linked via ITIS TSN'; + COMMENT ON COLUMN bcgw.observations_public.common_name IS 'Common name from taxon table linked via ITIS TSN'; + `); +} + +/** + * Revert materialized view. + * + * @export + * @param {Knex} knex + * @return {*} {Promise} + */ +export async function down(knex: Knex): Promise { + await knex.raw(` + DROP MATERIALIZED VIEW IF EXISTS bcgw.telemetry_all; + `); + await knex.raw(` + DROP MATERIALIZED VIEW IF EXISTS bcgw.telemetry_public; + `); + await knex.raw(` + DROP MATERIALIZED VIEW IF EXISTS bcgw.observations_public; + `); +} diff --git a/database/src/seeds/04_mock_test_data.ts b/database/src/seeds/04_mock_test_data.ts index 46e4711c5..e8fbe41fc 100644 --- a/database/src/seeds/04_mock_test_data.ts +++ b/database/src/seeds/04_mock_test_data.ts @@ -48,6 +48,8 @@ export async function seed(knex: Knex): Promise { SET SEARCH_PATH = 'biohub','public'; `); + // Ensure there are mock taxonomy records for animals/observations to reference + await ensureTaxonomySeed(trx); for (let i = 0; i < NUM_MOCK_FEATURE_SUBMISSIONS; i++) { await insertRecord(trx); // pass the transaction instead of knex } @@ -68,6 +70,26 @@ const insertRecord = async (knex: Knex) => { // Dataset const parent_submission_feature_id1 = await insertDatasetRecord(knex, { submission_id }); + // Telemetry Deployments + const deploymentIds: number[] = []; + const deviceInfos: { submission_feature_id: number; device_id: string }[] = []; + for (let i = 0; i < 5; i++) { + const deploymentId = await insertTelemetryDeployment(knex, { + submission_id, + parent_submission_feature_id: parent_submission_feature_id1 + }); + deploymentIds.push(deploymentId); + + // Devices under deployment + for (let j = 0; j < 2; j++) { + const deviceInfo = await insertTelemetryDevice(knex, { + submission_id, + parent_submission_feature_id: deploymentId + }); + deviceInfos.push(deviceInfo); + } + } + // Sample Sites and their children const sampleSitePromises = Array.from({ length: 10 }).map(async () => { const parent_submission_feature_id2 = await insertSampleSiteRecord(knex, { @@ -76,7 +98,7 @@ const insertRecord = async (knex: Knex) => { }); // Animals - const animalPromises = Array.from({ length: 2 }).map(() => + const animalPromises = Array.from({ length: 5 }).map(() => insertAnimalRecord(knex, { submission_id, parent_submission_feature_id: parent_submission_feature_id2 }) ); @@ -90,9 +112,17 @@ const insertRecord = async (knex: Knex) => { }); // Telemetry - const telemetryPromises = Array.from({ length: 100 }).map(() => - insertTelemetryRecord(knex, { submission_id, parent_submission_feature_id: parent_submission_feature_id1 }) - ); + const possibleParents = [...deploymentIds, ...deviceInfos.map((d) => d.submission_feature_id)]; + const telemetryPromises = Array.from({ length: 100 }).map(() => { + const randomParent = possibleParents[Math.floor(Math.random() * possibleParents.length)]; + const isDevice = deviceInfos.some((d) => d.submission_feature_id === randomParent); + const deviceInfo = isDevice ? deviceInfos.find((d) => d.submission_feature_id === randomParent) : undefined; + return insertTelemetryRecord(knex, { + submission_id, + parent_submission_feature_id: randomParent, + device_id: deviceInfo?.device_id + }); + }); // Wait for all sample sites and telemetry to complete concurrently await Promise.all([...sampleSitePromises, ...telemetryPromises]); @@ -184,18 +214,23 @@ export const insertObservationRecord = async ( knex: Knex, options: { submission_id: number; parent_submission_feature_id: number } ): Promise => { + const taxonId = await getRandomTaxonId(knex); + const response = await knex.raw( `${insertSubmissionFeature({ submission_id: options.submission_id, parent_submission_feature_id: options.parent_submission_feature_id, feature_type: 'species_observation', data: { - taxon_id: faker.number.int({ min: 10000, max: 99999 }), + taxon_id: taxonId, geometry: random.point( 1, // number of features in feature collection [-135.878906, 48.617424, -114.433594, 60.664785] // bbox constraint )['features'][0]['geometry'], - count: faker.number.int({ min: 0, max: 100 }) + count: faker.number.int({ min: 0, max: 100 }), + // species observation-specific properties + timestamp: faker.date.between({ from: '2020-01-01T00:00:00.000Z', to: new Date().toISOString() }).toISOString(), + sign: faker.helpers.arrayElement(['tracks', 'scat', 'sighting', 'other']) } })}` ); @@ -212,13 +247,43 @@ export const insertObservationRecord = async ( await knex.raw(`${insertSearchNumber({ submission_feature_id })}`); await knex.raw(`${insertSearchNumber({ submission_feature_id })}`); - await knex.raw(`${insertSearchStringTaxonomy({ submission_feature_id })}`); + await knex.raw(`${insertSearchStringTaxonomy({ submission_feature_id, taxon_id: taxonId })}`); // await knex.raw(`${insertSearchStartDatetime({ submission_feature_id })}`); // await knex.raw(`${insertSearchEndDatetime({ submission_feature_id })}`); await knex.raw(`${insertSpatialPoint({ submission_feature_id })}`); + // attach a measurement child record (sex & life stage) to the observation + await insertMeasurementRecord(knex, { + submission_id: options.submission_id, + parent_submission_feature_id: submission_feature_id + }); + + return submission_feature_id; +}; + +export const insertMeasurementRecord = async ( + knex: Knex, + options: { submission_id: number; parent_submission_feature_id: number } +): Promise => { + const response = await knex.raw( + `${insertSubmissionFeature({ + submission_id: options.submission_id, + parent_submission_feature_id: options.parent_submission_feature_id, + feature_type: 'measurement', + data: { + sex: faker.helpers.arrayElement(['male', 'female', 'unknown']), + life_stage: faker.helpers.arrayElement(['adult', 'juvenile', 'unknown']) + } + })}` + ); + const submission_feature_id = response.rows[0].submission_feature_id; + await knex.raw(`${insertSearchString({ submission_feature_id })}`); + await knex.raw(`${insertSearchString({ submission_feature_id })}`); + await knex.raw(`${insertSearchNumber({ submission_feature_id })}`); + await knex.raw(`${insertSearchNumber({ submission_feature_id })}`); + return submission_feature_id; }; @@ -226,6 +291,8 @@ const insertAnimalRecord = async ( knex: Knex, options: { submission_id: number; parent_submission_feature_id: number } ): Promise => { + const taxonId = await getRandomTaxonId(knex); + const response = await knex.raw( `${insertSubmissionFeature({ submission_id: options.submission_id, @@ -234,7 +301,7 @@ const insertAnimalRecord = async ( data: { species: faker.animal.type(), count: faker.number.int({ min: 0, max: 100 }), - taxon_id: faker.number.int({ min: 10000, max: 99999 }), + taxon_id: taxonId, start_date: faker.date.past().toISOString(), end_date: faker.date.future().toISOString() } @@ -253,7 +320,7 @@ const insertAnimalRecord = async ( await knex.raw(`${insertSearchNumber({ submission_feature_id })}`); await knex.raw(`${insertSearchNumber({ submission_feature_id })}`); - await knex.raw(`${insertSearchStringTaxonomy({ submission_feature_id })}`); + await knex.raw(`${insertSearchStringTaxonomy({ submission_feature_id, taxon_id: taxonId })}`); await knex.raw(`${insertSearchStartDatetime({ submission_feature_id })}`); await knex.raw(`${insertSearchEndDatetime({ submission_feature_id })}`); @@ -299,7 +366,16 @@ export const insertSubmission = (includeSecurityReviewTimestamp: boolean, includ export const insertSubmissionFeature = (options: { submission_id: number; parent_submission_feature_id: number | null; - feature_type: 'dataset' | 'sample_site' | 'species_observation' | 'animal' | 'artifact' | 'telemetry'; + feature_type: + | 'dataset' + | 'sample_site' + | 'species_observation' + | 'animal' + | 'artifact' + | 'telemetry' + | 'telemetry_deployment' + | 'telemetry_device' + | 'measurement'; data: { [key: string]: any }; }) => ` INSERT INTO submission_feature @@ -353,19 +429,19 @@ const insertSearchNumber = (options: { submission_feature_id: number }) => ` ); `; -const insertSearchStringTaxonomy = (options: { submission_feature_id: number }) => ` - INSERT INTO search_string - ( - submission_feature_id, - feature_property_id, - value - ) - values - ( - ${options.submission_feature_id}, - (select feature_property_id from feature_property where name = 'taxon_id'), - $$${faker.number.int({ min: 10000, max: 99999 })}$$ - ); +const insertSearchStringTaxonomy = (options: { submission_feature_id: number; taxon_id?: number }) => ` + INSERT INTO search_string + ( + submission_feature_id, + feature_property_id, + value + ) + values + ( + ${options.submission_feature_id}, + (select feature_property_id from feature_property where name = 'taxon_id'), + $$${options.taxon_id ?? faker.number.int({ min: 10000, max: 99999 })}$$ + ); `; const insertSearchStartDatetime = (options: { submission_feature_id: number }) => ` @@ -450,17 +526,137 @@ const randomIntFromInterval = (min: number, max: number) => { return Math.floor(Math.random() * (max - min + 1) + min); }; -export const insertTelemetryRecord = async ( +/** + * Ensure the taxonomy table has a set of mock taxon records (itis_tsn + scientific name). + */ +const ensureTaxonomySeed = async (knex: Knex) => { + const desiredCount = 5; + + const countRes = await knex.raw(`SELECT count(*)::int as c FROM taxon`); + const existing = countRes.rows?.[0]?.c || 0; + + if (existing >= desiredCount) { + return; + } + + const toCreate = desiredCount - existing; + const tsnSet = new Set(); + while (tsnSet.size < toCreate) { + tsnSet.add(faker.number.int({ min: 10000, max: 99999 })); + } + + const valuesSql = Array.from(tsnSet) + .map((tsn) => { + const sci = faker.lorem.word().replace(/'/g, "''"); + const common = faker.animal.type().replace(/'/g, "''"); + const itisData = JSON.stringify({ source: 'mock' }).replace(/'/g, "''"); + return `(${tsn}, $$${sci}$$, $$${common}$$, $$${itisData}$$::jsonb, now(), (SELECT system_user_id from "system_user" where user_identifier = 'SIMS'))`; + }) + .join(',\n'); + + const sql = ` + INSERT INTO taxon (itis_tsn, itis_scientific_name, common_name, itis_data, itis_update_date, create_user) + VALUES + ${valuesSql}; + `; + + await knex.raw(sql); +}; + +const getRandomTaxonId = async (knex: Knex): Promise => { + const res = await knex.raw(`SELECT itis_tsn FROM taxon ORDER BY random() LIMIT 1`); + return res.rows?.[0]?.itis_tsn ?? faker.number.int({ min: 10000, max: 99999 }); +}; + +export const insertSubmissionFeatureSecurity = async ( + knex: Knex, + options: { submission_feature_id: number; security_rule_id: number } +): Promise => { + const res = await knex.raw(` + INSERT INTO submission_feature_security (submission_feature_id, security_rule_id, create_user) + VALUES ( + ${options.submission_feature_id}, + ${options.security_rule_id}, + (SELECT system_user_id from "system_user" where user_identifier = 'SIMS') + ) + RETURNING submission_feature_security_id; + `); + + return res.rows[0].submission_feature_security_id; +}; + +export const insertTelemetryDeployment = async ( knex: Knex, options: { submission_id: number; parent_submission_feature_id: number } ): Promise => { + const deploymentData = { + animal_identifier: faker.string.alphanumeric({ length: 10 }), + device_key: faker.string.alphanumeric({ length: 8 }), + start_date: faker.date.past().toISOString(), + end_date: faker.date.future().toISOString() + }; + + const response = await knex.raw( + `${insertSubmissionFeature({ + submission_id: options.submission_id, + parent_submission_feature_id: options.parent_submission_feature_id, + feature_type: 'telemetry_deployment', + data: deploymentData + })}` + ); + const submission_feature_id = response.rows[0].submission_feature_id; + + await knex.raw(`${insertSearchString({ submission_feature_id })}`); + await knex.raw(`${insertSearchString({ submission_feature_id })}`); + + await knex.raw(`${insertSearchStartDatetime({ submission_feature_id })}`); + await knex.raw(`${insertSearchEndDatetime({ submission_feature_id })}`); + + return submission_feature_id; +}; + +export const insertTelemetryDevice = async ( + knex: Knex, + options: { submission_id: number; parent_submission_feature_id: number; device_id?: string } +): Promise<{ submission_feature_id: number; device_id: string }> => { + const device_id = options.device_id || faker.string.alphanumeric({ length: 8 }); + const deviceData = { + device_id, + device_manufacturer: faker.company.name(), + device_model: faker.commerce.productName(), + description: faker.lorem.sentence(), + serial_number: faker.string.alphanumeric({ length: 12 }) + }; + + const response = await knex.raw( + `${insertSubmissionFeature({ + submission_id: options.submission_id, + parent_submission_feature_id: options.parent_submission_feature_id, + feature_type: 'telemetry_device', + data: deviceData + })}` + ); + const submission_feature_id = response.rows[0].submission_feature_id; + + await knex.raw(`${insertSearchString({ submission_feature_id })}`); + await knex.raw(`${insertSearchString({ submission_feature_id })}`); + + return { submission_feature_id, device_id }; +}; + +export const insertTelemetryRecord = async ( + knex: Knex, + options: { submission_id: number; parent_submission_feature_id: number; device_id?: string } +): Promise => { + const device_id = options.device_id || faker.string.alphanumeric({ length: 8 }); const telemetryData = { - device_id: faker.string.alphanumeric({ length: 8 }), + device_id, latitude: faker.number.float({ min: 48.617424, max: 60.664785, multipleOf: 0.000001 }), longitude: faker.number.float({ min: -135.878906, max: -114.433594, multipleOf: 0.000001 }), - timestamp: faker.date.recent().toISOString(), + timestamp: faker.date.between({ from: '2020-01-01T00:00:00.000Z', to: new Date().toISOString() }).toISOString(), temperature: faker.number.float({ min: -20, max: 50, multipleOf: 0.1 }), humidity: faker.number.float({ min: 0, max: 100, multipleOf: 0.1 }), + dop: faker.number.float({ min: 1, max: 20, multipleOf: 0.1 }), status: faker.helpers.arrayElement(['active', 'idle', 'error']) }; @@ -479,6 +675,7 @@ export const insertTelemetryRecord = async ( await knex.raw(`${insertSearchString({ submission_feature_id })}`); // e.g., status await knex.raw(`${insertSearchNumber({ submission_feature_id })}`); // e.g., temperature await knex.raw(`${insertSearchNumber({ submission_feature_id })}`); // e.g., humidity + await knex.raw(`${insertSearchNumber({ submission_feature_id })}`); // e.g., dop // Spatial search index await knex.raw( @@ -487,5 +684,16 @@ export const insertTelemetryRecord = async ( })}` ); + // randomly secure some telemetry points + if (Math.random() < 0.1) { + const ruleRes = await knex.raw(`SELECT security_rule_id FROM security_rule ORDER BY random() LIMIT 1`); + if (ruleRes.rows.length) { + await insertSubmissionFeatureSecurity(knex, { + submission_feature_id, + security_rule_id: ruleRes.rows[0].security_rule_id + }); + } + } + return submission_feature_id; };