Skip to content

Commit ef8e7a9

Browse files
authored
fix: guarantee db is empty before performing a replay (#1374)
* feat: add event-replay test suite * test: bns genesis block * chore: rename tsv to mainnet * test: export import cycle * chore: reduce mainnet tsv size * fix: db has data check * fix: drop views first, tables second * feat: friendly error when migration cycle failed
1 parent eb9fd0e commit ef8e7a9

File tree

11 files changed

+265
-40
lines changed

11 files changed

+265
-40
lines changed

.vscode/launch.json

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -155,6 +155,23 @@
155155
"preLaunchTask": "stacks-node:deploy-dev",
156156
"postDebugTask": "stacks-node:stop-dev"
157157
},
158+
{
159+
"type": "node",
160+
"request": "launch",
161+
"name": "Jest: Event Replay",
162+
"program": "${workspaceFolder}/node_modules/.bin/jest",
163+
"args": [
164+
"--testTimeout=3600000",
165+
"--runInBand",
166+
"--no-cache",
167+
"--config",
168+
"${workspaceRoot}/jest.config.event-replay.js"
169+
],
170+
"outputCapture": "std",
171+
"console": "integratedTerminal",
172+
"preLaunchTask": "stacks-node:deploy-dev",
173+
"postDebugTask": "stacks-node:stop-dev"
174+
},
158175
{
159176
"type": "node",
160177
"request": "launch",

jest.config.event-replay.js

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
module.exports = {
2+
preset: 'ts-jest',
3+
rootDir: 'src',
4+
testMatch: ['<rootDir>/tests-event-replay/**/*.ts'],
5+
testPathIgnorePatterns: [
6+
'<rootDir>/tests-event-replay/setup.ts',
7+
'<rootDir>/tests-event-replay/teardown.ts',
8+
],
9+
collectCoverageFrom: ['<rootDir>/**/*.ts'],
10+
coveragePathIgnorePatterns: ['<rootDir>/tests*'],
11+
coverageDirectory: '../coverage',
12+
globalSetup: '<rootDir>/tests-event-replay/setup.ts',
13+
globalTeardown: '<rootDir>/tests-event-replay/teardown.ts',
14+
testTimeout: 20000,
15+
};

src/datastore/event-requests.ts

Lines changed: 23 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import { pipelineAsync } from '../helpers';
22
import { Readable, Writable } from 'stream';
33
import { DbRawEventRequest } from './common';
4-
import { PgServer } from './connection';
4+
import { connectPostgres, PgServer } from './connection';
55
import { connectPgPool, connectWithRetry } from './connection-legacy';
66
import * as pgCopyStreams from 'pg-copy-streams';
77
import * as PgCursor from 'pg-cursor';
@@ -119,22 +119,36 @@ export async function* getRawEventRequests(
119119
}
120120
}
121121

122-
export async function containsAnyRawEventRequests(): Promise<boolean> {
123-
const pool = await connectPgPool({
124-
usageName: 'contains-raw-events-check',
122+
/**
123+
* Check the `pg_class` table for any data structures contained in the database. We will consider
124+
* any and all results here as "data" contained in the DB, since anything that is not a completely
125+
* empty DB could lead to strange errors when running the API. See:
126+
* https://www.postgresql.org/docs/current/catalog-pg-class.html
127+
* @returns `boolean` if the DB has data
128+
*/
129+
export async function databaseHasData(args?: {
130+
ignoreMigrationTables?: boolean;
131+
}): Promise<boolean> {
132+
const sql = await connectPostgres({
133+
usageName: 'contains-data-check',
125134
pgServer: PgServer.primary,
126135
});
127-
const client = await pool.connect();
128136
try {
129-
const result = await client.query('SELECT id from event_observer_requests LIMIT 1');
130-
return result.rowCount > 0;
137+
const ignoreMigrationTables = args?.ignoreMigrationTables ?? false;
138+
const result = await sql<{ count: number }[]>`
139+
SELECT COUNT(*)
140+
FROM pg_class c
141+
JOIN pg_namespace s ON s.oid = c.relnamespace
142+
WHERE s.nspname = ${sql.options.connection.search_path}
143+
${ignoreMigrationTables ? sql`AND c.relname NOT LIKE 'pgmigrations%'` : sql``}
144+
`;
145+
return result.count > 0 && result[0].count > 0;
131146
} catch (error: any) {
132147
if (error.message?.includes('does not exist')) {
133148
return false;
134149
}
135150
throw error;
136151
} finally {
137-
client.release();
138-
await pool.end();
152+
await sql.end();
139153
}
140154
}

src/datastore/migrations.ts

Lines changed: 30 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,8 @@ import PgMigrate, { RunnerOption } from 'node-pg-migrate';
33
import { Client } from 'pg';
44
import { APP_DIR, isDevEnv, isTestEnv, logError, logger } from '../helpers';
55
import { getPgClientConfig, PgClientConfig } from './connection-legacy';
6+
import { connectPostgres, PgServer } from './connection';
7+
import { databaseHasData } from './event-requests';
68

79
const MIGRATIONS_TABLE = 'pgmigrations';
810
const MIGRATIONS_DIR = path.join(APP_DIR, 'migrations');
@@ -52,10 +54,14 @@ export async function runMigrations(
5254
export async function cycleMigrations(opts?: {
5355
// Bypass the NODE_ENV check when performing a "down" migration which irreversibly drops data.
5456
dangerousAllowDataLoss?: boolean;
57+
checkForEmptyData?: boolean;
5558
}): Promise<void> {
5659
const clientConfig = getPgClientConfig({ usageName: 'cycle-migrations' });
5760

5861
await runMigrations(clientConfig, 'down', opts);
62+
if (opts?.checkForEmptyData && (await databaseHasData({ ignoreMigrationTables: true }))) {
63+
throw new Error('Migration down process did not completely remove DB tables');
64+
}
5965
await runMigrations(clientConfig, 'up', opts);
6066
}
6167

@@ -65,30 +71,31 @@ export async function dangerousDropAllTables(opts?: {
6571
if (opts?.acknowledgePotentialCatastrophicConsequences !== 'yes') {
6672
throw new Error('Dangerous usage error.');
6773
}
68-
const clientConfig = getPgClientConfig({ usageName: 'dangerous-drop-all-tables' });
69-
const client = new Client(clientConfig);
74+
const sql = await connectPostgres({
75+
usageName: 'dangerous-drop-all-tables',
76+
pgServer: PgServer.primary,
77+
});
78+
const schema = sql.options.connection.search_path;
7079
try {
71-
await client.connect();
72-
await client.query('BEGIN');
73-
const getTablesQuery = await client.query<{ table_name: string }>(
74-
`
75-
SELECT table_name
76-
FROM information_schema.tables
77-
WHERE table_schema = $1
78-
AND table_catalog = $2
79-
AND table_type = 'BASE TABLE'
80-
`,
81-
[clientConfig.schema, clientConfig.database]
82-
);
83-
const tables = getTablesQuery.rows.map(r => r.table_name);
84-
for (const table of tables) {
85-
await client.query(`DROP TABLE IF EXISTS ${table} CASCADE`);
86-
}
87-
await client.query('COMMIT');
88-
} catch (error) {
89-
await client.query('ROLLBACK');
90-
throw error;
80+
await sql.begin(async sql => {
81+
const relNamesQuery = async (kind: string) => sql<{ relname: string }[]>`
82+
SELECT relname
83+
FROM pg_class c
84+
JOIN pg_namespace s ON s.oid = c.relnamespace
85+
WHERE s.nspname = ${schema} AND c.relkind = ${kind}
86+
`;
87+
// Remove materialized views first and tables second.
88+
// Using CASCADE in these DROP statements also removes associated indexes and constraints.
89+
const views = await relNamesQuery('m');
90+
for (const view of views) {
91+
await sql`DROP MATERIALIZED VIEW IF EXISTS ${sql(view.relname)} CASCADE`;
92+
}
93+
const tables = await relNamesQuery('r');
94+
for (const table of tables) {
95+
await sql`DROP TABLE IF EXISTS ${sql(table.relname)} CASCADE`;
96+
}
97+
});
9198
} finally {
92-
await client.end();
99+
await sql.end();
93100
}
94101
}

src/event-replay/event-replay.ts

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ import { defaultLogLevel, getApiConfiguredChainID, httpPostRequest, logger } fro
55
import { findBnsGenesisBlockData, findTsvBlockHeight, getDbBlockHeight } from './helpers';
66
import { importV1BnsNames, importV1BnsSubdomains, importV1TokenOfferingData } from '../import-v1';
77
import {
8-
containsAnyRawEventRequests,
8+
databaseHasData,
99
exportRawEventRequests,
1010
getRawEventRequests,
1111
} from '../datastore/event-requests';
@@ -90,18 +90,22 @@ export async function importEventsFromTsv(
9090
default:
9191
throw new Error(`Invalid event import mode: ${importMode}`);
9292
}
93-
const hasData = await containsAnyRawEventRequests();
93+
const hasData = await databaseHasData();
9494
if (!wipeDb && hasData) {
9595
throw new Error(`Database contains existing data. Add --wipe-db to drop the existing tables.`);
9696
}
9797
if (force) {
9898
await dangerousDropAllTables({ acknowledgePotentialCatastrophicConsequences: 'yes' });
9999
}
100100

101-
// This performs a "migration down" which drops the tables, then re-creates them.
102-
// If there's a breaking change in the migration files, this will throw, and the pg database needs wiped manually,
103-
// or the `--force` option can be used.
104-
await cycleMigrations({ dangerousAllowDataLoss: true });
101+
try {
102+
await cycleMigrations({ dangerousAllowDataLoss: true, checkForEmptyData: true });
103+
} catch (error) {
104+
logger.error(error);
105+
throw new Error(
106+
`DB migration cycle failed, possibly due to an incompatible API version upgrade. Add --wipe-db --force or perform a manual DB wipe before importing.`
107+
);
108+
}
105109

106110
// Look for the TSV's block height and determine the prunable block window.
107111
const tsvBlockHeight = await findTsvBlockHeight(resolvedFilePath);

src/tests/event-replay-tests.ts renamed to src/tests-event-replay/helper-tests.ts

Lines changed: 15 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
import * as fs from 'fs';
2-
import { findTsvBlockHeight } from '../event-replay/helpers';
2+
import { findBnsGenesisBlockData, findTsvBlockHeight } from '../event-replay/helpers';
33
import { ReverseFileStream } from '../event-replay/reverse-file-stream';
44

5-
describe('event replay tests', () => {
5+
describe('helper tests', () => {
66
function writeTmpFile(fileName: string, contents: string): string {
77
try {
88
fs.mkdirSync('./.tmp');
@@ -119,4 +119,17 @@ line4`;
119119
fs.unlinkSync(testFilePath);
120120
}
121121
});
122+
123+
test('BNS genesis block data is found', async () => {
124+
const genesisBlock = await findBnsGenesisBlockData('src/tests-event-replay/tsv/mainnet.tsv');
125+
expect(genesisBlock).toEqual({
126+
index_block_hash: '0x918697ef63f9d8bdf844c3312b299e72a231cde542f3173f7755bb8c1cdaf3a7',
127+
parent_index_block_hash: '0x55c9861be5cff984a20ce6d99d4aa65941412889bdc665094136429b84f8c2ee',
128+
microblock_hash: '0x0000000000000000000000000000000000000000000000000000000000000000',
129+
microblock_sequence: 0,
130+
microblock_canonical: true,
131+
tx_id: '0x2f079994c9bd92b2272258b9de73e278824d76efe1b5a83a3b00941f9559de8a',
132+
tx_index: 7,
133+
});
134+
});
122135
});
Lines changed: 101 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,101 @@
1+
import * as fs from 'fs';
2+
import { exportEventsAsTsv, importEventsFromTsv } from '../event-replay/event-replay';
3+
import { PgWriteStore } from '../datastore/pg-write-store';
4+
import { dangerousDropAllTables, runMigrations } from '../datastore/migrations';
5+
import { databaseHasData } from '../datastore/event-requests';
6+
import { getPgClientConfig } from '../datastore/connection-legacy';
7+
8+
describe('import/export tests', () => {
9+
let db: PgWriteStore;
10+
11+
beforeEach(async () => {
12+
process.env.PG_DATABASE = 'postgres';
13+
db = await PgWriteStore.connect({
14+
usageName: 'tests',
15+
withNotifier: false,
16+
skipMigrations: true,
17+
});
18+
});
19+
20+
test('event import and export cycle', async () => {
21+
// Import from mocknet TSV
22+
await importEventsFromTsv('src/tests-event-replay/tsv/mocknet.tsv', 'archival', true, true);
23+
const chainTip = await db.getUnanchoredChainTip();
24+
expect(chainTip.found).toBe(true);
25+
expect(chainTip.result?.blockHeight).toBe(28);
26+
expect(chainTip.result?.indexBlockHash).toBe(
27+
'0x76cd67a65c0dfd5ea450bb9efe30da89fa125bfc077c953802f718353283a533'
28+
);
29+
expect(chainTip.result?.blockHash).toBe(
30+
'0x7682af212d3c1ef62613412f9b5a727269b4548f14eca2e3f941f7ad8b3c11b2'
31+
);
32+
33+
// Export into temp TSV
34+
const tmpDir = 'src/tests-event-replay/.tmp';
35+
try {
36+
fs.mkdirSync(tmpDir);
37+
} catch (error: any) {
38+
if (error.code != 'EEXIST') throw error;
39+
}
40+
const tmpTsvPath = `${tmpDir}/export.tsv`;
41+
await exportEventsAsTsv(tmpTsvPath, true);
42+
43+
// Re-import with exported TSV and check that chain tip matches.
44+
try {
45+
await importEventsFromTsv(`${tmpDir}/export.tsv`, 'archival', true, true);
46+
const newChainTip = await db.getUnanchoredChainTip();
47+
expect(newChainTip.found).toBe(true);
48+
expect(newChainTip.result?.blockHeight).toBe(28);
49+
expect(newChainTip.result?.indexBlockHash).toBe(
50+
'0x76cd67a65c0dfd5ea450bb9efe30da89fa125bfc077c953802f718353283a533'
51+
);
52+
expect(newChainTip.result?.blockHash).toBe(
53+
'0x7682af212d3c1ef62613412f9b5a727269b4548f14eca2e3f941f7ad8b3c11b2'
54+
);
55+
} finally {
56+
fs.rmSync(tmpDir, { force: true, recursive: true });
57+
}
58+
});
59+
60+
test('import with db wipe options', async () => {
61+
// Migrate first so we have some data.
62+
const clientConfig = getPgClientConfig({ usageName: 'cycle-migrations' });
63+
await runMigrations(clientConfig, 'up', {});
64+
await expect(
65+
importEventsFromTsv('src/tests-event-replay/tsv/mocknet.tsv', 'archival', false, false)
66+
).rejects.toThrowError('contains existing data');
67+
68+
// Create strange table
69+
await db.sql`CREATE TABLE IF NOT EXISTS test (a varchar(10))`;
70+
await expect(
71+
importEventsFromTsv('src/tests-event-replay/tsv/mocknet.tsv', 'archival', true, false)
72+
).rejects.toThrowError('migration cycle failed');
73+
74+
// Force and test
75+
await expect(
76+
importEventsFromTsv('src/tests-event-replay/tsv/mocknet.tsv', 'archival', true, true)
77+
).resolves.not.toThrow();
78+
});
79+
80+
test('db contains data', async () => {
81+
const clientConfig = getPgClientConfig({ usageName: 'cycle-migrations' });
82+
await runMigrations(clientConfig, 'up', {});
83+
84+
// Having tables counts as having data as this may change across major versions.
85+
await expect(databaseHasData()).resolves.toBe(true);
86+
87+
// Dropping all tables removes everything.
88+
await dangerousDropAllTables({ acknowledgePotentialCatastrophicConsequences: 'yes' });
89+
await expect(databaseHasData()).resolves.toBe(false);
90+
91+
// Cycling migrations leaves the `pgmigrations` table.
92+
await runMigrations(clientConfig, 'up', {});
93+
await runMigrations(clientConfig, 'down', {});
94+
await expect(databaseHasData()).resolves.toBe(true);
95+
await expect(databaseHasData({ ignoreMigrationTables: true })).resolves.toBe(false);
96+
});
97+
98+
afterEach(async () => {
99+
await db?.close();
100+
});
101+
});

src/tests-event-replay/setup.ts

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
import { loadDotEnv } from '../helpers';
2+
3+
// ts-unused-exports:disable-next-line
4+
export default (): void => {
5+
console.log('Jest - setup..');
6+
if (!process.env.NODE_ENV) {
7+
process.env.NODE_ENV = 'test';
8+
}
9+
loadDotEnv();
10+
console.log('Jest - setup done');
11+
};

src/tests-event-replay/teardown.ts

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
// ts-unused-exports:disable-next-line
2+
export default (): void => {
3+
console.log('Jest - teardown');
4+
console.log('Jest - teardown done');
5+
};

src/tests-event-replay/tsv/mainnet.tsv

Lines changed: 10 additions & 0 deletions
Large diffs are not rendered by default.

0 commit comments

Comments
 (0)