Skip to content

Commit 215af32

Browse files
committed
script for more easily creating database backups
1 parent 4628bf7 commit 215af32

File tree

8 files changed

+522
-3
lines changed

8 files changed

+522
-3
lines changed

eslint.config.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ module.exports = [
77
languageOptions: {
88
parser: tsParser,
99
parserOptions: {
10-
project: 'tsconfig.json',
10+
project: ['tsconfig.eslint.json'],
1111
sourceType: 'module',
1212
},
1313
},

package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
"setup": "sqd down && sqd up && sleep 2 && sqd migration:apply",
1212
"serve": "sqd serve",
1313
"typegen": "sqd typegen",
14+
"migration:apply": "sqd migration:apply",
1415
"process:arbitrum": "sqd process:arbitrum",
1516
"process:base": "sqd process:base",
1617
"process:sonic": "sqd process:sonic",

scripts/create-db-dump.ts

Lines changed: 339 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,339 @@
1+
import { spawn } from 'child_process'
2+
import dotenv from 'dotenv'
3+
import 'dotenv/config'
4+
import * as fs from 'fs'
5+
import { createServer } from 'net'
6+
import * as path from 'path'
7+
import { Pool } from 'pg'
8+
import { setTimeout as delay } from 'timers/promises'
9+
import { createPublicClient, http } from 'viem'
10+
11+
type ProcessorName =
12+
| 'mainnet-processor'
13+
| 'oeth-processor'
14+
| 'ogv-processor'
15+
| 'ousd-processor'
16+
| 'arbitrum-processor'
17+
| 'base-processor'
18+
| 'oethb-processor'
19+
| 'sonic-processor'
20+
| 'plume-processor'
21+
22+
function parseArgs() {
23+
const args = process.argv.slice(2)
24+
if (args.length === 0) {
25+
console.error('Usage: tsx create-db-dump.ts <processor-name> [--profile <aws-profile>] [--continue]')
26+
process.exit(1)
27+
}
28+
29+
const processorName = args[0] as ProcessorName
30+
let awsProfile: string | undefined
31+
let continueRun = false
32+
33+
for (let i = 1; i < args.length; i++) {
34+
const arg = args[i]
35+
if (arg === '--profile' && args[i + 1]) {
36+
awsProfile = args[i + 1]
37+
i++
38+
continue
39+
}
40+
if (arg === '--continue') {
41+
continueRun = true
42+
}
43+
}
44+
45+
return { processorName, awsProfile, continueRun }
46+
}
47+
48+
function getProcessorAlias(processorName: ProcessorName): string {
49+
// Maps `oeth-processor` -> `oeth`, etc. Used to pick the main entry file.
50+
return processorName.replace(/-processor$/, '')
51+
}
52+
53+
function getRpcEndpointEnvName(processorName: ProcessorName): string {
54+
switch (processorName) {
55+
case 'base-processor':
56+
case 'oethb-processor':
57+
return process.env['RPC_BASE_ENV']!
58+
case 'arbitrum-processor':
59+
return process.env['RPC_ARBITRUM_ENV']!
60+
case 'sonic-processor':
61+
return process.env['RPC_SONIC_ENV']!
62+
case 'plume-processor':
63+
return process.env['RPC_PLUME_ENV']!
64+
// mainnet family
65+
case 'mainnet-processor':
66+
case 'oeth-processor':
67+
case 'ogv-processor':
68+
case 'ousd-processor':
69+
default:
70+
return process.env['RPC_ENV']!
71+
}
72+
}
73+
74+
async function getLatestBlockNumber(rpcUrl: string): Promise<number> {
75+
const client = createPublicClient({ transport: http(rpcUrl) })
76+
const n = await client.getBlockNumber()
77+
return Number(n)
78+
}
79+
80+
function computePrevious100kBoundary(height: number): number {
81+
if (height <= 0) return 0
82+
return height - (height % 100000) - 100000
83+
}
84+
85+
async function findFreePort(preferredStart = 24000, preferredEnd = 65000): Promise<number> {
86+
async function getDockerPublishedPorts(): Promise<Set<number>> {
87+
try {
88+
const { stdout } = await runCmd(`docker ps --format "{{.Ports}}"`, { capture: true })
89+
const busy = new Set<number>()
90+
stdout
91+
.split('\n')
92+
.map((l: string) => l.trim())
93+
.filter(Boolean)
94+
.forEach((line: string) => {
95+
// Examples: "0.0.0.0:23798->5432/tcp, :::23798->5432/tcp"
96+
const matches = [...line.matchAll(/:(\d+)->/g)]
97+
for (const m of matches) {
98+
const p = parseInt(m[1], 10)
99+
if (!isNaN(p)) busy.add(p)
100+
}
101+
})
102+
return busy
103+
} catch {
104+
return new Set<number>()
105+
}
106+
}
107+
108+
async function tryPort(port: number): Promise<boolean> {
109+
// Attempt to bind on all interfaces to catch conflicts with docker published ports
110+
return await new Promise<boolean>((resolve) => {
111+
const srv = createServer()
112+
let resolved = false
113+
const finalize = (ok: boolean) => {
114+
if (resolved) return
115+
resolved = true
116+
try {
117+
srv.close()
118+
} catch {}
119+
resolve(ok)
120+
}
121+
srv.once('error', () => finalize(false))
122+
srv.listen({ port, host: '0.0.0.0', exclusive: true }, () => finalize(true))
123+
})
124+
}
125+
126+
const dockerBusy = await getDockerPublishedPorts()
127+
for (let port = preferredStart; port <= preferredEnd; port++) {
128+
if (dockerBusy.has(port)) continue
129+
const ok = await tryPort(port)
130+
if (ok) return port
131+
}
132+
throw new Error('No free port found')
133+
}
134+
135+
async function getExistingDbPort(composeProject: string): Promise<number | null> {
136+
try {
137+
const { stdout } = await runCmd(`docker ps --filter "name=^\/${composeProject}-db-1$" --format "{{.Ports}}"`, {
138+
capture: true,
139+
})
140+
const ports = stdout.trim()
141+
if (!ports) return null
142+
// Prefer explicit 5432 mapping if present
143+
const mSpecific = ports.match(/:(\d+)->5432\/tcp/)
144+
if (mSpecific) return parseInt(mSpecific[1], 10)
145+
const mAny = ports.match(/:(\d+)->/)
146+
if (mAny) return parseInt(mAny[1], 10)
147+
return null
148+
} catch {
149+
return null
150+
}
151+
}
152+
153+
async function waitForDbReady({
154+
host,
155+
port,
156+
name,
157+
user,
158+
pass,
159+
}: {
160+
host: string
161+
port: number
162+
name: string
163+
user: string
164+
pass: string
165+
}) {
166+
const timeoutMs = 60_000
167+
const start = Date.now()
168+
while (Date.now() - start < timeoutMs) {
169+
const pool = new Pool({ host, port, database: name, user, password: pass })
170+
try {
171+
await pool.query('SELECT 1')
172+
await pool.end()
173+
return
174+
} catch {
175+
await pool.end().catch(() => {})
176+
await delay(1000)
177+
}
178+
}
179+
throw new Error('Database not ready in time')
180+
}
181+
182+
type RunCmdOptions = { env?: NodeJS.ProcessEnv; capture?: boolean }
183+
184+
async function runCmd(cmd: string, opts: RunCmdOptions = {}) {
185+
const { env, capture = false } = opts
186+
return await new Promise<{ stdout: string }>((resolve, reject) => {
187+
const child = spawn(cmd, {
188+
env,
189+
shell: true,
190+
stdio: capture ? ['ignore', 'pipe', 'inherit'] : 'inherit',
191+
})
192+
193+
let stdout = ''
194+
if (capture && child.stdout) {
195+
child.stdout.on('data', (data: Buffer) => {
196+
stdout += data.toString()
197+
})
198+
}
199+
200+
child.on('error', (err) => reject(err))
201+
child.on('close', (code) => {
202+
if (code === 0) resolve({ stdout })
203+
else reject(new Error(`Command failed (exit ${code}): ${cmd}`))
204+
})
205+
})
206+
}
207+
208+
async function runWithRetries(cmd: string, opts: RunCmdOptions & { retries?: number; delayMs?: number } = {}) {
209+
const { retries = 5, delayMs = 5000, ...rest } = opts
210+
let attempt = 0
211+
while (true) {
212+
attempt++
213+
try {
214+
return await runCmd(cmd, rest)
215+
} catch (err) {
216+
if (attempt >= retries) throw err
217+
console.warn(`Command failed (attempt ${attempt}/${retries}): ${cmd}. Retrying in ${delayMs}ms...`)
218+
await delay(delayMs)
219+
}
220+
}
221+
}
222+
223+
async function main() {
224+
// Load dev.env if present (in addition to standard .env via dotenv/config)
225+
const devEnvPath = path.join(process.cwd(), 'dev.env')
226+
if (fs.existsSync(devEnvPath)) {
227+
dotenv.config({ path: devEnvPath, override: false })
228+
}
229+
230+
const { processorName, awsProfile, continueRun } = parseArgs()
231+
const alias = getProcessorAlias(processorName)
232+
233+
// Determine RPC URL for the chain
234+
const rpcEnv = getRpcEndpointEnvName(processorName)
235+
const rpcUrl = process.env[rpcEnv]
236+
if (!rpcUrl) {
237+
console.error(`Missing RPC endpoint env var ${rpcEnv}. Please set it in your environment or dev.env`)
238+
process.exit(1)
239+
}
240+
241+
// Compute target block (nearest 100k boundary)
242+
const latest = await getLatestBlockNumber(rpcUrl)
243+
const blockTo = computePrevious100kBoundary(latest)
244+
if (blockTo === 0) {
245+
console.error('Computed target block is 0; aborting')
246+
process.exit(1)
247+
}
248+
console.log(`Latest block: ${latest}. Processing up to: ${blockTo}`)
249+
250+
// Prepare isolated DB
251+
const DB_HOST = 'localhost'
252+
const DB_USER = process.env.DB_USER || 'postgres'
253+
const DB_PASS = process.env.DB_PASS || 'postgres'
254+
const DB_NAME = process.env.DB_NAME || 'squid'
255+
const composeProject = `squid_${alias}`
256+
let DB_PORT = 0
257+
let startedCompose = false
258+
if (continueRun) {
259+
const existingPort = await getExistingDbPort(composeProject)
260+
if (existingPort) {
261+
DB_PORT = existingPort
262+
console.log(`Reusing existing dockerized Postgres at port ${DB_PORT} (project ${composeProject})...`)
263+
} else {
264+
console.log(`No existing Postgres found.`)
265+
process.exit(1)
266+
}
267+
} else {
268+
DB_PORT = await findFreePort(23000, 65000)
269+
}
270+
271+
const baseEnv: NodeJS.ProcessEnv = {
272+
...process.env,
273+
DB_HOST,
274+
DB_PORT: String(DB_PORT),
275+
DB_USER,
276+
DB_PASS,
277+
DB_NAME,
278+
}
279+
280+
// Start isolated Postgres
281+
if (!continueRun || startedCompose) {
282+
if (!startedCompose) {
283+
console.log(`Starting dockerized Postgres at port ${DB_PORT} (project ${composeProject})...`)
284+
await runCmd(`docker-compose -p ${composeProject} up -d`, { env: { ...baseEnv } })
285+
startedCompose = true
286+
}
287+
}
288+
289+
try {
290+
// Wait for DB to be ready
291+
await waitForDbReady({ host: DB_HOST, port: DB_PORT, name: DB_NAME, user: DB_USER, pass: DB_PASS })
292+
console.log('Database is ready')
293+
294+
console.log(`Running processor '${processorName}' up to block ${blockTo}...`)
295+
const procEnv: NodeJS.ProcessEnv = {
296+
...baseEnv,
297+
BLOCK_TO: String(blockTo),
298+
}
299+
await runCmd('npm run migration:apply', { env: procEnv })
300+
await runWithRetries(`npm run process:${alias}`, { env: procEnv, retries: 5, delayMs: 30000 })
301+
302+
// Dump DB using existing script to get the AWS command
303+
console.log('Creating DB dump...')
304+
const { stdout } = await runCmd(`npx --yes tsx scripts/dump-db.ts ${processorName}`, {
305+
env: baseEnv,
306+
capture: true,
307+
})
308+
309+
// Find the aws command line in the output
310+
const lines = stdout.split('\n')
311+
const awsLine = lines.find((l: string) => l.trim().startsWith('aws s3 cp '))
312+
if (!awsLine) {
313+
console.error('Could not find AWS upload command in dump script output')
314+
process.exit(1)
315+
}
316+
317+
const awsCmd = awsProfile ? `${awsLine.trim()} --profile ${awsProfile}` : awsLine.trim()
318+
console.log(`Uploading dump to S3: ${awsCmd}`)
319+
await runCmd(`${awsCmd}`)
320+
321+
console.log('Done.')
322+
} finally {
323+
if (startedCompose) {
324+
console.log('Shutting down docker compose project...')
325+
try {
326+
await runCmd(`docker-compose -p ${composeProject} down`, { env: baseEnv })
327+
} catch (e) {
328+
console.warn('Failed to stop docker-compose project:', e)
329+
}
330+
} else {
331+
console.log('Leaving existing docker compose project running.')
332+
}
333+
}
334+
}
335+
336+
main().catch((err) => {
337+
console.error(err)
338+
process.exit(1)
339+
})

0 commit comments

Comments
 (0)