Skip to content

Commit d25437f

Browse files
committed
Refactor brainbot service for improved api monitoring
1 parent 6589637 commit d25437f

File tree

2 files changed

+473
-119
lines changed

2 files changed

+473
-119
lines changed

src/events/common.ts

Lines changed: 246 additions & 91 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,9 @@ import schedule from 'node-schedule'
66
import Updates from '../utils/sendUpdates.js'
77
import { ChannelTypes, UpdateTypes } from '../services/types/activityResult.js'
88
import RevalidateService from '../services/revalidate.js'
9+
import WikiUpdates from '../services/wikiUpdates.js'
910
import { writeFile } from '../utils/helpers.js'
10-
import axios from 'axios'
11+
import axios, { AxiosError } from 'axios'
1112

1213
const urls = [
1314
'https://iq.wiki/categories/daos',
@@ -19,139 +20,293 @@ const urls = [
1920
'https://iq.braindao.org/dashboard/stats',
2021
'https://braindao.org',
2122
]
23+
24+
interface ScheduledJobConfig {
25+
name: string
26+
schedule: string
27+
task: () => Promise<void>
28+
enabled: boolean
29+
}
30+
2231
@Discord()
2332
@injectable()
2433
export class AppDiscord {
2534
PROD_ALARMS: string
2635
PROD_URL: string
2736
DEV_URL: string
37+
private scheduledJobs: schedule.Job[] = []
2838

29-
constructor(private updates: Updates, private revalidate: RevalidateService) {
39+
constructor(
40+
private updates: Updates,
41+
private revalidate: RevalidateService,
42+
private wikiUpdates: WikiUpdates,
43+
) {
3044
this.PROD_ALARMS = JSON.parse(process.env.CHANNELS).ALARMS
3145
this.PROD_URL = process.env.PROD_URL
3246
this.DEV_URL = process.env.DEV_URL
3347
}
3448

35-
@On({event: 'ready'})
49+
private async executeWithErrorHandling(
50+
taskName: string,
51+
task: () => Promise<void>,
52+
): Promise<void> {
53+
try {
54+
console.log(`🚀 Starting ${taskName}...`)
55+
await task()
56+
console.log(`✅ Completed ${taskName}`)
57+
} catch (error) {
58+
console.error(`❌ Error in ${taskName}:`, error)
59+
}
60+
}
61+
62+
private createScheduledJobs(channels: any): ScheduledJobConfig[] {
63+
const devWikiChannel = channels.devWikiChannel
64+
const devHiiqChannel = channels.devHiiqChannel
65+
const prodWikiChannel = channels.prodWikiChannel
66+
const prodAlertChannel = channels.prodAlertChannel
67+
68+
return [
69+
{
70+
name: 'Wiki Updates Check',
71+
schedule: '* * * * *',
72+
enabled: true,
73+
task: async () => {
74+
await Promise.all([
75+
this.updates.sendUpdates({
76+
channelId: devWikiChannel,
77+
channelType: ChannelTypes.DEV,
78+
url: this.DEV_URL,
79+
updateType: UpdateTypes.WIKI,
80+
}),
81+
this.updates.sendUpdates({
82+
channelId: prodWikiChannel,
83+
channelType: ChannelTypes.PROD,
84+
url: this.PROD_URL,
85+
updateType: UpdateTypes.WIKI,
86+
}),
87+
])
88+
},
89+
},
90+
{
91+
name: 'HIIQ Updates Check',
92+
schedule: '0 */1 * * *',
93+
enabled: true,
94+
task: async () => {
95+
await this.updates.sendUpdates({
96+
channelId: devHiiqChannel,
97+
channelType: ChannelTypes.DEV,
98+
url: '',
99+
updateType: UpdateTypes.HIIQ,
100+
})
101+
},
102+
},
103+
{
104+
name: 'Random Wiki Revalidation',
105+
schedule: '* * * * *',
106+
enabled: true,
107+
task: async () => {
108+
await Promise.all([
109+
this.revalidate.revalidateRandomWiki(
110+
this.PROD_URL,
111+
`${process.cwd()}/build/utils/prodWikiLinks.js`,
112+
),
113+
this.revalidate.revalidateRandomWiki(
114+
this.DEV_URL,
115+
`${process.cwd()}/build/utils/devWikiLinks.js`,
116+
),
117+
])
118+
},
119+
},
120+
{
121+
name: 'Core Pages Revalidation',
122+
schedule: '*/5 * * * *',
123+
enabled: true,
124+
task: async () => {
125+
const pages = ['/activity', '/']
126+
await Promise.all([
127+
...pages.map(page =>
128+
this.revalidate.revalidateWikiPage(this.PROD_URL, page),
129+
),
130+
...pages.map(page =>
131+
this.revalidate.revalidateWikiPage(this.DEV_URL, page),
132+
),
133+
])
134+
},
135+
},
136+
{
137+
name: 'Website Status Check',
138+
schedule: '*/30 * * * *',
139+
enabled: true,
140+
task: async () => {
141+
await this.checkWebpageStatus(urls, prodAlertChannel)
142+
},
143+
},
144+
{
145+
name: 'Daily Wiki Links Extraction',
146+
schedule: '0 0 * * *',
147+
enabled: true,
148+
task: async () => {
149+
await this.callAndExtractWikis()
150+
},
151+
},
152+
]
153+
}
154+
155+
@On({ event: 'ready' })
36156
async isReady([client]: ArgsOf<'ready'>) {
37-
const channelIds = JSON.parse(process.env.CHANNELS)
157+
console.log('🤖 Bot is ready! Setting up scheduled tasks...')
38158

39-
const devWikiChannel = client.channels.cache.get(
40-
channelIds.DEV.WIKI,
41-
) as TextChannel
42-
const devHiiqChannel = client.channels.cache.get(
43-
channelIds.DEV.HIIQ,
44-
) as TextChannel
45-
const prodWikiChannel = client.channels.cache.get(
46-
channelIds.PROD.WIKI,
47-
) as TextChannel
48-
const prodAlertChannel = client.channels.cache.get(
49-
channelIds.PROD.WIKI,
50-
) as TextChannel
51-
52-
schedule.scheduleJob('* * * *', async () => {
53-
console.log('Calling for new wikis 🚀')
54-
55-
await this.updates.sendUpdates({
56-
channelId: devWikiChannel,
57-
channelType: ChannelTypes.DEV,
58-
url: `${this.DEV_URL}`,
59-
updateType: UpdateTypes.WIKI,
60-
})
159+
const channelIds = JSON.parse(process.env.CHANNELS)
61160

62-
await this.updates.sendUpdates({
63-
channelId: prodWikiChannel,
64-
channelType: ChannelTypes.PROD,
65-
url: `${this.PROD_URL}`,
66-
updateType: UpdateTypes.WIKI,
67-
})
68-
})
161+
const channels = {
162+
devWikiChannel: client.channels.cache.get(
163+
channelIds.DEV.WIKI,
164+
) as TextChannel,
165+
devHiiqChannel: client.channels.cache.get(
166+
channelIds.DEV.HIIQ,
167+
) as TextChannel,
168+
prodWikiChannel: client.channels.cache.get(
169+
channelIds.PROD.WIKI,
170+
) as TextChannel,
171+
prodAlertChannel: client.channels.cache.get(
172+
channelIds.PROD.WIKI,
173+
) as TextChannel,
174+
}
69175

70-
schedule.scheduleJob('0 */1 * * *', async () => {
71-
console.log(new Date())
72-
await this.updates.sendUpdates({
73-
channelId: devHiiqChannel,
74-
channelType: ChannelTypes.DEV,
75-
url: '',
76-
updateType: UpdateTypes.HIIQ,
77-
})
176+
Object.entries(channels).forEach(([name, channel]) => {
177+
if (!channel) {
178+
console.error(`❌ Channel ${name} not found!`)
179+
}
78180
})
79181

80-
await this.callAndExtractWikis()
182+
await this.executeWithErrorHandling('Initial Wiki Links Extraction', () =>
183+
this.callAndExtractWikis(),
184+
)
81185

82-
// Every 12am
83-
schedule.scheduleJob('0 0 * * *', async () => {
84-
await this.callAndExtractWikis()
85-
})
186+
await this.executeWithErrorHandling('API Health Monitoring Setup', () =>
187+
this.wikiUpdates.startApiHealthMonitoring(),
188+
)
86189

87-
// Every minute
88-
89-
schedule.scheduleJob('* * * * *', async () => {
90-
await this.revalidate.revalidateRandomWiki(
91-
this.PROD_URL,
92-
`${process.cwd()}/build/utils/prodWikiLinks.js`,
93-
)
94-
await this.revalidate.revalidateRandomWiki(
95-
this.DEV_URL,
96-
`${process.cwd()}/build/utils/devWikiLinks.js`,
97-
)
98-
})
190+
const jobConfigs = this.createScheduledJobs(channels)
99191

100-
// Every 5 minutes
101-
schedule.scheduleJob('*/5 * * * *', async () => {
102-
await this.revalidate.revalidateWikiPage(this.PROD_URL, '/activity')
103-
await this.revalidate.revalidateWikiPage(this.PROD_URL, '/')
192+
jobConfigs.forEach(config => {
193+
if (config.enabled) {
194+
const job = schedule.scheduleJob(
195+
config.name,
196+
config.schedule,
197+
async () => {
198+
await this.executeWithErrorHandling(config.name, config.task)
199+
},
200+
)
104201

105-
await this.revalidate.revalidateWikiPage(this.DEV_URL, '/activity')
106-
await this.revalidate.revalidateWikiPage(this.DEV_URL, '/')
202+
this.scheduledJobs.push(job)
203+
console.log(`📅 Scheduled: ${config.name} (${config.schedule})`)
204+
}
107205
})
108206

109-
// Every 30 minutes
110-
schedule.scheduleJob('*/30 * * * *', async () => {
111-
await this.checkWebpageStatus(urls, prodAlertChannel)
112-
})
207+
console.log(`✅ Successfully scheduled ${this.scheduledJobs.length} jobs`)
113208
}
114209

115-
async callAndExtractWikis() {
116-
const extractedProdLinks = await this.revalidate.extractLinks(this.PROD_URL)
117-
writeFile(
118-
extractedProdLinks,
119-
`${process.cwd()}/build/utils/prodWikiLinks.js`,
120-
)
210+
async callAndExtractWikis(): Promise<void> {
211+
try {
212+
console.log('📋 Extracting wiki links from both environments...')
213+
214+
const [extractedProdLinks, extractedDevLinks] = await Promise.all([
215+
this.revalidate.extractLinks(this.PROD_URL),
216+
this.revalidate.extractLinks(this.DEV_URL),
217+
])
121218

122-
const extractedDevLinks = await this.revalidate.extractLinks(this.DEV_URL)
123-
writeFile(extractedDevLinks, `${process.cwd()}/build/utils/devWikiLinks.js`)
219+
await Promise.all([
220+
writeFile(
221+
extractedProdLinks,
222+
`${process.cwd()}/build/utils/prodWikiLinks.js`,
223+
),
224+
writeFile(
225+
extractedDevLinks,
226+
`${process.cwd()}/build/utils/devWikiLinks.js`,
227+
),
228+
])
229+
230+
console.log('✅ Wiki links extraction completed successfully')
231+
} catch (error) {
232+
console.error('❌ Failed to extract wiki links:', error)
233+
throw error
234+
}
124235
}
125236

126-
async checkWebpageStatus(urls: string[], channel: TextChannel) {
127-
const results = await Promise.all(
237+
async checkWebpageStatus(
238+
urls: string[],
239+
channel: TextChannel,
240+
): Promise<void> {
241+
console.log(`🔍 Checking status of ${urls.length} websites...`)
242+
243+
const timeout = 30000
244+
const results = await Promise.allSettled(
128245
urls.map(async url => {
129246
try {
130-
const response = await axios.get(url)
131-
return { url, status: response.status }
247+
const response = await axios.get(url, {
248+
timeout,
249+
validateStatus: status => status < 500,
250+
})
251+
return { url, status: response.status, success: true }
132252
} catch (error: any) {
133-
console.error(error.cause ?? error.response.status)
253+
const status = error.response?.status || 'TIMEOUT/NETWORK_ERROR'
254+
console.error(`❌ ${url} failed:`, status)
255+
return { url, status, success: false, error: error.message }
134256
}
135257
}),
136258
)
137259

138-
const failedResults = results.filter(
139-
result => result && result?.status !== 200,
140-
)
260+
const processedResults = results.map((result, index) => {
261+
if (result.status === 'fulfilled') {
262+
return result.value
263+
} else {
264+
console.error(`❌ Promise rejected for ${urls[index]}:`, result.reason)
265+
return {
266+
url: urls[index],
267+
status: 'PROMISE_REJECTED',
268+
success: false,
269+
error: result.reason,
270+
}
271+
}
272+
})
273+
274+
const failedResults = processedResults.filter(result => !result.success)
275+
const successfulResults = processedResults.filter(result => result.success)
276+
277+
console.log(`✅ ${successfulResults.length}/${urls.length} websites are up`)
141278

142279
if (failedResults.length > 0) {
143-
console.log('Some websites are down: 🚧')
144-
failedResults.forEach(async result => {
145-
result &&
146-
(await this.updates.sendUpdates({
280+
console.log(`🚧 ${failedResults.length} websites are down:`)
281+
282+
const notifications = failedResults.map(async result => {
283+
if (result) {
284+
console.log(` - ${result.url}: ${result.status}`)
285+
await this.updates.sendUpdates({
147286
channelId: channel,
148287
channelType: ChannelTypes.PROD,
149288
url: result.url,
150289
updateType: UpdateTypes.DOWNTIME,
151-
}))
290+
})
291+
}
152292
})
153-
} else {
154-
console.log('All websites are up!')
293+
294+
await Promise.allSettled(notifications)
155295
}
156296
}
297+
298+
async shutdown(): Promise<void> {
299+
console.log('🛑 Shutting down scheduled jobs...')
300+
301+
this.scheduledJobs.forEach((job, index) => {
302+
try {
303+
job.cancel()
304+
console.log(`✅ Cancelled job ${index + 1}`)
305+
} catch (error) {
306+
console.error(`❌ Error cancelling job ${index + 1}:`, error)
307+
}
308+
})
309+
310+
console.log('✅ All scheduled jobs cancelled')
311+
}
157312
}

0 commit comments

Comments
 (0)