-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathretry-failed-ads.js
More file actions
227 lines (194 loc) · 9.39 KB
/
retry-failed-ads.js
File metadata and controls
227 lines (194 loc) · 9.39 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
const LinkedInAdScraper = require('./main-scraper-lynx');
const fs = require('fs');
const path = require('path');
/**
* Retry failed ads from a previous scraping run
*
* Usage:
* node retry-failed-ads.js <results_json_file> [scrapingLevel]
*
* Example:
* node retry-failed-ads.js company_89771_20251022/scraping_results_lynx_1761154119634.json 3
*/
async function retryFailedAds() {
const resultsFile = process.argv[2];
const scrapingLevel = parseInt(process.argv[3]) || 3;
if (!resultsFile) {
console.error('Error: Please provide a results JSON file');
console.log('\nUsage: node retry-failed-ads.js <results_json_file> [scrapingLevel]');
console.log('\nExample:');
console.log(' node retry-failed-ads.js company_89771_20251022/scraping_results_lynx_1761154119634.json 3');
process.exit(1);
}
// Check if file exists
if (!fs.existsSync(resultsFile)) {
console.error(`Error: File not found: ${resultsFile}`);
process.exit(1);
}
// Read previous results
console.log(`Reading previous results from: ${resultsFile}`);
const previousResults = JSON.parse(fs.readFileSync(resultsFile, 'utf8'));
// Check if there are failed ads
if (!previousResults.failedAds || previousResults.failedAds.length === 0) {
console.log('No failed ads to retry.');
console.log('\nNote: This results file may be from an older version that didn\'t track failed ads.');
console.log('Re-run the original scraping command to generate a new results file with failure tracking.');
process.exit(0);
}
console.log(`\nFound ${previousResults.failedAds.length} failed ads to retry:`);
previousResults.failedAds.forEach((ad, index) => {
console.log(` ${index + 1}. Ad ID ${ad.adId} - Reason: ${ad.reason}`);
});
// Get output directory from results file path
const outputDir = path.dirname(resultsFile);
const companyId = previousResults.companyId;
// Create scraper instance
const scraper = new LinkedInAdScraper({
maxAdsToProcess: previousResults.failedAds.length,
downloadImages: scrapingLevel >= 3,
saveResults: true,
delayBetweenRequests: 500, // Slightly longer delay for retries
outputDir: outputDir
});
console.log(`\nRetrying ${previousResults.failedAds.length} ads...`);
console.log('='.repeat(60));
// Initialize browser
await scraper.initBrowser(previousResults.targetUrl);
const results = {
startTime: new Date().toISOString(),
originalResultsFile: resultsFile,
companyId: previousResults.companyId,
companyName: previousResults.companyName,
scrapingLevel,
retryAttempt: true,
processedAds: [],
failedAds: [],
downloadResults: [],
summary: {
totalAdsToRetry: previousResults.failedAds.length,
successfulDetails: 0,
stillFailed: 0,
videoAdsSkipped: 0,
successfulDownloads: 0,
failedDownloads: 0
}
};
try {
// Track logo downloads
const downloadedLogosByCompany = new Set();
// Process each failed ad
for (let i = 0; i < previousResults.failedAds.length; i++) {
const failedAd = previousResults.failedAds[i];
console.log(`\n[${i + 1}/${previousResults.failedAds.length}] Retrying Ad ID: ${failedAd.adId}`);
console.log(` Previous failure reason: ${failedAd.reason}`);
try {
const adDetails = await scraper.level2ScrapingWithLynx(failedAd.adUrl);
if (adDetails && adDetails.headline) {
console.log(` ✓ Success! Headline: ${adDetails.headline.substring(0, 60)}...`);
console.log(` Company: ${adDetails.company || 'N/A'}`)
console.log(` Ad Format: ${adDetails.adFormat || 'N/A'}`);
// Check if video ad
const isVideoAd = adDetails.adFormat && adDetails.adFormat.toLowerCase().includes('video');
if (isVideoAd) {
console.log(` SKIPPED: Video ad (no static image)`);
results.summary.videoAdsSkipped++;
} else {
results.processedAds.push(adDetails);
results.summary.successfulDetails++;
// Download images if Level 3
if (scrapingLevel >= 3 && adDetails.imageUrl) {
console.log(` Downloading image...`);
try {
const downloadResults = await scraper.imageDownloader.downloadAdImages(adDetails);
results.downloadResults.push({
adId: adDetails.adId,
results: downloadResults
});
const successfulDownloads = downloadResults.filter(r => !r.error).length;
results.summary.successfulDownloads += successfulDownloads;
results.summary.failedDownloads += downloadResults.length - successfulDownloads;
} catch (error) {
console.error(` Failed to download image: ${error.message}`);
results.summary.failedDownloads++;
}
}
// Download logo if needed
if (scrapingLevel >= 3 && adDetails.logoUrl && companyId && !downloadedLogosByCompany.has(companyId)) {
console.log(` Downloading company logo...`);
try {
const ext = scraper.imageDownloader.getFileExtension(adDetails.logoUrl);
const filename = `${companyId}_logo${ext}`;
await scraper.imageDownloader.downloadImage(adDetails.logoUrl, filename);
downloadedLogosByCompany.add(companyId);
console.log(` Logo downloaded: ${filename}`);
} catch (error) {
console.error(` Failed to download logo: ${error.message}`);
}
}
}
} else {
console.log(` ✗ Still no headline found`);
results.summary.stillFailed++;
results.failedAds.push({
adId: failedAd.adId,
adUrl: failedAd.adUrl,
reason: 'No headline found (retry attempt)'
});
}
} catch (error) {
console.error(` ✗ ERROR: ${error.message}`);
results.summary.stillFailed++;
results.failedAds.push({
adId: failedAd.adId,
adUrl: failedAd.adUrl,
reason: `${error.message} (retry attempt)`
});
}
// Add delay between retries
if (i < previousResults.failedAds.length - 1) {
await new Promise(resolve => setTimeout(resolve, 500));
}
}
results.endTime = new Date().toISOString();
// Print summary
console.log('\n' + '='.repeat(60));
console.log('RETRY SUMMARY');
console.log('='.repeat(60));
console.log(`Total ads retried: ${results.summary.totalAdsToRetry}`);
console.log(`Successfully recovered: ${results.summary.successfulDetails}`);
console.log(`Video ads skipped: ${results.summary.videoAdsSkipped}`);
console.log(`Still failed: ${results.summary.stillFailed}`);
if (scrapingLevel >= 3) {
console.log(`Successful downloads: ${results.summary.successfulDownloads}`);
console.log(`Failed downloads: ${results.summary.failedDownloads}`);
}
console.log('='.repeat(60));
// Save retry results
const timestamp = Date.now();
const retryFilename = `scraping_results_RETRY_${timestamp}.json`;
const retryFilepath = path.join(outputDir, retryFilename);
fs.writeFileSync(retryFilepath, JSON.stringify(results, null, 2));
console.log(`\nRetry results saved to: ${retryFilepath}`);
if (results.summary.successfulDetails > 0) {
console.log(`\nRecovered ${results.summary.successfulDetails} ads!`);
}
if (results.summary.stillFailed > 0) {
console.log(`\n${results.summary.stillFailed} ads still failed. Check the retry results for details.`);
}
} finally {
await scraper.closeBrowser();
}
}
// Main execution
if (require.main === module) {
retryFailedAds()
.then(() => {
console.log('\nRetry completed!');
process.exit(0);
})
.catch((error) => {
console.error('Retry failed:', error.message);
process.exit(1);
});
}
module.exports = retryFailedAds;