Skip to content

Commit 2270964

Browse files
authored
logging: remove duplicate seeds found error (#893)
Per discussion, the message is unnecessary / confusing (doesn't provide enough info) and can also happen on crawler restart.
1 parent fd49041 commit 2270964

File tree

1 file changed

+0
-6
lines changed

1 file changed

+0
-6
lines changed

src/crawler.ts

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -129,8 +129,6 @@ export class Crawler {
129129
limitHit = false;
130130
pageLimit: number;
131131

132-
dupeSeedsFound = false;
133-
134132
saveStateFiles: string[] = [];
135133
lastSaveTime: number;
136134

@@ -2487,10 +2485,6 @@ self.__bx_behaviors.selectMainBehavior();
24872485
return false;
24882486

24892487
case QueueState.DUPE_URL:
2490-
if (!this.dupeSeedsFound && depth === 0) {
2491-
logger.error("Duplicate seed URLs found and skipped");
2492-
this.dupeSeedsFound = true;
2493-
}
24942488
logger.debug(
24952489
"Page URL not queued, already seen",
24962490
{ url, ...logDetails },

0 commit comments

Comments
 (0)