Skip to content

Commit c391356

Browse files
authored
frontend dedupe: avoid passing in large list of crawl ids for 'crawl dependencies' tab (#3232)
Use 'hasRequiredByCrawls' filter to just show all crawls that are dependencies of other crawls in the collection. part of fix for #3230
1 parent 2402457 commit c391356

File tree

1 file changed

+6
-30
lines changed
  • frontend/src/pages/org/collection-detail

1 file changed

+6
-30
lines changed

frontend/src/pages/org/collection-detail/dedupe.ts

Lines changed: 6 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -144,47 +144,23 @@ export class CollectionDetailDedupe extends BtrixElement {
144144
});
145145

146146
/**
147-
* Crawled items that are a dependency of an archived item
148-
* currently in the collection
147+
* All collection dependencies:
148+
* Crawled items in the collection that are a dependency of another
149+
* archived item currently in the collection
149150
*/
150151
private readonly dependenciesTask = new Task(this, {
151152
task: async ([collectionId, pagination], { signal }) => {
152153
if (!collectionId) return;
153154

154-
const crawlsQuery = queryString.stringify({
155-
sortBy: "finished",
156-
sortDirection: SortDirection.Descending,
157-
collectionId,
158-
dedupeCollId: collectionId,
159-
state: finishedCrawlStates,
160-
hasRequiresCrawls: true,
161-
});
162-
163-
const { items } = await this.api.fetch<APIPaginatedList<ArchivedItem>>(
164-
`/orgs/${this.orgId}/crawls?${crawlsQuery}`,
165-
{ signal },
166-
);
167-
168-
const crawlIds = items.map(({ id }) => id);
169-
170-
if (!crawlIds.length) return;
171-
172-
// FIXME Prevent API from returning 431 by limiting IDs
173-
// should be an edge case that there are more that 100
174-
// dependencies in a collection but we would want a more
175-
// robust solution if this becomes more common.
176-
const limit = 100;
177155
const query = queryString.stringify({
178156
...pagination,
179157
sortBy: "finished",
180158
sortDirection: SortDirection.Descending,
181-
requiredByCrawls: crawlIds.slice(0, limit),
159+
collectionId,
160+
state: finishedCrawlStates,
161+
hasRequiredByCrawls: true,
182162
});
183163

184-
if (crawlIds.length > limit) {
185-
console.warn(`up to ${limit} dependencies queried`);
186-
}
187-
188164
return await this.api.fetch<APIPaginatedList<ArchivedItem>>(
189165
`/orgs/${this.orgId}/all-crawls?${query}`,
190166
{ signal },

0 commit comments

Comments
 (0)