Skip to content

Commit cab5f4c

Browse files
authored
fix: no resource found for fsimportexistingdocs (#2018) (#2019)
1 parent f685f66 commit cab5f4c

File tree

3 files changed

+77
-67
lines changed

3 files changed

+77
-67
lines changed

firestore-bigquery-export/CHANGELOG.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,7 @@
1+
## Version 0.1.48
2+
3+
fix - fix the error "no resource found for `fsimportexistingdocs`"
4+
15
## Version 0.1.47
26

37
fix - temporarily disable backfill feature

firestore-bigquery-export/extension.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
# limitations under the License.
1414

1515
name: firestore-bigquery-export
16-
version: 0.1.47
16+
version: 0.1.48
1717
specVersion: v1beta
1818

1919
displayName: Stream Firestore to BigQuery

firestore-bigquery-export/functions/src/index.ts

Lines changed: 72 additions & 66 deletions
Original file line numberDiff line numberDiff line change
@@ -207,69 +207,75 @@ export const initBigQuerySync = functions.tasks
207207
return;
208208
});
209209

210-
// exports.fsimportexistingdocs = functions.tasks
211-
// .taskQueue()
212-
// .onDispatch(async (data, context) => {
213-
// const runtime = getExtensions().runtime();
214-
// if (!config.doBackfill || !config.importCollectionPath) {
215-
// await runtime.setProcessingState(
216-
// "PROCESSING_COMPLETE",
217-
// "Completed. No existing documents imported into BigQuery."
218-
// );
219-
// return;
220-
// }
221-
222-
// const offset = (data["offset"] as number) ?? 0;
223-
// const docsCount = (data["docsCount"] as number) ?? 0;
224-
225-
// const query = config.useCollectionGroupQuery
226-
// ? getFirestore(config.databaseId).collectionGroup(
227-
// config.importCollectionPath.split("/")[
228-
// config.importCollectionPath.split("/").length - 1
229-
// ]
230-
// )
231-
// : getFirestore(config.databaseId).collection(config.importCollectionPath);
232-
233-
// const snapshot = await query
234-
// .offset(offset)
235-
// .limit(config.docsPerBackfill)
236-
// .get();
237-
238-
// const rows = snapshot.docs.map((d) => {
239-
// return {
240-
// timestamp: new Date().toISOString(),
241-
// operation: ChangeType.IMPORT,
242-
// documentName: `projects/${config.bqProjectId}/databases/(default)/documents/${d.ref.path}`,
243-
// documentId: d.id,
244-
// eventId: "",
245-
// pathParams: resolveWildcardIds(config.importCollectionPath, d.ref.path),
246-
// data: eventTracker.serializeData(d.data()),
247-
// };
248-
// });
249-
// try {
250-
// await eventTracker.record(rows);
251-
// } catch (err: any) {
252-
// /** If configured, event tracker wil handle failed rows in a backup collection */
253-
// functions.logger.log(err);
254-
// }
255-
// if (rows.length == config.docsPerBackfill) {
256-
// // There are more documents to import - enqueue another task to continue the backfill.
257-
// const queue = getFunctions().taskQueue(
258-
// `locations/${config.location}/functions/fsimportexistingdocs`,
259-
// config.instanceId
260-
// );
261-
// await queue.enqueue({
262-
// offset: offset + config.docsPerBackfill,
263-
// docsCount: docsCount + rows.length,
264-
// });
265-
// } else {
266-
// // We are finished, set the processing state to report back how many docs were imported.
267-
// runtime.setProcessingState(
268-
// "PROCESSING_COMPLETE",
269-
// `Successfully imported ${
270-
// docsCount + rows.length
271-
// } documents into BigQuery`
272-
// );
273-
// }
274-
// await events.recordCompletionEvent({ context });
275-
// });
210+
exports.fsimportexistingdocs = functions.tasks
211+
.taskQueue()
212+
.onDispatch(async (data, context) => {
213+
const runtime = getExtensions().runtime();
214+
await runtime.setProcessingState(
215+
"PROCESSING_COMPLETE",
216+
"Completed. No existing documents imported into BigQuery."
217+
);
218+
return;
219+
220+
// if (!config.doBackfill || !config.importCollectionPath) {
221+
// await runtime.setProcessingState(
222+
// "PROCESSING_COMPLETE",
223+
// "Completed. No existing documents imported into BigQuery."
224+
// );
225+
// return;
226+
// }
227+
228+
// const offset = (data["offset"] as number) ?? 0;
229+
// const docsCount = (data["docsCount"] as number) ?? 0;
230+
231+
// const query = config.useCollectionGroupQuery
232+
// ? getFirestore(config.databaseId).collectionGroup(
233+
// config.importCollectionPath.split("/")[
234+
// config.importCollectionPath.split("/").length - 1
235+
// ]
236+
// )
237+
// : getFirestore(config.databaseId).collection(config.importCollectionPath);
238+
239+
// const snapshot = await query
240+
// .offset(offset)
241+
// .limit(config.docsPerBackfill)
242+
// .get();
243+
244+
// const rows = snapshot.docs.map((d) => {
245+
// return {
246+
// timestamp: new Date().toISOString(),
247+
// operation: ChangeType.IMPORT,
248+
// documentName: `projects/${config.bqProjectId}/databases/(default)/documents/${d.ref.path}`,
249+
// documentId: d.id,
250+
// eventId: "",
251+
// pathParams: resolveWildcardIds(config.importCollectionPath, d.ref.path),
252+
// data: eventTracker.serializeData(d.data()),
253+
// };
254+
// });
255+
// try {
256+
// await eventTracker.record(rows);
257+
// } catch (err: any) {
258+
// /** If configured, event tracker wil handle failed rows in a backup collection */
259+
// functions.logger.log(err);
260+
// }
261+
// if (rows.length == config.docsPerBackfill) {
262+
// // There are more documents to import - enqueue another task to continue the backfill.
263+
// const queue = getFunctions().taskQueue(
264+
// `locations/${config.location}/functions/fsimportexistingdocs`,
265+
// config.instanceId
266+
// );
267+
// await queue.enqueue({
268+
// offset: offset + config.docsPerBackfill,
269+
// docsCount: docsCount + rows.length,
270+
// });
271+
// } else {
272+
// // We are finished, set the processing state to report back how many docs were imported.
273+
// runtime.setProcessingState(
274+
// "PROCESSING_COMPLETE",
275+
// `Successfully imported ${
276+
// docsCount + rows.length
277+
// } documents into BigQuery`
278+
// );
279+
// }
280+
// await events.recordCompletionEvent({ context });
281+
});

0 commit comments

Comments
 (0)