Skip to content

Commit

Permalink
fix: no resource found for fsimportexistingdocs (#2018) (#2019)
Browse files Browse the repository at this point in the history
  • Loading branch information
pr-Mais authored Mar 26, 2024
1 parent f685f66 commit cab5f4c
Show file tree
Hide file tree
Showing 3 changed files with 77 additions and 67 deletions.
4 changes: 4 additions & 0 deletions firestore-bigquery-export/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
## Version 0.1.48

fix - fix the error "no resource found for `fsimportexistingdocs`"

## Version 0.1.47

fix - temporarily disable backfill feature
Expand Down
2 changes: 1 addition & 1 deletion firestore-bigquery-export/extension.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# limitations under the License.

name: firestore-bigquery-export
version: 0.1.47
version: 0.1.48
specVersion: v1beta

displayName: Stream Firestore to BigQuery
Expand Down
138 changes: 72 additions & 66 deletions firestore-bigquery-export/functions/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -207,69 +207,75 @@ export const initBigQuerySync = functions.tasks
return;
});

// exports.fsimportexistingdocs = functions.tasks
// .taskQueue()
// .onDispatch(async (data, context) => {
// const runtime = getExtensions().runtime();
// if (!config.doBackfill || !config.importCollectionPath) {
// await runtime.setProcessingState(
// "PROCESSING_COMPLETE",
// "Completed. No existing documents imported into BigQuery."
// );
// return;
// }

// const offset = (data["offset"] as number) ?? 0;
// const docsCount = (data["docsCount"] as number) ?? 0;

// const query = config.useCollectionGroupQuery
// ? getFirestore(config.databaseId).collectionGroup(
// config.importCollectionPath.split("/")[
// config.importCollectionPath.split("/").length - 1
// ]
// )
// : getFirestore(config.databaseId).collection(config.importCollectionPath);

// const snapshot = await query
// .offset(offset)
// .limit(config.docsPerBackfill)
// .get();

// const rows = snapshot.docs.map((d) => {
// return {
// timestamp: new Date().toISOString(),
// operation: ChangeType.IMPORT,
// documentName: `projects/${config.bqProjectId}/databases/(default)/documents/${d.ref.path}`,
// documentId: d.id,
// eventId: "",
// pathParams: resolveWildcardIds(config.importCollectionPath, d.ref.path),
// data: eventTracker.serializeData(d.data()),
// };
// });
// try {
// await eventTracker.record(rows);
// } catch (err: any) {
// /** If configured, event tracker wil handle failed rows in a backup collection */
// functions.logger.log(err);
// }
// if (rows.length == config.docsPerBackfill) {
// // There are more documents to import - enqueue another task to continue the backfill.
// const queue = getFunctions().taskQueue(
// `locations/${config.location}/functions/fsimportexistingdocs`,
// config.instanceId
// );
// await queue.enqueue({
// offset: offset + config.docsPerBackfill,
// docsCount: docsCount + rows.length,
// });
// } else {
// // We are finished, set the processing state to report back how many docs were imported.
// runtime.setProcessingState(
// "PROCESSING_COMPLETE",
// `Successfully imported ${
// docsCount + rows.length
// } documents into BigQuery`
// );
// }
// await events.recordCompletionEvent({ context });
// });
exports.fsimportexistingdocs = functions.tasks
.taskQueue()
.onDispatch(async (data, context) => {
const runtime = getExtensions().runtime();
await runtime.setProcessingState(
"PROCESSING_COMPLETE",
"Completed. No existing documents imported into BigQuery."
);
return;

// if (!config.doBackfill || !config.importCollectionPath) {
// await runtime.setProcessingState(
// "PROCESSING_COMPLETE",
// "Completed. No existing documents imported into BigQuery."
// );
// return;
// }

// const offset = (data["offset"] as number) ?? 0;
// const docsCount = (data["docsCount"] as number) ?? 0;

// const query = config.useCollectionGroupQuery
// ? getFirestore(config.databaseId).collectionGroup(
// config.importCollectionPath.split("/")[
// config.importCollectionPath.split("/").length - 1
// ]
// )
// : getFirestore(config.databaseId).collection(config.importCollectionPath);

// const snapshot = await query
// .offset(offset)
// .limit(config.docsPerBackfill)
// .get();

// const rows = snapshot.docs.map((d) => {
// return {
// timestamp: new Date().toISOString(),
// operation: ChangeType.IMPORT,
// documentName: `projects/${config.bqProjectId}/databases/(default)/documents/${d.ref.path}`,
// documentId: d.id,
// eventId: "",
// pathParams: resolveWildcardIds(config.importCollectionPath, d.ref.path),
// data: eventTracker.serializeData(d.data()),
// };
// });
// try {
// await eventTracker.record(rows);
// } catch (err: any) {
// /** If configured, event tracker wil handle failed rows in a backup collection */
// functions.logger.log(err);
// }
// if (rows.length == config.docsPerBackfill) {
// // There are more documents to import - enqueue another task to continue the backfill.
// const queue = getFunctions().taskQueue(
// `locations/${config.location}/functions/fsimportexistingdocs`,
// config.instanceId
// );
// await queue.enqueue({
// offset: offset + config.docsPerBackfill,
// docsCount: docsCount + rows.length,
// });
// } else {
// // We are finished, set the processing state to report back how many docs were imported.
// runtime.setProcessingState(
// "PROCESSING_COMPLETE",
// `Successfully imported ${
// docsCount + rows.length
// } documents into BigQuery`
// );
// }
// await events.recordCompletionEvent({ context });
});

0 comments on commit cab5f4c

Please sign in to comment.