Skip to content

Commit 6c1bc76

Browse files
committed
Add more info-level logging for sync processes
1 parent d6fe6c4 commit 6c1bc76

File tree

4 files changed

+36
-2
lines changed

4 files changed

+36
-2
lines changed

app/src/components/queueManager.js

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,10 @@ class QueueManager {
5757
checkQueue() {
5858
if (!this.isBusy && !this.toClose) {
5959
objectQueueService.queueSize().then(size => {
60-
if (size > 0) this.processNextJob();
60+
if (size > 0) {
61+
log.verbose(`There are ${size} jobs in the queue to process`, { function: 'checkQueue' });
62+
this.processNextJob();
63+
}
6164
}).catch((err) => {
6265
log.error(`Error encountered while checking queue: ${err.message}`, { function: 'checkQueue', error: err });
6366
});
@@ -93,7 +96,7 @@ class QueueManager {
9396
this.isBusy = true;
9497
job = response[0];
9598

96-
log.verbose(`Started processing job id ${job.id}`, { function: 'processNextJob', job: job });
99+
log.info(`Started processing job id ${job.id}`, { function: 'processNextJob', job: job });
97100

98101
const objectId = await syncService.syncJob(job.path, job.bucketId, job.full, job.createdBy);
99102

app/src/controllers/sync.js

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -59,8 +59,14 @@ const controller = {
5959
// parent + child bucket records already in COMS db
6060
const dbChildBuckets = await bucketService.searchChildBuckets(parentBucket, false, userId);
6161
let dbBuckets = [parentBucket].concat(dbChildBuckets);
62+
log.info(`Found ${dbBuckets.length} bucket records in COMS db for parent bucketId ${bucketId}`,
63+
{ function: 'syncBucketRecursive' });
64+
6265
// 'folders' that exist below (and including) the parent 'folder' in S3
6366
const s3Response = await storageService.listAllObjectVersions({ bucketId: bucketId, precisePath: false });
67+
log.info(`Found ${s3Response.Versions.length} object versions and ${s3Response.DeleteMarkers.length}
68+
delete markers in S3 for bucketId ${bucketId}`, { function: 'syncBucketRecursive' });
69+
6470
const s3Keys = [...new Set([
6571
...s3Response.DeleteMarkers.map(object => formatS3KeyForCompare(object.Key)),
6672
...s3Response.Versions.map(object => formatS3KeyForCompare(object.Key)),
@@ -78,6 +84,7 @@ const controller = {
7884
userId,
7985
trx
8086
);
87+
log.info(`${syncedBuckets.length} buckets records synced`, { function: 'syncBucketRecursive' });
8188

8289
/**
8390
* Queue objects in all the folders for syncing
@@ -107,6 +114,8 @@ const controller = {
107114
const userId = await userService.getCurrentUserId(getCurrentIdentity(req.currentUser, SYSTEM_USER), SYSTEM_USER);
108115

109116
const s3Objects = await storageService.listAllObjectVersions({ bucketId: bucketId, filterLatest: true });
117+
log.info(`Found ${s3Objects.Versions.length} object versions and ${s3Objects.DeleteMarkers.length}
118+
delete markers in S3 for bucketId ${bucketId}`, { function: 'syncBucketSingle' });
110119

111120
const response = await utils.trxWrapper(async (trx) => {
112121
return this.queueObjectRecords([bucket], s3Objects, userId, trx);
@@ -139,6 +148,8 @@ const controller = {
139148
oldDbBuckets.map(dbBucket =>
140149
bucketService.delete(dbBucket.bucketId, trx)
141150
.then(() => {
151+
log.info(`Deleted bucketId ${dbBucket.bucketId} from COMS db as key ${dbBucket.key} not found in S3`,
152+
{ function: 'syncBucketRecords' });
142153
dbBuckets = dbBuckets.filter(b => b.bucketId !== dbBucket.bucketId);
143154
})
144155
)
@@ -161,6 +172,7 @@ const controller = {
161172
const newS3Keys = s3Keys.filter(k => !dbBuckets.map(b => b.key).includes(k));
162173
await Promise.all(
163174
newS3Keys.map(s3Key => {
175+
log.info(`Creating new bucket record in COMS db for S3 key ${s3Key}`, { function: 'syncBucketRecords' });
164176
const data = {
165177
bucketName: s3Key.substring(s3Key.lastIndexOf('/') + 1),
166178
accessKeyId: parentBucket.accessKeyId,
@@ -204,6 +216,9 @@ const controller = {
204216
bucketId: dbBuckets.map(b => b.bucketId)
205217
}, trx);
206218

219+
log.info(`Found ${dbObjects.data.length} object records in COMS db for ${dbBuckets.length} buckets`,
220+
{ function: 'queueObjectRecords' });
221+
207222
/**
208223
* merge arrays of objects from COMS db and S3 to form an array of jobs with format:
209224
* [ { path: '/images/img3.jpg', bucketId: '123' }, { path: '/images/album1/img1.jpg', bucketId: '456' } ]
@@ -239,6 +254,7 @@ const controller = {
239254

240255
// merge and remove duplicates
241256
const jobs = [...new Map(objects.map(o => [o.path, o])).values()];
257+
log.info(`Prepared ${jobs.length} jobs to enqueue to object queue`, { function: 'queueObjectRecords' });
242258

243259
// create jobs in COMS db object_queue for each object
244260
// update 'lastSyncRequestedDate' value in COMS db for each bucket

app/src/services/objectQueue.js

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
const { NIL: SYSTEM_USER } = require('uuid');
22

33
const { ObjectQueue } = require('../db/models');
4+
const log = require('../components/log')(module.filename);
5+
46

57
/**
68
* Max number of parameters in a prepared statement (this is a Postgres hard-coded limit).
@@ -53,6 +55,8 @@ const service = {
5355
let trx;
5456
try {
5557
trx = etrx ? etrx : await ObjectQueue.startTransaction();
58+
log.info(`Enqueuing ${jobs.length} jobs to object queue (full: ${full},
59+
retries: ${retries}, createdBy: ${createdBy})`, { function: 'enqueue' });
5660

5761
const jobsArray = jobs.map(job => ({
5862
bucketId: job.bucketId,
@@ -78,6 +82,8 @@ const service = {
7882
const response = await ObjectQueue.query(trx).insert(batch).onConflict().ignore();
7983

8084
totalInserted += response.reduce((acc, job) => job?.id ? acc + 1 : acc, 0);
85+
log.verbose(`Inserted ${response.length} jobs to object queue (batch starting at index ${i})`,
86+
{ function: 'enqueue' });
8187
}
8288

8389
if (!etrx) await trx.commit();

app/src/services/sync.js

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -98,11 +98,14 @@ const service = {
9898
// 1. Sync Object
9999
const object = await service.syncObject(path, bucketId, userId, trx)
100100
.then(obj => obj.object);
101+
log.info(`Synchronized object at path ${path} in bucket ${bucketId}`,
102+
{ function: 'syncJob', objectId: object?.id });
101103

102104
// 2. Sync Object Versions
103105
let versions = [];
104106
if (object) {
105107
versions = await service.syncVersions(object, userId, trx);
108+
log.info(`Synchronized ${versions.length} versions for object id ${object.id}`, { function: 'syncJob' });
106109
}
107110

108111
// 3. Sync Version Metadata & Tags
@@ -407,8 +410,10 @@ const service = {
407410

408411
// COMS Tags
409412
const comsTags = comsTagsForVersion[0]?.tagset ?? [];
413+
// log.info(`Found ${comsTags.length} tags in COMS for version id ${comsVersion.id}`, { function: 'syncTags' });
410414
// S3 Tags
411415
const s3Tags = toLowerKeys(s3TagsForVersion?.TagSet ?? []);
416+
// log.info(`Found ${s3Tags.length} tags in S3 for version id ${comsVersion.id}`, { function: 'syncTags' });
412417
/**
413418
* Add coms-id tag to latest version in S3 if not already present
414419
* NOTE: For a sync job the _deriveObjectId() function will have already added
@@ -501,8 +506,12 @@ const service = {
501506

502507
// COMS Metadata
503508
const comsMetadata = comsMetadataForVersion[0]?.metadata ?? [];
509+
// log.info(`Found ${comsMetadata.length} metadata entries in COMS for version id ${comsVersion.id}`,
510+
// { function: 'syncMetadata' });
504511
// S3 Metadata
505512
const s3Metadata = getKeyValue(s3ObjectHead?.Metadata ?? {});
513+
// log.info(`Found ${s3Metadata.length} metadata entries in S3 for version id ${comsVersion.id}`,
514+
// { function: 'syncMetadata' });
506515

507516
// Dissociate Metadata not in S3
508517
const oldMetadata = [];

0 commit comments

Comments
 (0)