Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions CountItems/CountManager.js
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,10 @@ class CountManager {
this.store.bucketList = this.store.bucketList.concat(transformedInfos);
bucketInfos.forEach(bucketInfo => {
this.q.push(bucketInfo);
const key = `${bucketInfo.getName()}_${new Date(bucketInfo.getCreationDate()).getTime()}`;
if (!this.dataMetrics.bucket[key]) {
this.dataMetrics.bucket[key] = consolidateDataMetrics(null, null);
}
});
this.log.debug('added work', {
workInQueue: this.q.length(),
Expand Down
45 changes: 45 additions & 0 deletions tests/unit/CountItems/CountManager.js
Original file line number Diff line number Diff line change
Expand Up @@ -974,6 +974,51 @@ describe('CountItems::CountManager', () => {
expect(m.q.paused).toBeTruthy();
});

test('should pre-seed dataMetrics.bucket with zero-value entries for all buckets', () => {
const workers = createWorkers(1);
const m = new CountManager({
log: new DummyLogger(),
workers,
maxConcurrent: 1,
});
const bucketInfos = [
BucketInfo.deSerialize(stringifiedBucketMD),
];
const bucketList = {
bucketCount: bucketInfos.length,
bucketInfos,
};
m.addWork(bucketList);
const expectedKey = `${bucketInfos[0].getName()}_${new Date(bucketInfos[0].getCreationDate()).getTime()}`;
expect(m.dataMetrics.bucket[expectedKey]).toBeDefined();
expect(m.dataMetrics.bucket[expectedKey].usedCapacity.current).toEqual(0n);
expect(m.dataMetrics.bucket[expectedKey].usedCapacity.nonCurrent).toEqual(0n);
expect(m.dataMetrics.bucket[expectedKey].objectCount.current).toEqual(0n);
expect(m.dataMetrics.bucket[expectedKey].objectCount.nonCurrent).toEqual(0n);
expect(m.dataMetrics.bucket[expectedKey].objectCount.deleteMarker).toEqual(0n);
});

test('should not overwrite existing dataMetrics.bucket entries on addWork', () => {
const workers = createWorkers(1);
const m = new CountManager({
log: new DummyLogger(),
workers,
maxConcurrent: 1,
});
const bucketInfo = BucketInfo.deSerialize(stringifiedBucketMD);
const key = `${bucketInfo.getName()}_${new Date(bucketInfo.getCreationDate()).getTime()}`;
m.dataMetrics.bucket[key] = {
usedCapacity: { current: 100n, nonCurrent: 50n },
objectCount: { current: 10n, nonCurrent: 5n, deleteMarker: 0n },
};
m.addWork({
bucketCount: 1,
bucketInfos: [bucketInfo],
});
expect(m.dataMetrics.bucket[key].usedCapacity.current).toEqual(100n);
expect(m.dataMetrics.bucket[key].objectCount.current).toEqual(10n);
});

test('should only allow queue to be started once', done => {
const workers = createWorkers(1);
const m = new CountManager({
Expand Down
Loading