refactor: don't create giant array, process in batches of 500

This commit is contained in:
Barış Soner Uşaklı
2026-02-17 13:01:35 -05:00
parent 652629df69
commit ed8cbd6ec3

View File

@@ -2,7 +2,6 @@
'use strict'; 'use strict';
const db = require('../../database'); const db = require('../../database');
const batch = require('../../batch');
module.exports = { module.exports = {
name: 'Add id field to all topic events', name: 'Add id field to all topic events',
@@ -12,11 +11,11 @@ module.exports = {
let nextId = await db.getObjectField('global', 'nextTopicEventId'); let nextId = await db.getObjectField('global', 'nextTopicEventId');
nextId = parseInt(nextId, 10) || 0; nextId = parseInt(nextId, 10) || 0;
progress.total = Math.max(0, nextId - 1);
const ids = []; const ids = [];
for (let i = 1; i < nextId; i++) { const BATCH_SIZE = 500;
ids.push(i);
} async function processBatch(eids) {
await batch.processArray(ids, async (eids) => {
const eventData = await db.getObjects(eids.map(eid => `topicEvent:${eid}`)); const eventData = await db.getObjects(eids.map(eid => `topicEvent:${eid}`));
const bulkSet = []; const bulkSet = [];
eventData.forEach((event, idx) => { eventData.forEach((event, idx) => {
@@ -28,10 +27,20 @@ module.exports = {
} }
}); });
await db.setObjectBulk(bulkSet); await db.setObjectBulk(bulkSet);
progress.incr(eids.length); }
}, {
batch: 500, for (let i = 1; i < nextId; i++) {
progress, ids.push(i);
}); if (ids.length >= BATCH_SIZE) {
await processBatch(ids);
progress.incr(ids.length);
ids.length = 0;
}
}
if (ids.length > 0) {
await processBatch(ids);
progress.incr(ids.length);
}
}, },
}; };