Date: Wed, 17 Sep 2025 10:44:51 -0400
Subject: [PATCH 23/34] fix: add pre-processing step to title generation logic
so sbd doesn't fall over so badly
---
src/activitypub/helpers.js | 46 --------------------------------------
src/activitypub/notes.js | 6 ++++-
2 files changed, 5 insertions(+), 47 deletions(-)
diff --git a/src/activitypub/helpers.js b/src/activitypub/helpers.js
index e6eb2e1c08..01cfd86d10 100644
--- a/src/activitypub/helpers.js
+++ b/src/activitypub/helpers.js
@@ -339,52 +339,6 @@ Helpers.resolveObjects = async (ids) => {
return objects.length === 1 ? objects[0] : objects;
};
-const titleishTags = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'title', 'p', 'span'];
-const titleRegex = new RegExp(`<(${titleishTags.join('|')})>(.+?)\\1>`, 'm');
-Helpers.generateTitle = (html) => {
- // Given an html string, generates a more appropriate title if possible
- let title;
-
- // Try the first paragraph-like element
- const match = html.match(titleRegex);
- if (match && match.index === 0) {
- title = match[2];
- }
-
- // Fall back to newline splitting (i.e. if no paragraph elements)
- title = title || html.split('\n').filter(Boolean).shift();
-
- // Discard everything after a line break element
- title = title.replace(/
.*/g, '');
-
- // Strip html
- title = utils.stripHTMLTags(title);
-
- // Split sentences and use only first one
- const sentences = title
- .split(/(\.|\?|!)\s/)
- .reduce((memo, cur, idx, sentences) => {
- if (idx % 2) {
- memo.push(`${sentences[idx - 1]}${cur}`);
- } else if (idx === sentences.length - 1) {
- memo.push(cur);
- }
-
- return memo;
- }, []);
-
- if (sentences.length > 1) {
- title = sentences.shift();
- }
-
- // Truncate down if too long
- if (title.length > meta.config.maximumTitleLength) {
- title = `${title.slice(0, meta.config.maximumTitleLength - 3)}...`;
- }
-
- return title;
-};
-
Helpers.remoteAnchorToLocalProfile = async (content, isMarkdown = false) => {
let anchorRegex;
if (isMarkdown) {
diff --git a/src/activitypub/notes.js b/src/activitypub/notes.js
index ef4abe9add..6ccb2ca209 100644
--- a/src/activitypub/notes.js
+++ b/src/activitypub/notes.js
@@ -165,7 +165,11 @@ Notes.assert = async (uid, input, options = { skipChecks: false }) => {
// mainPid ok to leave as-is
if (!title) {
- const sentences = tokenizer.sentences(content || sourceContent, { sanitize: true });
+ // Naive pre-processing prior to sbd tokenization
+ let sbdInput = content || sourceContent;
+ sbdInput = sbdInput.replace('', '
\n');
+
+ const sentences = tokenizer.sentences(sbdInput, { sanitize: true, newline_boundaries: true });
title = sentences.shift();
}
From 6cca55e37f0bce389c3094c5aae07ed1bbed3297 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Bar=C4=B1=C5=9F=20Soner=20U=C5=9Fakl=C4=B1?=
Date: Wed, 17 Sep 2025 10:50:35 -0400
Subject: [PATCH 24/34] fix: use parameterized query for key lookup
---
src/database/postgres/main.js | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/src/database/postgres/main.js b/src/database/postgres/main.js
index c0838b45a0..5b3c7f7e9d 100644
--- a/src/database/postgres/main.js
+++ b/src/database/postgres/main.js
@@ -85,7 +85,8 @@ module.exports = function (module) {
text: `
SELECT o."_key"
FROM "legacy_object_live" o
- WHERE o."_key" LIKE '${match}'`,
+ WHERE o."_key" LIKE $1`,
+ values: [match],
});
return res.rows.map(r => r._key);
From 532653110c9e0400967c42352415be6dccb8e6a4 Mon Sep 17 00:00:00 2001
From: Julian Lam
Date: Wed, 17 Sep 2025 10:58:07 -0400
Subject: [PATCH 25/34] Revert "fix: add pre-processing step to title
generation logic so sbd doesn't fall over so badly"
This reverts commit f7c47429879f757e08975b5cd003416db00f5568.
---
src/activitypub/helpers.js | 46 ++++++++++++++++++++++++++++++++++++++
src/activitypub/notes.js | 6 +----
2 files changed, 47 insertions(+), 5 deletions(-)
diff --git a/src/activitypub/helpers.js b/src/activitypub/helpers.js
index 01cfd86d10..e6eb2e1c08 100644
--- a/src/activitypub/helpers.js
+++ b/src/activitypub/helpers.js
@@ -339,6 +339,52 @@ Helpers.resolveObjects = async (ids) => {
return objects.length === 1 ? objects[0] : objects;
};
+const titleishTags = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'title', 'p', 'span'];
+const titleRegex = new RegExp(`<(${titleishTags.join('|')})>(.+?)\\1>`, 'm');
+Helpers.generateTitle = (html) => {
+ // Given an html string, generates a more appropriate title if possible
+ let title;
+
+ // Try the first paragraph-like element
+ const match = html.match(titleRegex);
+ if (match && match.index === 0) {
+ title = match[2];
+ }
+
+ // Fall back to newline splitting (i.e. if no paragraph elements)
+ title = title || html.split('\n').filter(Boolean).shift();
+
+ // Discard everything after a line break element
+ title = title.replace(/
.*/g, '');
+
+ // Strip html
+ title = utils.stripHTMLTags(title);
+
+ // Split sentences and use only first one
+ const sentences = title
+ .split(/(\.|\?|!)\s/)
+ .reduce((memo, cur, idx, sentences) => {
+ if (idx % 2) {
+ memo.push(`${sentences[idx - 1]}${cur}`);
+ } else if (idx === sentences.length - 1) {
+ memo.push(cur);
+ }
+
+ return memo;
+ }, []);
+
+ if (sentences.length > 1) {
+ title = sentences.shift();
+ }
+
+ // Truncate down if too long
+ if (title.length > meta.config.maximumTitleLength) {
+ title = `${title.slice(0, meta.config.maximumTitleLength - 3)}...`;
+ }
+
+ return title;
+};
+
Helpers.remoteAnchorToLocalProfile = async (content, isMarkdown = false) => {
let anchorRegex;
if (isMarkdown) {
diff --git a/src/activitypub/notes.js b/src/activitypub/notes.js
index 6ccb2ca209..ef4abe9add 100644
--- a/src/activitypub/notes.js
+++ b/src/activitypub/notes.js
@@ -165,11 +165,7 @@ Notes.assert = async (uid, input, options = { skipChecks: false }) => {
// mainPid ok to leave as-is
if (!title) {
- // Naive pre-processing prior to sbd tokenization
- let sbdInput = content || sourceContent;
- sbdInput = sbdInput.replace('
', '
\n');
-
- const sentences = tokenizer.sentences(sbdInput, { sanitize: true, newline_boundaries: true });
+ const sentences = tokenizer.sentences(content || sourceContent, { sanitize: true });
title = sentences.shift();
}
From a6674f67a1cfb92f6236e76447e5e9213b1b5710 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Bar=C4=B1=C5=9F=20Soner=20U=C5=9Fakl=C4=B1?=
Date: Wed, 17 Sep 2025 10:58:26 -0400
Subject: [PATCH 26/34] lint: remove unused
---
src/activitypub/helpers.js | 2 --
1 file changed, 2 deletions(-)
diff --git a/src/activitypub/helpers.js b/src/activitypub/helpers.js
index 01cfd86d10..f24d18b730 100644
--- a/src/activitypub/helpers.js
+++ b/src/activitypub/helpers.js
@@ -8,7 +8,6 @@ const validator = require('validator');
// const cheerio = require('cheerio');
const crypto = require('crypto');
-const meta = require('../meta');
const posts = require('../posts');
const categories = require('../categories');
const messaging = require('../messaging');
@@ -16,7 +15,6 @@ const request = require('../request');
const db = require('../database');
const ttl = require('../cache/ttl');
const user = require('../user');
-const utils = require('../utils');
const activitypub = require('.');
const webfingerRegex = /^(@|acct:)?[\w-.]+@.+$/;
From 5beeedd67cc2fc08b6dda77f237ba7892b5329b1 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Bar=C4=B1=C5=9F=20Soner=20U=C5=9Fakl=C4=B1?=
Date: Wed, 17 Sep 2025 11:09:02 -0400
Subject: [PATCH 27/34] Revert "lint: remove unused"
This reverts commit a6674f67a1cfb92f6236e76447e5e9213b1b5710.
---
src/activitypub/helpers.js | 2 ++
1 file changed, 2 insertions(+)
diff --git a/src/activitypub/helpers.js b/src/activitypub/helpers.js
index e0f96fe3aa..e6eb2e1c08 100644
--- a/src/activitypub/helpers.js
+++ b/src/activitypub/helpers.js
@@ -8,6 +8,7 @@ const validator = require('validator');
// const cheerio = require('cheerio');
const crypto = require('crypto');
+const meta = require('../meta');
const posts = require('../posts');
const categories = require('../categories');
const messaging = require('../messaging');
@@ -15,6 +16,7 @@ const request = require('../request');
const db = require('../database');
const ttl = require('../cache/ttl');
const user = require('../user');
+const utils = require('../utils');
const activitypub = require('.');
const webfingerRegex = /^(@|acct:)?[\w-.]+@.+$/;
From d1f5060f11a257388690d1441726efd58ca88b5a Mon Sep 17 00:00:00 2001
From: Julian Lam
Date: Thu, 18 Sep 2025 13:33:16 -0400
Subject: [PATCH 28/34] fix(deps): bump 2factor to 7.6.0
---
install/package.json | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/install/package.json b/install/package.json
index cd5d08f5c1..f9e5b8b0ed 100644
--- a/install/package.json
+++ b/install/package.json
@@ -96,7 +96,7 @@
"mousetrap": "1.6.5",
"multer": "2.0.2",
"nconf": "0.13.0",
- "nodebb-plugin-2factor": "7.5.10",
+ "nodebb-plugin-2factor": "7.6.0",
"nodebb-plugin-composer-default": "10.3.1",
"nodebb-plugin-dbsearch": "6.3.2",
"nodebb-plugin-emoji": "6.0.3",
@@ -201,4 +201,4 @@
"url": "https://github.com/barisusakli"
}
]
-}
\ No newline at end of file
+}
From f9edb13f6209b075d4a53c130d1bba166ae188fa Mon Sep 17 00:00:00 2001
From: Julian Lam
Date: Fri, 19 Sep 2025 14:43:04 -0400
Subject: [PATCH 29/34] fix: missing actor assertion on 1b12 announced upboat
---
src/activitypub/inbox.js | 1 +
1 file changed, 1 insertion(+)
diff --git a/src/activitypub/inbox.js b/src/activitypub/inbox.js
index 754720f208..ea5b032a1a 100644
--- a/src/activitypub/inbox.js
+++ b/src/activitypub/inbox.js
@@ -298,6 +298,7 @@ inbox.announce = async (req) => {
const exists = await posts.exists(localId || id);
if (exists) {
try {
+ await activitypub.actors.assert(object.actor);
const result = await posts.upvote(localId || id, object.actor);
if (localId) {
socketHelpers.upvote(result, 'notifications:upvoted-your-post-in');
From 4d68e3fe145e49f38d1f3dc2b45d8409bb3945f6 Mon Sep 17 00:00:00 2001
From: Julian Lam
Date: Mon, 22 Sep 2025 11:56:55 -0400
Subject: [PATCH 30/34] fix: re-jig handling of ap tag values so that only
hashtags are considered (not Piefed community tags, etc.)
---
src/activitypub/notes.js | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/src/activitypub/notes.js b/src/activitypub/notes.js
index ef4abe9add..832b8d152d 100644
--- a/src/activitypub/notes.js
+++ b/src/activitypub/notes.js
@@ -38,11 +38,12 @@ Notes._normalizeTags = async (tag, cid) => {
}
tags = tags
+ .filter(({ type }) => type === 'Hashtag')
.map((tag) => {
tag.name = tag.name.startsWith('#') ? tag.name.slice(1) : tag.name;
return tag;
})
- .filter(o => o.type === 'Hashtag' && !systemTags.includes(o.name))
+ .filter(({ name }) => !systemTags.includes(name))
.map(t => t.name);
if (tags.length > maxTags) {
From d0c058263f5ffbdd7be821d15b3fd3bcbdb5fa12 Mon Sep 17 00:00:00 2001
From: Julian Lam
Date: Mon, 22 Sep 2025 12:14:14 -0400
Subject: [PATCH 31/34] fix: update note assertion topic members check to
simpler posts.exists check
The original logic checked that each member of the resolved chain was part of the resolved topic.
That isn't always the case, especially when topics splinter due to network timeouts/unavailability.
This ended up causing issues where already asserted posts were re-asserted but failed because they no longer served an _activitypub object since it was already asserted and the data was just pulled from the db.
---
src/activitypub/notes.js | 7 +++----
1 file changed, 3 insertions(+), 4 deletions(-)
diff --git a/src/activitypub/notes.js b/src/activitypub/notes.js
index 832b8d152d..e1fc16c175 100644
--- a/src/activitypub/notes.js
+++ b/src/activitypub/notes.js
@@ -117,9 +117,8 @@ Notes.assert = async (uid, input, options = { skipChecks: false }) => {
await topics.tools.move(tid, { cid: options.cid, uid: 'system' });
}
- const members = await db.isSortedSetMembers(`tid:${tid}:posts`, chain.slice(1).map(p => p.pid));
- members.unshift(await posts.exists(mainPid));
- if (tid && members.every(Boolean)) {
+ const exists = await posts.exists(chain.map(p => p.pid));
+ if (tid && exists.every(Boolean)) {
// All cached, return early.
activitypub.helpers.log('[notes/assert] No new notes to process.');
await unlock(id);
@@ -212,7 +211,7 @@ Notes.assert = async (uid, input, options = { skipChecks: false }) => {
}
return post;
- }).filter((p, idx) => !members[idx]);
+ }).filter((p, idx) => !exists[idx]);
const count = unprocessed.length;
activitypub.helpers.log(`[notes/assert] ${count} new note(s) found.`);
From 32d0ee480844b350cd5502922bfe1109ad913d19 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Bar=C4=B1=C5=9F=20Soner=20U=C5=9Fakl=C4=B1?=
Date: Thu, 25 Sep 2025 02:03:14 -0400
Subject: [PATCH 32/34] perf: update old upgrade scripts to use bulkSet/Add
fix a missing await
---
src/upgrades/1.10.2/upgrade_bans_to_hashes.js | 34 ++++++----
src/upgrades/1.10.2/username_email_history.js | 45 +++++++------
.../1.12.1/clear_username_email_history.js | 53 ++++++----------
.../1.12.1/moderation_notes_refactor.js | 10 +--
src/upgrades/1.13.0/clean_post_topic_hash.js | 3 +-
.../1.6.2/topics_lastposttime_zset.js | 35 ++++++-----
src/upgrades/1.7.1/notification-settings.js | 35 ++++++++---
src/upgrades/1.7.3/topic_votes.js | 56 ++++++++++-------
src/upgrades/1.8.1/diffs_zset_to_listhash.js | 63 +++++++------------
.../1.9.0/refresh_post_upload_associations.js | 19 +++---
src/upgrades/2.8.7/fix-email-sorted-sets.js | 2 +-
src/upgrades/3.7.0/category-read-by-uid.js | 2 +-
12 files changed, 186 insertions(+), 171 deletions(-)
diff --git a/src/upgrades/1.10.2/upgrade_bans_to_hashes.js b/src/upgrades/1.10.2/upgrade_bans_to_hashes.js
index 84c7a0ed4d..2bc55b4667 100644
--- a/src/upgrades/1.10.2/upgrade_bans_to_hashes.js
+++ b/src/upgrades/1.10.2/upgrade_bans_to_hashes.js
@@ -11,14 +11,23 @@ module.exports = {
method: async function () {
const { progress } = this;
+ progress.total = await db.sortedSetCard('users:joindate');
+
await batch.processSortedSet('users:joindate', async (uids) => {
- for (const uid of uids) {
- progress.incr();
- const [bans, reasons, userData] = await Promise.all([
- db.getSortedSetRevRangeWithScores(`uid:${uid}:bans`, 0, -1),
- db.getSortedSetRevRangeWithScores(`banned:${uid}:reasons`, 0, -1),
- db.getObjectFields(`user:${uid}`, ['banned', 'banned:expire', 'joindate', 'lastposttime', 'lastonline']),
- ]);
+ progress.incr(uids.length);
+ const [allUserData, allBans] = await Promise.all([
+ db.getObjectsFields(
+ uids.map(uid => `user:${uid}`),
+ ['banned', 'banned:expire', 'joindate', 'lastposttime', 'lastonline'],
+ ),
+ db.getSortedSetsMembersWithScores(
+ uids.map(uid => `uid:${uid}:bans`)
+ ),
+ ]);
+
+ await Promise.all(uids.map(async (uid, index) => {
+ const userData = allUserData[index];
+ const bans = allBans[index] || [];
// has no history, but is banned, create plain object with just uid and timestmap
if (!bans.length && parseInt(userData.banned, 10)) {
@@ -31,6 +40,7 @@ module.exports = {
const banKey = `uid:${uid}:ban:${banTimestamp}`;
await addBan(uid, banKey, { uid: uid, timestamp: banTimestamp });
} else if (bans.length) {
+ const reasons = await db.getSortedSetRevRangeWithScores(`banned:${uid}:reasons`, 0, -1);
// process ban history
for (const ban of bans) {
const reasonData = reasons.find(reasonData => reasonData.score === ban.score);
@@ -46,14 +56,16 @@ module.exports = {
await addBan(uid, banKey, data);
}
}
- }
+ }));
}, {
- progress: this.progress,
+ batch: 500,
});
},
};
async function addBan(uid, key, data) {
- await db.setObject(key, data);
- await db.sortedSetAdd(`uid:${uid}:bans:timestamp`, data.timestamp, key);
+ await Promise.all([
+ db.setObject(key, data),
+ db.sortedSetAdd(`uid:${uid}:bans:timestamp`, data.timestamp, key),
+ ]);
}
diff --git a/src/upgrades/1.10.2/username_email_history.js b/src/upgrades/1.10.2/username_email_history.js
index 3b03568a69..8ee4306e3d 100644
--- a/src/upgrades/1.10.2/username_email_history.js
+++ b/src/upgrades/1.10.2/username_email_history.js
@@ -11,27 +11,34 @@ module.exports = {
method: async function () {
const { progress } = this;
- await batch.processSortedSet('users:joindate', async (uids) => {
- async function updateHistory(uid, set, fieldName) {
- const count = await db.sortedSetCard(set);
- if (count <= 0) {
- // User has not changed their username/email before, record original username
- const userData = await user.getUserFields(uid, [fieldName, 'joindate']);
- if (userData && userData.joindate && userData[fieldName]) {
- await db.sortedSetAdd(set, userData.joindate, [userData[fieldName], userData.joindate].join(':'));
- }
- }
- }
+ progress.total = await db.sortedSetCard('users:joindate');
- await Promise.all(uids.map(async (uid) => {
- await Promise.all([
- updateHistory(uid, `user:${uid}:usernames`, 'username'),
- updateHistory(uid, `user:${uid}:emails`, 'email'),
- ]);
- progress.incr();
- }));
+ await batch.processSortedSet('users:joindate', async (uids) => {
+ const [usernameHistory, emailHistory, userData] = await Promise.all([
+ db.sortedSetsCard(uids.map(uid => `user:${uid}:usernames`)),
+ db.sortedSetsCard(uids.map(uid => `user:${uid}:emails`)),
+ user.getUsersFields(uids, ['uid', 'username', 'email', 'joindate']),
+ ]);
+
+ const bulkAdd = [];
+ userData.forEach((data, index) => {
+ const thisUsernameHistory = usernameHistory[index];
+ const thisEmailHistory = emailHistory[index];
+ if (thisUsernameHistory <= 0 && data && data.joindate && data.username) {
+ bulkAdd.push([
+ `user:${data.uid}:usernames`, data.joindate, [data.username, data.joindate].join(':'),
+ ]);
+ }
+ if (thisEmailHistory <= 0 && data && data.joindate && data.email) {
+ bulkAdd.push([
+ `user:${data.uid}:emails`, data.joindate, [data.email, data.joindate].join(':'),
+ ]);
+ }
+ });
+ await db.sortedSetAddBulk(bulkAdd);
+ progress.incr(uids.length);
}, {
- progress: this.progress,
+ batch: 500,
});
},
};
diff --git a/src/upgrades/1.12.1/clear_username_email_history.js b/src/upgrades/1.12.1/clear_username_email_history.js
index 822b500884..0d36534502 100644
--- a/src/upgrades/1.12.1/clear_username_email_history.js
+++ b/src/upgrades/1.12.1/clear_username_email_history.js
@@ -1,45 +1,32 @@
'use strict';
-const async = require('async');
+
const db = require('../../database');
const user = require('../../user');
+const batch = require('../../batch');
module.exports = {
name: 'Delete username email history for deleted users',
timestamp: Date.UTC(2019, 2, 25),
- method: function (callback) {
+ method: async function () {
const { progress } = this;
- let currentUid = 1;
- db.getObjectField('global', 'nextUid', (err, nextUid) => {
- if (err) {
- return callback(err);
- }
- progress.total = nextUid;
- async.whilst((next) => {
- next(null, currentUid < nextUid);
- },
- (next) => {
- progress.incr();
- user.exists(currentUid, (err, exists) => {
- if (err) {
- return next(err);
- }
- if (exists) {
- currentUid += 1;
- return next();
- }
- db.deleteAll([`user:${currentUid}:usernames`, `user:${currentUid}:emails`], (err) => {
- if (err) {
- return next(err);
- }
- currentUid += 1;
- next();
- });
- });
- },
- (err) => {
- callback(err);
- });
+
+ progress.total = await db.getObjectField('global', 'nextUid');
+ const allUids = [];
+ for (let i = 1; i < progress.total; i += 1) {
+ allUids.push(i);
+ }
+ await batch.processArray(allUids, async (uids) => {
+ const exists = await user.exists(uids);
+ const missingUids = uids.filter((uid, index) => !exists[index]);
+ const keysToDelete = [
+ ...missingUids.map(uid => `user:${uid}:usernames`),
+ ...missingUids.map(uid => `user:${uid}:emails`),
+ ];
+ await db.deleteAll(keysToDelete);
+ progress.incr(uids.length);
+ }, {
+ batch: 500,
});
},
};
diff --git a/src/upgrades/1.12.1/moderation_notes_refactor.js b/src/upgrades/1.12.1/moderation_notes_refactor.js
index 390273d74a..85118a9a0c 100644
--- a/src/upgrades/1.12.1/moderation_notes_refactor.js
+++ b/src/upgrades/1.12.1/moderation_notes_refactor.js
@@ -12,10 +12,12 @@ module.exports = {
const { progress } = this;
await batch.processSortedSet('users:joindate', async (uids) => {
- await Promise.all(uids.map(async (uid) => {
- progress.incr();
-
- const notes = await db.getSortedSetRevRange(`uid:${uid}:moderation:notes`, 0, -1);
+ progress.incr(uids.length);
+ const allNotes = await db.getSortedSetsMembers(
+ uids.map(uid => `uid:${uid}:moderation:notes`)
+ );
+ await Promise.all(uids.map(async (uid, index) => {
+ const notes = allNotes[index];
for (const note of notes) {
const noteData = JSON.parse(note);
noteData.timestamp = noteData.timestamp || Date.now();
diff --git a/src/upgrades/1.13.0/clean_post_topic_hash.js b/src/upgrades/1.13.0/clean_post_topic_hash.js
index caa6dbd8f6..20cfd78c22 100644
--- a/src/upgrades/1.13.0/clean_post_topic_hash.js
+++ b/src/upgrades/1.13.0/clean_post_topic_hash.js
@@ -8,6 +8,7 @@ module.exports = {
timestamp: Date.UTC(2019, 9, 7),
method: async function () {
const { progress } = this;
+ progress.total = await db.sortedSetCard('posts:pid') + await db.sortedSetCard('topics:tid');
await cleanPost(progress);
await cleanTopic(progress);
},
@@ -51,7 +52,6 @@ async function cleanPost(progress) {
}));
}, {
batch: 500,
- progress: progress,
});
}
@@ -90,6 +90,5 @@ async function cleanTopic(progress) {
}));
}, {
batch: 500,
- progress: progress,
});
}
diff --git a/src/upgrades/1.6.2/topics_lastposttime_zset.js b/src/upgrades/1.6.2/topics_lastposttime_zset.js
index 1dee9feb1a..f299b19c01 100644
--- a/src/upgrades/1.6.2/topics_lastposttime_zset.js
+++ b/src/upgrades/1.6.2/topics_lastposttime_zset.js
@@ -1,29 +1,30 @@
'use strict';
-const async = require('async');
-
const db = require('../../database');
+const batch = require('../../batch');
module.exports = {
name: 'New sorted set cid::tids:lastposttime',
timestamp: Date.UTC(2017, 9, 30),
- method: function (callback) {
+ method: async function () {
const { progress } = this;
+ progress.total = await db.sortedSetCard('topics:tid');
- require('../../batch').processSortedSet('topics:tid', (tids, next) => {
- async.eachSeries(tids, (tid, next) => {
- db.getObjectFields(`topic:${tid}`, ['cid', 'timestamp', 'lastposttime'], (err, topicData) => {
- if (err || !topicData) {
- return next(err);
- }
- progress.incr();
-
- const timestamp = topicData.lastposttime || topicData.timestamp || Date.now();
- db.sortedSetAdd(`cid:${topicData.cid}:tids:lastposttime`, timestamp, tid, next);
- }, next);
- }, next);
+ await batch.processSortedSet('topics:tid', async (tids) => {
+ const topicData = await db.getObjectsFields(
+ tids.map(tid => `topic:${tid}`), ['tid', 'cid', 'timestamp', 'lastposttime']
+ );
+ const bulkAdd = [];
+ topicData.forEach((data) => {
+ if (data && data.cid && data.tid) {
+ const timestamp = data.lastposttime || data.timestamp || Date.now();
+ bulkAdd.push([`cid:${data.cid}:tids:lastposttime`, timestamp, data.tid]);
+ }
+ });
+ await db.sortedSetAddBulk(bulkAdd);
+ progress.incr(tids.length);
}, {
- progress: this.progress,
- }, callback);
+ batch: 500,
+ });
},
};
diff --git a/src/upgrades/1.7.1/notification-settings.js b/src/upgrades/1.7.1/notification-settings.js
index fed592effb..e3693d4f04 100644
--- a/src/upgrades/1.7.1/notification-settings.js
+++ b/src/upgrades/1.7.1/notification-settings.js
@@ -8,23 +8,38 @@ module.exports = {
timestamp: Date.UTC(2017, 10, 15),
method: async function () {
const { progress } = this;
-
+ progress.total = await db.sortedSetCard('users:joindate');
await batch.processSortedSet('users:joindate', async (uids) => {
- await Promise.all(uids.map(async (uid) => {
- progress.incr();
- const userSettings = await db.getObjectFields(`user:${uid}:settings`, ['sendChatNotifications', 'sendPostNotifications']);
- if (userSettings) {
+
+ const userSettings = await db.getObjectsFields(
+ uids.map(uid => `user:${uid}:settings`),
+ ['sendChatNotifications', 'sendPostNotifications'],
+ );
+
+ const bulkSet = [];
+ userSettings.forEach((settings, index) => {
+ const set = {};
+ if (settings) {
if (parseInt(userSettings.sendChatNotifications, 10) === 1) {
- await db.setObjectField(`user:${uid}:settings`, 'notificationType_new-chat', 'notificationemail');
+ set['notificationType_new-chat'] = 'notificationemail';
}
if (parseInt(userSettings.sendPostNotifications, 10) === 1) {
- await db.setObjectField(`user:${uid}:settings`, 'notificationType_new-reply', 'notificationemail');
+ set['notificationType_new-reply'] = 'notificationemail';
+ }
+ if (Object.keys(set).length) {
+ bulkSet.push([`user:${uids[index]}:settings`, set]);
}
}
- await db.deleteObjectFields(`user:${uid}:settings`, ['sendChatNotifications', 'sendPostNotifications']);
- }));
+ });
+ await db.setObjectBulk(bulkSet);
+
+ await db.deleteObjectFields(
+ uids.map(uid => `user:${uid}:settings`),
+ ['sendChatNotifications', 'sendPostNotifications'],
+ );
+
+ progress.incr(uids.length);
}, {
- progress: progress,
batch: 500,
});
},
diff --git a/src/upgrades/1.7.3/topic_votes.js b/src/upgrades/1.7.3/topic_votes.js
index 008aaece0a..d5f6b9fd57 100644
--- a/src/upgrades/1.7.3/topic_votes.js
+++ b/src/upgrades/1.7.3/topic_votes.js
@@ -10,32 +10,42 @@ module.exports = {
method: async function () {
const { progress } = this;
- batch.processSortedSet('topics:tid', async (tids) => {
- await Promise.all(tids.map(async (tid) => {
- progress.incr();
- const topicData = await db.getObjectFields(`topic:${tid}`, ['mainPid', 'cid', 'pinned']);
- if (topicData.mainPid && topicData.cid) {
- const postData = await db.getObject(`post:${topicData.mainPid}`);
- if (postData) {
- const upvotes = parseInt(postData.upvotes, 10) || 0;
- const downvotes = parseInt(postData.downvotes, 10) || 0;
- const data = {
- upvotes: upvotes,
- downvotes: downvotes,
- };
- const votes = upvotes - downvotes;
- await Promise.all([
- db.setObject(`topic:${tid}`, data),
- db.sortedSetAdd('topics:votes', votes, tid),
- ]);
- if (parseInt(topicData.pinned, 10) !== 1) {
- await db.sortedSetAdd(`cid:${topicData.cid}:tids:votes`, votes, tid);
- }
+ progress.total = await db.sortedSetCard('topics:tid');
+
+ await batch.processSortedSet('topics:tid', async (tids) => {
+ const topicsData = await db.getObjectsFields(
+ tids.map(tid => `topic:${tid}`),
+ ['tid', 'mainPid', 'cid', 'pinned'],
+ );
+ const mainPids = topicsData.map(topicData => topicData && topicData.mainPid);
+ const mainPosts = await db.getObjects(mainPids.map(pid => `post:${pid}`));
+
+ const bulkSet = [];
+ const bulkAdd = [];
+
+ topicsData.forEach((topicData, index) => {
+ const mainPost = mainPosts[index];
+ if (mainPost && topicData && topicData.cid) {
+ const upvotes = parseInt(mainPost.upvotes, 10) || 0;
+ const downvotes = parseInt(mainPost.downvotes, 10) || 0;
+ const data = {
+ upvotes: upvotes,
+ downvotes: downvotes,
+ };
+ const votes = upvotes - downvotes;
+ bulkSet.push([`topic:${topicData.tid}`, data]);
+ bulkAdd.push(['topics:votes', votes, topicData.tid]);
+ if (parseInt(topicData.pinned, 10) !== 1) {
+ bulkAdd.push([`cid:${topicData.cid}:tids:votes`, votes, topicData.tid]);
}
}
- }));
+ });
+
+ await db.setObjectBulk(bulkSet);
+ await db.sortedSetAddBulk('topics:votes', bulkAdd);
+
+ progress.incr(tids.length);
}, {
- progress: progress,
batch: 500,
});
},
diff --git a/src/upgrades/1.8.1/diffs_zset_to_listhash.js b/src/upgrades/1.8.1/diffs_zset_to_listhash.js
index 370242fba1..277418a79e 100644
--- a/src/upgrades/1.8.1/diffs_zset_to_listhash.js
+++ b/src/upgrades/1.8.1/diffs_zset_to_listhash.js
@@ -1,57 +1,40 @@
'use strict';
-const async = require('async');
const db = require('../../database');
const batch = require('../../batch');
-
module.exports = {
name: 'Reformatting post diffs to be stored in lists and hash instead of single zset',
timestamp: Date.UTC(2018, 2, 15),
- method: function (callback) {
+ method: async function () {
const { progress } = this;
- batch.processSortedSet('posts:pid', (pids, next) => {
- async.each(pids, (pid, next) => {
- db.getSortedSetRangeWithScores(`post:${pid}:diffs`, 0, -1, (err, diffs) => {
- if (err) {
- return next(err);
- }
+ progress.total = await db.sortedSetCard('posts:pid');
- if (!diffs || !diffs.length) {
- progress.incr();
- return next();
- }
+ await batch.processSortedSet('posts:pid', async (pids) => {
+ const postDiffs = await db.getSortedSetsMembersWithScores(
+ pids.map(pid => `post:${pid}:diffs`),
+ );
- // For each diff, push to list
- async.each(diffs, (diff, next) => {
- async.series([
- async.apply(db.delete.bind(db), `post:${pid}:diffs`),
- async.apply(db.listPrepend.bind(db), `post:${pid}:diffs`, diff.score),
- async.apply(db.setObject.bind(db), `diff:${pid}.${diff.score}`, {
- pid: pid,
- patch: diff.value,
- }),
- ], next);
- }, (err) => {
- if (err) {
- return next(err);
- }
+ await db.deleteAll(pids.map(pid => `post:${pid}:diffs`));
- progress.incr();
- return next();
- });
- });
- }, (err) => {
- if (err) {
- // Probably type error, ok to incr and continue
- progress.incr();
+ await Promise.all(postDiffs.map(async (diffs, index) => {
+ if (!diffs || !diffs.length) {
+ return;
}
-
- return next();
- });
+ diffs.reverse();
+ const pid = pids[index];
+ await db.listAppend(`post:${pid}:diffs`, diffs.map(d => d.score));
+ await db.setObjectBulk(
+ diffs.map(d => ([`diff:${pid}.${d.score}`, {
+ pid: pid,
+ patch: d.value,
+ }]))
+ );
+ }));
+ progress.incr(pids.length);
}, {
- progress: progress,
- }, callback);
+ batch: 500,
+ });
},
};
diff --git a/src/upgrades/1.9.0/refresh_post_upload_associations.js b/src/upgrades/1.9.0/refresh_post_upload_associations.js
index 44acfc079f..6183529641 100644
--- a/src/upgrades/1.9.0/refresh_post_upload_associations.js
+++ b/src/upgrades/1.9.0/refresh_post_upload_associations.js
@@ -1,21 +1,20 @@
'use strict';
-const async = require('async');
+const db = require('../../database');
const posts = require('../../posts');
+const batch = require('../../batch');
module.exports = {
name: 'Refresh post-upload associations',
timestamp: Date.UTC(2018, 3, 16),
- method: function (callback) {
+ method: async function () {
const { progress } = this;
-
- require('../../batch').processSortedSet('posts:pid', (pids, next) => {
- async.each(pids, (pid, next) => {
- posts.uploads.sync(pid, next);
- progress.incr();
- }, next);
+ progress.total = await db.sortedSetCard('posts:pid');
+ await batch.processSortedSet('posts:pid', async (pids) => {
+ await Promise.all(pids.map(pid => posts.uploads.sync(pid)));
+ progress.incr(pids.length);
}, {
- progress: this.progress,
- }, callback);
+ batch: 500,
+ });
},
};
diff --git a/src/upgrades/2.8.7/fix-email-sorted-sets.js b/src/upgrades/2.8.7/fix-email-sorted-sets.js
index fcab69a8f4..84919e6774 100644
--- a/src/upgrades/2.8.7/fix-email-sorted-sets.js
+++ b/src/upgrades/2.8.7/fix-email-sorted-sets.js
@@ -26,7 +26,7 @@ module.exports = {
}
// user has email but doesn't match whats stored in user hash, gh#11259
- if (userData.email && userData.email.toLowerCase() !== email.toLowerCase()) {
+ if (userData.email && email && String(userData.email).toLowerCase() !== email.toLowerCase()) {
bulkRemove.push(['email:uid', email]);
bulkRemove.push(['email:sorted', `${email.toLowerCase()}:${uid}`]);
}
diff --git a/src/upgrades/3.7.0/category-read-by-uid.js b/src/upgrades/3.7.0/category-read-by-uid.js
index 4ef564f53a..971620613e 100644
--- a/src/upgrades/3.7.0/category-read-by-uid.js
+++ b/src/upgrades/3.7.0/category-read-by-uid.js
@@ -9,6 +9,7 @@ module.exports = {
method: async function () {
const { progress } = this;
const nextCid = await db.getObjectField('global', 'nextCid');
+ progress.total = nextCid;
const allCids = [];
for (let i = 1; i <= nextCid; i++) {
allCids.push(i);
@@ -18,7 +19,6 @@ module.exports = {
progress.incr(cids.length);
}, {
batch: 500,
- progress,
});
},
};
From 0a2fa45da1768c175f1821ea79e9eebdfb83faab Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Bar=C4=B1=C5=9F=20Soner=20U=C5=9Fakl=C4=B1?=
Date: Thu, 25 Sep 2025 11:02:12 -0400
Subject: [PATCH 33/34] perf: update upgrade script to use bulk methods
add missing progress.total
---
src/upgrades/1.10.0/view_deleted_privilege.js | 1 +
.../1.10.2/fix_category_topic_zsets.js | 24 +++++++++++--------
2 files changed, 15 insertions(+), 10 deletions(-)
diff --git a/src/upgrades/1.10.0/view_deleted_privilege.js b/src/upgrades/1.10.0/view_deleted_privilege.js
index a483bcf417..3b65f2d5b7 100644
--- a/src/upgrades/1.10.0/view_deleted_privilege.js
+++ b/src/upgrades/1.10.0/view_deleted_privilege.js
@@ -11,6 +11,7 @@ module.exports = {
method: async function () {
const { progress } = this;
const cids = await db.getSortedSetRange('categories:cid', 0, -1);
+ progress.total = cids.length;
for (const cid of cids) {
const uids = await db.getSortedSetRange(`group:cid:${cid}:privileges:moderate:members`, 0, -1);
for (const uid of uids) {
diff --git a/src/upgrades/1.10.2/fix_category_topic_zsets.js b/src/upgrades/1.10.2/fix_category_topic_zsets.js
index 999383feac..83b4d7b27f 100644
--- a/src/upgrades/1.10.2/fix_category_topic_zsets.js
+++ b/src/upgrades/1.10.2/fix_category_topic_zsets.js
@@ -1,5 +1,3 @@
-/* eslint-disable no-await-in-loop */
-
'use strict';
const db = require('../../database');
@@ -13,18 +11,24 @@ module.exports = {
const { progress } = this;
const topics = require('../../topics');
+ progress.total = await db.sortedSetCard('topics:tid');
await batch.processSortedSet('topics:tid', async (tids) => {
- for (const tid of tids) {
- progress.incr();
- const topicData = await db.getObjectFields(`topic:${tid}`, ['cid', 'pinned', 'postcount']);
- if (parseInt(topicData.pinned, 10) !== 1) {
+ progress.incr(tids.length);
+ const topicData = await db.getObjectFields(
+ tids.map(tid => `topic:${tid}`),
+ ['tid', 'cid', 'pinned', 'postcount'],
+ );
+ const bulkAdd = [];
+ topicData.forEach((topic) => {
+ if (topic && parseInt(topic.pinned, 10) !== 1) {
topicData.postcount = parseInt(topicData.postcount, 10) || 0;
- await db.sortedSetAdd(`cid:${topicData.cid}:tids:posts`, topicData.postcount, tid);
+ bulkAdd.push([`cid:${topicData.cid}:tids:posts`, topicData.postcount, topicData.tid]);
}
- await topics.updateLastPostTimeFromLastPid(tid);
- }
+ });
+ await db.sortedSetAddBulk(bulkAdd);
+ await Promise.all(tids.map(tid => topics.updateLastPostTimeFromLastPid(tid)));
}, {
- progress: progress,
+ batch: 500,
});
},
};
From 7abdfd86ac87bcd4f5df7d17ae0cdf157177a991 Mon Sep 17 00:00:00 2001
From: Julian Lam
Date: Thu, 25 Sep 2025 11:56:38 -0400
Subject: [PATCH 34/34] fix: skip header checking during note assertion if test
runner is active
---
src/activitypub/notes.js | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/activitypub/notes.js b/src/activitypub/notes.js
index e1fc16c175..b85d686f1e 100644
--- a/src/activitypub/notes.js
+++ b/src/activitypub/notes.js
@@ -71,7 +71,7 @@ Notes.assert = async (uid, input, options = { skipChecks: false }) => {
}
try {
- if (!options.skipChecks) {
+ if (!(options.skipChecks || process.env.hasOwnProperty('CI'))) {
id = (await activitypub.checkHeader(id)) || id;
}