From 835723482e2edcd8bd34f1390fbe6598d784f830 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bar=C4=B1=C5=9F=20Soner=20U=C5=9Fakl=C4=B1?= Date: Thu, 26 Mar 2026 11:50:55 -0400 Subject: [PATCH 1/5] feat: add unreadNids to /api/notifications --- public/openapi/read/notifications.yaml | 5 +++++ src/controllers/accounts/notifications.js | 2 ++ 2 files changed, 7 insertions(+) diff --git a/public/openapi/read/notifications.yaml b/public/openapi/read/notifications.yaml index 015b6fae33..8dd723aabf 100644 --- a/public/openapi/read/notifications.yaml +++ b/public/openapi/read/notifications.yaml @@ -71,6 +71,11 @@ get: type: boolean readClass: type: string + unreadNids: + type: array + description: An array of notification ids that are unread. + items: + type: string filters: $ref: ../components/schemas/NotificationFilters.yaml#/FiltersArray regularFilters: diff --git a/src/controllers/accounts/notifications.js b/src/controllers/accounts/notifications.js index 301851ca36..00b94f0acb 100644 --- a/src/controllers/accounts/notifications.js +++ b/src/controllers/accounts/notifications.js @@ -61,6 +61,7 @@ notificationsController.get = async function (req, res, next) { const data = await user.notifications.getAllWithCounts(req.uid, selectedFilter.filter); let notifications = await user.notifications.getNotifications(data.nids, req.uid); + const unreadNids = notifications.filter(n => n && n.nid && !n.read).map(n => n.nid); allFilters.forEach((filterData) => { if (filterData && filterData.filter) { filterData.count = data.counts[filterData.filter] || 0; @@ -72,6 +73,7 @@ notificationsController.get = async function (req, res, next) { res.render('notifications', { notifications: notifications, + unreadNids, pagination: pagination.create(page, pageCount, req.query), filters: allFilters, regularFilters: regularFilters, From 1a0c2a21c7ffceb2a0f7d316bbe662c69b63d84d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bar=C4=B1=C5=9F=20Soner=20U=C5=9Fakl=C4=B1?= Date: Thu, 26 Mar 2026 12:43:21 -0400 Subject: [PATCH 2/5] fix: align-center user and name on post queue --- src/views/post-queue.tpl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/views/post-queue.tpl b/src/views/post-queue.tpl index 7366500f6e..569bfb68e3 100644 --- a/src/views/post-queue.tpl +++ b/src/views/post-queue.tpl @@ -100,9 +100,9 @@ {{{ end }}} -
+
{{{ if posts.user.userslug}}} - {buildAvatar(posts.user, "24px", true, "not-responsive")} {posts.user.username} + {buildAvatar(posts.user, "24px", true, "not-responsive")} {posts.user.username} {{{ else }}} {posts.user.username} {{{ end }}} From 4b503db49701e0f815e5c751435e3c2142fca5ae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bar=C4=B1=C5=9F=20Soner=20U=C5=9Fakl=C4=B1?= Date: Fri, 27 Mar 2026 16:44:10 -0400 Subject: [PATCH 3/5] refactor: break long line --- src/database/mongo/sorted/add.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/database/mongo/sorted/add.js b/src/database/mongo/sorted/add.js index bc3a8bc8ec..82539ee8c8 100644 --- a/src/database/mongo/sorted/add.js +++ b/src/database/mongo/sorted/add.js @@ -83,7 +83,9 @@ module.exports = function (module) { if (!utils.isNumber(item[1])) { throw new Error(`[[error:invalid-score, ${item[1]}]]`); } - bulk.find({ _key: item[0], value: String(item[2]) }).upsert().updateOne({ $set: { score: parseFloat(item[1]) } }); + bulk.find({ _key: item[0], value: String(item[2]) }) + .upsert() + .updateOne({ $set: { score: parseFloat(item[1]) } }); }); await bulk.execute(); }; From 6c4e9284822e37b5f77100705a8441e09b1854a1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bar=C4=B1=C5=9F=20Soner=20U=C5=9Fakl=C4=B1?= Date: Fri, 27 Mar 2026 16:45:20 -0400 Subject: [PATCH 4/5] fix: on exit, dont write analytics data on all nodes if you are running 4 nodebbs each one was calling writeData which could trigger duplicate key errors --- src/analytics.js | 14 +++++++++----- src/start.js | 2 +- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/src/analytics.js b/src/analytics.js index e054e2e733..f64c651687 100644 --- a/src/analytics.js +++ b/src/analytics.js @@ -38,11 +38,7 @@ Analytics.init = async function () { runOnAllNodes: true, onTick: async () => { if (Analytics.pause) return; - publishLocalAnalytics(); - if (runJobs) { - await sleep(2000); - await Analytics.writeData(); - } + await Analytics.writeLocalData(); }, }); @@ -63,6 +59,14 @@ Analytics.init = async function () { } }; +Analytics.writeLocalData = async function () { + publishLocalAnalytics(); + if (runJobs) { + await sleep(2000); + await Analytics.writeData(); + } +}; + function publishLocalAnalytics() { pubsub.publish('analytics:publish', { local: local, diff --git a/src/start.js b/src/start.js index 00a129e33f..89a1683703 100644 --- a/src/start.js +++ b/src/start.js @@ -149,7 +149,7 @@ async function shutdown(code) { try { await require('./webserver').destroy(); winston.info('[app] Web server closed to connections.'); - await require('./analytics').writeData(); + await require('./analytics').writeLocalData(); winston.info('[app] Live analytics saved.'); const db = require('./database'); await db.delete('locks'); From b8fd88fba955db240dd3b8bd473ab9b9f19098aa Mon Sep 17 00:00:00 2001 From: Michele Di Maria Date: Sat, 28 Mar 2026 18:24:34 +0100 Subject: [PATCH 5/5] Fix the saving of the statistics on PosgreSQL #14124 (#14129) * fix: deduplicate postgres sorted set bulk ops to prevent pkey violation sortedSetIncrByBulk and sortedSetAddBulk did not deduplicate (key, value) pairs before INSERT, causing "duplicate key value violates unique constraint legacy_zset_pkey" errors since PostgreSQL ON CONFLICT only resolves against existing table rows, not within-statement duplicates. Also adds missing pageviews:ap metrics to analyticsKeys sorted set. Co-Authored-By: Claude Opus 4.6 (1M context) * fix: use upsert with RETURNING to prevent postgres analytics write failures Replace the INSERT ON CONFLICT DO NOTHING + separate SELECT verification pattern with INSERT ON CONFLICT DO UPDATE RETURNING. The old pattern had an unreliable gap between INSERT and SELECT causing random "failed to insert keys for objects" errors that blocked all analytics writes. The no-op upsert (DO UPDATE SET type = existing type) guarantees every row is returned via RETURNING, eliminating the need for a separate SELECT and the "missing keys" check entirely. Also deduplicates the keys array to prevent "cannot affect row a second time" errors with DO UPDATE. Co-Authored-By: Claude Opus 4.6 (1M context) --------- Co-authored-by: Claude Opus 4.6 (1M context) --- src/analytics.js | 6 ++++ src/database/postgres/helpers.js | 44 ++++++++--------------------- src/database/postgres/sorted.js | 21 +++++++++++--- src/database/postgres/sorted/add.js | 4 ++- 4 files changed, 38 insertions(+), 37 deletions(-) diff --git a/src/analytics.js b/src/analytics.js index f64c651687..e8e60c5e68 100644 --- a/src/analytics.js +++ b/src/analytics.js @@ -189,6 +189,12 @@ Analytics.writeData = async function () { incrByBulk.push(['analytics:pageviews:ap', total.apPageViews, today.getTime()]); incrByBulk.push(['analytics:pageviews:ap:month', total.apPageViews, month.getTime()]); total.apPageViews = 0; + if (!metrics.includes('pageviews:ap')) { + metrics.push('pageviews:ap'); + } + if (!metrics.includes('pageviews:ap:month')) { + metrics.push('pageviews:ap:month'); + } } if (total.uniquevisitors > 0) { diff --git a/src/database/postgres/helpers.js b/src/database/postgres/helpers.js index 85e0b63d07..8b92d3fe50 100644 --- a/src/database/postgres/helpers.js +++ b/src/database/postgres/helpers.js @@ -27,31 +27,25 @@ DELETE FROM "legacy_object" AND "expireAt" <= CURRENT_TIMESTAMP`, }); - await db.query({ - name: 'ensureLegacyObjectType1', + const res = await db.query({ + name: 'ensureLegacyObjectType_upsert', text: ` INSERT INTO "legacy_object" ("_key", "type") VALUES ($1::TEXT, $2::TEXT::LEGACY_OBJECT_TYPE) - ON CONFLICT - DO NOTHING`, + ON CONFLICT ("_key") + DO UPDATE SET "type" = "legacy_object"."type" + RETURNING "type"`, values: [key, type], }); - const res = await db.query({ - name: 'ensureLegacyObjectType2', - text: ` -SELECT "type" - FROM "legacy_object_live" - WHERE "_key" = $1::TEXT`, - values: [key], - }); - if (res.rows[0].type !== type) { throw new Error(`database: cannot insert ${JSON.stringify(key)} as ${type} because it already exists as ${res.rows[0].type}`); } }; helpers.ensureLegacyObjectsType = async function (db, keys, type) { + keys = [...new Set(keys)]; + await db.query({ name: 'ensureLegacyObjectTypeBefore', text: ` @@ -60,38 +54,24 @@ DELETE FROM "legacy_object" AND "expireAt" <= CURRENT_TIMESTAMP`, }); - await db.query({ - name: 'ensureLegacyObjectsType1', + const res = await db.query({ + name: 'ensureLegacyObjectsType_upsert', text: ` INSERT INTO "legacy_object" ("_key", "type") SELECT k, $2::TEXT::LEGACY_OBJECT_TYPE FROM UNNEST($1::TEXT[]) k - ON CONFLICT - DO NOTHING`, + ON CONFLICT ("_key") + DO UPDATE SET "type" = "legacy_object"."type" + RETURNING "_key", "type"`, values: [keys, type], }); - const res = await db.query({ - name: 'ensureLegacyObjectsType2', - text: ` -SELECT "_key", "type" - FROM "legacy_object_live" - WHERE "_key" = ANY($1::TEXT[])`, - values: [keys], - }); - const invalid = res.rows.filter(r => r.type !== type); if (invalid.length) { const parts = invalid.map(r => `${JSON.stringify(r._key)} is ${r.type}`); throw new Error(`database: cannot insert multiple objects as ${type} because they already exist: ${parts.join(', ')}`); } - - const missing = keys.filter(k => !res.rows.some(r => r._key === k)); - - if (missing.length) { - throw new Error(`database: failed to insert keys for objects: ${JSON.stringify(missing)}`); - } }; helpers.noop = function () {}; diff --git a/src/database/postgres/sorted.js b/src/database/postgres/sorted.js index 351fe3e059..50581a6ed4 100644 --- a/src/database/postgres/sorted.js +++ b/src/database/postgres/sorted.js @@ -551,16 +551,29 @@ RETURNING "score" s`, return []; } + // Deduplicate by (key, value) pair, summing increments for duplicates + const seen = new Map(); + const deduped = []; + data.forEach(([key, increment, value]) => { + value = helpers.valueToString(value); + increment = parseFloat(increment); + const mapKey = `${key}\0${value}`; + if (seen.has(mapKey)) { + deduped[seen.get(mapKey)][1] += increment; + } else { + seen.set(mapKey, deduped.length); + deduped.push([key, increment, value]); + } + }); + return await module.transaction(async (client) => { - await helpers.ensureLegacyObjectsType(client, data.map(item => item[0]), 'zset'); + await helpers.ensureLegacyObjectsType(client, deduped.map(item => item[0]), 'zset'); const values = []; const queryParams = []; let paramIndex = 1; - data.forEach(([key, increment, value]) => { - value = helpers.valueToString(value); - increment = parseFloat(increment); + deduped.forEach(([key, increment, value]) => { values.push(key, value, increment); queryParams.push(`($${paramIndex}::TEXT, $${paramIndex + 1}::TEXT, $${paramIndex + 2}::NUMERIC)`); paramIndex += 3; diff --git a/src/database/postgres/sorted/add.js b/src/database/postgres/sorted/add.js index 6f87416089..3db65cfde9 100644 --- a/src/database/postgres/sorted/add.js +++ b/src/database/postgres/sorted/add.js @@ -114,8 +114,10 @@ INSERT INTO "legacy_zset" ("_key", "value", "score") } keys.push(item[0]); scores.push(item[1]); - values.push(item[2]); + values.push(helpers.valueToString(item[2])); }); + const compositeKeys = keys.map((k, i) => `${k}\0${values[i]}`); + helpers.removeDuplicateValues(compositeKeys, keys, values, scores); await module.transaction(async (client) => { await helpers.ensureLegacyObjectsType(client, keys, 'zset'); await client.query({