Merge branch 'master' into develop

This commit is contained in:
Barış Soner Uşaklı
2026-03-28 13:25:26 -04:00
8 changed files with 43 additions and 42 deletions

View File

@@ -71,6 +71,11 @@ get:
type: boolean
readClass:
type: string
unreadNids:
type: array
description: An array of notification ids that are unread.
items:
type: string
filters:
$ref: ../components/schemas/NotificationFilters.yaml#/FiltersArray
regularFilters:

View File

@@ -38,11 +38,7 @@ Analytics.init = async function () {
runOnAllNodes: true,
onTick: async () => {
if (Analytics.pause) return;
publishLocalAnalytics();
if (runJobs) {
await sleep(2000);
await Analytics.writeData();
}
await Analytics.writeLocalData();
},
});
@@ -63,6 +59,14 @@ Analytics.init = async function () {
}
};
Analytics.writeLocalData = async function () {
publishLocalAnalytics();
if (runJobs) {
await sleep(2000);
await Analytics.writeData();
}
};
function publishLocalAnalytics() {
pubsub.publish('analytics:publish', {
local: local,
@@ -185,6 +189,12 @@ Analytics.writeData = async function () {
incrByBulk.push(['analytics:pageviews:ap', total.apPageViews, today.getTime()]);
incrByBulk.push(['analytics:pageviews:ap:month', total.apPageViews, month.getTime()]);
total.apPageViews = 0;
if (!metrics.includes('pageviews:ap')) {
metrics.push('pageviews:ap');
}
if (!metrics.includes('pageviews:ap:month')) {
metrics.push('pageviews:ap:month');
}
}
if (total.uniquevisitors > 0) {

View File

@@ -61,6 +61,7 @@ notificationsController.get = async function (req, res, next) {
const data = await user.notifications.getAllWithCounts(req.uid, selectedFilter.filter);
let notifications = await user.notifications.getNotifications(data.nids, req.uid);
const unreadNids = notifications.filter(n => n && n.nid && !n.read).map(n => n.nid);
allFilters.forEach((filterData) => {
if (filterData && filterData.filter) {
filterData.count = data.counts[filterData.filter] || 0;
@@ -72,6 +73,7 @@ notificationsController.get = async function (req, res, next) {
res.render('notifications', {
notifications: notifications,
unreadNids,
pagination: pagination.create(page, pageCount, req.query),
filters: allFilters,
regularFilters: regularFilters,

View File

@@ -83,7 +83,9 @@ module.exports = function (module) {
if (!utils.isNumber(item[1])) {
throw new Error(`[[error:invalid-score, ${item[1]}]]`);
}
bulk.find({ _key: item[0], value: String(item[2]) }).upsert().updateOne({ $set: { score: parseFloat(item[1]) } });
bulk.find({ _key: item[0], value: String(item[2]) })
.upsert()
.updateOne({ $set: { score: parseFloat(item[1]) } });
});
await bulk.execute();
};

View File

@@ -27,31 +27,25 @@ DELETE FROM "legacy_object"
AND "expireAt" <= CURRENT_TIMESTAMP`,
});
await db.query({
name: 'ensureLegacyObjectType1',
const res = await db.query({
name: 'ensureLegacyObjectType_upsert',
text: `
INSERT INTO "legacy_object" ("_key", "type")
VALUES ($1::TEXT, $2::TEXT::LEGACY_OBJECT_TYPE)
ON CONFLICT
DO NOTHING`,
ON CONFLICT ("_key")
DO UPDATE SET "type" = "legacy_object"."type"
RETURNING "type"`,
values: [key, type],
});
const res = await db.query({
name: 'ensureLegacyObjectType2',
text: `
SELECT "type"
FROM "legacy_object_live"
WHERE "_key" = $1::TEXT`,
values: [key],
});
if (res.rows[0].type !== type) {
throw new Error(`database: cannot insert ${JSON.stringify(key)} as ${type} because it already exists as ${res.rows[0].type}`);
}
};
helpers.ensureLegacyObjectsType = async function (db, keys, type) {
keys = [...new Set(keys)];
await db.query({
name: 'ensureLegacyObjectTypeBefore',
text: `
@@ -60,38 +54,24 @@ DELETE FROM "legacy_object"
AND "expireAt" <= CURRENT_TIMESTAMP`,
});
await db.query({
name: 'ensureLegacyObjectsType1',
const res = await db.query({
name: 'ensureLegacyObjectsType_upsert',
text: `
INSERT INTO "legacy_object" ("_key", "type")
SELECT k, $2::TEXT::LEGACY_OBJECT_TYPE
FROM UNNEST($1::TEXT[]) k
ON CONFLICT
DO NOTHING`,
ON CONFLICT ("_key")
DO UPDATE SET "type" = "legacy_object"."type"
RETURNING "_key", "type"`,
values: [keys, type],
});
const res = await db.query({
name: 'ensureLegacyObjectsType2',
text: `
SELECT "_key", "type"
FROM "legacy_object_live"
WHERE "_key" = ANY($1::TEXT[])`,
values: [keys],
});
const invalid = res.rows.filter(r => r.type !== type);
if (invalid.length) {
const parts = invalid.map(r => `${JSON.stringify(r._key)} is ${r.type}`);
throw new Error(`database: cannot insert multiple objects as ${type} because they already exist: ${parts.join(', ')}`);
}
const missing = keys.filter(k => !res.rows.some(r => r._key === k));
if (missing.length) {
throw new Error(`database: failed to insert keys for objects: ${JSON.stringify(missing)}`);
}
};
helpers.noop = function () {};

View File

@@ -114,8 +114,10 @@ INSERT INTO "legacy_zset" ("_key", "value", "score")
}
keys.push(item[0]);
scores.push(item[1]);
values.push(item[2]);
values.push(helpers.valueToString(item[2]));
});
const compositeKeys = keys.map((k, i) => `${k}\0${values[i]}`);
helpers.removeDuplicateValues(compositeKeys, keys, values, scores);
await module.transaction(async (client) => {
await helpers.ensureLegacyObjectsType(client, keys, 'zset');
await client.query({

View File

@@ -149,7 +149,7 @@ async function shutdown(code) {
try {
await require('./webserver').destroy();
winston.info('[app] Web server closed to connections.');
await require('./analytics').writeData();
await require('./analytics').writeLocalData();
winston.info('[app] Live analytics saved.');
const db = require('./database');
await db.delete('locks');

View File

@@ -100,9 +100,9 @@
</div>
{{{ end }}}
</div>
<div class="small">
<div class="text-sm lh-1">
{{{ if posts.user.userslug}}}
<a class="text-decoration-none" href="{config.relative_path}/uid/{posts.user.uid}">{buildAvatar(posts.user, "24px", true, "not-responsive")} {posts.user.username}</a>
<a class="text-decoration-none d-flex align-items-center gap-1" href="{config.relative_path}/uid/{posts.user.uid}">{buildAvatar(posts.user, "24px", true, "not-responsive")} {posts.user.username}</a>
{{{ else }}}
{posts.user.username}
{{{ end }}}