test(client,server): guard against duplicate JSON keys

This commit is contained in:
Elian Doran
2026-04-16 18:25:37 +03:00
parent d2836ef84c
commit d8db862c22
4 changed files with 103 additions and 5 deletions

View File

@@ -1,10 +1,10 @@
import { LOCALES } from "@triliumnext/commons";
import { findDuplicateJsonKeys, LOCALES } from "@triliumnext/commons";
import { readFileSync } from "fs";
import { join } from "path";
import { describe, expect, it } from "vitest";
describe("i18n", () => {
it("translations are valid JSON", () => {
it("translations are valid JSON with no duplicate keys", () => {
for (const locale of LOCALES) {
if (locale.contentOnly || locale.id === "en_rtl") {
continue;
@@ -14,6 +14,13 @@ describe("i18n", () => {
const translationFile = readFileSync(translationPath, { encoding: "utf-8" });
expect(() => JSON.parse(translationFile), `JSON error while parsing locale '${locale.id}' at "${translationPath}"`)
.not.toThrow();
const duplicates = findDuplicateJsonKeys(translationFile);
expect(
duplicates,
`Duplicate keys in locale '${locale.id}' at "${translationPath}":\n${
duplicates.map((d) => ` - "${d.key}" (line ${d.line})`).join("\n")}`
).toEqual([]);
}
});
});

View File

@@ -1,4 +1,4 @@
import { LOCALES } from "@triliumnext/commons";
import { findDuplicateJsonKeys, LOCALES } from "@triliumnext/commons";
import { readFileSync } from "fs";
import { join } from "path";
// eslint-disable-next-line @typescript-eslint/no-require-imports
@@ -6,7 +6,7 @@ const { languages } = require("tesseract.js");
import { describe, expect, it } from "vitest";
describe("i18n", () => {
it("translations are valid JSON", () => {
it("translations are valid JSON with no duplicate keys", () => {
for (const locale of LOCALES) {
if (locale.contentOnly || locale.id === "en_rtl") {
continue;
@@ -16,6 +16,13 @@ describe("i18n", () => {
const translationFile = readFileSync(translationPath, { encoding: "utf-8" });
expect(() => JSON.parse(translationFile), `JSON error while parsing locale '${locale.id}' at "${translationPath}"`)
.not.toThrow();
const duplicates = findDuplicateJsonKeys(translationFile);
expect(
duplicates,
`Duplicate keys in locale '${locale.id}' at "${translationPath}":\n` +
duplicates.map((d) => ` - "${d.key}" (line ${d.line})`).join("\n")
).toEqual([]);
}
});

View File

@@ -1,5 +1,5 @@
import { describe, it, expect } from "vitest";
import { trimIndentation } from "./test-utils.js";
import { findDuplicateJsonKeys, trimIndentation } from "./test-utils.js";
describe("Utils", () => {
it("trims indentation", () => {
@@ -11,4 +11,27 @@ Hello
world
123`);
});
describe("findDuplicateJsonKeys", () => {
it("returns empty for valid JSON without duplicates", () => {
expect(findDuplicateJsonKeys(`{"a": 1, "b": {"c": 2}}`)).toEqual([]);
});
it("detects duplicates at the top level and reports line numbers", () => {
const text = `{\n "a": 1,\n "b": 2,\n "a": 3\n}`;
expect(findDuplicateJsonKeys(text)).toEqual([{ key: "a", line: 4 }]);
});
it("scopes keys per object — same name at different levels is not a duplicate", () => {
expect(findDuplicateJsonKeys(`{"a": {"x": 1}, "b": {"x": 2}}`)).toEqual([]);
});
it("does not treat string values containing a colon as keys", () => {
expect(findDuplicateJsonKeys(`{"a": "b:c", "d": "a:e"}`)).toEqual([]);
});
it("does not treat strings inside arrays as keys", () => {
expect(findDuplicateJsonKeys(`{"items": ["a", "a", "b"]}`)).toEqual([]);
});
});
});

View File

@@ -62,3 +62,64 @@ export function flushPromises() {
export function sleepFor(duration: number) {
return new Promise(resolve => setTimeout(resolve, duration));
}
/**
* Scans raw JSON text for keys that are duplicated within the same object.
*
* `JSON.parse` silently collapses duplicate keys (the last one wins), which makes
* it impossible to detect them from the parsed value. This scanner walks the raw
* text, pushing/popping a scope for each `{`/`}`, and identifies a string as a
* key when the next non-whitespace char is `:`.
*
* Intended for validating hand-maintained JSON files (e.g. translation files)
* at test level.
*/
export function findDuplicateJsonKeys(text: string): Array<{ key: string; line: number }> {
const duplicates: Array<{ key: string; line: number }> = [];
const stack: Set<string>[] = [];
let line = 1;
let i = 0;
while (i < text.length) {
const c = text[i];
if (c === "\n") {
line++;
i++;
} else if (c === "{") {
stack.push(new Set());
i++;
} else if (c === "}") {
stack.pop();
i++;
} else if (c === '"') {
const start = i;
const startLine = line;
i++;
while (i < text.length && text[i] !== '"') {
if (text[i] === "\\") {
i += 2;
} else {
if (text[i] === "\n") line++;
i++;
}
}
i++;
// A string is a key iff the next non-whitespace char is ':'.
let j = i;
while (j < text.length && /\s/.test(text[j])) j++;
if (text[j] === ":" && stack.length > 0) {
const key = JSON.parse(text.substring(start, i)) as string;
const frame = stack[stack.length - 1];
if (frame.has(key)) {
duplicates.push({ key, line: startLine });
} else {
frame.add(key);
}
}
} else {
i++;
}
}
return duplicates;
}