Reintegrate tests (#9352)

This commit is contained in:
Elian Doran
2026-04-09 21:14:11 +03:00
committed by GitHub
35 changed files with 727 additions and 297 deletions

View File

@@ -65,13 +65,20 @@ jobs:
path: apps/server/test-output/vitest/html/
retention-days: 30
- name: Run the client-standalone tests
# Runs the same trilium-core spec set as the server suite, but in
# happy-dom + sql.js WASM via BrowserSqlProvider (see
# apps/client-standalone/src/test_setup.ts). Catches differences
# between the Node-side and browser-side runtimes.
run: pnpm run --filter=client-standalone test
- name: Run CKEditor e2e tests
run: |
pnpm run --filter=ckeditor5-mermaid test
pnpm run --filter=ckeditor5-math test
- name: Run the rest of the tests
run: pnpm run --filter=\!client --filter=\!server --filter=\!ckeditor5-mermaid --filter=\!ckeditor5-math test
run: pnpm run --filter=\!client --filter=\!client-standalone --filter=\!server --filter=\!ckeditor5-mermaid --filter=\!ckeditor5-math test
build_docker:
name: Build Docker image

View File

@@ -435,9 +435,18 @@ export default class BrowserSqlProvider implements DatabaseProvider {
loadFromBuffer(buffer: Uint8Array): void {
this.ensureSqlite3();
// SQLite WASM can deserialize a database from a byte array
const p = this.sqlite3!.wasm.allocFromTypedArray(buffer);
// SQLite WASM's allocFromTypedArray rejects Node's Buffer (and other
// non-Uint8Array typed arrays) with "expecting 8/16/32/64". Normalize
// to a plain Uint8Array view over the same memory so callers can pass
// anything readFileSync returns.
const view = buffer instanceof Uint8Array && buffer.constructor === Uint8Array
? buffer
: new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength);
const p = this.sqlite3!.wasm.allocFromTypedArray(view);
try {
// Cached statements reference the previous DB and become invalid
// once we swap connections. Drop them so callers re-prepare.
this.clearStatementCache();
this.db = new this.sqlite3!.oo1.DB({ filename: ":memory:", flags: "c" });
this.opfsDbPath = undefined; // Not using OPFS
@@ -445,8 +454,8 @@ export default class BrowserSqlProvider implements DatabaseProvider {
this.db.pointer!,
"main",
p,
buffer.byteLength,
buffer.byteLength,
view.byteLength,
view.byteLength,
this.sqlite3!.capi.SQLITE_DESERIALIZE_FREEONCLOSE |
this.sqlite3!.capi.SQLITE_DESERIALIZE_RESIZEABLE
);
@@ -563,8 +572,7 @@ export default class BrowserSqlProvider implements DatabaseProvider {
this.db!.exec(query);
}
close(): void {
// Clean up all cached statements first
private clearStatementCache(): void {
for (const statement of this.statementCache.values()) {
try {
statement.finalize();
@@ -574,6 +582,10 @@ export default class BrowserSqlProvider implements DatabaseProvider {
}
}
this.statementCache.clear();
}
close(): void {
this.clearStatementCache();
if (this.db) {
this.db.close();

View File

@@ -65,7 +65,7 @@ export default class BrowserZipProvider implements ZipProvider {
try {
for (const [fileName, data] of Object.entries(files)) {
await processEntry(
{ fileName },
{ fileName: decodeZipFileName(fileName) },
() => Promise.resolve(data)
);
}
@@ -77,3 +77,25 @@ export default class BrowserZipProvider implements ZipProvider {
});
}
}
const utf8Decoder = new TextDecoder("utf-8", { fatal: true });
/**
* fflate decodes ZIP entry filenames as CP437/Latin-1 unless the language
* encoding flag (general purpose bit 11) is set, but many real-world archives
* (e.g. those produced by macOS / Linux unzip / Python's zipfile) write UTF-8
* filenames without setting that flag. Recover the original UTF-8 bytes from
* fflate's per-byte string and re-decode them; if the result isn't valid
* UTF-8 we fall back to the as-decoded name.
*/
function decodeZipFileName(name: string): string {
const bytes = new Uint8Array(name.length);
for (let i = 0; i < name.length; i++) {
bytes[i] = name.charCodeAt(i) & 0xff;
}
try {
return utf8Decoder.decode(bytes);
} catch {
return name;
}
}

View File

@@ -0,0 +1,140 @@
import { createRequire } from "node:module";
import { readFileSync } from "node:fs";
import { fileURLToPath } from "node:url";
import { initializeCore } from "@triliumnext/core";
import schemaSql from "@triliumnext/core/src/assets/schema.sql?raw";
import HappyDomHtmlParser from "happy-dom/lib/html-parser/HTMLParser.js";
import serverEnTranslations from "../../server/src/assets/translations/en/server.json";
import { beforeAll } from "vitest";
import BrowserExecutionContext from "./lightweight/cls_provider.js";
import BrowserCryptoProvider from "./lightweight/crypto_provider.js";
import StandalonePlatformProvider from "./lightweight/platform_provider.js";
import BrowserSqlProvider from "./lightweight/sql_provider.js";
import BrowserZipProvider from "./lightweight/zip_provider.js";
// =============================================================================
// SQLite WASM compatibility shims
// =============================================================================
// The @sqlite.org/sqlite-wasm package loads its .wasm via fetch, and its
// bundled `instantiateWasm` hook overrides any user-supplied alternative.
// Two things go wrong under vitest + happy-dom:
// 1. happy-dom's `fetch()` refuses `file://` URLs.
// 2. happy-dom installs its own Response global, which Node's
// `WebAssembly.instantiateStreaming` rejects ("Received an instance of
// Response" — it wants undici's Response).
// We intercept fetch for file:// URLs ourselves and force instantiateStreaming
// to fall back to the ArrayBuffer path.
const fileFetchCache = new Map<string, ArrayBuffer>();
function readFileAsArrayBuffer(url: string): ArrayBuffer {
let cached = fileFetchCache.get(url);
if (!cached) {
const bytes = readFileSync(fileURLToPath(url));
cached = bytes.buffer.slice(bytes.byteOffset, bytes.byteOffset + bytes.byteLength) as ArrayBuffer;
fileFetchCache.set(url, cached);
}
return cached;
}
const originalFetch = globalThis.fetch;
globalThis.fetch = (async (input: RequestInfo | URL, init?: RequestInit) => {
const url = typeof input === "string"
? input
: input instanceof URL
? input.href
: input.url;
if (url.startsWith("file://")) {
const body = readFileAsArrayBuffer(url);
return new Response(body, {
status: 200,
headers: { "Content-Type": "application/wasm" }
});
}
return originalFetch(input as RequestInfo, init);
}) as typeof fetch;
WebAssembly.instantiateStreaming = (async (source, importObject) => {
const response = await source;
const bytes = await response.arrayBuffer();
return WebAssembly.instantiate(bytes, importObject);
}) as typeof WebAssembly.instantiateStreaming;
// =============================================================================
// happy-dom HTMLParser spec compliance patch
// =============================================================================
// Per HTML5 parsing spec, a single U+000A LINE FEED immediately after a <pre>,
// <listing>, or <textarea> start tag must be ignored ("newlines at the start
// of pre blocks are ignored as an authoring convenience"). Real browsers and
// domino (which the server runtime uses via turnish) both implement this;
// happy-dom (as of 20.8.9) does not — it keeps the LF as a text node.
//
// That difference makes turnish's markdown export produce different output
// under happy-dom vs. production, breaking markdown.spec.ts > "exports jQuery
// code in table properly". Patch HTMLParser.parse to pre-process the string.
const LEADING_LF_IN_PRE_RE = /(<(?:pre|listing|textarea)\b[^>]*>)(\r\n|\r|\n)/gi;
const originalHtmlParserParse = (HappyDomHtmlParser as unknown as {
prototype: { parse(html: string, rootNode?: unknown): unknown };
}).prototype.parse;
(HappyDomHtmlParser as unknown as {
prototype: { parse(html: string, rootNode?: unknown): unknown };
}).prototype.parse = function (html: string, rootNode?: unknown) {
const patched = typeof html === "string"
? html.replace(LEADING_LF_IN_PRE_RE, "$1")
: html;
return originalHtmlParserParse.call(this, patched, rootNode);
};
// =============================================================================
// Core initialization for standalone-flavored tests
// =============================================================================
// Mirror what apps/server/spec/setup.ts does: load the pre-seeded integration
// fixture DB into an in-memory sqlite-wasm instance, then initialize core
// against it with the standalone (browser) providers. Each vitest worker gets
// a fresh copy because tests run in forks (per the default pool).
const require = createRequire(import.meta.url);
const fixtureDb = readFileSync(
require.resolve("@triliumnext/core/src/test/fixtures/document.db")
);
beforeAll(async () => {
const sqlProvider = new BrowserSqlProvider();
await sqlProvider.initWasm();
sqlProvider.loadFromBuffer(fixtureDb);
await initializeCore({
executionContext: new BrowserExecutionContext(),
crypto: new BrowserCryptoProvider(),
zip: new BrowserZipProvider(),
zipExportProviderFactory: (
await import("./lightweight/zip_export_provider_factory.js")
).standaloneZipExportProviderFactory,
// i18next must be wired up — keyboard_actions.ts and other modules
// call `t()` and throw if translations are missing. Inline the
// en/server.json resources via vite's JSON import so we don't need a
// backend in tests.
translations: async (i18nextInstance, locale) => {
await i18nextInstance.init({
lng: locale,
fallbackLng: "en",
ns: "server",
defaultNS: "server",
resources: {
en: { server: serverEnTranslations }
}
});
},
platform: new StandalonePlatformProvider(""),
schema: schemaSql,
dbConfig: {
provider: sqlProvider,
isReadOnly: false,
onTransactionCommit: () => {},
onTransactionRollback: () => {}
}
});
});

View File

@@ -183,7 +183,27 @@ export default defineConfig(() => ({
}
},
test: {
environment: "happy-dom"
environment: "happy-dom",
setupFiles: [join(__dirname, "src/test_setup.ts")],
dir: join(__dirname),
include: [
"src/**/*.{test,spec}.{ts,tsx}",
"../../packages/trilium-core/src/**/*.{test,spec}.{ts,tsx}"
],
server: {
deps: {
inline: ["@sqlite.org/sqlite-wasm"]
}
},
alias: {
// The package's `node.mjs` entry references a non-existent
// `sqlite3-node.mjs`. Force the browser-style entry which works
// under Node + happy-dom too.
"@sqlite.org/sqlite-wasm": join(
__dirname,
"../../node_modules/@sqlite.org/sqlite-wasm/index.mjs"
)
}
},
define: {
"process.env.IS_PREACT": JSON.stringify("true"),

View File

@@ -1,15 +1,18 @@
import { getLog, initializeCore, sql_init } from "@triliumnext/core";
import ClsHookedExecutionContext from "@triliumnext/server/src/cls_provider.js";
import NodejsCryptoProvider from "@triliumnext/server/src/crypto_provider.js";
import NodejsZipProvider from "@triliumnext/server/src/zip_provider.js";
import { loadCoreSchema } from "@triliumnext/server/src/core_assets.js";
import NodejsInAppHelpProvider from "@triliumnext/server/src/in_app_help_provider.js";
import dataDirs from "@triliumnext/server/src/services/data_dir.js";
import options from "@triliumnext/server/src/services/options.js";
import port from "@triliumnext/server/src/services/port.js";
import NodeRequestProvider from "@triliumnext/server/src/services/request.js";
import { RESOURCE_DIR } from "@triliumnext/server/src/services/resource_dir.js";
import tray from "@triliumnext/server/src/services/tray.js";
import windowService from "@triliumnext/server/src/services/window.js";
import WebSocketMessagingProvider from "@triliumnext/server/src/services/ws_messaging_provider.js";
import BetterSqlite3Provider from "@triliumnext/server/src/sql_provider.js";
import NodejsZipProvider from "@triliumnext/server/src/zip_provider.js";
import { app, BrowserWindow,globalShortcut } from "electron";
import electronDebug from "electron-debug";
import electronDl from "electron-dl";
@@ -139,10 +142,14 @@ async function main() {
request: new NodeRequestProvider(),
executionContext: new ClsHookedExecutionContext(),
messaging: new WebSocketMessagingProvider(),
schema: fs.readFileSync(require.resolve("@triliumnext/core/src/assets/schema.sql"), "utf-8"),
schema: loadCoreSchema(),
platform: new DesktopPlatformProvider(),
translations: (await import("@triliumnext/server/src/services/i18n.js")).initializeTranslations,
getDemoArchive: async () => fs.readFileSync(require.resolve("@triliumnext/server/src/assets/db/demo.zip")),
// demo.zip is a server-owned asset; src/assets is copied to dist/assets
// by the build script, so the same RESOURCE_DIR-relative path works in
// both source and bundled-production modes.
getDemoArchive: async () => fs.readFileSync(path.join(RESOURCE_DIR, "db", "demo.zip")),
inAppHelp: new NodejsInAppHelpProvider(),
extraAppInfo: {
nodeVersion: process.version,
dataDirectory: path.resolve(dataDirs.TRILIUM_DATA_DIR)

View File

@@ -1,7 +1,7 @@
import debounce from "@triliumnext/client/src/services/debounce.js";
import type { AdvancedExportOptions, ExportFormat } from "@triliumnext/core";
import NodejsInAppHelpProvider from "@triliumnext/server/src/in_app_help_provider.js";
import cls from "@triliumnext/server/src/services/cls.js";
import { parseNoteMetaFile } from "@triliumnext/server/src/services/in_app_help.js";
import type { NoteMetaFile } from "@triliumnext/server/src/services/meta/note_meta.js";
import type NoteMeta from "@triliumnext/server/src/services/meta/note_meta.js";
import fs from "fs/promises";
@@ -241,7 +241,7 @@ async function cleanUpMeta(outputPath: string, minify: boolean) {
}
if (minify) {
const subtree = parseNoteMetaFile(meta);
const subtree = new NodejsInAppHelpProvider().parseNoteMetaFile(meta);
await fs.writeFile(metaPath, JSON.stringify(subtree));
} else {
await fs.writeFile(metaPath, JSON.stringify(meta, null, 4));

View File

@@ -7,6 +7,17 @@ async function main() {
// Copy assets
build.copy("src/assets", "assets/");
// schema.sql lives in trilium-core but is loaded at server startup. The
// bundled main.cjs can't `require.resolve("@triliumnext/core/...")` in
// Docker (no workspace symlinks in the image), so we copy the file
// alongside the server's own assets and read it via RESOURCE_DIR at
// runtime. See main.ts.
build.copy("/packages/trilium-core/src/assets/schema.sql", "assets/schema.sql");
// The integration test database fixture is loaded into memory when
// TRILIUM_INTEGRATION_TEST=memory is set (used by e2e tests against the
// packaged server). Same require.resolve issue as schema.sql in
// bundled environments — copy it into the bundle so it's reachable.
build.copy("/packages/trilium-core/src/test/fixtures/document.db", "assets/test/document.db");
build.triggerBuildAndCopyTo("packages/share-theme", "share-theme/assets/");
build.copy("/packages/share-theme/src/templates", "share-theme/templates/");

View File

@@ -8,6 +8,7 @@ import NodejsCryptoProvider from "../src/crypto_provider.js";
import NodejsZipProvider from "../src/zip_provider.js";
import ServerPlatformProvider from "../src/platform_provider.js";
import BetterSqlite3Provider from "../src/sql_provider.js";
import NodejsInAppHelpProvider from "../src/in_app_help_provider.js";
import { initializeTranslations } from "../src/services/i18n.js";
// Initialize environment variables.
@@ -18,8 +19,14 @@ process.env.TRILIUM_ENV = "dev";
process.env.TRILIUM_PUBLIC_SERVER = "http://localhost:4200";
beforeAll(async () => {
// Load the integration test database into memory. The fixture at
// packages/trilium-core/src/test/fixtures/document.db is pre-seeded with
// the schema, demo content, and a known password ("demo1234") that the
// ETAPI tests log in with. Each test file runs in its own vitest fork
// (pool: "forks"), so each gets a fresh in-memory copy and mutations
// don't leak across files.
const dbProvider = new BetterSqlite3Provider();
dbProvider.loadFromMemory();
dbProvider.loadFromBuffer(readFileSync(require.resolve("@triliumnext/core/src/test/fixtures/document.db")));
await initializeCore({
dbConfig: {
@@ -34,6 +41,7 @@ beforeAll(async () => {
executionContext: new ClsHookedExecutionContext(),
schema: readFileSync(require.resolve("@triliumnext/core/src/assets/schema.sql"), "utf-8"),
platform: new ServerPlatformProvider(),
translations: initializeTranslations
translations: initializeTranslations,
inAppHelp: new NodejsInAppHelpProvider()
});
});

View File

@@ -0,0 +1,44 @@
import fs from "fs";
import path from "path";
import { RESOURCE_DIR } from "./services/resource_dir.js";
/**
* Reads schema.sql, falling back gracefully between bundled-production and
* source/dev modes.
*
* In bundled production (Docker, packaged desktop), the build script copies
* trilium-core's schema.sql into dist/assets/, which resolves to
* RESOURCE_DIR/schema.sql at runtime. The bundle has no @triliumnext/core
* package on disk, so require.resolve would fail with MODULE_NOT_FOUND.
*
* In dev/test (running source via tsx), the file isn't copied anywhere; the
* workspace symlink in node_modules makes require.resolve work.
*/
export function loadCoreSchema(): string {
const productionPath = path.join(RESOURCE_DIR, "schema.sql");
if (fs.existsSync(productionPath)) {
return fs.readFileSync(productionPath, "utf-8");
}
return fs.readFileSync(require.resolve("@triliumnext/core/src/assets/schema.sql"), "utf-8");
}
/**
* Resolves the path to the integration test database fixture, with the same
* production-bundled vs. dev/test fallback as loadCoreSchema().
*
* Returns a real on-disk path so callers can either feed it into
* fs.readFileSync() (to load as a buffer for an in-memory connection) or
* pass it directly to better-sqlite3's `new Database(path)` constructor
* (for a separate file-backed read-only connection like share uses).
*
* Only meaningful when TRILIUM_INTEGRATION_TEST is set; production code
* paths that call this should be gated by that env var.
*/
export function getIntegrationTestDbPath(): string {
const productionPath = path.join(RESOURCE_DIR, "test", "document.db");
if (fs.existsSync(productionPath)) {
return productionPath;
}
return require.resolve("@triliumnext/core/src/test/fixtures/document.db");
}

View File

@@ -0,0 +1,165 @@
import type { HiddenSubtreeItem } from "@triliumnext/commons";
import type { InAppHelpProvider } from "@triliumnext/core";
import fs from "fs";
import path from "path";
import becca from "./becca/becca.js";
import type BNote from "./becca/entities/bnote.js";
import type NoteMeta from "./services/meta/note_meta.js";
import type { NoteMetaFile } from "./services/meta/note_meta.js";
import { RESOURCE_DIR } from "./services/resource_dir.js";
export default class NodejsInAppHelpProvider implements InAppHelpProvider {
getHelpHiddenSubtreeData(): HiddenSubtreeItem[] {
const helpDir = path.join(RESOURCE_DIR, "doc_notes", "en", "User Guide");
const metaFilePath = path.join(helpDir, "!!!meta.json");
try {
return JSON.parse(fs.readFileSync(metaFilePath).toString("utf-8"));
} catch (e) {
console.warn(e);
return [];
}
}
parseNoteMetaFile(noteMetaFile: NoteMetaFile): HiddenSubtreeItem[] {
if (!noteMetaFile.files) {
console.warn("No meta files found to parse.");
return [];
}
const metaRoot = noteMetaFile.files[0];
const parsedMetaRoot = this.parseNoteMeta(metaRoot, "/" + (metaRoot.dirFileName ?? ""));
return parsedMetaRoot?.children ?? [];
}
parseNoteMeta(noteMeta: NoteMeta, docNameRoot: string): HiddenSubtreeItem | null {
let iconClass: string = "bx bx-file";
const item: HiddenSubtreeItem = {
id: `_help_${noteMeta.noteId}`,
title: noteMeta.title ?? "",
type: "doc", // can change
attributes: []
};
// Handle folder notes
if (!noteMeta.dataFileName) {
iconClass = "bx bx-folder";
item.type = "book";
}
// Handle attributes
for (const attribute of noteMeta.attributes ?? []) {
if (attribute.name === "iconClass") {
iconClass = attribute.value;
continue;
}
if (attribute.name === "webViewSrc") {
item.attributes?.push({
type: "label",
name: attribute.name,
value: attribute.value
});
}
if (attribute.name === "shareHiddenFromTree") {
return null;
}
}
// Handle text notes
if (noteMeta.type === "text" && noteMeta.dataFileName) {
const docPath = `${docNameRoot}/${path.basename(noteMeta.dataFileName, ".html")}`.substring(1);
item.attributes?.push({
type: "label",
name: "docName",
value: docPath
});
}
// Handle web views
if (noteMeta.type === "webView") {
item.type = "webView";
item.enforceAttributes = true;
}
// Handle children
if (noteMeta.children) {
const children: HiddenSubtreeItem[] = [];
for (const childMeta of noteMeta.children) {
let newDocNameRoot = noteMeta.dirFileName ? `${docNameRoot}/${noteMeta.dirFileName}` : docNameRoot;
const item = this.parseNoteMeta(childMeta, newDocNameRoot);
if (item) {
children.push(item);
}
}
item.children = children;
}
// Handle note icon
item.attributes?.push({
name: "iconClass",
value: iconClass,
type: "label"
});
return item;
}
/**
* Iterates recursively through the help subtree that the user has and compares it against the definition
* to remove any notes that are no longer present in the latest version of the help.
*
* @param helpDefinition the hidden subtree definition for the help, to compare against the user's structure.
*/
cleanUpHelp(helpDefinition: HiddenSubtreeItem[]): void {
function getFlatIds(items: HiddenSubtreeItem | HiddenSubtreeItem[]) {
const ids: (string | string[])[] = [];
if (Array.isArray(items)) {
for (const item of items) {
ids.push(getFlatIds(item));
}
} else {
if (items.children) {
for (const child of items.children) {
ids.push(getFlatIds(child));
}
}
ids.push(items.id);
}
return ids.flat();
}
function getFlatIdsFromNote(note: BNote | null) {
if (!note) {
return [];
}
const ids: (string | string[])[] = [];
for (const subnote of note.getChildNotes()) {
ids.push(getFlatIdsFromNote(subnote));
}
ids.push(note.noteId);
return ids.flat();
}
const definitionHelpIds = new Set(getFlatIds(helpDefinition));
const realHelpIds = getFlatIdsFromNote(becca.getNote("_help"));
for (const realHelpId of realHelpIds) {
if (realHelpId === "_help") {
continue;
}
if (!definitionHelpIds.has(realHelpId)) {
becca.getNote(realHelpId)?.deleteNote();
}
}
}
}

View File

@@ -9,11 +9,14 @@ import { t } from "i18next";
import path from "path";
import ClsHookedExecutionContext from "./cls_provider.js";
import { getIntegrationTestDbPath, loadCoreSchema } from "./core_assets.js";
import NodejsCryptoProvider from "./crypto_provider.js";
import NodejsInAppHelpProvider from "./in_app_help_provider.js";
import ServerPlatformProvider from "./platform_provider.js";
import dataDirs from "./services/data_dir.js";
import port from "./services/port.js";
import NodeRequestProvider from "./services/request.js";
import { RESOURCE_DIR } from "./services/resource_dir.js";
import WebSocketMessagingProvider from "./services/ws_messaging_provider.js";
import BetterSqlite3Provider from "./sql_provider.js";
import NodejsZipProvider from "./zip_provider.js";
@@ -23,7 +26,16 @@ async function startApplication() {
const { DOCUMENT_PATH } = (await import("./services/data_dir.js")).default;
const dbProvider = new BetterSqlite3Provider();
dbProvider.loadFromFile(DOCUMENT_PATH, config.General.readOnly);
if (process.env.TRILIUM_INTEGRATION_TEST === "memory") {
// Integration test mode: load the same fixture buffer used by the
// unit test setup so e2e tests get a known-good starting state
// (schema + demo content + known password) without touching disk.
// getIntegrationTestDbPath() handles the bundled-vs-source path
// resolution; see core_assets.ts.
dbProvider.loadFromBuffer(fs.readFileSync(getIntegrationTestDbPath()));
} else {
dbProvider.loadFromFile(DOCUMENT_PATH, config.General.readOnly);
}
await initializeCore({
dbConfig: {
@@ -57,10 +69,14 @@ async function startApplication() {
request: new NodeRequestProvider(),
executionContext: new ClsHookedExecutionContext(),
messaging: new WebSocketMessagingProvider(),
schema: fs.readFileSync(require.resolve("@triliumnext/core/src/assets/schema.sql"), "utf-8"),
schema: loadCoreSchema(),
platform: new ServerPlatformProvider(),
translations: (await import("./services/i18n.js")).initializeTranslations,
getDemoArchive: async () => fs.readFileSync(require.resolve("@triliumnext/server/src/assets/db/demo.zip")),
// demo.zip is a server-owned asset; src/assets is copied to dist/assets
// by the build script, so the same RESOURCE_DIR-relative path works in
// both source and bundled-production modes.
getDemoArchive: async () => fs.readFileSync(path.join(RESOURCE_DIR, "db", "demo.zip")),
inAppHelp: new NodejsInAppHelpProvider(),
extraAppInfo: {
nodeVersion: process.version,
dataDirectory: path.resolve(dataDirs.TRILIUM_DATA_DIR)

View File

@@ -1,7 +1,9 @@
import { BackupDatabaseNowResponse, DatabaseCheckIntegrityResponse } from "@triliumnext/commons";
import { becca_loader, rebuildIntegrationTestDatabase as rebuildIntegrationTestDatabaseCore,ValidationError } from "@triliumnext/core";
import { becca_loader, ValidationError } from "@triliumnext/core";
import type { Request } from "express";
import { readFileSync } from "fs";
import { getIntegrationTestDbPath } from "../../core_assets.js";
import anonymizationService from "../../services/anonymization.js";
import backupService from "../../services/backup.js";
import consistencyChecksService from "../../services/consistency_checks.js";
@@ -30,7 +32,13 @@ function findAndFixConsistencyIssues() {
}
async function rebuildIntegrationTestDatabase() {
rebuildIntegrationTestDatabaseCore();
// Reload the integration test database fixture into the in-memory SQL
// backend, then re-init schema-dependent state and the becca cache.
// Test-mode only — registered in routes.ts under the same env-var guard.
// getIntegrationTestDbPath() handles the bundled-vs-source path
// resolution; see core_assets.ts.
const fixtureBytes = readFileSync(getIntegrationTestDbPath());
sql.rebuildFromBuffer(fixtureBytes);
sql_init.initializeDb();
becca_loader.load();
}

View File

@@ -1,7 +1,10 @@
import { describe, expect, expectTypeOf, it } from "vitest";
import { parseNoteMeta } from "./in_app_help.js";
import { describe, expect, it } from "vitest";
import NodejsInAppHelpProvider from "../in_app_help_provider.js";
import type NoteMeta from "./meta/note_meta.js";
const provider = new NodejsInAppHelpProvider();
describe("In-app help", () => {
it("preserves custom folder icon", () => {
const meta: NoteMeta = {
@@ -29,7 +32,7 @@ describe("In-app help", () => {
children: []
};
const item = parseNoteMeta(meta, "/");
const item = provider.parseNoteMeta(meta, "/");
const icon = item?.attributes?.find((a) => a.name === "iconClass");
expect(icon?.value).toBe("bx bx-star");
});
@@ -60,7 +63,7 @@ describe("In-app help", () => {
children: []
};
const item = parseNoteMeta(meta, "/");
const item = provider.parseNoteMeta(meta, "/");
expect(item).toBeFalsy();
});
});

View File

@@ -1,158 +0,0 @@
import path from "path";
import fs from "fs";
import type NoteMeta from "./meta/note_meta.js";
import type { NoteMetaFile } from "./meta/note_meta.js";
import type BNote from "../becca/entities/bnote.js";
import becca from "../becca/becca.js";
import type { HiddenSubtreeItem } from "@triliumnext/commons";
import { RESOURCE_DIR } from "./resource_dir.js";
export function getHelpHiddenSubtreeData() {
const helpDir = path.join(RESOURCE_DIR, "doc_notes", "en", "User Guide");
const metaFilePath = path.join(helpDir, "!!!meta.json");
try {
return JSON.parse(fs.readFileSync(metaFilePath).toString("utf-8"));
} catch (e) {
console.warn(e);
return [];
}
}
export function parseNoteMetaFile(noteMetaFile: NoteMetaFile): HiddenSubtreeItem[] {
if (!noteMetaFile.files) {
console.log("No meta files");
return [];
}
const metaRoot = noteMetaFile.files[0];
const parsedMetaRoot = parseNoteMeta(metaRoot, "/" + (metaRoot.dirFileName ?? ""));
return parsedMetaRoot?.children ?? [];
}
export function parseNoteMeta(noteMeta: NoteMeta, docNameRoot: string): HiddenSubtreeItem | null {
let iconClass: string = "bx bx-file";
const item: HiddenSubtreeItem = {
id: `_help_${noteMeta.noteId}`,
title: noteMeta.title ?? "",
type: "doc", // can change
attributes: []
};
// Handle folder notes
if (!noteMeta.dataFileName) {
iconClass = "bx bx-folder";
item.type = "book";
}
// Handle attributes
for (const attribute of noteMeta.attributes ?? []) {
if (attribute.name === "iconClass") {
iconClass = attribute.value;
continue;
}
if (attribute.name === "webViewSrc") {
item.attributes?.push({
type: "label",
name: attribute.name,
value: attribute.value
});
}
if (attribute.name === "shareHiddenFromTree") {
return null;
}
}
// Handle text notes
if (noteMeta.type === "text" && noteMeta.dataFileName) {
const docPath = `${docNameRoot}/${path.basename(noteMeta.dataFileName, ".html")}`.substring(1);
item.attributes?.push({
type: "label",
name: "docName",
value: docPath
});
}
// Handle web views
if (noteMeta.type === "webView") {
item.type = "webView";
item.enforceAttributes = true;
}
// Handle children
if (noteMeta.children) {
const children: HiddenSubtreeItem[] = [];
for (const childMeta of noteMeta.children) {
let newDocNameRoot = noteMeta.dirFileName ? `${docNameRoot}/${noteMeta.dirFileName}` : docNameRoot;
const item = parseNoteMeta(childMeta, newDocNameRoot);
if (item) {
children.push(item);
}
}
item.children = children;
}
// Handle note icon
item.attributes?.push({
name: "iconClass",
value: iconClass,
type: "label"
});
return item;
}
/**
* Iterates recursively through the help subtree that the user has and compares it against the definition
* to remove any notes that are no longer present in the latest version of the help.
*
* @param helpDefinition the hidden subtree definition for the help, to compare against the user's structure.
*/
export function cleanUpHelp(helpDefinition: HiddenSubtreeItem[]) {
function getFlatIds(items: HiddenSubtreeItem | HiddenSubtreeItem[]) {
const ids: (string | string[])[] = [];
if (Array.isArray(items)) {
for (const item of items) {
ids.push(getFlatIds(item));
}
} else {
if (items.children) {
for (const child of items.children) {
ids.push(getFlatIds(child));
}
}
ids.push(items.id);
}
return ids.flat();
}
function getFlatIdsFromNote(note: BNote | null) {
if (!note) {
return [];
}
const ids: (string | string[])[] = [];
for (const subnote of note.getChildNotes()) {
ids.push(getFlatIdsFromNote(subnote));
}
ids.push(note.noteId);
return ids.flat();
}
const definitionHelpIds = new Set(getFlatIds(helpDefinition));
const realHelpIds = getFlatIdsFromNote(becca.getNote("_help"));
for (const realHelpId of realHelpIds) {
if (realHelpId === "_help") {
continue;
}
if (!definitionHelpIds.has(realHelpId)) {
becca.getNote(realHelpId)?.deleteNote();
}
}
}

View File

@@ -1,3 +1,17 @@
import { getSql } from "@triliumnext/core";
export default getSql();
// Lazy proxy: defers getSql() until first property access. Without this,
// any static import chain that reaches this file crashes if initSql() has
// not been called yet. The server avoids that today only because main.ts
// is careful to use dynamic imports for everything that transitively
// touches sql — an unenforced invariant. Tests hit the trap because
// vitest's beforeAll runs after the test file's static imports resolve.
const sqlProxy = new Proxy({} as ReturnType<typeof getSql>, {
get(_target, prop, receiver) {
const sql = getSql();
const value = Reflect.get(sql, prop, receiver);
return typeof value === "function" ? value.bind(sql) : value;
}
});
export default sqlProxy;

View File

@@ -1,6 +1,7 @@
"use strict";
import Database from "better-sqlite3";
import { getIntegrationTestDbPath } from "../core_assets.js";
import dataDir from "../services/data_dir.js";
import sql_init from "../services/sql_init.js";
@@ -8,7 +9,17 @@ let dbConnection!: Database.Database;
let dbConnectionReady = false;
sql_init.dbReady.then(() => {
dbConnection = new Database(dataDir.DOCUMENT_PATH, {
// The share module opens its own read-only connection to the on-disk
// database for isolation from the main read/write connection. In
// integration test mode `dataDir.DOCUMENT_PATH` doesn't contain a real
// database (the main connection is in-memory, loaded from a fixture
// buffer), so we open the fixture file directly. getIntegrationTestDbPath
// handles bundled-vs-source path resolution; see core_assets.ts.
const dbPath = process.env.TRILIUM_INTEGRATION_TEST
? getIntegrationTestDbPath()
: dataDir.DOCUMENT_PATH;
dbConnection = new Database(dbPath, {
readonly: true,
nativeBinding: process.env.BETTERSQLITE3_NATIVE_PATH || undefined
});

View File

@@ -31,6 +31,10 @@ export default class BetterSqlite3Provider implements DatabaseProvider {
}
loadFromBuffer(buffer: NonSharedBuffer) {
// Close any existing connection so its file handles are released
// before we replace it. Important for repeated rebuilds in tests
// (each call would otherwise leak the previous handle).
this.dbConnection?.close();
this.dbConnection = new Database(buffer, dbOpts);
}

View File

@@ -13,9 +13,19 @@ export default defineConfig(() => ({
env: {
NODE_ENV: "development",
TRILIUM_DATA_DIR: "./spec/db",
TRILIUM_INTEGRATION_TEST: "memory"
TRILIUM_INTEGRATION_TEST: "memory",
// Must be set in the vitest env (not in spec/setup.ts) so import-time
// constants like `isDev` in apps/server/src/services/utils.ts evaluate
// correctly. setup.ts top-level statements run AFTER its static imports
// resolve, so any env var assigned there is too late for module-load
// constants in transitively-imported files.
TRILIUM_ENV: "dev",
TRILIUM_PUBLIC_SERVER: "http://localhost:4200"
},
include: ['{src,spec}/**/*.{test,spec}.{js,mjs,cjs,ts,mts,cts,jsx,tsx}'],
include: [
'{src,spec}/**/*.{test,spec}.{js,mjs,cjs,ts,mts,cts,jsx,tsx}',
'../../packages/trilium-core/src/**/*.{test,spec}.{ts,tsx}'
],
exclude: [
"spec/build-checks/**",
],

View File

@@ -4,7 +4,7 @@
"description": "",
"main": "src/index.ts",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
"test": "echo \"Note: Core tests are run either through apps/server or apps/client-standalone, since they have different runtime environments.\""
},
"dependencies": {
"@braintree/sanitize-url": "7.1.1",

View File

@@ -11,6 +11,7 @@ import appInfo from "./services/app_info";
import { type PlatformProvider, initPlatform } from "./services/platform";
import { type ZipProvider, initZipProvider } from "./services/zip_provider";
import { type ZipExportProviderFactory, initZipExportProviderFactory } from "./services/export/zip_export_provider_factory";
import { type InAppHelpProvider, initInAppHelp } from "./services/in_app_help";
export { getLog } from "./services/log";
export type * from "./services/sql/types";
@@ -96,6 +97,7 @@ export { default as content_hash } from "./services/content_hash";
export { default as sync_mutex } from "./services/sync_mutex";
export { default as setup } from "./services/setup";
export { getPlatform, type PlatformProvider } from "./services/platform";
export type { InAppHelpProvider } from "./services/in_app_help";
export { t } from "i18next";
export type { RequestProvider, ExecOpts, CookieJar } from "./services/request";
export type * from "./meta";
@@ -119,7 +121,7 @@ export { default as scriptService } from "./services/script";
export { default as BackendScriptApi, type Api as BackendScriptApiInterface } from "./services/backend_script_api";
export * as scheduler from "./services/scheduler";
export async function initializeCore({ dbConfig, executionContext, crypto, zip, zipExportProviderFactory, translations, messaging, request, schema, extraAppInfo, platform, getDemoArchive }: {
export async function initializeCore({ dbConfig, executionContext, crypto, zip, zipExportProviderFactory, translations, messaging, request, schema, extraAppInfo, platform, getDemoArchive, inAppHelp }: {
dbConfig: SqlServiceParams,
executionContext: ExecutionContext,
crypto: CryptoProvider,
@@ -131,6 +133,7 @@ export async function initializeCore({ dbConfig, executionContext, crypto, zip,
messaging?: MessagingProvider,
request?: RequestProvider,
getDemoArchive?: () => Promise<Uint8Array | null>,
inAppHelp?: InAppHelpProvider,
extraAppInfo?: {
nodeVersion: string;
dataDirectory: string;
@@ -155,4 +158,7 @@ export async function initializeCore({ dbConfig, executionContext, crypto, zip,
if (request) {
initRequest(request);
}
if (inAppHelp) {
initInAppHelp(inAppHelp);
}
};

View File

@@ -1,10 +1,19 @@
import { readFileSync } from "node:fs";
import { dirname, join } from "node:path";
import { fileURLToPath } from "node:url";
import { describe, expect, it, beforeEach } from "vitest";
import * as cls from "../services/context.js";
import { getSql, rebuildIntegrationTestDatabase } from "../services/sql/index.js";
import { getSql } from "../services/sql/index.js";
import becca from "../becca/becca.js";
import becca_loader from "../becca/becca_loader.js";
import migration from "./0233__migrate_geo_map_to_collection.js";
// Resolve fixture path relative to this spec file. Spec files only ever run
// under vitest (which uses ESM via Vite), so import.meta.url is available;
// the CLAUDE.md restriction against import.meta.url applies to production
// code that gets bundled to CJS, not to test files.
const __dirname = dirname(fileURLToPath(import.meta.url));
/**
* Test suite for migration 0233 which converts geoMap notes to book type with viewConfig attachments.
*
@@ -19,11 +28,20 @@ import migration from "./0233__migrate_geo_map_to_collection.js";
* test data into the database, then verifies the migration transforms the data correctly.
*/
describe("Migration 0233: Migrate geoMap to collection", () => {
const sql = getSql();
let sql: ReturnType<typeof getSql>;
beforeEach(async () => {
// Set up a clean in-memory database for each test
rebuildIntegrationTestDatabase();
// getSql() is resolved here (not at describe-collection time) so that
// initializeCore() in the test setup's beforeAll has had a chance to
// run first. Capturing it eagerly at the top of describe crashes with
// "SQL not initialized" because describe callbacks run before any
// beforeAll hooks fire.
sql = getSql();
// Reload the integration test database from the fixture for each test
// so mutations from one test don't leak into the next.
const dbBytes = readFileSync(join(__dirname, "../test/fixtures/document.db"));
sql.rebuildFromBuffer(dbBytes);
await new Promise<void>((resolve) => {
cls.getContext().init(() => {

View File

@@ -1,8 +1,20 @@
export function cleanUpHelp(items: unknown[]) {
// TODO: implement.
import type { HiddenSubtreeItem } from "@triliumnext/commons";
export interface InAppHelpProvider {
getHelpHiddenSubtreeData(): HiddenSubtreeItem[];
cleanUpHelp(items: HiddenSubtreeItem[]): void;
}
export function getHelpHiddenSubtreeData() {
// TODO: implement.
return [];
let provider: InAppHelpProvider | null = null;
export function initInAppHelp(p: InAppHelpProvider) {
provider = p;
}
export function getHelpHiddenSubtreeData(): HiddenSubtreeItem[] {
return provider?.getHelpHiddenSubtreeData() ?? [];
}
export function cleanUpHelp(items: HiddenSubtreeItem[]): void {
provider?.cleanUpHelp(items);
}

View File

@@ -1,12 +1,22 @@
import { readFileSync } from "node:fs";
import { dirname, join } from "node:path";
import { fileURLToPath } from "node:url";
import { describe, expect, it } from "vitest";
import { getContext } from "./context.js";
import { getSql } from "./sql/index.js";
// Resolve fixture path relative to this spec file. Spec files only ever run
// under vitest (which uses ESM via Vite), so import.meta.url is available;
// the CLAUDE.md restriction against import.meta.url applies to production
// code that gets bundled to CJS, not to test files.
const __dirname = dirname(fileURLToPath(import.meta.url));
describe("Migration", () => {
it("migrates from v214", async () => {
await new Promise<void>((resolve) => {
getContext().init(async () => {
const { getSql, rebuildIntegrationTestDatabase } = (await (import("./sql/index.js")));
rebuildIntegrationTestDatabase("spec/db/document_v214.db");
const dbBytes = readFileSync(join(__dirname, "../test/fixtures/document_v214.db"));
getSql().rebuildFromBuffer(dbBytes);
const migration = (await import("./migration.js")).default;
await migration.migrateIfNecessary();

View File

@@ -1,5 +1,5 @@
import { trimIndentation } from "@triliumnext/commons";
import { beforeEach, describe, expect, it, vi } from "vitest";
import { beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
import becca from "../becca/becca.js";
import BBranch from "../becca/entities/bbranch.js";
@@ -61,10 +61,18 @@ describe("Script", () => {
});
describe("dayjs in backend scripts", () => {
const scriptNote = buildNote({
type: "code",
mime: "application/javascript;env=backend",
content: ""
// buildNote() is called inside beforeAll (not at describe-collection
// time) so that the test setup's initializeCore() has run first.
// buildNote generates random IDs via getCrypto(), which crashes with
// "Crypto not initialized" if invoked during describe collection.
let scriptNote: ReturnType<typeof buildNote>;
beforeAll(() => {
scriptNote = buildNote({
type: "code",
mime: "application/javascript;env=backend",
content: ""
});
});
it("dayjs is available", () => {

View File

@@ -1,92 +1,75 @@
import { beforeEach, describe, expect, it, vi } from 'vitest';
import { beforeEach, describe, expect, it } from "vitest";
const mockBecca = {
notes: {} as Record<string, any>,
getNote: vi.fn()
};
import becca from "../../becca/becca.js";
import BNote from "../../becca/entities/bnote.js";
import { buildNote } from "../../test/becca_easy_mocking.js";
import SearchResult from "./search_result.js";
const mockBeccaService = {
getNoteTitleForPath: vi.fn()
};
describe("SearchResult", () => {
let note: BNote;
vi.mock('../../becca/becca.js', () => ({
default: mockBecca
}));
vi.mock('../../becca/becca_service.js', () => ({
default: mockBeccaService
}));
let SearchResult: any;
beforeEach(async () => {
vi.clearAllMocks();
mockBeccaService.getNoteTitleForPath.mockReturnValue('Test Note Title');
mockBecca.notes['test123'] = {
noteId: 'test123',
title: 'Test Note',
isInHiddenSubtree: vi.fn().mockReturnValue(false)
};
const module = await import('./search_result.js');
SearchResult = module.default;
});
describe('SearchResult', () => {
describe('constructor', () => {
it('should initialize with note path array', () => {
const searchResult = new SearchResult(['root', 'folder', 'test123']);
expect(searchResult.notePathArray).toEqual(['root', 'folder', 'test123']);
expect(searchResult.noteId).toBe('test123');
expect(searchResult.notePath).toBe('root/folder/test123');
expect(searchResult.score).toBe(0);
expect(mockBeccaService.getNoteTitleForPath).toHaveBeenCalledWith(['root', 'folder', 'test123']);
beforeEach(() => {
becca.reset();
note = buildNote({
id: "test123",
title: "Test Note"
});
});
describe('computeScore', () => {
let searchResult: any;
describe("constructor", () => {
it("should initialize with note path array", () => {
const searchResult = new SearchResult([note.noteId]);
expect(searchResult.notePathArray).toEqual(["test123"]);
expect(searchResult.noteId).toBe("test123");
expect(searchResult.notePath).toBe("test123");
expect(searchResult.score).toBe(0);
expect(searchResult.notePathTitle).toBe("Test Note");
});
});
describe("computeScore", () => {
let searchResult: SearchResult;
beforeEach(() => {
searchResult = new SearchResult(['root', 'test123']);
searchResult = new SearchResult([note.noteId]);
});
describe('basic scoring', () => {
it('should give highest score for exact note ID match', () => {
searchResult.computeScore('test123', ['test123']);
describe("basic scoring", () => {
it("should give highest score for exact note ID match", () => {
searchResult.computeScore("test123", ["test123"]);
expect(searchResult.score).toBeGreaterThanOrEqual(1000);
});
it('should give high score for exact title match', () => {
searchResult.computeScore('test note', ['test', 'note']);
it("should give high score for exact title match", () => {
searchResult.computeScore("test note", ["test", "note"]);
expect(searchResult.score).toBeGreaterThan(2000);
});
it('should give medium score for title prefix match', () => {
searchResult.computeScore('test', ['test']);
it("should give medium score for title prefix match", () => {
searchResult.computeScore("test", ["test"]);
expect(searchResult.score).toBeGreaterThan(500);
});
it('should give lower score for title word match', () => {
mockBecca.notes['test123'].title = 'This is a test note';
searchResult.computeScore('test', ['test']);
it("should give lower score for title word match", () => {
note.title = "This is a test note";
searchResult.computeScore("test", ["test"]);
expect(searchResult.score).toBeGreaterThan(300);
});
});
describe('hidden notes penalty', () => {
it('should apply penalty for hidden notes', () => {
mockBecca.notes['test123'].isInHiddenSubtree.mockReturnValue(true);
describe("hidden notes penalty", () => {
it("should apply penalty for hidden notes", () => {
const hiddenNote = buildNote({
id: "_hidden",
title: "Test Note"
});
const hiddenSearchResult = new SearchResult([hiddenNote.noteId]);
searchResult.computeScore('test', ['test']);
const hiddenScore = searchResult.score;
hiddenSearchResult.computeScore("test", ["test"]);
const hiddenScore = hiddenSearchResult.score;
mockBecca.notes['test123'].isInHiddenSubtree.mockReturnValue(false);
searchResult.score = 0;
searchResult.computeScore('test', ['test']);
searchResult.computeScore("test", ["test"]);
const normalScore = searchResult.score;
expect(normalScore).toBeGreaterThan(hiddenScore);
@@ -95,51 +78,51 @@ describe('SearchResult', () => {
});
});
describe('addScoreForStrings', () => {
let searchResult: any;
describe("addScoreForStrings", () => {
let searchResult: SearchResult;
beforeEach(() => {
searchResult = new SearchResult(['root', 'test123']);
searchResult = new SearchResult([note.noteId]);
});
it('should give highest score for exact token match', () => {
searchResult.addScoreForStrings(['sample'], 'sample text', 1.0);
it("should give highest score for exact token match", () => {
searchResult.addScoreForStrings(["sample"], "sample text", 1.0);
const exactScore = searchResult.score;
searchResult.score = 0;
searchResult.addScoreForStrings(['sample'], 'sampling text', 1.0);
searchResult.addScoreForStrings(["sample"], "sampling text", 1.0);
const prefixScore = searchResult.score;
searchResult.score = 0;
searchResult.addScoreForStrings(['sample'], 'text sample text', 1.0);
searchResult.addScoreForStrings(["sample"], "text sample text", 1.0);
const partialScore = searchResult.score;
expect(exactScore).toBeGreaterThan(prefixScore);
expect(exactScore).toBeGreaterThanOrEqual(partialScore);
});
it('should apply factor multiplier correctly', () => {
searchResult.addScoreForStrings(['sample'], 'sample text', 2.0);
it("should apply factor multiplier correctly", () => {
searchResult.addScoreForStrings(["sample"], "sample text", 2.0);
const doubleFactorScore = searchResult.score;
searchResult.score = 0;
searchResult.addScoreForStrings(['sample'], 'sample text', 1.0);
searchResult.addScoreForStrings(["sample"], "sample text", 1.0);
const singleFactorScore = searchResult.score;
expect(doubleFactorScore).toBe(singleFactorScore * 2);
});
it('should handle multiple tokens', () => {
searchResult.addScoreForStrings(['hello', 'world'], 'hello world test', 1.0);
it("should handle multiple tokens", () => {
searchResult.addScoreForStrings(["hello", "world"], "hello world test", 1.0);
expect(searchResult.score).toBeGreaterThan(0);
});
it('should be case insensitive', () => {
searchResult.addScoreForStrings(['sample'], 'sample text', 1.0);
it("should be case insensitive", () => {
searchResult.addScoreForStrings(["sample"], "sample text", 1.0);
const lowerCaseScore = searchResult.score;
searchResult.score = 0;
searchResult.addScoreForStrings(['sample'], 'SAMPLE text', 1.0);
searchResult.addScoreForStrings(["sample"], "SAMPLE text", 1.0);
const upperCaseScore = searchResult.score;
expect(upperCaseScore).toEqual(lowerCaseScore);

View File

@@ -1,10 +1,18 @@
import { describe, it, expect, beforeEach } from "vitest";
import { beforeAll, beforeEach, describe, expect, it } from "vitest";
import ValueExtractor from "./value_extractor.js";
import becca from "../../becca/becca.js";
import SearchContext from "./search_context.js";
import { note } from "../../test/becca_mocking.js";
const dsc = new SearchContext();
// SearchContext is constructed inside beforeAll (not at module load) so that
// the test setup's initializeCore() has run first. Constructing it at module
// top level crashes with "Context not initialized" because module evaluation
// happens before any beforeAll hooks fire.
let dsc: SearchContext;
beforeAll(() => {
dsc = new SearchContext();
});
describe("Value extractor", () => {
beforeEach(() => {

View File

@@ -13,7 +13,3 @@ export function getSql(): SqlService {
if (!sql) throw new Error("SQL not initialized");
return sql;
}
export function rebuildIntegrationTestDatabase(path?: string) {
throw new Error("Not implemented");
}

View File

@@ -27,6 +27,20 @@ export class SqlService {
this.params = restParams;
}
/**
* Replace the underlying database with a fresh in-memory copy of the
* given buffer. Used by integration tests that need a clean DB per test.
*
* Clears the prepared-statement cache because cached statements are
* bound to the previous connection and become invalid after the swap.
*
* Not safe to call inside a transaction.
*/
rebuildFromBuffer(buffer: Uint8Array) {
this.statementCache = {};
this.dbConnection.loadFromBuffer(buffer);
}
insert<T extends {}>(tableName: string, rec: T, replace = false) {
const keys = Object.keys(rec || {});
if (keys.length === 0) {

View File

@@ -465,11 +465,19 @@ describe("#toMap", () => {
expect(result).toBeInstanceOf(Map);
expect(result.size).toBe(0);
});
it.fails("should correctly handle duplicate keys? (currently it will overwrite the entry, so returned size will be 1 instead of 2)", () => {
const testList = [ { title: "testDupeTitle", propA: "text", propB: 123 }, { title: "testDupeTitle", propA: "prop2", propB: 456 } ];
it("collapses entries when keys collide (last write wins)", () => {
// Documents the current contract of toMap: when two entries share a
// key, the later entry overwrites the earlier one. See the TODO in
// toMap itself — if the contract ever changes to preserve duplicates
// (e.g. returning Map<K, T[]>), update this test along with the change.
const testList = [
{ title: "testDupeTitle", propA: "text", propB: 123 },
{ title: "testDupeTitle", propA: "prop2", propB: 456 }
];
const result = utils.toMap(testList, "title");
expect(result).toBeInstanceOf(Map);
expect(result.size).toBe(2);
expect(result.size).toBe(1);
expect(result.get("testDupeTitle")?.propA).toBe("prop2");
});
});
@@ -530,8 +538,15 @@ describe("#safeExtractMessageAndStackFromError", () => {
expect(actual[1]).not.toBeUndefined();
});
it("should use the fallback 'Unknown Error' message, if it gets passed anything else than an instance of an Error", () => {
const testNonError = "this is not an instance of an Error, but JS technically allows us to throw this anyways";
it("should pass a thrown string through as the message, with no stack", () => {
const testString = "this is not an instance of an Error, but JS technically allows us to throw this anyways";
const actual = utils.safeExtractMessageAndStackFromError(testString);
expect(actual[0]).toBe(testString);
expect(actual[1]).toBeUndefined();
});
it("should use the fallback 'Unknown Error' message, if it gets passed something that is neither an Error nor a string", () => {
const testNonError = { not: "an error" };
const actual = utils.safeExtractMessageAndStackFromError(testNonError);
expect(actual[0]).toBe("Unknown Error");
expect(actual[1]).toBeUndefined();

View File

@@ -211,7 +211,7 @@ export const escapeHtml = escape;
export const unescapeHtml = unescape;
export function randomSecureToken(bytes = 32) {
return encodeBase64(getCrypto().randomBytes(32));
return encodeBase64(getCrypto().randomBytes(bytes));
}
export function safeExtractMessageAndStackFromError(err: unknown): [errMessage: string, errStack: string | undefined] {

View File

@@ -54,7 +54,23 @@ export default class BuildHelper {
"pdfjs-dist",
"./xhr-sync-worker.js",
"vite",
"tesseract.js"
"tesseract.js",
// Test fixtures referenced via require.resolve from
// integration-test-only code paths in apps/server. These
// paths are gated at runtime by TRILIUM_INTEGRATION_TEST and
// never reached in production, but esbuild can't see through
// the gate during static analysis. Marking them external
// suppresses the spurious "require.resolve not external"
// warning without affecting the bundle behavior.
"@triliumnext/core/src/test/*",
// schema.sql is read via core_assets.ts, which prefers a
// bundled copy at RESOURCE_DIR/schema.sql (placed there by
// apps/server/scripts/build.ts) and only falls back to
// require.resolve in dev/test mode. In bundled production
// the require.resolve branch is unreachable, but esbuild
// still sees the static string and warns. External marker
// suppresses the warning without changing runtime behavior.
"@triliumnext/core/src/assets/*"
],
metafile: true,
splitting: false,