mirror of
https://github.com/zadam/trilium.git
synced 2026-05-06 17:47:43 +02:00
Standalone stabilization (#9395)
This commit is contained in:
11
CLAUDE.md
11
CLAUDE.md
@@ -156,6 +156,17 @@ Fluent builder pattern: `.child()`, `.class()`, `.css()` chaining with position-
|
||||
- **Barrel import caution** — `import { x } from "@triliumnext/core"` loads ALL core exports. Early-loading modules like `config.ts` should import specific subpaths (e.g. `@triliumnext/core/src/services/utils/index`) to avoid circular dependencies or initialization ordering issues
|
||||
- **Electron IPC** — In desktop mode, client API calls use Electron IPC (not HTTP). The IPC handler in `apps/server/src/routes/electron.ts` must be registered via `utils.isElectron` from the **server's** utils (which correctly checks `process.versions["electron"]`), not from core's utils
|
||||
|
||||
### Binary Utilities
|
||||
|
||||
Use utilities from `packages/trilium-core/src/services/utils/binary.ts` for string/buffer conversions instead of manual `TextEncoder`/`TextDecoder` or `Buffer.from()` calls:
|
||||
|
||||
- **`wrapStringOrBuffer(input)`** — Converts `string` to `Uint8Array`, returns `Uint8Array` unchanged. Use when a function expects `Uint8Array` but receives `string | Uint8Array`.
|
||||
- **`unwrapStringOrBuffer(input)`** — Converts `Uint8Array` to `string`, returns `string` unchanged. Use when a function expects `string` but receives `string | Uint8Array`.
|
||||
- **`encodeBase64(input)`** / **`decodeBase64(input)`** — Base64 encoding/decoding that works in both Node.js and browser.
|
||||
- **`encodeUtf8(string)`** / **`decodeUtf8(buffer)`** — UTF-8 encoding/decoding.
|
||||
|
||||
Import via `import { binary_utils } from "@triliumnext/core"` or directly from the module.
|
||||
|
||||
### Database
|
||||
|
||||
SQLite via `better-sqlite3`. SQL abstraction in `packages/trilium-core/src/services/sql/` with `DatabaseProvider` interface, prepared statement caching, and transaction support.
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
"@triliumnext/server": "workspace:*"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@redocly/cli": "2.25.4",
|
||||
"@redocly/cli": "2.26.0",
|
||||
"archiver": "7.0.1",
|
||||
"fs-extra": "11.3.4",
|
||||
"js-yaml": "4.1.1",
|
||||
|
||||
@@ -32,7 +32,7 @@
|
||||
"@triliumnext/highlightjs": "workspace:*",
|
||||
"@triliumnext/share-theme": "workspace:*",
|
||||
"@triliumnext/split.js": "workspace:*",
|
||||
"@zumer/snapdom": "2.7.0",
|
||||
"@zumer/snapdom": "2.8.0",
|
||||
"autocomplete.js": "0.38.1",
|
||||
"bootstrap": "5.3.8",
|
||||
"boxicons": "2.1.4",
|
||||
@@ -43,14 +43,16 @@
|
||||
"fflate": "0.8.2",
|
||||
"force-graph": "1.51.2",
|
||||
"globals": "17.4.0",
|
||||
"i18next": "26.0.3",
|
||||
"i18next": "26.0.4",
|
||||
"i18next-http-backend": "3.0.4",
|
||||
"aes-js": "3.1.2",
|
||||
"jquery": "4.0.0",
|
||||
"jquery.fancytree": "2.38.5",
|
||||
"js-md5": "0.8.3",
|
||||
"js-sha1": "0.7.0",
|
||||
"js-sha256": "0.11.1",
|
||||
"js-sha512": "0.9.0",
|
||||
"scrypt-js": "3.0.1",
|
||||
"jsplumb": "2.15.6",
|
||||
"katex": "0.16.45",
|
||||
"knockout": "3.5.1",
|
||||
@@ -71,6 +73,7 @@
|
||||
"vanilla-js-wheel-zoom": "9.0.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/aes-js": "3.1.4",
|
||||
"@ckeditor/ckeditor5-inspector": "5.0.0",
|
||||
"@preact/preset-vite": "2.10.2",
|
||||
"@types/bootstrap": "5.2.10",
|
||||
|
||||
156
apps/client-standalone/src/lightweight/backup_provider.ts
Normal file
156
apps/client-standalone/src/lightweight/backup_provider.ts
Normal file
@@ -0,0 +1,156 @@
|
||||
import type { DatabaseBackup } from "@triliumnext/commons";
|
||||
import { BackupOptionsService, BackupService, getSql } from "@triliumnext/core";
|
||||
|
||||
const BACKUP_DIR_NAME = "backups";
|
||||
const BACKUP_FILE_PATTERN = /^backup-.*\.db$/;
|
||||
|
||||
/**
|
||||
* Standalone backup service using OPFS (Origin Private File System).
|
||||
* Stores database backups as serialized byte arrays in OPFS.
|
||||
* Falls back to no-op behavior when OPFS is not available (e.g., in tests).
|
||||
*/
|
||||
export default class StandaloneBackupService extends BackupService {
|
||||
private backupDir: FileSystemDirectoryHandle | null = null;
|
||||
private opfsAvailable: boolean | null = null;
|
||||
|
||||
constructor(options: BackupOptionsService) {
|
||||
super(options);
|
||||
}
|
||||
|
||||
private isOpfsAvailable(): boolean {
|
||||
if (this.opfsAvailable === null) {
|
||||
this.opfsAvailable = typeof navigator !== "undefined"
|
||||
&& navigator.storage
|
||||
&& typeof navigator.storage.getDirectory === "function";
|
||||
}
|
||||
return this.opfsAvailable;
|
||||
}
|
||||
|
||||
private async ensureBackupDirectory(): Promise<FileSystemDirectoryHandle | null> {
|
||||
if (!this.isOpfsAvailable()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!this.backupDir) {
|
||||
const root = await navigator.storage.getDirectory();
|
||||
this.backupDir = await root.getDirectoryHandle(BACKUP_DIR_NAME, { create: true });
|
||||
}
|
||||
return this.backupDir;
|
||||
}
|
||||
|
||||
override async backupNow(name: string): Promise<string> {
|
||||
const fileName = `backup-${name}.db`;
|
||||
|
||||
// Check if OPFS is available
|
||||
if (!this.isOpfsAvailable()) {
|
||||
console.warn(`[Backup] OPFS not available, skipping backup: ${fileName}`);
|
||||
return `/${BACKUP_DIR_NAME}/${fileName}`;
|
||||
}
|
||||
|
||||
try {
|
||||
const dir = await this.ensureBackupDirectory();
|
||||
if (!dir) {
|
||||
console.warn(`[Backup] Backup directory not available, skipping: ${fileName}`);
|
||||
return `/${BACKUP_DIR_NAME}/${fileName}`;
|
||||
}
|
||||
|
||||
// Serialize the database
|
||||
const data = getSql().serialize();
|
||||
|
||||
// Write to OPFS
|
||||
const fileHandle = await dir.getFileHandle(fileName, { create: true });
|
||||
const writable = await fileHandle.createWritable();
|
||||
await writable.write(data);
|
||||
await writable.close();
|
||||
|
||||
console.log(`[Backup] Created backup: ${fileName} (${data.byteLength} bytes)`);
|
||||
return `/${BACKUP_DIR_NAME}/${fileName}`;
|
||||
} catch (error) {
|
||||
console.error(`[Backup] Failed to create backup ${fileName}:`, error);
|
||||
// Don't throw - backup failure shouldn't block operations
|
||||
return `/${BACKUP_DIR_NAME}/${fileName}`;
|
||||
}
|
||||
}
|
||||
|
||||
override async getExistingBackups(): Promise<DatabaseBackup[]> {
|
||||
if (!this.isOpfsAvailable()) {
|
||||
return [];
|
||||
}
|
||||
|
||||
try {
|
||||
const dir = await this.ensureBackupDirectory();
|
||||
if (!dir) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const backups: DatabaseBackup[] = [];
|
||||
|
||||
for await (const [name, handle] of dir.entries()) {
|
||||
if (handle.kind !== "file" || !BACKUP_FILE_PATTERN.test(name)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const file = await (handle as FileSystemFileHandle).getFile();
|
||||
backups.push({
|
||||
fileName: name,
|
||||
filePath: `/${BACKUP_DIR_NAME}/${name}`,
|
||||
mtime: new Date(file.lastModified)
|
||||
});
|
||||
}
|
||||
|
||||
// Sort by modification time, newest first
|
||||
backups.sort((a, b) => b.mtime.getTime() - a.mtime.getTime());
|
||||
return backups;
|
||||
} catch (error) {
|
||||
console.error("[Backup] Failed to list backups:", error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a backup by filename.
|
||||
*/
|
||||
async deleteBackup(fileName: string): Promise<void> {
|
||||
if (!this.isOpfsAvailable()) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const dir = await this.ensureBackupDirectory();
|
||||
if (!dir) {
|
||||
return;
|
||||
}
|
||||
await dir.removeEntry(fileName);
|
||||
console.log(`[Backup] Deleted backup: ${fileName}`);
|
||||
} catch (error) {
|
||||
console.error(`[Backup] Failed to delete backup ${fileName}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
override async getBackupContent(filePath: string): Promise<Uint8Array | null> {
|
||||
if (!this.isOpfsAvailable()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const dir = await this.ensureBackupDirectory();
|
||||
if (!dir) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Extract fileName from filePath (e.g., "/backups/backup-now.db" -> "backup-now.db")
|
||||
const fileName = filePath.split("/").pop();
|
||||
if (!fileName || !BACKUP_FILE_PATTERN.test(fileName)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const fileHandle = await dir.getFileHandle(fileName);
|
||||
const file = await fileHandle.getFile();
|
||||
const data = await file.arrayBuffer();
|
||||
return new Uint8Array(data);
|
||||
} catch (error) {
|
||||
console.error(`[Backup] Failed to get backup content ${filePath}:`, error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,24 +1,22 @@
|
||||
import type { CryptoProvider } from "@triliumnext/core";
|
||||
import type { Cipher, CryptoProvider, ScryptOptions } from "@triliumnext/core";
|
||||
import { binary_utils } from "@triliumnext/core";
|
||||
import { sha1 } from "js-sha1";
|
||||
import { sha256 } from "js-sha256";
|
||||
import { sha512 } from "js-sha512";
|
||||
import { md5 } from "js-md5";
|
||||
|
||||
interface Cipher {
|
||||
update(data: Uint8Array): Uint8Array;
|
||||
final(): Uint8Array;
|
||||
}
|
||||
import { scrypt } from "scrypt-js";
|
||||
import aesjs from "aes-js";
|
||||
|
||||
const CHARS = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz";
|
||||
|
||||
/**
|
||||
* Crypto provider for browser environments using the Web Crypto API.
|
||||
* Crypto provider for browser environments using pure JavaScript crypto libraries.
|
||||
* Uses aes-js for synchronous AES encryption (matching Node.js behavior).
|
||||
*/
|
||||
export default class BrowserCryptoProvider implements CryptoProvider {
|
||||
|
||||
createHash(algorithm: "md5" | "sha1" | "sha512", content: string | Uint8Array): Uint8Array {
|
||||
const data = typeof content === "string" ? content :
|
||||
new TextDecoder().decode(content);
|
||||
const data = binary_utils.unwrapStringOrBuffer(content);
|
||||
|
||||
let hexHash: string;
|
||||
if (algorithm === "md5") {
|
||||
@@ -38,13 +36,11 @@ export default class BrowserCryptoProvider implements CryptoProvider {
|
||||
}
|
||||
|
||||
createCipheriv(algorithm: "aes-128-cbc", key: Uint8Array, iv: Uint8Array): Cipher {
|
||||
// Web Crypto API doesn't support streaming cipher like Node.js
|
||||
// We need to implement a wrapper that collects data and encrypts on final()
|
||||
return new WebCryptoCipher(algorithm, key, iv, "encrypt");
|
||||
return new AesJsCipher(algorithm, key, iv, "encrypt");
|
||||
}
|
||||
|
||||
createDecipheriv(algorithm: "aes-128-cbc", key: Uint8Array, iv: Uint8Array): Cipher {
|
||||
return new WebCryptoCipher(algorithm, key, iv, "decrypt");
|
||||
return new AesJsCipher(algorithm, key, iv, "decrypt");
|
||||
}
|
||||
|
||||
randomBytes(size: number): Uint8Array {
|
||||
@@ -63,8 +59,8 @@ export default class BrowserCryptoProvider implements CryptoProvider {
|
||||
}
|
||||
|
||||
hmac(secret: string | Uint8Array, value: string | Uint8Array): string {
|
||||
const secretStr = typeof secret === "string" ? secret : new TextDecoder().decode(secret);
|
||||
const valueStr = typeof value === "string" ? value : new TextDecoder().decode(value);
|
||||
const secretStr = binary_utils.unwrapStringOrBuffer(secret);
|
||||
const valueStr = binary_utils.unwrapStringOrBuffer(value);
|
||||
// sha256.hmac returns hex, convert to base64 to match Node's behavior
|
||||
const hexHash = sha256.hmac(secretStr, valueStr);
|
||||
const bytes = new Uint8Array(hexHash.length / 2);
|
||||
@@ -73,28 +69,50 @@ export default class BrowserCryptoProvider implements CryptoProvider {
|
||||
}
|
||||
return btoa(String.fromCharCode(...bytes));
|
||||
}
|
||||
|
||||
async scrypt(
|
||||
password: Uint8Array | string,
|
||||
salt: Uint8Array | string,
|
||||
keyLength: number,
|
||||
options: ScryptOptions = {}
|
||||
): Promise<Uint8Array> {
|
||||
const { N = 16384, r = 8, p = 1 } = options;
|
||||
const passwordBytes = binary_utils.wrapStringOrBuffer(password);
|
||||
const saltBytes = binary_utils.wrapStringOrBuffer(salt);
|
||||
|
||||
return scrypt(passwordBytes, saltBytes, N, r, p, keyLength);
|
||||
}
|
||||
|
||||
constantTimeCompare(a: Uint8Array, b: Uint8Array): boolean {
|
||||
if (a.length !== b.length) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let result = 0;
|
||||
for (let i = 0; i < a.length; i++) {
|
||||
result |= a[i] ^ b[i];
|
||||
}
|
||||
return result === 0;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A cipher implementation that wraps Web Crypto API.
|
||||
* Note: This buffers all data until final() is called, which differs from
|
||||
* Node.js's streaming cipher behavior.
|
||||
* A synchronous cipher implementation using aes-js.
|
||||
* Matches Node.js crypto behavior with update() and final() methods.
|
||||
*/
|
||||
class WebCryptoCipher implements Cipher {
|
||||
class AesJsCipher implements Cipher {
|
||||
private chunks: Uint8Array[] = [];
|
||||
private algorithm: string;
|
||||
private key: Uint8Array;
|
||||
private iv: Uint8Array;
|
||||
private mode: "encrypt" | "decrypt";
|
||||
private finalized = false;
|
||||
|
||||
constructor(
|
||||
algorithm: "aes-128-cbc",
|
||||
_algorithm: "aes-128-cbc",
|
||||
key: Uint8Array,
|
||||
iv: Uint8Array,
|
||||
mode: "encrypt" | "decrypt"
|
||||
) {
|
||||
this.algorithm = algorithm;
|
||||
this.key = key;
|
||||
this.iv = iv;
|
||||
this.mode = mode;
|
||||
@@ -104,9 +122,9 @@ class WebCryptoCipher implements Cipher {
|
||||
if (this.finalized) {
|
||||
throw new Error("Cipher has already been finalized");
|
||||
}
|
||||
// Buffer the data - Web Crypto doesn't support streaming
|
||||
// Buffer the data - we process everything in final() to match streaming behavior
|
||||
this.chunks.push(data);
|
||||
// Return empty array since we process everything in final()
|
||||
// Return empty array since aes-js CBC doesn't support true streaming
|
||||
return new Uint8Array(0);
|
||||
}
|
||||
|
||||
@@ -116,24 +134,6 @@ class WebCryptoCipher implements Cipher {
|
||||
}
|
||||
this.finalized = true;
|
||||
|
||||
// Web Crypto API is async, but we need sync behavior
|
||||
// This is a fundamental limitation that requires architectural changes
|
||||
// For now, throw an error directing users to use async methods
|
||||
throw new Error(
|
||||
"Synchronous cipher finalization not available in browser. " +
|
||||
"The Web Crypto API is async-only. Use finalizeAsync() instead."
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Async version that actually performs the encryption/decryption.
|
||||
*/
|
||||
async finalizeAsync(): Promise<Uint8Array> {
|
||||
if (this.finalized) {
|
||||
throw new Error("Cipher has already been finalized");
|
||||
}
|
||||
this.finalized = true;
|
||||
|
||||
// Concatenate all chunks
|
||||
const totalLength = this.chunks.reduce((sum, chunk) => sum + chunk.length, 0);
|
||||
const data = new Uint8Array(totalLength);
|
||||
@@ -143,24 +143,33 @@ class WebCryptoCipher implements Cipher {
|
||||
offset += chunk.length;
|
||||
}
|
||||
|
||||
// Copy key and iv to ensure they're plain ArrayBuffer-backed
|
||||
const keyBuffer = new Uint8Array(this.key);
|
||||
const ivBuffer = new Uint8Array(this.iv);
|
||||
if (this.mode === "encrypt") {
|
||||
// PKCS7 padding for encryption
|
||||
const blockSize = 16;
|
||||
const paddingLength = blockSize - (data.length % blockSize);
|
||||
const paddedData = new Uint8Array(data.length + paddingLength);
|
||||
paddedData.set(data);
|
||||
paddedData.fill(paddingLength, data.length);
|
||||
|
||||
// Import the key
|
||||
const cryptoKey = await crypto.subtle.importKey(
|
||||
"raw",
|
||||
keyBuffer,
|
||||
{ name: "AES-CBC" },
|
||||
false,
|
||||
[this.mode]
|
||||
);
|
||||
const aesCbc = new aesjs.ModeOfOperation.cbc(
|
||||
Array.from(this.key),
|
||||
Array.from(this.iv)
|
||||
);
|
||||
return new Uint8Array(aesCbc.encrypt(paddedData));
|
||||
} else {
|
||||
// Decryption
|
||||
const aesCbc = new aesjs.ModeOfOperation.cbc(
|
||||
Array.from(this.key),
|
||||
Array.from(this.iv)
|
||||
);
|
||||
const decrypted = new Uint8Array(aesCbc.decrypt(data));
|
||||
|
||||
// Perform encryption/decryption
|
||||
const result = this.mode === "encrypt"
|
||||
? await crypto.subtle.encrypt({ name: "AES-CBC", iv: ivBuffer }, cryptoKey, data)
|
||||
: await crypto.subtle.decrypt({ name: "AES-CBC", iv: ivBuffer }, cryptoKey, data);
|
||||
|
||||
return new Uint8Array(result);
|
||||
// Remove PKCS7 padding
|
||||
const paddingLength = decrypted[decrypted.length - 1];
|
||||
if (paddingLength > 0 && paddingLength <= 16) {
|
||||
return decrypted.slice(0, decrypted.length - paddingLength);
|
||||
}
|
||||
return decrypted;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
168
apps/client-standalone/src/lightweight/log_provider.ts
Normal file
168
apps/client-standalone/src/lightweight/log_provider.ts
Normal file
@@ -0,0 +1,168 @@
|
||||
import { FileBasedLogService, type LogFileInfo } from "@triliumnext/core";
|
||||
|
||||
const LOG_DIR_NAME = "logs";
|
||||
const LOG_FILE_PATTERN = /^trilium-\d{4}-\d{2}-\d{2}\.log$/;
|
||||
const DEFAULT_RETENTION_DAYS = 7;
|
||||
|
||||
/**
|
||||
* Standalone log service using OPFS (Origin Private File System).
|
||||
* Uses synchronous access handles available in service worker context.
|
||||
*/
|
||||
export default class StandaloneLogService extends FileBasedLogService {
|
||||
private logDir: FileSystemDirectoryHandle | null = null;
|
||||
private currentFile: FileSystemSyncAccessHandle | null = null;
|
||||
private currentFileName: string = "";
|
||||
private textEncoder = new TextEncoder();
|
||||
private textDecoder = new TextDecoder();
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
|
||||
// ==================== Abstract Method Implementations ====================
|
||||
|
||||
protected override get eol(): string {
|
||||
return "\n";
|
||||
}
|
||||
|
||||
protected override async ensureLogDirectory(): Promise<void> {
|
||||
const root = await navigator.storage.getDirectory();
|
||||
this.logDir = await root.getDirectoryHandle(LOG_DIR_NAME, { create: true });
|
||||
}
|
||||
|
||||
protected override async openLogFile(fileName: string): Promise<void> {
|
||||
if (!this.logDir) {
|
||||
await this.ensureLogDirectory();
|
||||
}
|
||||
|
||||
// Close existing file if open
|
||||
if (this.currentFile) {
|
||||
this.currentFile.close();
|
||||
this.currentFile = null;
|
||||
}
|
||||
|
||||
const fileHandle = await this.logDir!.getFileHandle(fileName, { create: true });
|
||||
|
||||
// Try to create sync access handle with retry logic for worker restarts
|
||||
// Previous worker may have left handle open before being terminated
|
||||
const maxRetries = 3;
|
||||
const retryDelay = 100;
|
||||
|
||||
for (let attempt = 0; attempt < maxRetries; attempt++) {
|
||||
try {
|
||||
this.currentFile = await fileHandle.createSyncAccessHandle();
|
||||
break;
|
||||
} catch (error) {
|
||||
if (attempt === maxRetries - 1) {
|
||||
// Last attempt failed - fall back to console-only logging
|
||||
console.warn("[LogService] Could not open log file, using console-only logging:", error);
|
||||
this.currentFile = null;
|
||||
this.currentFileName = "";
|
||||
return;
|
||||
}
|
||||
// Wait before retrying - previous handle may be released
|
||||
await new Promise(resolve => setTimeout(resolve, retryDelay * (attempt + 1)));
|
||||
}
|
||||
}
|
||||
|
||||
this.currentFileName = fileName;
|
||||
|
||||
// Seek to end for appending
|
||||
if (this.currentFile) {
|
||||
const size = this.currentFile.getSize();
|
||||
this.currentFile.truncate(size); // No-op, but ensures we're at the right position
|
||||
}
|
||||
}
|
||||
|
||||
protected override closeLogFile(): void {
|
||||
if (this.currentFile) {
|
||||
this.currentFile.close();
|
||||
this.currentFile = null;
|
||||
this.currentFileName = "";
|
||||
}
|
||||
}
|
||||
|
||||
protected override writeEntry(entry: string): void {
|
||||
if (!this.currentFile) {
|
||||
console.log(entry); // Fallback to console if file not ready
|
||||
return;
|
||||
}
|
||||
|
||||
const data = this.textEncoder.encode(entry);
|
||||
const currentSize = this.currentFile.getSize();
|
||||
this.currentFile.write(data, { at: currentSize });
|
||||
this.currentFile.flush();
|
||||
}
|
||||
|
||||
protected override readLogFile(fileName: string): string | null {
|
||||
if (!this.logDir) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
// For the current file, we need to read from the sync handle
|
||||
if (fileName === this.currentFileName && this.currentFile) {
|
||||
const size = this.currentFile.getSize();
|
||||
const buffer = new ArrayBuffer(size);
|
||||
const view = new DataView(buffer);
|
||||
this.currentFile.read(view, { at: 0 });
|
||||
return this.textDecoder.decode(buffer);
|
||||
}
|
||||
|
||||
// For other files, we'd need async access - return null for now
|
||||
// The current file is what's most commonly needed
|
||||
return null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
protected override async listLogFiles(): Promise<LogFileInfo[]> {
|
||||
if (!this.logDir) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const logFiles: LogFileInfo[] = [];
|
||||
|
||||
for await (const [name, handle] of this.logDir.entries()) {
|
||||
if (handle.kind !== "file" || !LOG_FILE_PATTERN.test(name)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// OPFS doesn't provide mtime directly, so we parse from filename
|
||||
const match = name.match(/trilium-(\d{4})-(\d{2})-(\d{2})\.log/);
|
||||
if (match) {
|
||||
const mtime = new Date(
|
||||
parseInt(match[1]),
|
||||
parseInt(match[2]) - 1,
|
||||
parseInt(match[3])
|
||||
);
|
||||
logFiles.push({ name, mtime });
|
||||
}
|
||||
}
|
||||
|
||||
return logFiles;
|
||||
}
|
||||
|
||||
protected override async deleteLogFile(fileName: string): Promise<void> {
|
||||
if (!this.logDir) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Don't delete the current file
|
||||
if (fileName === this.currentFileName) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await this.logDir.removeEntry(fileName);
|
||||
} catch {
|
||||
// File might not exist or be locked
|
||||
}
|
||||
}
|
||||
|
||||
protected override getRetentionDays(): number {
|
||||
// Standalone doesn't have config system, use default
|
||||
return DEFAULT_RETENTION_DAYS;
|
||||
}
|
||||
}
|
||||
@@ -58,6 +58,8 @@ let BrowserCryptoProvider: typeof import('./lightweight/crypto_provider').defaul
|
||||
let BrowserZipProvider: typeof import('./lightweight/zip_provider').default;
|
||||
let FetchRequestProvider: typeof import('./lightweight/request_provider').default;
|
||||
let StandalonePlatformProvider: typeof import('./lightweight/platform_provider').default;
|
||||
let StandaloneLogService: typeof import('./lightweight/log_provider').default;
|
||||
let StandaloneBackupService: typeof import('./lightweight/backup_provider').default;
|
||||
let translationProvider: typeof import('./lightweight/translation_provider').default;
|
||||
let createConfiguredRouter: typeof import('./lightweight/browser_routes').createConfiguredRouter;
|
||||
|
||||
@@ -86,6 +88,8 @@ async function loadModules(): Promise<void> {
|
||||
zipModule,
|
||||
requestModule,
|
||||
platformModule,
|
||||
logModule,
|
||||
backupModule,
|
||||
translationModule,
|
||||
routesModule
|
||||
] = await Promise.all([
|
||||
@@ -96,6 +100,8 @@ async function loadModules(): Promise<void> {
|
||||
import('./lightweight/zip_provider.js'),
|
||||
import('./lightweight/request_provider.js'),
|
||||
import('./lightweight/platform_provider.js'),
|
||||
import('./lightweight/log_provider.js'),
|
||||
import('./lightweight/backup_provider.js'),
|
||||
import('./lightweight/translation_provider.js'),
|
||||
import('./lightweight/browser_routes.js')
|
||||
]);
|
||||
@@ -107,6 +113,8 @@ async function loadModules(): Promise<void> {
|
||||
BrowserZipProvider = zipModule.default;
|
||||
FetchRequestProvider = requestModule.default;
|
||||
StandalonePlatformProvider = platformModule.default;
|
||||
StandaloneLogService = logModule.default;
|
||||
StandaloneBackupService = backupModule.default;
|
||||
translationProvider = translationModule.default;
|
||||
createConfiguredRouter = routesModule.createConfiguredRouter;
|
||||
|
||||
@@ -153,6 +161,12 @@ async function initialize(): Promise<void> {
|
||||
console.log("[Worker] Loading @triliumnext/core...");
|
||||
const schemaModule = await import("@triliumnext/core/src/assets/schema.sql?raw");
|
||||
coreModule = await import("@triliumnext/core");
|
||||
|
||||
// Initialize log service with OPFS persistence
|
||||
const logService = new StandaloneLogService();
|
||||
await logService.initialize();
|
||||
console.log("[Worker] Log service initialized with OPFS");
|
||||
|
||||
await coreModule.initializeCore({
|
||||
executionContext: new BrowserExecutionContext(),
|
||||
crypto: new BrowserCryptoProvider(),
|
||||
@@ -161,6 +175,8 @@ async function initialize(): Promise<void> {
|
||||
messaging: messagingProvider!,
|
||||
request: new FetchRequestProvider(),
|
||||
platform: new StandalonePlatformProvider(queryString),
|
||||
log: logService,
|
||||
backup: new StandaloneBackupService(coreModule!.options),
|
||||
translations: translationProvider,
|
||||
schema: schemaModule.default,
|
||||
getDemoArchive: async () => {
|
||||
@@ -168,6 +184,7 @@ async function initialize(): Promise<void> {
|
||||
if (!response.ok) return null;
|
||||
return new Uint8Array(await response.arrayBuffer());
|
||||
},
|
||||
image: (await import("./services/image_provider.js")).standaloneImageProvider,
|
||||
dbConfig: {
|
||||
provider: sqlProvider!,
|
||||
isReadOnly: false,
|
||||
|
||||
67
apps/client-standalone/src/services/data_encryption.spec.ts
Normal file
67
apps/client-standalone/src/services/data_encryption.spec.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
import { data_encryption } from "@triliumnext/core";
|
||||
|
||||
// Note: BrowserCryptoProvider is already initialized via test_setup.ts
|
||||
|
||||
describe("data_encryption with BrowserCryptoProvider", () => {
|
||||
it("should encrypt and decrypt ASCII text correctly", () => {
|
||||
const key = new Uint8Array(16).fill(42);
|
||||
const plainText = "Hello, World!";
|
||||
|
||||
const encrypted = data_encryption.encrypt(key, plainText);
|
||||
expect(typeof encrypted).toBe("string");
|
||||
expect(encrypted.length).toBeGreaterThan(0);
|
||||
|
||||
const decrypted = data_encryption.decryptString(key, encrypted);
|
||||
expect(decrypted).toBe(plainText);
|
||||
});
|
||||
|
||||
it("should encrypt and decrypt UTF-8 text correctly", () => {
|
||||
const key = new Uint8Array(16).fill(42);
|
||||
const plainText = "Привет мир! 你好世界! 🎉";
|
||||
|
||||
const encrypted = data_encryption.encrypt(key, plainText);
|
||||
const decrypted = data_encryption.decryptString(key, encrypted);
|
||||
expect(decrypted).toBe(plainText);
|
||||
});
|
||||
|
||||
it("should encrypt and decrypt empty string", () => {
|
||||
const key = new Uint8Array(16).fill(42);
|
||||
const plainText = "";
|
||||
|
||||
const encrypted = data_encryption.encrypt(key, plainText);
|
||||
const decrypted = data_encryption.decryptString(key, encrypted);
|
||||
expect(decrypted).toBe(plainText);
|
||||
});
|
||||
|
||||
it("should encrypt and decrypt binary data", () => {
|
||||
const key = new Uint8Array(16).fill(42);
|
||||
const plainData = new Uint8Array([0, 1, 2, 255, 128, 64]);
|
||||
|
||||
const encrypted = data_encryption.encrypt(key, plainData);
|
||||
const decrypted = data_encryption.decrypt(key, encrypted);
|
||||
expect(decrypted).toBeInstanceOf(Uint8Array);
|
||||
expect(Array.from(decrypted as Uint8Array)).toEqual(Array.from(plainData));
|
||||
});
|
||||
|
||||
it("should fail decryption with wrong key", () => {
|
||||
const key1 = new Uint8Array(16).fill(42);
|
||||
const key2 = new Uint8Array(16).fill(43);
|
||||
const plainText = "Secret message";
|
||||
|
||||
const encrypted = data_encryption.encrypt(key1, plainText);
|
||||
|
||||
// decrypt returns false when digest doesn't match
|
||||
const result = data_encryption.decrypt(key2, encrypted);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it("should handle large content", () => {
|
||||
const key = new Uint8Array(16).fill(42);
|
||||
const plainText = "x".repeat(100000);
|
||||
|
||||
const encrypted = data_encryption.encrypt(key, plainText);
|
||||
const decrypted = data_encryption.decryptString(key, encrypted);
|
||||
expect(decrypted).toBe(plainText);
|
||||
});
|
||||
});
|
||||
96
apps/client-standalone/src/services/image_provider.ts
Normal file
96
apps/client-standalone/src/services/image_provider.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
/**
|
||||
* Standalone image provider implementation.
|
||||
* Uses pure JavaScript for format detection without compression.
|
||||
* Images are saved as-is without resizing.
|
||||
*/
|
||||
|
||||
import type { ImageProvider, ImageFormat, ProcessedImage } from "@triliumnext/core";
|
||||
|
||||
/**
|
||||
* Detect image type from buffer using magic bytes.
|
||||
*/
|
||||
function getImageTypeFromBuffer(buffer: Uint8Array): ImageFormat | null {
|
||||
if (buffer.length < 12) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Check for SVG (text-based)
|
||||
if (isSvg(buffer)) {
|
||||
return { ext: "svg", mime: "image/svg+xml" };
|
||||
}
|
||||
|
||||
// JPEG: FF D8 FF
|
||||
if (buffer[0] === 0xff && buffer[1] === 0xd8 && buffer[2] === 0xff) {
|
||||
return { ext: "jpg", mime: "image/jpeg" };
|
||||
}
|
||||
|
||||
// PNG: 89 50 4E 47 0D 0A 1A 0A
|
||||
if (
|
||||
buffer[0] === 0x89 &&
|
||||
buffer[1] === 0x50 &&
|
||||
buffer[2] === 0x4e &&
|
||||
buffer[3] === 0x47 &&
|
||||
buffer[4] === 0x0d &&
|
||||
buffer[5] === 0x0a &&
|
||||
buffer[6] === 0x1a &&
|
||||
buffer[7] === 0x0a
|
||||
) {
|
||||
return { ext: "png", mime: "image/png" };
|
||||
}
|
||||
|
||||
// GIF: "GIF"
|
||||
if (buffer[0] === 0x47 && buffer[1] === 0x49 && buffer[2] === 0x46) {
|
||||
return { ext: "gif", mime: "image/gif" };
|
||||
}
|
||||
|
||||
// WebP: RIFF....WEBP
|
||||
if (
|
||||
buffer[0] === 0x52 &&
|
||||
buffer[1] === 0x49 &&
|
||||
buffer[2] === 0x46 &&
|
||||
buffer[3] === 0x46 &&
|
||||
buffer[8] === 0x57 &&
|
||||
buffer[9] === 0x45 &&
|
||||
buffer[10] === 0x42 &&
|
||||
buffer[11] === 0x50
|
||||
) {
|
||||
return { ext: "webp", mime: "image/webp" };
|
||||
}
|
||||
|
||||
// BMP: "BM"
|
||||
if (buffer[0] === 0x42 && buffer[1] === 0x4d) {
|
||||
return { ext: "bmp", mime: "image/bmp" };
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if buffer contains SVG content.
|
||||
*/
|
||||
function isSvg(buffer: Uint8Array): boolean {
|
||||
const maxBytes = Math.min(buffer.length, 1000);
|
||||
let str = "";
|
||||
for (let i = 0; i < maxBytes; i++) {
|
||||
str += String.fromCharCode(buffer[i]);
|
||||
}
|
||||
|
||||
const trimmed = str.trim().toLowerCase();
|
||||
return trimmed.startsWith("<svg") || (trimmed.startsWith("<?xml") && trimmed.includes("<svg"));
|
||||
}
|
||||
|
||||
export const standaloneImageProvider: ImageProvider = {
|
||||
getImageType(buffer: Uint8Array): ImageFormat | null {
|
||||
return getImageTypeFromBuffer(buffer);
|
||||
},
|
||||
|
||||
async processImage(buffer: Uint8Array, _originalName: string, _shrink: boolean): Promise<ProcessedImage> {
|
||||
// Standalone doesn't do compression - just detect format and return original
|
||||
const format = getImageTypeFromBuffer(buffer) || { ext: "dat", mime: "application/octet-stream" };
|
||||
|
||||
return {
|
||||
buffer,
|
||||
format
|
||||
};
|
||||
}
|
||||
};
|
||||
@@ -85,12 +85,22 @@ async function networkFirst(request) {
|
||||
}
|
||||
}
|
||||
|
||||
async function forwardToClientLocalServer(request, clientId) {
|
||||
// Find a client to handle the request (prefer the initiating client if available)
|
||||
let client = clientId ? await self.clients.get(clientId) : null;
|
||||
async function forwardToClientLocalServer(request, _clientId) {
|
||||
// Find the main app window to handle the request
|
||||
// We must route to the main app (which has the local bridge), not iframes like PDF.js viewer
|
||||
// @ts-expect-error - self.clients is valid in service worker context
|
||||
const all = await self.clients.matchAll({ type: "window", includeUncontrolled: true });
|
||||
|
||||
// Find the main app window - it's the one NOT serving pdfjs or other embedded content
|
||||
// The main app has the local bridge handler for LOCAL_FETCH messages
|
||||
let client = all.find((c: { url: string }) => {
|
||||
const url = new URL(c.url);
|
||||
// Main app is at root or index.html, not in /pdfjs/ or other iframe paths
|
||||
return !url.pathname.startsWith("/pdfjs/");
|
||||
}) || null;
|
||||
|
||||
// If no main app window found, fall back to any available client
|
||||
if (!client) {
|
||||
const all = await self.clients.matchAll({ type: "window", includeUncontrolled: true });
|
||||
client = all[0] || null;
|
||||
}
|
||||
|
||||
|
||||
@@ -2,17 +2,19 @@ import { createRequire } from "node:module";
|
||||
import { readFileSync } from "node:fs";
|
||||
import { fileURLToPath } from "node:url";
|
||||
|
||||
import { initializeCore } from "@triliumnext/core";
|
||||
import { initializeCore, options } from "@triliumnext/core";
|
||||
import schemaSql from "@triliumnext/core/src/assets/schema.sql?raw";
|
||||
import HappyDomHtmlParser from "happy-dom/lib/html-parser/HTMLParser.js";
|
||||
import serverEnTranslations from "../../server/src/assets/translations/en/server.json";
|
||||
import { beforeAll } from "vitest";
|
||||
|
||||
import StandaloneBackupService from "./lightweight/backup_provider.js";
|
||||
import BrowserExecutionContext from "./lightweight/cls_provider.js";
|
||||
import BrowserCryptoProvider from "./lightweight/crypto_provider.js";
|
||||
import StandalonePlatformProvider from "./lightweight/platform_provider.js";
|
||||
import BrowserSqlProvider from "./lightweight/sql_provider.js";
|
||||
import BrowserZipProvider from "./lightweight/zip_provider.js";
|
||||
import { standaloneImageProvider } from "./services/image_provider.js";
|
||||
|
||||
// =============================================================================
|
||||
// SQLite WASM compatibility shims
|
||||
@@ -129,6 +131,8 @@ beforeAll(async () => {
|
||||
});
|
||||
},
|
||||
platform: new StandalonePlatformProvider(""),
|
||||
backup: new StandaloneBackupService(options),
|
||||
image: standaloneImageProvider,
|
||||
schema: schemaSql,
|
||||
dbConfig: {
|
||||
provider: sqlProvider,
|
||||
|
||||
31
apps/client-standalone/src/vite-env.d.ts
vendored
31
apps/client-standalone/src/vite-env.d.ts
vendored
@@ -1,31 +0,0 @@
|
||||
/// <reference types="vite/client" />
|
||||
|
||||
interface ImportMetaEnv {
|
||||
readonly VITE_APP_TITLE: string
|
||||
}
|
||||
|
||||
interface ImportMeta {
|
||||
readonly env: ImportMetaEnv
|
||||
}
|
||||
|
||||
interface Window {
|
||||
glob: {
|
||||
assetPath: string;
|
||||
themeCssUrl?: string;
|
||||
themeUseNextAsBase?: string;
|
||||
iconPackCss: string;
|
||||
device: string;
|
||||
headingStyle: string;
|
||||
layoutOrientation: string;
|
||||
platform: string;
|
||||
isElectron: boolean;
|
||||
hasNativeTitleBar: boolean;
|
||||
hasBackgroundEffects: boolean;
|
||||
currentLocale: {
|
||||
id: string;
|
||||
rtl: boolean;
|
||||
};
|
||||
activeDialog: any;
|
||||
};
|
||||
global: typeof globalThis;
|
||||
}
|
||||
@@ -1,7 +1,9 @@
|
||||
import prefresh from '@prefresh/vite';
|
||||
import { join } from 'path';
|
||||
import { defineConfig } from 'vite';
|
||||
import { viteStaticCopy } from 'vite-plugin-static-copy';
|
||||
import fs from "fs";
|
||||
import { join, resolve, sep } from "path";
|
||||
|
||||
import prefresh from "@prefresh/vite";
|
||||
import { defineConfig, type Plugin } from "vite";
|
||||
import { viteStaticCopy } from "vite-plugin-static-copy";
|
||||
|
||||
const clientAssets = ["assets", "stylesheets", "fonts", "translations"];
|
||||
|
||||
@@ -9,15 +11,15 @@ const isDev = process.env.NODE_ENV === "development";
|
||||
|
||||
// Watch client files and trigger reload in development
|
||||
const clientWatchPlugin = () => ({
|
||||
name: 'client-watch',
|
||||
name: "client-watch",
|
||||
configureServer(server: any) {
|
||||
if (isDev) {
|
||||
// Watch client source files (adjusted for new root)
|
||||
server.watcher.add('../../client/src/**/*');
|
||||
server.watcher.on('change', (file: string) => {
|
||||
if (file.includes('../../client/src/')) {
|
||||
server.watcher.add("../../client/src/**/*");
|
||||
server.watcher.on("change", (file: string) => {
|
||||
if (file.includes("../../client/src/")) {
|
||||
server.ws.send({
|
||||
type: 'full-reload'
|
||||
type: "full-reload"
|
||||
});
|
||||
}
|
||||
});
|
||||
@@ -25,6 +27,56 @@ const clientWatchPlugin = () => ({
|
||||
}
|
||||
});
|
||||
|
||||
// Serve PDF.js files directly in dev mode to bypass SPA fallback
|
||||
const pdfjsServePlugin = (): Plugin => ({
|
||||
name: "pdfjs-serve",
|
||||
configureServer(server) {
|
||||
const pdfjsRoot = join(__dirname, "../../packages/pdfjs-viewer/dist");
|
||||
|
||||
server.middlewares.use((req, res, next) => {
|
||||
if (!req.url?.startsWith("/pdfjs/")) {
|
||||
return next();
|
||||
}
|
||||
|
||||
// Map /pdfjs/web/... to dist/web/...
|
||||
// Map /pdfjs/build/... to dist/build/...
|
||||
// Strip query string (e.g., ?v=0.102.2) before resolving path
|
||||
const urlWithoutQuery = req.url.split("?")[0];
|
||||
const relativePath = urlWithoutQuery.replace(/^\/pdfjs\//, "");
|
||||
const filePath = join(pdfjsRoot, relativePath);
|
||||
|
||||
// Security: resolve both paths to prevent prefix-collision attacks
|
||||
// (e.g. pdfjsRoot="/foo/bar" matching "/foo/bar2/evil.js")
|
||||
const resolvedRoot = resolve(pdfjsRoot);
|
||||
const resolvedFilePath = resolve(filePath);
|
||||
if (!resolvedFilePath.startsWith(resolvedRoot + sep)) {
|
||||
return next();
|
||||
}
|
||||
|
||||
if (fs.existsSync(filePath) && fs.statSync(filePath).isFile()) {
|
||||
const ext = filePath.split(".").pop() || "";
|
||||
const mimeTypes: Record<string, string> = {
|
||||
html: "text/html",
|
||||
css: "text/css",
|
||||
js: "application/javascript",
|
||||
mjs: "application/javascript",
|
||||
wasm: "application/wasm",
|
||||
png: "image/png",
|
||||
svg: "image/svg+xml",
|
||||
json: "application/json"
|
||||
};
|
||||
res.setHeader("Content-Type", mimeTypes[ext] || "application/octet-stream");
|
||||
// Match isolation headers from main page for iframe compatibility
|
||||
res.setHeader("Cross-Origin-Opener-Policy", "same-origin");
|
||||
res.setHeader("Cross-Origin-Embedder-Policy", "require-corp");
|
||||
fs.createReadStream(filePath).pipe(res);
|
||||
} else {
|
||||
next();
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Always copy SQLite WASM files so they're available to the module
|
||||
const sqliteWasmPlugin = viteStaticCopy({
|
||||
targets: [
|
||||
@@ -65,10 +117,27 @@ let plugins: any = [
|
||||
}
|
||||
]
|
||||
}),
|
||||
// PDF.js viewer for PDF preview support
|
||||
// stripBase: 4 removes packages/pdfjs-viewer/dist/web (or /build)
|
||||
viteStaticCopy({
|
||||
targets: [
|
||||
{
|
||||
src: "../../../packages/pdfjs-viewer/dist/web/**/*",
|
||||
dest: "pdfjs/web",
|
||||
rename: { stripBase: 4 }
|
||||
},
|
||||
{
|
||||
src: "../../../packages/pdfjs-viewer/dist/build/**/*",
|
||||
dest: "pdfjs/build",
|
||||
rename: { stripBase: 4 }
|
||||
}
|
||||
]
|
||||
}),
|
||||
// Watch client files for changes in development
|
||||
...(isDev ? [
|
||||
prefresh(),
|
||||
clientWatchPlugin()
|
||||
clientWatchPlugin(),
|
||||
pdfjsServePlugin()
|
||||
] : [])
|
||||
];
|
||||
|
||||
|
||||
@@ -42,7 +42,7 @@
|
||||
"@univerjs/preset-sheets-note": "0.20.0",
|
||||
"@univerjs/preset-sheets-sort": "0.20.0",
|
||||
"@univerjs/presets": "0.20.0",
|
||||
"@zumer/snapdom": "2.7.0",
|
||||
"@zumer/snapdom": "2.8.0",
|
||||
"autocomplete.js": "0.38.1",
|
||||
"bootstrap": "5.3.8",
|
||||
"boxicons": "2.1.4",
|
||||
@@ -52,7 +52,7 @@
|
||||
"dompurify": "3.3.3",
|
||||
"draggabilly": "3.0.0",
|
||||
"force-graph": "1.51.2",
|
||||
"i18next": "26.0.3",
|
||||
"i18next": "26.0.4",
|
||||
"i18next-http-backend": "3.0.4",
|
||||
"jquery": "4.0.0",
|
||||
"jquery.fancytree": "2.38.5",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { t } from "./i18n";
|
||||
import options from "./options";
|
||||
import { isMobile } from "./utils";
|
||||
import { isMobile, isStandalone } from "./utils";
|
||||
|
||||
export interface ExperimentalFeature {
|
||||
id: string;
|
||||
@@ -23,6 +23,11 @@ export const experimentalFeatures = [
|
||||
|
||||
export type ExperimentalFeatureId = typeof experimentalFeatures[number]["id"];
|
||||
|
||||
/** Returns experimental features available for the current platform (excludes LLM in standalone mode). */
|
||||
export function getAvailableExperimentalFeatures() {
|
||||
return experimentalFeatures.filter(f => !(f.id === "llm" && isStandalone));
|
||||
}
|
||||
|
||||
let enabledFeatures: Set<ExperimentalFeatureId> | null = null;
|
||||
|
||||
export function isExperimentalFeatureEnabled(featureId: ExperimentalFeatureId): boolean {
|
||||
@@ -30,14 +35,24 @@ export function isExperimentalFeatureEnabled(featureId: ExperimentalFeatureId):
|
||||
return (isMobile() || options.is("newLayout"));
|
||||
}
|
||||
|
||||
// LLM features require server-side API calls that don't work in standalone mode
|
||||
// due to CORS restrictions from LLM providers (OpenAI, Google don't allow browser requests)
|
||||
if (featureId === "llm" && isStandalone) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return getEnabledFeatures().has(featureId);
|
||||
}
|
||||
|
||||
export function getEnabledExperimentalFeatureIds() {
|
||||
const values = [ ...getEnabledFeatures().values() ];
|
||||
let values = [ ...getEnabledFeatures().values() ];
|
||||
if (isMobile() || options.is("newLayout")) {
|
||||
values.push("new-layout");
|
||||
}
|
||||
// LLM is not available in standalone mode
|
||||
if (isStandalone) {
|
||||
values = values.filter(v => v !== "llm");
|
||||
}
|
||||
return values;
|
||||
}
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ import { useContext, useEffect, useRef, useState } from "preact/hooks";
|
||||
|
||||
import { CommandNames } from "../../components/app_context";
|
||||
import Component from "../../components/component";
|
||||
import { ExperimentalFeature, ExperimentalFeatureId, experimentalFeatures, isExperimentalFeatureEnabled, toggleExperimentalFeature } from "../../services/experimental_features";
|
||||
import { ExperimentalFeature, ExperimentalFeatureId, getAvailableExperimentalFeatures, isExperimentalFeatureEnabled, toggleExperimentalFeature } from "../../services/experimental_features";
|
||||
import { t } from "../../services/i18n";
|
||||
import utils, { dynamicRequire, isElectron, isMobile, isStandalone, reloadFrontendApp } from "../../services/utils";
|
||||
import Dropdown from "../react/Dropdown";
|
||||
@@ -112,7 +112,7 @@ function DevelopmentOptions({ dropStart }: { dropStart: boolean }) {
|
||||
return <>
|
||||
<FormListHeader text="Development Options" />
|
||||
<FormDropdownSubmenu icon="bx bx-test-tube" title="Experimental features" dropStart={dropStart}>
|
||||
{experimentalFeatures.map((feature) => (
|
||||
{getAvailableExperimentalFeatures().map((feature) => (
|
||||
<ExperimentalFeatureToggle key={feature.id} experimentalFeature={feature as ExperimentalFeature} />
|
||||
))}
|
||||
</FormDropdownSubmenu>
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import type { ComponentChildren, CSSProperties, RefObject } from "preact";
|
||||
import { memo } from "preact/compat";
|
||||
import { useMemo } from "preact/hooks";
|
||||
|
||||
import { CommandNames } from "../../components/app_context";
|
||||
@@ -27,7 +26,7 @@ export interface ButtonProps {
|
||||
title?: string;
|
||||
}
|
||||
|
||||
const Button = memo(({ name, buttonRef, className, text, onClick, keyboardShortcut, icon, kind, disabled, size, style, triggerCommand, ...restProps }: ButtonProps) => {
|
||||
function Button({ name, buttonRef, className, text, onClick, keyboardShortcut, icon, kind, disabled, size, style, triggerCommand, ...restProps }: ButtonProps) {
|
||||
// Memoize classes array to prevent recreation
|
||||
const classes = useMemo(() => {
|
||||
const classList: string[] = ["btn"];
|
||||
@@ -83,7 +82,7 @@ const Button = memo(({ name, buttonRef, className, text, onClick, keyboardShortc
|
||||
{text} {shortcutElements}
|
||||
</button>
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
export function ButtonGroup({ children }: { children: ComponentChildren }) {
|
||||
return (
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { Modal as BootstrapModal } from "bootstrap";
|
||||
import clsx from "clsx";
|
||||
import { ComponentChildren, CSSProperties, RefObject } from "preact";
|
||||
import { memo } from "preact/compat";
|
||||
import { useEffect, useMemo, useRef } from "preact/hooks";
|
||||
|
||||
import { openDialog } from "../../services/dialog";
|
||||
@@ -186,7 +185,7 @@ export default function Modal({ children, className, size, title, customTitleBar
|
||||
);
|
||||
}
|
||||
|
||||
const ModalInner = memo(({ children, footer, footerAlignment, bodyStyle, footerStyle: _footerStyle }: Pick<ModalProps, "children" | "footer" | "footerAlignment" | "bodyStyle" | "footerStyle">) => {
|
||||
function ModalInner({ children, footer, footerAlignment, bodyStyle, footerStyle: _footerStyle }: Pick<ModalProps, "children" | "footer" | "footerAlignment" | "bodyStyle" | "footerStyle">) {
|
||||
// Memoize footer style
|
||||
const footerStyle = useMemo<CSSProperties>(() => {
|
||||
const style: CSSProperties = _footerStyle ?? {};
|
||||
@@ -209,4 +208,4 @@ const ModalInner = memo(({ children, footer, footerAlignment, bodyStyle, footerS
|
||||
)}
|
||||
</>
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1447,24 +1447,29 @@ export function useColorScheme() {
|
||||
export function useMathRendering(containerRef: RefObject<HTMLElement>, deps: unknown[]) {
|
||||
useEffect(() => {
|
||||
if (!containerRef.current) return;
|
||||
// Support both read-only (.math-tex) and CKEditor editing view (.ck-math-tex) classes
|
||||
const mathElements = containerRef.current.querySelectorAll(".math-tex, .ck-math-tex");
|
||||
const mathElements = containerRef.current.querySelectorAll(".math-tex");
|
||||
|
||||
for (const mathEl of mathElements) {
|
||||
// Skip if already rendered by KaTeX
|
||||
if (mathEl.querySelector(".katex")) continue;
|
||||
|
||||
try {
|
||||
let equation = mathEl.textContent || "";
|
||||
// CKEditor's data format wraps the equation with \(...\) or \[...\]
|
||||
// delimiters. katex.render() expects raw LaTeX without them.
|
||||
const raw = mathEl.textContent?.trim() ?? "";
|
||||
let equation: string;
|
||||
let displayMode = false;
|
||||
|
||||
// CKEditor widgets store equation without delimiters, add them for KaTeX
|
||||
if (mathEl.classList.contains("ck-math-tex")) {
|
||||
// Check if it's display mode or inline
|
||||
const isDisplay = mathEl.classList.contains("ck-math-tex-display");
|
||||
equation = isDisplay ? `\\[${equation}\\]` : `\\(${equation}\\)`;
|
||||
if (raw.startsWith("\\(") && raw.endsWith("\\)")) {
|
||||
equation = raw.slice(2, -2);
|
||||
} else if (raw.startsWith("\\[") && raw.endsWith("\\]")) {
|
||||
equation = raw.slice(2, -2);
|
||||
displayMode = true;
|
||||
} else {
|
||||
equation = raw;
|
||||
}
|
||||
|
||||
math.render(equation, mathEl as HTMLElement);
|
||||
math.render(equation, mathEl as HTMLElement, { displayMode });
|
||||
} catch (e) {
|
||||
console.warn("Failed to render math:", e);
|
||||
}
|
||||
|
||||
@@ -200,17 +200,34 @@ function extractTocFromTextEditor(editor: CKTextEditor) {
|
||||
|
||||
const level = Number(item.name.replace( 'heading', '' ));
|
||||
|
||||
// Convert model element to view, then to DOM to get HTML
|
||||
// Convert model element to view, then to DOM to get HTML.
|
||||
// Math UIElements render their KaTeX content asynchronously, so
|
||||
// ck-math-tex spans may be empty at read time. Replace them with
|
||||
// math-tex spans (the data format) using the equation from the model,
|
||||
// so useMathRendering can render them synchronously in the sidebar.
|
||||
const viewEl = editor.editing.mapper.toViewElement(item);
|
||||
let text = '';
|
||||
if (viewEl) {
|
||||
const domEl = editor.editing.view.domConverter.mapViewToDom(viewEl);
|
||||
if (domEl instanceof HTMLElement) {
|
||||
text = domEl.innerHTML;
|
||||
const clone = domEl.cloneNode(true) as HTMLElement;
|
||||
const ckMathSpans = clone.querySelectorAll('.ck-math-tex');
|
||||
let mathIdx = 0;
|
||||
for (const child of item.getChildren()) {
|
||||
if (!child.is('element', 'mathtex-inline')) continue;
|
||||
if (mathIdx >= ckMathSpans.length) break;
|
||||
const equation = String(child.getAttribute('equation') ?? '');
|
||||
const span = document.createElement('span');
|
||||
span.className = 'math-tex';
|
||||
span.textContent = `\\(${equation}\\)`;
|
||||
ckMathSpans[mathIdx].replaceWith(span);
|
||||
mathIdx++;
|
||||
}
|
||||
text = clone.innerHTML;
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to plain text if conversion fails
|
||||
// Fallback to plain text if DOM conversion fails
|
||||
if (!text) {
|
||||
text = Array.from( item.getChildren() )
|
||||
.map( c => c.is( '$text' ) ? c.data : '' )
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { AnonymizedDbResponse, DatabaseAnonymizeResponse, DatabaseCheckIntegrityResponse } from "@triliumnext/commons";
|
||||
import { useCallback, useEffect, useMemo, useState } from "preact/hooks";
|
||||
|
||||
import { experimentalFeatures, type ExperimentalFeatureId } from "../../../services/experimental_features";
|
||||
import { getAvailableExperimentalFeatures, type ExperimentalFeatureId } from "../../../services/experimental_features";
|
||||
import { t } from "../../../services/i18n";
|
||||
import server from "../../../services/server";
|
||||
import toast from "../../../services/toast";
|
||||
@@ -182,7 +182,7 @@ function VacuumDatabaseOptions() {
|
||||
|
||||
function ExperimentalOptions() {
|
||||
const [enabledFeatures, setEnabledFeatures] = useTriliumOptionJson<ExperimentalFeatureId[]>("experimentalFeatures", true);
|
||||
const filteredFeatures = useMemo(() => experimentalFeatures.filter(e => e.id !== "new-layout"), []);
|
||||
const filteredFeatures = useMemo(() => getAvailableExperimentalFeatures().filter(e => e.id !== "new-layout"), []);
|
||||
|
||||
const toggleFeature = useCallback((featureId: ExperimentalFeatureId, enabled: boolean) => {
|
||||
if (enabled) {
|
||||
|
||||
@@ -45,7 +45,7 @@
|
||||
"@triliumnext/server": "workspace:*",
|
||||
"@types/electron-squirrel-startup": "1.0.2",
|
||||
"copy-webpack-plugin": "14.0.0",
|
||||
"electron": "41.1.1",
|
||||
"electron": "41.2.0",
|
||||
"prebuild-install": "7.1.3"
|
||||
}
|
||||
}
|
||||
@@ -4,13 +4,15 @@ import NodejsCryptoProvider from "@triliumnext/server/src/crypto_provider.js";
|
||||
import { loadCoreSchema } from "@triliumnext/server/src/core_assets.js";
|
||||
import NodejsInAppHelpProvider from "@triliumnext/server/src/in_app_help_provider.js";
|
||||
import dataDirs from "@triliumnext/server/src/services/data_dir.js";
|
||||
import options from "@triliumnext/server/src/services/options.js";
|
||||
import { options } from "@triliumnext/core";
|
||||
import port from "@triliumnext/server/src/services/port.js";
|
||||
import NodeRequestProvider from "@triliumnext/server/src/services/request.js";
|
||||
import { RESOURCE_DIR } from "@triliumnext/server/src/services/resource_dir.js";
|
||||
import tray from "@triliumnext/server/src/services/tray.js";
|
||||
import windowService from "@triliumnext/server/src/services/window.js";
|
||||
import WebSocketMessagingProvider from "@triliumnext/server/src/services/ws_messaging_provider.js";
|
||||
import ServerBackupService from "@triliumnext/server/src/backup_provider.js";
|
||||
import ServerLogService from "@triliumnext/server/src/log_provider.js";
|
||||
import BetterSqlite3Provider from "@triliumnext/server/src/sql_provider.js";
|
||||
import NodejsZipProvider from "@triliumnext/server/src/zip_provider.js";
|
||||
import { app, BrowserWindow,globalShortcut } from "electron";
|
||||
@@ -150,6 +152,9 @@ async function main() {
|
||||
// both source and bundled-production modes.
|
||||
getDemoArchive: async () => fs.readFileSync(path.join(RESOURCE_DIR, "db", "demo.zip")),
|
||||
inAppHelp: new NodejsInAppHelpProvider(),
|
||||
log: new ServerLogService(),
|
||||
backup: new ServerBackupService(options),
|
||||
image: (await import("@triliumnext/server/src/services/image_provider.js")).serverImageProvider,
|
||||
extraAppInfo: {
|
||||
nodeVersion: process.version,
|
||||
dataDirectory: path.resolve(dataDirs.TRILIUM_DATA_DIR)
|
||||
|
||||
@@ -27,6 +27,9 @@
|
||||
},
|
||||
{
|
||||
"path": "../../packages/commons/tsconfig.lib.json"
|
||||
},
|
||||
{
|
||||
"path": "../../packages/trilium-core/tsconfig.lib.json"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"@triliumnext/desktop": "workspace:*",
|
||||
"@types/fs-extra": "11.0.4",
|
||||
"copy-webpack-plugin": "14.0.0",
|
||||
"electron": "41.1.1",
|
||||
"electron": "41.2.0",
|
||||
"fs-extra": "11.3.4"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -34,11 +34,11 @@
|
||||
"@ai-sdk/google": "3.0.60",
|
||||
"@ai-sdk/openai": "3.0.52",
|
||||
"@modelcontextprotocol/sdk": "^1.12.1",
|
||||
"ai": "6.0.153",
|
||||
"ai": "6.0.154",
|
||||
"better-sqlite3": "12.8.0",
|
||||
"html-to-text": "9.0.5",
|
||||
"i18next-fs-backend": "2.6.3",
|
||||
"i18next": "26.0.3",
|
||||
"i18next": "26.0.4",
|
||||
"js-yaml": "4.1.1",
|
||||
"unpdf": "1.4.0"
|
||||
},
|
||||
@@ -82,7 +82,7 @@
|
||||
"debounce": "3.0.0",
|
||||
"debug": "4.4.3",
|
||||
"ejs": "5.0.1",
|
||||
"electron": "41.1.1",
|
||||
"electron": "41.2.0",
|
||||
"electron-window-state": "5.0.3",
|
||||
"express": "5.2.1",
|
||||
"express-http-proxy": "2.1.2",
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { beforeAll } from "vitest";
|
||||
import { readFileSync } from "fs";
|
||||
import { join } from "path";
|
||||
import { initializeCore } from "@triliumnext/core";
|
||||
import { initializeCore, options } from "@triliumnext/core";
|
||||
import { serverZipExportProviderFactory } from "../src/services/export/zip/factory.js";
|
||||
import ServerBackupService from "../src/backup_provider.js";
|
||||
import ClsHookedExecutionContext from "../src/cls_provider.js";
|
||||
import NodejsCryptoProvider from "../src/crypto_provider.js";
|
||||
import NodejsZipProvider from "../src/zip_provider.js";
|
||||
@@ -10,6 +11,8 @@ import ServerPlatformProvider from "../src/platform_provider.js";
|
||||
import BetterSqlite3Provider from "../src/sql_provider.js";
|
||||
import NodejsInAppHelpProvider from "../src/in_app_help_provider.js";
|
||||
import { initializeTranslationsWithParams } from "../src/services/i18n.js";
|
||||
import ServerLogService from "../src/log_provider.js";
|
||||
import { serverImageProvider } from "../src/services/image_provider.js";
|
||||
|
||||
// Initialize environment variables.
|
||||
process.env.TRILIUM_DATA_DIR = join(__dirname, "db");
|
||||
@@ -42,6 +45,9 @@ beforeAll(async () => {
|
||||
schema: readFileSync(require.resolve("@triliumnext/core/src/assets/schema.sql"), "utf-8"),
|
||||
platform: new ServerPlatformProvider(),
|
||||
translations: initializeTranslationsWithParams,
|
||||
inAppHelp: new NodejsInAppHelpProvider()
|
||||
inAppHelp: new NodejsInAppHelpProvider(),
|
||||
backup: new ServerBackupService(options),
|
||||
log: new ServerLogService(),
|
||||
image: serverImageProvider
|
||||
});
|
||||
});
|
||||
|
||||
@@ -122,8 +122,6 @@ export default async function buildApp() {
|
||||
const { sync, consistency_checks, scheduler } = await import("@triliumnext/core");
|
||||
sync.startSyncTimer();
|
||||
|
||||
await import("./services/backup.js");
|
||||
|
||||
consistency_checks.startConsistencyChecks();
|
||||
scheduler.startScheduler();
|
||||
|
||||
|
||||
65
apps/server/src/backup_provider.ts
Normal file
65
apps/server/src/backup_provider.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import type { DatabaseBackup } from "@triliumnext/commons";
|
||||
import { BackupOptionsService, BackupService, sync_mutex as syncMutexService } from "@triliumnext/core";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
|
||||
import dataDir from "./services/data_dir.js";
|
||||
import log from "./services/log.js";
|
||||
import sql from "./services/sql.js";
|
||||
|
||||
export default class ServerBackupService extends BackupService {
|
||||
constructor(options: BackupOptionsService) {
|
||||
super(options);
|
||||
}
|
||||
|
||||
override async getExistingBackups(): Promise<DatabaseBackup[]> {
|
||||
if (!fs.existsSync(dataDir.BACKUP_DIR)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return fs
|
||||
.readdirSync(dataDir.BACKUP_DIR)
|
||||
.filter((fileName) => fileName.includes("backup"))
|
||||
.map((fileName) => {
|
||||
const filePath = path.resolve(dataDir.BACKUP_DIR, fileName);
|
||||
const stat = fs.statSync(filePath);
|
||||
|
||||
return { fileName, filePath, mtime: stat.mtime };
|
||||
});
|
||||
}
|
||||
|
||||
// regularBackup() inherited from BackupService - uses getContext().init()
|
||||
|
||||
override async backupNow(name: string): Promise<string> {
|
||||
// we don't want to back up DB in the middle of sync with potentially inconsistent DB state
|
||||
return await syncMutexService.doExclusively(async () => {
|
||||
const backupFile = path.resolve(`${dataDir.BACKUP_DIR}/backup-${name}.db`);
|
||||
|
||||
if (!fs.existsSync(dataDir.BACKUP_DIR)) {
|
||||
fs.mkdirSync(dataDir.BACKUP_DIR, 0o700);
|
||||
}
|
||||
|
||||
log.info("Creating backup...");
|
||||
await sql.copyDatabase(backupFile);
|
||||
log.info(`Created backup at ${backupFile}`);
|
||||
|
||||
return backupFile;
|
||||
});
|
||||
}
|
||||
|
||||
override async getBackupContent(filePath: string): Promise<Uint8Array | null> {
|
||||
const resolvedPath = path.resolve(filePath);
|
||||
const backupDir = path.resolve(dataDir.BACKUP_DIR);
|
||||
|
||||
// Security check: ensure the path is within the backup directory
|
||||
if (!resolvedPath.startsWith(backupDir + path.sep)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!fs.existsSync(resolvedPath)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return fs.readFileSync(resolvedPath);
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
import { CryptoProvider } from "@triliumnext/core";
|
||||
import type { CryptoProvider, ScryptOptions } from "@triliumnext/core";
|
||||
import { binary_utils } from "@triliumnext/core";
|
||||
import crypto from "crypto";
|
||||
import { generator } from "rand-token";
|
||||
|
||||
@@ -32,4 +33,28 @@ export default class NodejsCryptoProvider implements CryptoProvider {
|
||||
return hmac.digest("base64");
|
||||
}
|
||||
|
||||
async scrypt(
|
||||
password: Uint8Array | string,
|
||||
salt: Uint8Array | string,
|
||||
keyLength: number,
|
||||
options: ScryptOptions = {}
|
||||
): Promise<Uint8Array> {
|
||||
const { N = 16384, r = 8, p = 1 } = options;
|
||||
const passwordBytes = binary_utils.wrapStringOrBuffer(password);
|
||||
const saltBytes = binary_utils.wrapStringOrBuffer(salt);
|
||||
return crypto.scryptSync(passwordBytes, saltBytes, keyLength, { N, r, p });
|
||||
}
|
||||
|
||||
constantTimeCompare(a: Uint8Array, b: Uint8Array): boolean {
|
||||
const bufA = Buffer.from(a);
|
||||
const bufB = Buffer.from(b);
|
||||
|
||||
if (bufA.length !== bufB.length) {
|
||||
// Compare bufA against itself to maintain constant time behavior
|
||||
crypto.timingSafeEqual(bufA, bufA);
|
||||
return false;
|
||||
}
|
||||
|
||||
return crypto.timingSafeEqual(bufA, bufB);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { getBackup } from "@triliumnext/core";
|
||||
import type { Router } from "express";
|
||||
|
||||
import backupService from "../services/backup.js";
|
||||
import eu from "./etapi_utils.js";
|
||||
|
||||
function register(router: Router) {
|
||||
eu.route<{ backupName: string }>(router, "put", "/etapi/backup/:backupName", (req, res, next) => {
|
||||
backupService.backupNow(req.params.backupName)
|
||||
getBackup().backupNow(req.params.backupName)
|
||||
.then(() => res.sendStatus(204))
|
||||
.catch(() => res.sendStatus(500));
|
||||
});
|
||||
|
||||
125
apps/server/src/log_provider.ts
Normal file
125
apps/server/src/log_provider.ts
Normal file
@@ -0,0 +1,125 @@
|
||||
import { FileBasedLogService, type LogFileInfo } from "@triliumnext/core";
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs";
|
||||
import { EOL } from "os";
|
||||
import path from "path";
|
||||
|
||||
import config, { LOGGING_DEFAULT_RETENTION_DAYS } from "./services/config.js";
|
||||
import dataDir from "./services/data_dir.js";
|
||||
|
||||
const LOG_FILE_PATTERN = /^trilium-\d{4}-\d{2}-\d{2}\.log$/;
|
||||
|
||||
const requestBlacklist = ["/app", "/images", "/stylesheets", "/api/recent-notes"];
|
||||
|
||||
export default class ServerLogService extends FileBasedLogService {
|
||||
private logFile: fs.WriteStream | undefined;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
// Server uses sync initialization since Node.js fs operations are sync
|
||||
this.ensureLogDirectory();
|
||||
this.todaysMidnight = this.getTodaysMidnight();
|
||||
this.openLogFile(this.getLogFileName());
|
||||
}
|
||||
|
||||
// ==================== Abstract Method Implementations ====================
|
||||
|
||||
protected override get eol(): string {
|
||||
return EOL;
|
||||
}
|
||||
|
||||
protected override ensureLogDirectory(): void {
|
||||
fs.mkdirSync(dataDir.LOG_DIR, { recursive: true, mode: 0o700 });
|
||||
}
|
||||
|
||||
protected override openLogFile(fileName: string): void {
|
||||
const logPath = path.join(dataDir.LOG_DIR, fileName);
|
||||
this.logFile = fs.createWriteStream(logPath, { flags: "a" });
|
||||
}
|
||||
|
||||
protected override closeLogFile(): void {
|
||||
if (this.logFile) {
|
||||
this.logFile.end();
|
||||
this.logFile = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
protected override writeEntry(entry: string): void {
|
||||
this.logFile?.write(entry);
|
||||
}
|
||||
|
||||
protected override readLogFile(fileName: string): string | null {
|
||||
const filePath = path.join(dataDir.LOG_DIR, fileName);
|
||||
try {
|
||||
return fs.readFileSync(filePath, "utf8");
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
protected override async listLogFiles(): Promise<LogFileInfo[]> {
|
||||
const files = await fs.promises.readdir(dataDir.LOG_DIR);
|
||||
const logFiles: LogFileInfo[] = [];
|
||||
|
||||
for (const file of files) {
|
||||
if (!LOG_FILE_PATTERN.test(file)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const filePath = path.join(dataDir.LOG_DIR, file);
|
||||
|
||||
// Security: Verify path stays within LOG_DIR
|
||||
const resolvedPath = path.resolve(filePath);
|
||||
const resolvedLogDir = path.resolve(dataDir.LOG_DIR);
|
||||
if (!resolvedPath.startsWith(resolvedLogDir + path.sep)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
const stats = await fs.promises.stat(filePath);
|
||||
logFiles.push({ name: file, mtime: stats.mtime });
|
||||
} catch {
|
||||
// Skip files we can't stat
|
||||
}
|
||||
}
|
||||
|
||||
return logFiles;
|
||||
}
|
||||
|
||||
protected override async deleteLogFile(fileName: string): Promise<void> {
|
||||
const filePath = path.join(dataDir.LOG_DIR, fileName);
|
||||
|
||||
// Security: Verify path stays within LOG_DIR
|
||||
const resolvedPath = path.resolve(filePath);
|
||||
const resolvedLogDir = path.resolve(dataDir.LOG_DIR);
|
||||
if (!resolvedPath.startsWith(resolvedLogDir + path.sep)) {
|
||||
return;
|
||||
}
|
||||
|
||||
await fs.promises.unlink(filePath);
|
||||
}
|
||||
|
||||
protected override getRetentionDays(): number {
|
||||
const customRetentionDays = config.Logging.retentionDays;
|
||||
if (customRetentionDays !== undefined && customRetentionDays !== 0) {
|
||||
return customRetentionDays;
|
||||
}
|
||||
return LOGGING_DEFAULT_RETENTION_DAYS;
|
||||
}
|
||||
|
||||
// ==================== Server-Specific Methods ====================
|
||||
|
||||
request(req: Request, res: Response, timeMs: number, responseLength: number | string = "?"): void {
|
||||
for (const bl of requestBlacklist) {
|
||||
if (req.url.startsWith(bl)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (req.url.includes(".js.map") || req.url.includes(".css.map")) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.info(`${timeMs >= 10 ? "Slow " : ""}${res.statusCode} ${req.method} ${req.url} with ${responseLength} bytes took ${timeMs}ms`);
|
||||
}
|
||||
}
|
||||
@@ -3,15 +3,17 @@
|
||||
* are loaded later and will result in an empty string.
|
||||
*/
|
||||
|
||||
import { getLog, initializeCore, sql_init } from "@triliumnext/core";
|
||||
import { getLog, initializeCore, options, sql_init } from "@triliumnext/core";
|
||||
import fs from "fs";
|
||||
import { t } from "i18next";
|
||||
import path from "path";
|
||||
|
||||
import ServerBackupService from "./backup_provider.js";
|
||||
import ClsHookedExecutionContext from "./cls_provider.js";
|
||||
import { getIntegrationTestDbPath, loadCoreSchema } from "./core_assets.js";
|
||||
import NodejsCryptoProvider from "./crypto_provider.js";
|
||||
import NodejsInAppHelpProvider from "./in_app_help_provider.js";
|
||||
import ServerLogService from "./log_provider.js";
|
||||
import ServerPlatformProvider from "./platform_provider.js";
|
||||
import dataDirs from "./services/data_dir.js";
|
||||
import port from "./services/port.js";
|
||||
@@ -37,6 +39,8 @@ async function startApplication() {
|
||||
dbProvider.loadFromFile(DOCUMENT_PATH, config.General.readOnly);
|
||||
}
|
||||
|
||||
const logService = new ServerLogService();
|
||||
|
||||
await initializeCore({
|
||||
dbConfig: {
|
||||
provider: dbProvider,
|
||||
@@ -49,12 +53,11 @@ async function startApplication() {
|
||||
const cls = (await import("./services/cls.js")).default;
|
||||
const becca_loader = (await import("@triliumnext/core")).becca_loader;
|
||||
const entity_changes = (await import("./services/entity_changes.js")).default;
|
||||
const log = (await import("./services/log")).default;
|
||||
|
||||
const entityChangeIds = cls.getAndClearEntityChangeIds();
|
||||
|
||||
if (entityChangeIds.length > 0) {
|
||||
log.info("Transaction rollback dirtied the becca, forcing reload.");
|
||||
logService.info("Transaction rollback dirtied the becca, forcing reload.");
|
||||
|
||||
becca_loader.load();
|
||||
}
|
||||
@@ -71,12 +74,15 @@ async function startApplication() {
|
||||
messaging: new WebSocketMessagingProvider(),
|
||||
schema: loadCoreSchema(),
|
||||
platform: new ServerPlatformProvider(),
|
||||
log: logService,
|
||||
translations: (await import("./services/i18n.js")).initializeTranslationsWithParams,
|
||||
// demo.zip is a server-owned asset; src/assets is copied to dist/assets
|
||||
// by the build script, so the same RESOURCE_DIR-relative path works in
|
||||
// both source and bundled-production modes.
|
||||
getDemoArchive: async () => fs.readFileSync(path.join(RESOURCE_DIR, "db", "demo.zip")),
|
||||
inAppHelp: new NodejsInAppHelpProvider(),
|
||||
backup: new ServerBackupService(options),
|
||||
image: (await import("./services/image_provider.js")).serverImageProvider,
|
||||
extraAppInfo: {
|
||||
nodeVersion: process.version,
|
||||
dataDirectory: path.resolve(dataDirs.TRILIUM_DATA_DIR)
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
import { readFile } from "fs/promises";
|
||||
import { join } from "path";
|
||||
import dateUtils from "../../services/date_utils.js";
|
||||
import dataDir from "../../services/data_dir.js";
|
||||
import log from "../../services/log.js";
|
||||
import { t } from "i18next";
|
||||
|
||||
const { LOG_DIR } = dataDir;
|
||||
|
||||
async function getBackendLog() {
|
||||
const fileName = `trilium-${dateUtils.localNowDate()}.log`;
|
||||
try {
|
||||
const file = join(LOG_DIR, fileName);
|
||||
return await readFile(file, "utf8");
|
||||
} catch (e) {
|
||||
const isErrorInstance = e instanceof Error;
|
||||
|
||||
// most probably the log file does not exist yet - https://github.com/zadam/trilium/issues/1977
|
||||
if (isErrorInstance && "code" in e && e.code === "ENOENT") {
|
||||
log.error(e);
|
||||
return t("backend_log.log-does-not-exist", { fileName });
|
||||
}
|
||||
|
||||
log.error(isErrorInstance ? e : `Reading the backend log '${fileName}' failed with an unknown error: '${e}'.`);
|
||||
return t("backend_log.reading-log-failed", { fileName });
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
getBackendLog
|
||||
};
|
||||
@@ -1,12 +1,11 @@
|
||||
import { BackupDatabaseNowResponse, DatabaseCheckIntegrityResponse } from "@triliumnext/commons";
|
||||
import { becca_loader, ValidationError } from "@triliumnext/core";
|
||||
import { becca_loader, getBackup, ValidationError } from "@triliumnext/core";
|
||||
import type { Request, Response } from "express";
|
||||
import fs, { readFileSync } from "fs";
|
||||
import path from "path";
|
||||
|
||||
import { getIntegrationTestDbPath } from "../../core_assets.js";
|
||||
import anonymizationService from "../../services/anonymization.js";
|
||||
import backupService from "../../services/backup.js";
|
||||
import consistencyChecksService from "../../services/consistency_checks.js";
|
||||
import dataDir from "../../services/data_dir.js";
|
||||
import log from "../../services/log.js";
|
||||
@@ -14,12 +13,12 @@ import sql from "../../services/sql.js";
|
||||
import sql_init from "../../services/sql_init.js";
|
||||
|
||||
function getExistingBackups() {
|
||||
return backupService.getExistingBackups();
|
||||
return getBackup().getExistingBackups();
|
||||
}
|
||||
|
||||
async function backupDatabase() {
|
||||
return {
|
||||
backupFile: await backupService.backupNow("now")
|
||||
backupFile: await getBackup().backupNow("now")
|
||||
} satisfies BackupDatabaseNowResponse;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,10 @@
|
||||
import { events as eventService, getInstanceId } from "@triliumnext/core";
|
||||
/**
|
||||
* Server-only login routes.
|
||||
*
|
||||
* Protected session routes (loginToProtectedSession, logoutFromProtectedSession,
|
||||
* touchProtectedSession) are now in core and registered via buildSharedApiRoutes.
|
||||
*/
|
||||
import { getInstanceId } from "@triliumnext/core";
|
||||
import type { Request } from "express";
|
||||
|
||||
import appInfo from "../../services/app_info.js";
|
||||
@@ -7,12 +13,10 @@ import passwordEncryptionService from "../../services/encryption/password_encryp
|
||||
import recoveryCodeService from "../../services/encryption/recovery_codes";
|
||||
import etapiTokenService from "../../services/etapi_tokens.js";
|
||||
import options from "../../services/options.js";
|
||||
import protectedSessionService from "../../services/protected_session.js";
|
||||
import sql from "../../services/sql.js";
|
||||
import sqlInit from "../../services/sql_init.js";
|
||||
import totp from "../../services/totp";
|
||||
import utils from "../../services/utils.js";
|
||||
import ws from "../../services/ws.js";
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
@@ -118,48 +122,7 @@ function loginSync(req: Request) {
|
||||
};
|
||||
}
|
||||
|
||||
function loginToProtectedSession(req: Request) {
|
||||
const password = req.body.password;
|
||||
|
||||
if (!passwordEncryptionService.verifyPassword(password)) {
|
||||
return {
|
||||
success: false,
|
||||
message: "Given current password doesn't match hash"
|
||||
};
|
||||
}
|
||||
|
||||
const decryptedDataKey = passwordEncryptionService.getDataKey(password);
|
||||
if (!decryptedDataKey) {
|
||||
return {
|
||||
success: false,
|
||||
message: "Unable to obtain data key."
|
||||
};
|
||||
}
|
||||
|
||||
protectedSessionService.setDataKey(decryptedDataKey);
|
||||
|
||||
eventService.emit(eventService.ENTER_PROTECTED_SESSION);
|
||||
|
||||
ws.sendMessageToAllClients({ type: "protectedSessionLogin" });
|
||||
|
||||
return {
|
||||
success: true
|
||||
};
|
||||
}
|
||||
|
||||
function logoutFromProtectedSession() {
|
||||
protectedSessionService.resetDataKey();
|
||||
|
||||
eventService.emit(eventService.LEAVE_PROTECTED_SESSION);
|
||||
|
||||
ws.sendMessageToAllClients({ type: "protectedSessionLogout" });
|
||||
}
|
||||
|
||||
function touchProtectedSession() {
|
||||
protectedSessionService.touchProtectedSession();
|
||||
}
|
||||
|
||||
function token(req: Request) {
|
||||
async function token(req: Request) {
|
||||
const password = req.body.password;
|
||||
const submittedTotpToken = req.body.totpToken;
|
||||
|
||||
@@ -169,7 +132,7 @@ function token(req: Request) {
|
||||
}
|
||||
}
|
||||
|
||||
if (!passwordEncryptionService.verifyPassword(password)) {
|
||||
if (!(await passwordEncryptionService.verifyPassword(password))) {
|
||||
return [401, "Incorrect credential"];
|
||||
}
|
||||
|
||||
@@ -191,8 +154,5 @@ function verifyTOTP(submittedTotpToken: string) {
|
||||
|
||||
export default {
|
||||
loginSync,
|
||||
loginToProtectedSession,
|
||||
logoutFromProtectedSession,
|
||||
touchProtectedSession,
|
||||
token
|
||||
};
|
||||
|
||||
@@ -4,12 +4,11 @@ import type { Request } from "express";
|
||||
|
||||
import passwordService from "../../services/encryption/password.js";
|
||||
|
||||
function changePassword(req: Request): ChangePasswordResponse {
|
||||
async function changePassword(req: Request): Promise<ChangePasswordResponse> {
|
||||
if (passwordService.isPasswordSet()) {
|
||||
return passwordService.changePassword(req.body.current_password, req.body.new_password);
|
||||
return await passwordService.changePassword(req.body.current_password, req.body.new_password);
|
||||
}
|
||||
return passwordService.setPassword(req.body.new_password);
|
||||
|
||||
return await passwordService.setPassword(req.body.new_password);
|
||||
}
|
||||
|
||||
function resetPassword(req: Request) {
|
||||
|
||||
@@ -1,19 +1,15 @@
|
||||
import { ValidationError } from "@triliumnext/core";
|
||||
import { ValidationError, password_encryption } from "@triliumnext/core";
|
||||
import { i18n } from "@triliumnext/core";
|
||||
import crypto from "crypto";
|
||||
import type { Request, Response } from 'express';
|
||||
|
||||
import appPath from "../services/app_path.js";
|
||||
import assetPath, { assetUrlFragment } from "../services/asset_path.js";
|
||||
import myScryptService from "../services/encryption/my_scrypt.js";
|
||||
import openIDEncryption from '../services/encryption/open_id_encryption.js';
|
||||
import passwordService from "../services/encryption/password.js";
|
||||
import recoveryCodeService from '../services/encryption/recovery_codes.js';
|
||||
import log from "../services/log.js";
|
||||
import openID from '../services/open_id.js';
|
||||
import optionService from "../services/options.js";
|
||||
import totp from '../services/totp.js';
|
||||
import utils from "../services/utils.js";
|
||||
|
||||
function loginPage(req: Request, res: Response) {
|
||||
// Login page is triggered twice. Once here, and another time (see sendLoginError) if the password is failed.
|
||||
@@ -40,7 +36,7 @@ function setPasswordPage(req: Request, res: Response) {
|
||||
});
|
||||
}
|
||||
|
||||
function setPassword(req: Request, res: Response) {
|
||||
async function setPassword(req: Request, res: Response) {
|
||||
if (passwordService.isPasswordSet()) {
|
||||
throw new ValidationError("Password has been already set");
|
||||
}
|
||||
@@ -67,7 +63,7 @@ function setPassword(req: Request, res: Response) {
|
||||
return;
|
||||
}
|
||||
|
||||
passwordService.setPassword(password1);
|
||||
await passwordService.setPassword(password1);
|
||||
|
||||
res.redirect("login");
|
||||
}
|
||||
@@ -102,7 +98,7 @@ function setPassword(req: Request, res: Response) {
|
||||
* '401':
|
||||
* description: Password / TOTP mismatch
|
||||
*/
|
||||
function login(req: Request, res: Response) {
|
||||
async function login(req: Request, res: Response) {
|
||||
if (openID.isOpenIDEnabled()) {
|
||||
res.oidc.login({
|
||||
returnTo: '/',
|
||||
@@ -124,7 +120,7 @@ function login(req: Request, res: Response) {
|
||||
}
|
||||
}
|
||||
|
||||
if (!verifyPassword(submittedPassword)) {
|
||||
if (!(await password_encryption.verifyPassword(submittedPassword))) {
|
||||
sendLoginError(req, res, 'password');
|
||||
return;
|
||||
}
|
||||
@@ -157,18 +153,6 @@ function verifyTOTP(submittedTotpToken: string) {
|
||||
return recoveryCodeValidates;
|
||||
}
|
||||
|
||||
function verifyPassword(submittedPassword: string) {
|
||||
const hashed_password = utils.fromBase64(optionService.getOption("passwordVerificationHash"));
|
||||
|
||||
const guess_hashed = myScryptService.getVerificationHash(submittedPassword);
|
||||
|
||||
// Use constant-time comparison to prevent timing attacks
|
||||
if (hashed_password.length !== guess_hashed.length) {
|
||||
return false;
|
||||
}
|
||||
return crypto.timingSafeEqual(guess_hashed, hashed_password);
|
||||
}
|
||||
|
||||
function sendLoginError(req: Request, res: Response, errorType: 'password' | 'totp' = 'password') {
|
||||
// note that logged IP address is usually meaningless since the traffic should come from a reverse proxy
|
||||
if (totp.isTotpEnabled()) {
|
||||
|
||||
@@ -18,7 +18,6 @@ import auth from "../services/auth.js";
|
||||
import openID from '../services/open_id.js';
|
||||
import { isElectron } from "../services/utils.js";
|
||||
import shareRoutes from "../share/routes.js";
|
||||
import backendLogRoute from "./api/backend_log.js";
|
||||
import clipperRoute from "./api/clipper.js";
|
||||
import databaseRoute from "./api/database.js";
|
||||
import etapiTokensApiRoutes from "./api/etapi_tokens.js";
|
||||
@@ -29,7 +28,6 @@ import llmSpecialNotesRoute from "./api/llm_special_notes.js";
|
||||
import loginApiRoute from "./api/login.js";
|
||||
import metricsRoute from "./api/metrics.js";
|
||||
import ocrRoute from "./api/ocr.js";
|
||||
import passwordApiRoute from "./api/password.js";
|
||||
import recoveryCodes from './api/recovery_codes.js';
|
||||
import senderRoute from "./api/sender.js";
|
||||
import systemInfoRoute from "./api/system_info.js";
|
||||
@@ -59,9 +57,9 @@ function register(app: express.Application) {
|
||||
});
|
||||
|
||||
route(GET, "/bootstrap", [ auth.checkAuth ], indexRoute.bootstrap);
|
||||
route(PST, "/login", [loginRateLimiter], loginRoute.login);
|
||||
asyncRoute(PST, "/login", [loginRateLimiter], loginRoute.login, null);
|
||||
route(PST, "/logout", [csrfMiddleware, auth.checkAuth], loginRoute.logout);
|
||||
route(PST, "/set-password", [auth.checkAppInitialized, auth.checkPasswordNotSet], loginRoute.setPassword);
|
||||
asyncRoute(PST, "/set-password", [auth.checkAppInitialized, auth.checkPasswordNotSet], loginRoute.setPassword, null);
|
||||
route(GET, "/setup", [], setupRoute.setupPage);
|
||||
|
||||
|
||||
@@ -107,8 +105,6 @@ function register(app: express.Application) {
|
||||
apiRoute(PST, "/api/notes/:noteId/save-to-tmp-dir", filesRoute.saveNoteToTmpDir);
|
||||
apiRoute(PST, "/api/notes/:noteId/upload-modified-file", filesRoute.uploadModifiedFileToNote);
|
||||
|
||||
// TODO: Bring back attachment uploading
|
||||
// route(PST, "/api/notes/:noteId/attachments/upload", [auth.checkApiAuthOrElectron, uploadMiddlewareWithErrorHandling, csrfMiddleware], attachmentsApiRoute.uploadAttachment, apiResultHandler);
|
||||
asyncRoute(
|
||||
GET,
|
||||
"/api/attachments/:attachmentId/open-partial",
|
||||
@@ -124,15 +120,9 @@ function register(app: express.Application) {
|
||||
apiRoute(PST, "/api/attachments/:attachmentId/upload-modified-file", filesRoute.uploadModifiedFileToAttachment);
|
||||
route(PUT, "/api/attachments/:attachmentId/file", [auth.checkApiAuthOrElectron, uploadMiddlewareWithErrorHandling, csrfMiddleware], filesRoute.updateAttachment, apiResultHandler);
|
||||
|
||||
// TODO: Re-enable once ported to core.
|
||||
// route(PUT, "/api/images/:noteId", [auth.checkApiAuthOrElectron, uploadMiddlewareWithErrorHandling, csrfMiddleware], imageRoute.updateImage, apiResultHandler);
|
||||
|
||||
// TODO: Re-enable once we support route()
|
||||
// route(GET, "/api/revisions/:revisionId/download", [auth.checkApiAuthOrElectron], revisionsApiRoute.downloadRevision);
|
||||
|
||||
apiRoute(PST, "/api/password/change", passwordApiRoute.changePassword);
|
||||
apiRoute(PST, "/api/password/reset", passwordApiRoute.resetPassword);
|
||||
|
||||
apiRoute(GET, "/api/metrics", metricsRoute.getMetrics);
|
||||
apiRoute(GET, "/api/system-checks", systemInfoRoute.systemChecks);
|
||||
|
||||
@@ -140,12 +130,7 @@ function register(app: express.Application) {
|
||||
route(GET, "/api/health-check", [], () => ({ status: "ok" }), apiResultHandler);
|
||||
|
||||
route(PST, "/api/login/sync", [loginRateLimiter], loginApiRoute.loginSync, apiResultHandler);
|
||||
// this is for entering protected mode so user has to be already logged-in (that's the reason we don't require username)
|
||||
apiRoute(PST, "/api/login/protected", loginApiRoute.loginToProtectedSession);
|
||||
apiRoute(PST, "/api/login/protected/touch", loginApiRoute.touchProtectedSession);
|
||||
apiRoute(PST, "/api/logout/protected", loginApiRoute.logoutFromProtectedSession);
|
||||
|
||||
route(PST, "/api/login/token", [loginRateLimiter], loginApiRoute.token, apiResultHandler);
|
||||
asyncRoute(PST, "/api/login/token", [loginRateLimiter], loginApiRoute.token, apiResultHandler);
|
||||
|
||||
apiRoute(GET, "/api/etapi-tokens", etapiTokensApiRoutes.getTokens);
|
||||
apiRoute(PST, "/api/etapi-tokens", etapiTokensApiRoutes.createToken);
|
||||
@@ -173,10 +158,7 @@ function register(app: express.Application) {
|
||||
asyncRoute(PST, "/api/database/rebuild/", [auth.checkApiAuthOrElectron], databaseRoute.rebuildIntegrationTestDatabase, apiResultHandler);
|
||||
}
|
||||
|
||||
// backup requires execution outside of transaction
|
||||
asyncRoute(PST, "/api/database/backup-database", [auth.checkApiAuthOrElectron, csrfMiddleware], databaseRoute.backupDatabase, apiResultHandler);
|
||||
apiRoute(GET, "/api/database/backups", databaseRoute.getExistingBackups);
|
||||
route(GET, "/api/database/backup/download", [auth.checkApiAuthOrElectron], databaseRoute.downloadBackup);
|
||||
// backup routes (backups, backup-database, backup/download) are in core
|
||||
// VACUUM requires execution outside of transaction
|
||||
asyncRoute(PST, "/api/database/vacuum-database", [auth.checkApiAuthOrElectron, csrfMiddleware], databaseRoute.vacuumDatabase, apiResultHandler);
|
||||
|
||||
@@ -189,11 +171,10 @@ function register(app: express.Application) {
|
||||
apiRoute(GET, "/api/llm-chat/models", llmChatRoute.getModels);
|
||||
|
||||
// no CSRF since this is called from android app
|
||||
route(PST, "/api/sender/login", [loginRateLimiter], loginApiRoute.token, apiResultHandler);
|
||||
asyncRoute(PST, "/api/sender/login", [loginRateLimiter], loginApiRoute.token, apiResultHandler);
|
||||
asyncRoute(PST, "/api/sender/image", [auth.checkEtapiToken, uploadMiddlewareWithErrorHandling], senderRoute.uploadImage, apiResultHandler);
|
||||
asyncRoute(PST, "/api/sender/note", [auth.checkEtapiToken], senderRoute.saveNote, apiResultHandler);
|
||||
|
||||
asyncApiRoute(GET, "/api/backend-log", backendLogRoute.getBackendLog);
|
||||
route(GET, "/api/fonts", [auth.checkApiAuthOrElectron], fontsRoute.getFontCss);
|
||||
|
||||
shareRoutes.register(router);
|
||||
|
||||
@@ -1,93 +0,0 @@
|
||||
import type { DatabaseBackup, OptionNames } from "@triliumnext/commons";
|
||||
import { sync_mutex as syncMutexService } from "@triliumnext/core";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
|
||||
import cls from "./cls.js";
|
||||
import dataDir from "./data_dir.js";
|
||||
import dateUtils from "./date_utils.js";
|
||||
import log from "./log.js";
|
||||
import optionService from "./options.js";
|
||||
import sql from "./sql.js";
|
||||
|
||||
type BackupType = "daily" | "weekly" | "monthly";
|
||||
|
||||
function getExistingBackups(): DatabaseBackup[] {
|
||||
if (!fs.existsSync(dataDir.BACKUP_DIR)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return fs
|
||||
.readdirSync(dataDir.BACKUP_DIR)
|
||||
.filter((fileName) => fileName.includes("backup"))
|
||||
.map((fileName) => {
|
||||
const filePath = path.resolve(dataDir.BACKUP_DIR, fileName);
|
||||
const stat = fs.statSync(filePath);
|
||||
|
||||
return { fileName, filePath, mtime: stat.mtime };
|
||||
});
|
||||
}
|
||||
|
||||
function regularBackup() {
|
||||
cls.init(() => {
|
||||
periodBackup("lastDailyBackupDate", "daily", 24 * 3600);
|
||||
|
||||
periodBackup("lastWeeklyBackupDate", "weekly", 7 * 24 * 3600);
|
||||
|
||||
periodBackup("lastMonthlyBackupDate", "monthly", 30 * 24 * 3600);
|
||||
});
|
||||
}
|
||||
|
||||
function isBackupEnabled(backupType: BackupType) {
|
||||
let optionName: OptionNames;
|
||||
switch (backupType) {
|
||||
case "daily":
|
||||
optionName = "dailyBackupEnabled";
|
||||
break;
|
||||
case "weekly":
|
||||
optionName = "weeklyBackupEnabled";
|
||||
break;
|
||||
case "monthly":
|
||||
optionName = "monthlyBackupEnabled";
|
||||
break;
|
||||
}
|
||||
|
||||
return optionService.getOptionBool(optionName);
|
||||
}
|
||||
|
||||
function periodBackup(optionName: "lastDailyBackupDate" | "lastWeeklyBackupDate" | "lastMonthlyBackupDate", backupType: BackupType, periodInSeconds: number) {
|
||||
if (!isBackupEnabled(backupType)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const now = new Date();
|
||||
const lastBackupDate = dateUtils.parseDateTime(optionService.getOption(optionName));
|
||||
|
||||
if (now.getTime() - lastBackupDate.getTime() > periodInSeconds * 1000) {
|
||||
backupNow(backupType);
|
||||
|
||||
optionService.setOption(optionName, dateUtils.utcNowDateTime());
|
||||
}
|
||||
}
|
||||
|
||||
async function backupNow(name: string) {
|
||||
// we don't want to back up DB in the middle of sync with potentially inconsistent DB state
|
||||
return await syncMutexService.doExclusively(async () => {
|
||||
const backupFile = path.resolve(`${dataDir.BACKUP_DIR}/backup-${name}.db`);
|
||||
|
||||
if (!fs.existsSync(dataDir.BACKUP_DIR)) {
|
||||
fs.mkdirSync(dataDir.BACKUP_DIR, 0o700);
|
||||
}
|
||||
|
||||
log.info("Creating backup...");
|
||||
await sql.copyDatabase(backupFile);
|
||||
log.info(`Created backup at ${backupFile}`);
|
||||
|
||||
return backupFile;
|
||||
});
|
||||
}
|
||||
export default {
|
||||
getExistingBackups,
|
||||
backupNow,
|
||||
regularBackup
|
||||
};
|
||||
@@ -16,6 +16,11 @@ describe("data_dir.ts unit tests", async () => {
|
||||
pathJoinMock: vi.fn()
|
||||
};
|
||||
|
||||
// Reset the module cache so the dynamic imports below get a fresh instance
|
||||
// of data_dir.ts with the mocked dependencies rather than the cached copy
|
||||
// loaded by spec/setup.ts.
|
||||
vi.resetModules();
|
||||
|
||||
// using doMock, to avoid hoisting, so that we can use the mockFn object
|
||||
// to collect all mocked Fns
|
||||
vi.doMock("node:fs", () => {
|
||||
@@ -37,7 +42,7 @@ describe("data_dir.ts unit tests", async () => {
|
||||
};
|
||||
});
|
||||
|
||||
vi.doMock("path", () => {
|
||||
vi.doMock("node:path", () => {
|
||||
return {
|
||||
join: mockFn.pathJoinMock
|
||||
};
|
||||
|
||||
@@ -6,9 +6,9 @@
|
||||
* - case D) as a fallback if the previous step fails, we'll use home dir
|
||||
*/
|
||||
|
||||
import fs from "fs";
|
||||
import os from "os";
|
||||
import { join as pathJoin } from "path";
|
||||
import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import { join as pathJoin } from "node:path";
|
||||
|
||||
const DIR_NAME = "trilium-data";
|
||||
const FOLDER_PERMISSIONS = 0o700;
|
||||
|
||||
@@ -1,63 +1,74 @@
|
||||
import optionService from "../options.js";
|
||||
/**
|
||||
* Server-side scrypt service.
|
||||
*
|
||||
* Password-related functions (getVerificationHash, getPasswordDerivedKey, getScryptHash)
|
||||
* have been moved to @triliumnext/core. Import them from there:
|
||||
*
|
||||
* import { scrypt } from "@triliumnext/core";
|
||||
* await scrypt.getVerificationHash(password);
|
||||
*
|
||||
* This file only contains OpenID-specific functions that use synchronous crypto
|
||||
* and access the user_data table directly.
|
||||
*/
|
||||
import crypto from "crypto";
|
||||
import sql from "../sql.js";
|
||||
|
||||
function getVerificationHash(password: crypto.BinaryLike) {
|
||||
const salt = optionService.getOption("passwordVerificationSalt");
|
||||
const SCRYPT_OPTIONS = { N: 16384, r: 8, p: 1 };
|
||||
|
||||
return getScryptHash(password, salt);
|
||||
}
|
||||
|
||||
function getPasswordDerivedKey(password: crypto.BinaryLike) {
|
||||
const salt = optionService.getOption("passwordDerivedKeySalt");
|
||||
|
||||
return getScryptHash(password, salt);
|
||||
}
|
||||
|
||||
function getScryptHash(password: crypto.BinaryLike, salt: crypto.BinaryLike) {
|
||||
const hashed = crypto.scryptSync(password, salt, 32, { N: 16384, r: 8, p: 1 });
|
||||
|
||||
return hashed;
|
||||
/**
|
||||
* Sync scrypt hash for OpenID functions (server-only).
|
||||
*/
|
||||
function getScryptHashSync(password: crypto.BinaryLike, salt: crypto.BinaryLike): Buffer {
|
||||
return crypto.scryptSync(password, salt, 32, SCRYPT_OPTIONS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the verification hash for an OpenID subject identifier.
|
||||
* Uses the salt from user_data table if not provided.
|
||||
*/
|
||||
function getSubjectIdentifierVerificationHash(
|
||||
guessedUserId: string | crypto.BinaryLike,
|
||||
salt?: string
|
||||
) {
|
||||
if (salt != null) return getScryptHash(guessedUserId, salt);
|
||||
): Buffer | undefined {
|
||||
if (salt != null) return getScryptHashSync(guessedUserId, salt);
|
||||
|
||||
const savedSalt = sql.getValue("SELECT salt FROM user_data;");
|
||||
if (!savedSalt) {
|
||||
console.error("User salt undefined!");
|
||||
return undefined;
|
||||
}
|
||||
return getScryptHash(guessedUserId, savedSalt.toString());
|
||||
return getScryptHashSync(guessedUserId, savedSalt.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the derived key for an OpenID subject identifier.
|
||||
* Uses the salt from user_data table if not provided.
|
||||
*/
|
||||
function getSubjectIdentifierDerivedKey(
|
||||
subjectIdentifer: crypto.BinaryLike,
|
||||
givenSalt?: string
|
||||
) {
|
||||
): Buffer | undefined {
|
||||
if (givenSalt !== undefined) {
|
||||
return getScryptHash(subjectIdentifer, givenSalt.toString());
|
||||
return getScryptHashSync(subjectIdentifer, givenSalt.toString());
|
||||
}
|
||||
|
||||
const salt = sql.getValue("SELECT salt FROM user_data;");
|
||||
if (!salt) return undefined;
|
||||
|
||||
return getScryptHash(subjectIdentifer, salt.toString());
|
||||
return getScryptHashSync(subjectIdentifer, salt.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a derived key for an OpenID subject identifier with the given salt.
|
||||
*/
|
||||
function createSubjectIdentifierDerivedKey(
|
||||
subjectIdentifer: string | crypto.BinaryLike,
|
||||
salt: string | crypto.BinaryLike
|
||||
) {
|
||||
return getScryptHash(subjectIdentifer, salt);
|
||||
): Buffer {
|
||||
return getScryptHashSync(subjectIdentifer, salt);
|
||||
}
|
||||
|
||||
export default {
|
||||
getVerificationHash,
|
||||
getPasswordDerivedKey,
|
||||
getSubjectIdentifierVerificationHash,
|
||||
getSubjectIdentifierDerivedKey,
|
||||
createSubjectIdentifierDerivedKey
|
||||
|
||||
@@ -1,85 +1,5 @@
|
||||
import sql from "../sql.js";
|
||||
import optionService from "../options.js";
|
||||
import myScryptService from "./my_scrypt.js";
|
||||
import { randomSecureToken, toBase64 } from "../utils.js";
|
||||
import passwordEncryptionService from "./password_encryption.js";
|
||||
import { ChangePasswordResponse } from "@triliumnext/commons";
|
||||
|
||||
function isPasswordSet() {
|
||||
return !!sql.getValue("SELECT value FROM options WHERE name = 'passwordVerificationHash'");
|
||||
}
|
||||
|
||||
function changePassword(currentPassword: string, newPassword: string): ChangePasswordResponse {
|
||||
if (!isPasswordSet()) {
|
||||
throw new Error("Password has not been set yet, so it cannot be changed. Use 'setPassword' instead.");
|
||||
}
|
||||
|
||||
if (!passwordEncryptionService.verifyPassword(currentPassword)) {
|
||||
return {
|
||||
success: false,
|
||||
message: "Given current password doesn't match hash"
|
||||
};
|
||||
}
|
||||
|
||||
sql.transactional(() => {
|
||||
const decryptedDataKey = passwordEncryptionService.getDataKey(currentPassword);
|
||||
|
||||
optionService.setOption("passwordVerificationSalt", randomSecureToken(32));
|
||||
optionService.setOption("passwordDerivedKeySalt", randomSecureToken(32));
|
||||
|
||||
const newPasswordVerificationKey = toBase64(myScryptService.getVerificationHash(newPassword));
|
||||
|
||||
if (decryptedDataKey) {
|
||||
// TODO: what should happen if the decrypted data key is null?
|
||||
passwordEncryptionService.setDataKey(newPassword, decryptedDataKey);
|
||||
}
|
||||
|
||||
optionService.setOption("passwordVerificationHash", newPasswordVerificationKey);
|
||||
});
|
||||
|
||||
return {
|
||||
success: true
|
||||
};
|
||||
}
|
||||
|
||||
function setPassword(password: string): ChangePasswordResponse {
|
||||
if (isPasswordSet()) {
|
||||
throw new Error("Password is set already. Either change it or perform 'reset password' first.");
|
||||
}
|
||||
|
||||
optionService.createOption("passwordVerificationSalt", randomSecureToken(32), true);
|
||||
optionService.createOption("passwordDerivedKeySalt", randomSecureToken(32), true);
|
||||
|
||||
const passwordVerificationKey = toBase64(myScryptService.getVerificationHash(password));
|
||||
optionService.createOption("passwordVerificationHash", passwordVerificationKey, true);
|
||||
|
||||
// passwordEncryptionService expects these options to already exist
|
||||
optionService.createOption("encryptedDataKey", "", true);
|
||||
|
||||
passwordEncryptionService.setDataKey(password, randomSecureToken(16));
|
||||
|
||||
return {
|
||||
success: true
|
||||
};
|
||||
}
|
||||
|
||||
function resetPassword() {
|
||||
// user forgot the password,
|
||||
sql.transactional(() => {
|
||||
optionService.setOption("passwordVerificationSalt", "");
|
||||
optionService.setOption("passwordDerivedKeySalt", "");
|
||||
optionService.setOption("encryptedDataKey", "");
|
||||
optionService.setOption("passwordVerificationHash", "");
|
||||
});
|
||||
|
||||
return {
|
||||
success: true
|
||||
};
|
||||
}
|
||||
|
||||
export default {
|
||||
isPasswordSet,
|
||||
changePassword,
|
||||
setPassword,
|
||||
resetPassword
|
||||
};
|
||||
/**
|
||||
* Re-exports the password service from core.
|
||||
* changePassword and setPassword are now async - callers must use await.
|
||||
*/
|
||||
export { default } from "@triliumnext/core/src/services/encryption/password.js";
|
||||
|
||||
@@ -1,41 +1,5 @@
|
||||
import { data_encryption } from "@triliumnext/core";
|
||||
|
||||
import optionService from "../options.js";
|
||||
import { constantTimeCompare,toBase64 } from "../utils.js";
|
||||
import myScryptService from "./my_scrypt.js";
|
||||
|
||||
function verifyPassword(password: string) {
|
||||
const givenPasswordHash = toBase64(myScryptService.getVerificationHash(password));
|
||||
|
||||
const dbPasswordHash = optionService.getOptionOrNull("passwordVerificationHash");
|
||||
|
||||
if (!dbPasswordHash) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return constantTimeCompare(givenPasswordHash, dbPasswordHash);
|
||||
}
|
||||
|
||||
function setDataKey(password: string, plainTextDataKey: string | Buffer | Uint8Array) {
|
||||
const passwordDerivedKey = myScryptService.getPasswordDerivedKey(password);
|
||||
|
||||
const newEncryptedDataKey = data_encryption.encrypt(passwordDerivedKey, plainTextDataKey);
|
||||
|
||||
optionService.setOption("encryptedDataKey", newEncryptedDataKey);
|
||||
}
|
||||
|
||||
function getDataKey(password: string) {
|
||||
const passwordDerivedKey = myScryptService.getPasswordDerivedKey(password);
|
||||
|
||||
const encryptedDataKey = optionService.getOption("encryptedDataKey");
|
||||
|
||||
const decryptedDataKey = data_encryption.decrypt(passwordDerivedKey, encryptedDataKey);
|
||||
|
||||
return decryptedDataKey;
|
||||
}
|
||||
|
||||
export default {
|
||||
verifyPassword,
|
||||
getDataKey,
|
||||
setDataKey
|
||||
};
|
||||
/**
|
||||
* Re-exports the password encryption service from core.
|
||||
* All functions are now async - callers must use await.
|
||||
*/
|
||||
export { default } from "@triliumnext/core/src/services/encryption/password_encryption.js";
|
||||
|
||||
@@ -1,9 +1,20 @@
|
||||
/**
|
||||
* Server-side TOTP (Time-based One-Time Password) encryption service.
|
||||
*
|
||||
* This service handles encryption/decryption of TOTP secrets and remains
|
||||
* server-only because:
|
||||
* - TOTP/2FA is not supported in standalone mode
|
||||
* - Uses synchronous Node.js crypto.scryptSync for performance
|
||||
*
|
||||
* The TOTP secret is encrypted using AES and stored in options.
|
||||
* Verification uses scrypt-based hashing with constant-time comparison.
|
||||
*/
|
||||
import type { OptionNames } from "@triliumnext/commons";
|
||||
import { data_encryption } from "@triliumnext/core";
|
||||
import crypto from "crypto";
|
||||
|
||||
import optionService from "../options.js";
|
||||
import { constantTimeCompare,randomSecureToken, toBase64 } from "../utils.js";
|
||||
import myScryptService from "./my_scrypt.js";
|
||||
import { constantTimeCompare, randomSecureToken, toBase64 } from "../utils.js";
|
||||
|
||||
const TOTP_OPTIONS: Record<string, OptionNames> = {
|
||||
SALT: "totpEncryptionSalt",
|
||||
@@ -11,8 +22,19 @@ const TOTP_OPTIONS: Record<string, OptionNames> = {
|
||||
VERIFICATION_HASH: "totpVerificationHash"
|
||||
};
|
||||
|
||||
const SCRYPT_OPTIONS = { N: 16384, r: 8, p: 1 };
|
||||
|
||||
/**
|
||||
* Gets verification hash for TOTP secret using the password verification salt.
|
||||
* This is server-only and uses sync scrypt.
|
||||
*/
|
||||
function getTotpVerificationHash(secret: string): Buffer {
|
||||
const salt = optionService.getOption("passwordVerificationSalt");
|
||||
return crypto.scryptSync(secret, salt, 32, SCRYPT_OPTIONS);
|
||||
}
|
||||
|
||||
function verifyTotpSecret(secret: string): boolean {
|
||||
const givenSecretHash = toBase64(myScryptService.getVerificationHash(secret));
|
||||
const givenSecretHash = toBase64(getTotpVerificationHash(secret));
|
||||
const dbSecretHash = optionService.getOptionOrNull(TOTP_OPTIONS.VERIFICATION_HASH);
|
||||
|
||||
if (!dbSecretHash) {
|
||||
@@ -30,7 +52,7 @@ function setTotpSecret(secret: string) {
|
||||
const encryptionSalt = randomSecureToken(32);
|
||||
optionService.setOption(TOTP_OPTIONS.SALT, encryptionSalt);
|
||||
|
||||
const verificationHash = toBase64(myScryptService.getVerificationHash(secret));
|
||||
const verificationHash = toBase64(getTotpVerificationHash(secret));
|
||||
optionService.setOption(TOTP_OPTIONS.VERIFICATION_HASH, verificationHash);
|
||||
|
||||
const encryptedSecret = data_encryption.encrypt(
|
||||
|
||||
@@ -1,191 +1,12 @@
|
||||
import { sanitize } from "@triliumnext/core";
|
||||
import imageType from "image-type";
|
||||
import isAnimated from "is-animated";
|
||||
import isSvg from "is-svg";
|
||||
import { Jimp } from "jimp";
|
||||
import sanitizeFilename from "sanitize-filename";
|
||||
/**
|
||||
* Server-side image service.
|
||||
* Re-exports core image service and adds OCR scheduling.
|
||||
*/
|
||||
|
||||
import becca from "../becca/becca.js";
|
||||
import { imageService } from "@triliumnext/core";
|
||||
import log from "./log.js";
|
||||
import noteService from "./notes.js";
|
||||
import ocrService from "./ocr/ocr_service.js";
|
||||
import optionService from "./options.js";
|
||||
import protectedSessionService from "./protected_session.js";
|
||||
import sql from "./sql.js";
|
||||
|
||||
async function processImage(uploadBuffer: Buffer, originalName: string, shrinkImageSwitch: boolean) {
|
||||
const compressImages = optionService.getOptionBool("compressImages");
|
||||
const origImageFormat = await getImageType(uploadBuffer);
|
||||
|
||||
if (!origImageFormat || !["jpg", "png"].includes(origImageFormat.ext)) {
|
||||
shrinkImageSwitch = false;
|
||||
} else if (isAnimated(uploadBuffer)) {
|
||||
// recompression of animated images will make them static
|
||||
shrinkImageSwitch = false;
|
||||
}
|
||||
|
||||
let finalImageBuffer;
|
||||
let imageFormat;
|
||||
|
||||
if (compressImages && shrinkImageSwitch) {
|
||||
finalImageBuffer = await shrinkImage(uploadBuffer, originalName);
|
||||
imageFormat = await getImageType(finalImageBuffer);
|
||||
} else {
|
||||
finalImageBuffer = uploadBuffer;
|
||||
imageFormat = origImageFormat || {
|
||||
ext: "dat"
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
buffer: finalImageBuffer,
|
||||
imageFormat
|
||||
};
|
||||
}
|
||||
|
||||
async function getImageType(buffer: Buffer) {
|
||||
if (isSvg(buffer.toString())) {
|
||||
return { ext: "svg" };
|
||||
}
|
||||
return (await imageType(buffer)) || { ext: "jpg" }; // optimistic JPG default
|
||||
}
|
||||
|
||||
function getImageMimeFromExtension(ext: string) {
|
||||
ext = ext.toLowerCase();
|
||||
|
||||
return `image/${ext === "svg" ? "svg+xml" : ext}`;
|
||||
}
|
||||
|
||||
function updateImage(noteId: string, uploadBuffer: Buffer, originalName: string) {
|
||||
log.info(`Updating image ${noteId}: ${originalName}`);
|
||||
|
||||
originalName = sanitize.sanitizeHtml(originalName);
|
||||
|
||||
const note = becca.getNote(noteId);
|
||||
if (!note) {
|
||||
throw new Error("Unable to find note.");
|
||||
}
|
||||
|
||||
note.saveRevision();
|
||||
|
||||
note.setLabel("originalFileName", originalName);
|
||||
|
||||
// resizing images asynchronously since JIMP does not support sync operation
|
||||
processImage(uploadBuffer, originalName, true).then(({ buffer, imageFormat }) => {
|
||||
sql.transactional(() => {
|
||||
note.mime = getImageMimeFromExtension(imageFormat.ext);
|
||||
note.save();
|
||||
|
||||
note.setContent(buffer);
|
||||
});
|
||||
|
||||
scheduleOcrForNote(noteId);
|
||||
});
|
||||
}
|
||||
|
||||
function saveImage(parentNoteId: string, uploadBuffer: Buffer, originalName: string, shrinkImageSwitch: boolean, trimFilename = false) {
|
||||
log.info(`Saving image ${originalName} into parent ${parentNoteId}`);
|
||||
|
||||
if (trimFilename && originalName.length > 40) {
|
||||
// https://github.com/zadam/trilium/issues/2307
|
||||
originalName = "image";
|
||||
}
|
||||
|
||||
const fileName = sanitizeFilename(originalName);
|
||||
const parentNote = becca.getNote(parentNoteId);
|
||||
if (!parentNote) {
|
||||
throw new Error("Unable to find parent note.");
|
||||
}
|
||||
|
||||
const { note } = noteService.createNewNote({
|
||||
parentNoteId,
|
||||
title: fileName,
|
||||
type: "image",
|
||||
mime: "unknown",
|
||||
content: "",
|
||||
isProtected: parentNote.isProtected && protectedSessionService.isProtectedSessionAvailable()
|
||||
});
|
||||
|
||||
note.addLabel("originalFileName", originalName);
|
||||
|
||||
// resizing images asynchronously since JIMP does not support sync operation
|
||||
processImage(uploadBuffer, originalName, shrinkImageSwitch).then(({ buffer, imageFormat }) => {
|
||||
sql.transactional(() => {
|
||||
note.mime = getImageMimeFromExtension(imageFormat.ext);
|
||||
|
||||
if (!originalName.includes(".")) {
|
||||
originalName += `.${imageFormat.ext}`;
|
||||
|
||||
note.setLabel("originalFileName", originalName);
|
||||
note.title = sanitizeFilename(originalName);
|
||||
}
|
||||
|
||||
note.setContent(buffer, { forceSave: true });
|
||||
});
|
||||
|
||||
scheduleOcrForNote(note.noteId);
|
||||
});
|
||||
|
||||
return {
|
||||
fileName,
|
||||
note,
|
||||
noteId: note.noteId,
|
||||
url: `api/images/${note.noteId}/${encodeURIComponent(fileName)}`
|
||||
};
|
||||
}
|
||||
|
||||
function saveImageToAttachment(noteId: string, uploadBuffer: Buffer, originalName: string, shrinkImageSwitch?: boolean, trimFilename = false) {
|
||||
log.info(`Saving image '${originalName}' as attachment into note '${noteId}'`);
|
||||
|
||||
if (trimFilename && originalName.length > 40) {
|
||||
// https://github.com/zadam/trilium/issues/2307
|
||||
originalName = "image";
|
||||
}
|
||||
|
||||
const fileName = sanitizeFilename(originalName);
|
||||
const note = becca.getNoteOrThrow(noteId);
|
||||
|
||||
let attachment = note.saveAttachment({
|
||||
role: "image",
|
||||
mime: "unknown",
|
||||
title: fileName
|
||||
});
|
||||
|
||||
// TODO: this is a quick-fix solution of a recursive bug - this is called from asyncPostProcessContent()
|
||||
// find some async way to do this - perhaps some global timeout with a Set of noteIds needing one more
|
||||
// run of asyncPostProcessContent
|
||||
setTimeout(() => {
|
||||
sql.transactional(() => {
|
||||
const note = becca.getNoteOrThrow(noteId);
|
||||
noteService.asyncPostProcessContent(note, note.getContent()); // to mark an unused attachment for deletion
|
||||
});
|
||||
}, 5000);
|
||||
|
||||
// resizing images asynchronously since JIMP does not support sync operation
|
||||
const attachmentId = attachment.attachmentId;
|
||||
processImage(uploadBuffer, originalName, !!shrinkImageSwitch).then(({ buffer, imageFormat }) => {
|
||||
sql.transactional(() => {
|
||||
// re-read, might be changed in the meantime
|
||||
if (!attachmentId) {
|
||||
throw new Error("Missing attachment ID.");
|
||||
}
|
||||
attachment = becca.getAttachmentOrThrow(attachmentId);
|
||||
|
||||
attachment.mime = getImageMimeFromExtension(imageFormat.ext);
|
||||
|
||||
if (!originalName.includes(".")) {
|
||||
originalName += `.${imageFormat.ext}`;
|
||||
attachment.title = sanitizeFilename(originalName);
|
||||
}
|
||||
|
||||
attachment.setContent(buffer, { forceSave: true });
|
||||
});
|
||||
|
||||
scheduleOcrForAttachment(attachmentId);
|
||||
});
|
||||
|
||||
return attachment;
|
||||
}
|
||||
|
||||
function scheduleOcrForNote(noteId: string) {
|
||||
if (optionService.getOptionBool("ocrAutoProcessImages")) {
|
||||
@@ -211,52 +32,34 @@ function scheduleOcrForAttachment(attachmentId: string | undefined) {
|
||||
}
|
||||
}
|
||||
|
||||
async function shrinkImage(buffer: Buffer, originalName: string) {
|
||||
let jpegQuality = optionService.getOptionInt("imageJpegQuality", 0);
|
||||
|
||||
if (jpegQuality < 10 || jpegQuality > 100) {
|
||||
jpegQuality = 75;
|
||||
}
|
||||
|
||||
let finalImageBuffer;
|
||||
try {
|
||||
finalImageBuffer = await resize(buffer, jpegQuality);
|
||||
} catch (e: any) {
|
||||
log.error(`Failed to resize image '${originalName}', stack: ${e.stack}`);
|
||||
|
||||
finalImageBuffer = buffer;
|
||||
}
|
||||
|
||||
// if resizing did not help with size, then save the original
|
||||
// (can happen when e.g., resizing PNG into JPEG)
|
||||
if (finalImageBuffer.byteLength >= buffer.byteLength) {
|
||||
finalImageBuffer = buffer;
|
||||
}
|
||||
|
||||
return finalImageBuffer;
|
||||
// Re-export core functions with OCR scheduling wrappers
|
||||
function saveImage(
|
||||
parentNoteId: string,
|
||||
uploadBuffer: Uint8Array,
|
||||
originalName: string,
|
||||
shrinkImageSwitch: boolean,
|
||||
trimFilename = false
|
||||
) {
|
||||
const result = imageService.saveImage(parentNoteId, uploadBuffer, originalName, shrinkImageSwitch, trimFilename);
|
||||
scheduleOcrForNote(result.noteId);
|
||||
return result;
|
||||
}
|
||||
|
||||
async function resize(buffer: Buffer, quality: number) {
|
||||
const imageMaxWidthHeight = optionService.getOptionInt("imageMaxWidthHeight");
|
||||
function saveImageToAttachment(
|
||||
noteId: string,
|
||||
uploadBuffer: Uint8Array,
|
||||
originalName: string,
|
||||
shrinkImageSwitch?: boolean,
|
||||
trimFilename = false
|
||||
) {
|
||||
const result = imageService.saveImageToAttachment(noteId, uploadBuffer, originalName, shrinkImageSwitch, trimFilename);
|
||||
scheduleOcrForAttachment(result.attachmentId);
|
||||
return result;
|
||||
}
|
||||
|
||||
const start = Date.now();
|
||||
|
||||
const image = await Jimp.read(buffer);
|
||||
|
||||
if (image.bitmap.width > image.bitmap.height && image.bitmap.width > imageMaxWidthHeight) {
|
||||
image.resize({ w: imageMaxWidthHeight });
|
||||
} else if (image.bitmap.height > imageMaxWidthHeight) {
|
||||
image.resize({ h: imageMaxWidthHeight });
|
||||
}
|
||||
|
||||
// when converting PNG to JPG, we lose the alpha channel, this is replaced by white to match Trilium white background
|
||||
image.background = 0xffffffff;
|
||||
|
||||
const resultBuffer = await image.getBuffer("image/jpeg", { quality });
|
||||
|
||||
log.info(`Resizing image of ${resultBuffer.byteLength} took ${Date.now() - start}ms`);
|
||||
|
||||
return resultBuffer;
|
||||
function updateImage(noteId: string, uploadBuffer: Uint8Array, originalName: string) {
|
||||
imageService.updateImage(noteId, uploadBuffer, originalName);
|
||||
scheduleOcrForNote(noteId);
|
||||
}
|
||||
|
||||
export default {
|
||||
|
||||
114
apps/server/src/services/image_provider.ts
Normal file
114
apps/server/src/services/image_provider.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
/**
|
||||
* Server-side image provider implementation.
|
||||
* Uses JIMP for image processing with full compression support.
|
||||
*/
|
||||
|
||||
import imageType from "image-type";
|
||||
import isAnimated from "is-animated";
|
||||
import isSvg from "is-svg";
|
||||
import { Jimp } from "jimp";
|
||||
|
||||
import type { ImageProvider, ImageFormat, ProcessedImage } from "@triliumnext/core/src/services/image_provider.js";
|
||||
import log from "./log.js";
|
||||
import optionService from "./options.js";
|
||||
|
||||
async function getImageTypeFromBuffer(buffer: Uint8Array): Promise<ImageFormat | null> {
|
||||
// Check for SVG first (text-based)
|
||||
if (isSvg(Buffer.from(buffer).toString())) {
|
||||
return { ext: "svg", mime: "image/svg+xml" };
|
||||
}
|
||||
|
||||
const detected = await imageType(buffer);
|
||||
if (detected) {
|
||||
return { ext: detected.ext, mime: detected.mime };
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
async function shrinkImage(buffer: Uint8Array, originalName: string): Promise<Uint8Array> {
|
||||
let jpegQuality = optionService.getOptionInt("imageJpegQuality", 0);
|
||||
|
||||
if (jpegQuality < 10 || jpegQuality > 100) {
|
||||
jpegQuality = 75;
|
||||
}
|
||||
|
||||
let finalImageBuffer: Uint8Array;
|
||||
try {
|
||||
finalImageBuffer = await resize(buffer, jpegQuality);
|
||||
} catch (e: unknown) {
|
||||
const error = e as Error;
|
||||
log.error(`Failed to resize image '${originalName}', stack: ${error.stack}`);
|
||||
finalImageBuffer = buffer;
|
||||
}
|
||||
|
||||
// If resizing did not help with size, then save the original
|
||||
if (finalImageBuffer.byteLength >= buffer.byteLength) {
|
||||
finalImageBuffer = buffer;
|
||||
}
|
||||
|
||||
return finalImageBuffer;
|
||||
}
|
||||
|
||||
async function resize(buffer: Uint8Array, quality: number): Promise<Uint8Array> {
|
||||
const imageMaxWidthHeight = optionService.getOptionInt("imageMaxWidthHeight");
|
||||
|
||||
const start = Date.now();
|
||||
|
||||
const image = await Jimp.read(Buffer.from(buffer));
|
||||
|
||||
if (image.bitmap.width > image.bitmap.height && image.bitmap.width > imageMaxWidthHeight) {
|
||||
image.resize({ w: imageMaxWidthHeight });
|
||||
} else if (image.bitmap.height > imageMaxWidthHeight) {
|
||||
image.resize({ h: imageMaxWidthHeight });
|
||||
}
|
||||
|
||||
// When converting PNG to JPG, we lose the alpha channel - replace with white
|
||||
image.background = 0xffffffff;
|
||||
|
||||
const resultBuffer = await image.getBuffer("image/jpeg", { quality });
|
||||
|
||||
log.info(`Resizing image of ${resultBuffer.byteLength} took ${Date.now() - start}ms`);
|
||||
|
||||
return resultBuffer;
|
||||
}
|
||||
|
||||
export const serverImageProvider: ImageProvider = {
|
||||
getImageType(buffer: Uint8Array): ImageFormat | null {
|
||||
// Synchronous check for SVG
|
||||
if (isSvg(Buffer.from(buffer).toString())) {
|
||||
return { ext: "svg", mime: "image/svg+xml" };
|
||||
}
|
||||
|
||||
// For other formats, we need async detection but interface is sync
|
||||
// Return null and let processImage handle the async detection
|
||||
return null;
|
||||
},
|
||||
|
||||
async processImage(buffer: Uint8Array, originalName: string, shrink: boolean): Promise<ProcessedImage> {
|
||||
const compressImages = optionService.getOptionBool("compressImages");
|
||||
const origImageFormat = await getImageTypeFromBuffer(buffer);
|
||||
|
||||
let shouldShrink = shrink;
|
||||
|
||||
if (!origImageFormat || !["jpg", "png"].includes(origImageFormat.ext)) {
|
||||
shouldShrink = false;
|
||||
} else if (isAnimated(Buffer.from(buffer))) {
|
||||
// Recompression of animated images will make them static
|
||||
shouldShrink = false;
|
||||
}
|
||||
|
||||
let finalBuffer: Uint8Array;
|
||||
let format: ImageFormat;
|
||||
|
||||
if (compressImages && shouldShrink) {
|
||||
finalBuffer = await shrinkImage(buffer, originalName);
|
||||
format = (await getImageTypeFromBuffer(finalBuffer)) || { ext: "jpg", mime: "image/jpeg" };
|
||||
} else {
|
||||
finalBuffer = buffer;
|
||||
format = origImageFormat || { ext: "dat", mime: "application/octet-stream" };
|
||||
}
|
||||
|
||||
return { buffer: finalBuffer, format };
|
||||
}
|
||||
};
|
||||
@@ -1,150 +1,10 @@
|
||||
import { getLog } from "@triliumnext/core/src/services/log.js";
|
||||
import { getLog } from "@triliumnext/core";
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs";
|
||||
import { EOL } from "os";
|
||||
import path from "path";
|
||||
import ServerLogService from "../log_provider.js";
|
||||
|
||||
import cls from "./cls.js";
|
||||
import config, { LOGGING_DEFAULT_RETENTION_DAYS } from "./config.js";
|
||||
import dataDir from "./data_dir.js";
|
||||
|
||||
fs.mkdirSync(dataDir.LOG_DIR, { recursive: true, mode: 0o700 });
|
||||
|
||||
let logFile: fs.WriteStream | undefined;
|
||||
|
||||
const SECOND = 1000;
|
||||
const MINUTE = 60 * SECOND;
|
||||
const HOUR = 60 * MINUTE;
|
||||
const DAY = 24 * HOUR;
|
||||
|
||||
const MINIMUM_FILES_TO_KEEP = 7;
|
||||
|
||||
let todaysMidnight!: Date;
|
||||
|
||||
initLogFile();
|
||||
|
||||
function getTodaysMidnight() {
|
||||
const now = new Date();
|
||||
|
||||
return new Date(now.getFullYear(), now.getMonth(), now.getDate());
|
||||
}
|
||||
|
||||
async function cleanupOldLogFiles() {
|
||||
try {
|
||||
// Get retention days from environment or options
|
||||
let retentionDays = LOGGING_DEFAULT_RETENTION_DAYS;
|
||||
const customRetentionDays = config.Logging.retentionDays;
|
||||
if (customRetentionDays > 0) {
|
||||
retentionDays = customRetentionDays;
|
||||
} else if (customRetentionDays <= -1){
|
||||
info(`Log cleanup: keeping all log files, as specified by configuration.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const cutoffDate = new Date();
|
||||
cutoffDate.setDate(cutoffDate.getDate() - retentionDays);
|
||||
|
||||
// Read all log files
|
||||
const files = await fs.promises.readdir(dataDir.LOG_DIR);
|
||||
const logFiles: Array<{name: string, mtime: Date, path: string}> = [];
|
||||
|
||||
for (const file of files) {
|
||||
// Security: Only process files matching our log pattern
|
||||
if (!/^trilium-\d{4}-\d{2}-\d{2}\.log$/.test(file)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const filePath = path.join(dataDir.LOG_DIR, file);
|
||||
|
||||
// Security: Verify path stays within LOG_DIR
|
||||
const resolvedPath = path.resolve(filePath);
|
||||
const resolvedLogDir = path.resolve(dataDir.LOG_DIR);
|
||||
if (!resolvedPath.startsWith(resolvedLogDir + path.sep)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
const stats = await fs.promises.stat(filePath);
|
||||
logFiles.push({ name: file, mtime: stats.mtime, path: filePath });
|
||||
} catch (err) {
|
||||
// Skip files we can't stat
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by modification time (oldest first)
|
||||
logFiles.sort((a, b) => a.mtime.getTime() - b.mtime.getTime());
|
||||
|
||||
// Keep minimum number of files
|
||||
if (logFiles.length <= MINIMUM_FILES_TO_KEEP) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Delete old files, keeping minimum
|
||||
let deletedCount = 0;
|
||||
for (let i = 0; i < logFiles.length - MINIMUM_FILES_TO_KEEP; i++) {
|
||||
const file = logFiles[i];
|
||||
if (file.mtime < cutoffDate) {
|
||||
try {
|
||||
await fs.promises.unlink(file.path);
|
||||
deletedCount++;
|
||||
} catch (err) {
|
||||
// Log deletion failed, but continue with others
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (deletedCount > 0) {
|
||||
info(`Log cleanup: deleted ${deletedCount} old log files`);
|
||||
}
|
||||
} catch (err) {
|
||||
// Cleanup failed, but don't crash the log rotation
|
||||
}
|
||||
}
|
||||
|
||||
function initLogFile() {
|
||||
todaysMidnight = getTodaysMidnight();
|
||||
|
||||
const logPath = `${dataDir.LOG_DIR}/trilium-${formatDate()}.log`;
|
||||
const isRotating = !!logFile;
|
||||
|
||||
if (isRotating) {
|
||||
logFile!.end();
|
||||
}
|
||||
|
||||
logFile = fs.createWriteStream(logPath, { flags: "a" });
|
||||
|
||||
// Clean up old log files when rotating to a new file
|
||||
if (isRotating) {
|
||||
cleanupOldLogFiles().catch(() => {
|
||||
// Ignore cleanup errors
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function checkDate(millisSinceMidnight: number) {
|
||||
if (millisSinceMidnight >= DAY) {
|
||||
initLogFile();
|
||||
|
||||
millisSinceMidnight -= DAY;
|
||||
}
|
||||
|
||||
return millisSinceMidnight;
|
||||
}
|
||||
|
||||
function log(str: string | Error) {
|
||||
const bundleNoteId = cls.get("bundleNoteId");
|
||||
|
||||
if (bundleNoteId) {
|
||||
str = `[Script ${bundleNoteId}] ${str}`;
|
||||
}
|
||||
|
||||
let millisSinceMidnight = Date.now() - todaysMidnight.getTime();
|
||||
|
||||
millisSinceMidnight = checkDate(millisSinceMidnight);
|
||||
|
||||
logFile!.write(`${formatTime(millisSinceMidnight)} ${str}${EOL}`);
|
||||
|
||||
console.log(str);
|
||||
function getServerLog(): ServerLogService | undefined {
|
||||
const log = getLog();
|
||||
return log instanceof ServerLogService ? log : undefined;
|
||||
}
|
||||
|
||||
function info(message: string | Error) {
|
||||
@@ -155,44 +15,8 @@ function error(message: string | Error | unknown) {
|
||||
getLog().error(message);
|
||||
}
|
||||
|
||||
const requestBlacklist = ["/app", "/images", "/stylesheets", "/api/recent-notes"];
|
||||
|
||||
function request(req: Request, res: Response, timeMs: number, responseLength: number | string = "?") {
|
||||
for (const bl of requestBlacklist) {
|
||||
if (req.url.startsWith(bl)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (req.url.includes(".js.map") || req.url.includes(".css.map")) {
|
||||
return;
|
||||
}
|
||||
|
||||
info(`${timeMs >= 10 ? "Slow " : "" }${res.statusCode} ${req.method} ${req.url} with ${responseLength} bytes took ${timeMs}ms`);
|
||||
}
|
||||
|
||||
function pad(num: number) {
|
||||
num = Math.floor(num);
|
||||
|
||||
return num < 10 ? `0${num}` : num.toString();
|
||||
}
|
||||
|
||||
function padMilli(num: number) {
|
||||
if (num < 10) {
|
||||
return `00${num}`;
|
||||
} else if (num < 100) {
|
||||
return `0${num}`;
|
||||
}
|
||||
return num.toString();
|
||||
|
||||
}
|
||||
|
||||
function formatTime(millisSinceMidnight: number) {
|
||||
return `${pad(millisSinceMidnight / HOUR)}:${pad((millisSinceMidnight % HOUR) / MINUTE)}:${pad((millisSinceMidnight % MINUTE) / SECOND)}.${padMilli(millisSinceMidnight % SECOND)}`;
|
||||
}
|
||||
|
||||
function formatDate() {
|
||||
return `${pad(todaysMidnight.getFullYear())}-${pad(todaysMidnight.getMonth() + 1)}-${pad(todaysMidnight.getDate())}`;
|
||||
getServerLog()?.request(req, res, timeMs, responseLength);
|
||||
}
|
||||
|
||||
export default {
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
// Mock the dependencies before importing the module
|
||||
vi.mock("./config.js", () => ({ default: { Sync: {} } }));
|
||||
vi.mock("./options.js", () => ({ default: { getOption: vi.fn() } }));
|
||||
|
||||
import config from "./config.js";
|
||||
import optionService from "./options.js";
|
||||
import syncOptions from "./sync_options.js";
|
||||
|
||||
describe("syncOptions.getSyncTimeout", () => {
|
||||
beforeEach(() => {
|
||||
(config as any).Sync = {};
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it("converts database value from seconds to milliseconds", () => {
|
||||
// TimeSelector stores value in seconds (displayed value × scale)
|
||||
// Scale is UI-only, not used in backend calculation
|
||||
vi.mocked(optionService.getOption).mockReturnValue("120"); // 120 seconds = 2 minutes
|
||||
expect(syncOptions.getSyncTimeout()).toBe(120000);
|
||||
|
||||
vi.mocked(optionService.getOption).mockReturnValue("30"); // 30 seconds
|
||||
expect(syncOptions.getSyncTimeout()).toBe(30000);
|
||||
|
||||
vi.mocked(optionService.getOption).mockReturnValue("3600"); // 3600 seconds = 1 hour
|
||||
expect(syncOptions.getSyncTimeout()).toBe(3600000);
|
||||
});
|
||||
|
||||
it("treats config override as raw milliseconds for backward compatibility", () => {
|
||||
(config as any).Sync = { syncServerTimeout: "60000" }; // 60 seconds in ms
|
||||
|
||||
// Config value takes precedence, db value is ignored
|
||||
vi.mocked(optionService.getOption).mockReturnValue("9999");
|
||||
expect(syncOptions.getSyncTimeout()).toBe(60000);
|
||||
});
|
||||
|
||||
it("uses safe defaults for invalid values", () => {
|
||||
vi.mocked(optionService.getOption).mockReturnValue("");
|
||||
expect(syncOptions.getSyncTimeout()).toBe(120000); // default 120 seconds
|
||||
|
||||
(config as any).Sync = { syncServerTimeout: "invalid" };
|
||||
expect(syncOptions.getSyncTimeout()).toBe(120000); // fallback for invalid config
|
||||
});
|
||||
});
|
||||
@@ -9,7 +9,7 @@
|
||||
"preview": "pnpm build && vite preview"
|
||||
},
|
||||
"dependencies": {
|
||||
"i18next": "26.0.3",
|
||||
"i18next": "26.0.4",
|
||||
"preact": "10.29.1",
|
||||
"preact-iso": "2.11.1",
|
||||
"preact-render-to-string": "6.6.7",
|
||||
|
||||
@@ -105,7 +105,7 @@
|
||||
"overrides": {
|
||||
"@codemirror/language": "6.12.3",
|
||||
"@lezer/highlight": "1.2.3",
|
||||
"@lezer/common": "1.5.1",
|
||||
"@lezer/common": "1.5.2",
|
||||
"mermaid": "11.14.0",
|
||||
"preact": "10.29.1",
|
||||
"roughjs": "4.6.6",
|
||||
@@ -159,7 +159,7 @@
|
||||
"handlebars@<4.7.9": ">=4.7.9",
|
||||
"qs@<6.14.2": ">=6.14.2",
|
||||
"minimatch@<3.1.4": "^3.1.4",
|
||||
"minimatch@3>brace-expansion": "^5.0.0",
|
||||
"minimatch@3>brace-expansion": "^1.1.13",
|
||||
"serialize-javascript@<7.0.5": ">=7.0.5",
|
||||
"webpack@<5.104.1": ">=5.104.1",
|
||||
"file-type@>=13.0.0 <21.3.1": ">=21.3.1",
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"async-mutex": "0.5.0",
|
||||
"chardet": "2.1.1",
|
||||
"escape-html": "1.0.3",
|
||||
"i18next": "26.0.3",
|
||||
"i18next": "26.0.4",
|
||||
"mime-types": "3.0.2",
|
||||
"node-html-parser": "7.1.0",
|
||||
"sanitize-filename": "1.6.4",
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { ExecutionContext, initContext } from "./services/context";
|
||||
import { CryptoProvider, initCrypto } from "./services/encryption/crypto";
|
||||
import { getLog, initLog } from "./services/log";
|
||||
import LogService, { getLog, initLog } from "./services/log";
|
||||
import BackupService, { initBackup, type BackupOptionsService } from "./services/backup";
|
||||
import { initSql } from "./services/sql/index";
|
||||
import { SqlService, SqlServiceParams } from "./services/sql/sql";
|
||||
import { initMessaging, MessagingProvider } from "./services/messaging/index";
|
||||
@@ -12,13 +13,19 @@ import { type PlatformProvider, initPlatform } from "./services/platform";
|
||||
import { type ZipProvider, initZipProvider } from "./services/zip_provider";
|
||||
import { type ZipExportProviderFactory, initZipExportProviderFactory } from "./services/export/zip_export_provider_factory";
|
||||
import { type InAppHelpProvider, initInAppHelp } from "./services/in_app_help";
|
||||
import { type ImageProvider, initImageProvider } from "./services/image_provider";
|
||||
|
||||
export { getLog } from "./services/log";
|
||||
export { default as LogService, getLog } from "./services/log";
|
||||
export { default as FileBasedLogService, type LogFileInfo } from "./services/file_based_log";
|
||||
export { default as BackupService, getBackup, initBackup, type BackupOptionsService } from "./services/backup";
|
||||
export type * from "./services/sql/types";
|
||||
export * from "./services/sql/index";
|
||||
export { default as sql_init } from "./services/sql_init";
|
||||
export * as protected_session from "./services/protected_session";
|
||||
export { default as data_encryption } from "./services/encryption/data_encryption"
|
||||
export { default as data_encryption } from "./services/encryption/data_encryption";
|
||||
export { default as scrypt } from "./services/encryption/scrypt";
|
||||
export { default as password_encryption } from "./services/encryption/password_encryption";
|
||||
export { default as password } from "./services/encryption/password";
|
||||
export * as binary_utils from "./services/utils/binary";
|
||||
export * as utils from "./services/utils/index";
|
||||
export * from "./services/build";
|
||||
@@ -37,7 +44,7 @@ export * as cls from "./services/context";
|
||||
export * as i18n from "./services/i18n";
|
||||
export * from "./errors";
|
||||
export { default as getInstanceId } from "./services/instance_id";
|
||||
export type { CryptoProvider } from "./services/encryption/crypto";
|
||||
export type { CryptoProvider, ScryptOptions, Cipher } from "./services/encryption/crypto";
|
||||
export { default as note_types } from "./services/note_types";
|
||||
export { default as tree } from "./services/tree";
|
||||
export { default as cloning } from "./services/cloning";
|
||||
@@ -98,6 +105,8 @@ export { default as sync_mutex } from "./services/sync_mutex";
|
||||
export { default as setup } from "./services/setup";
|
||||
export { getPlatform, type PlatformProvider } from "./services/platform";
|
||||
export type { InAppHelpProvider } from "./services/in_app_help";
|
||||
export { type ImageProvider, type ImageFormat, type ProcessedImage, getImageProvider } from "./services/image_provider";
|
||||
export { default as imageService } from "./services/image";
|
||||
export { t } from "i18next";
|
||||
export type { RequestProvider, ExecOpts, CookieJar } from "./services/request";
|
||||
export type * from "./meta";
|
||||
@@ -121,7 +130,7 @@ export { default as scriptService } from "./services/script";
|
||||
export { default as BackendScriptApi, type Api as BackendScriptApiInterface } from "./services/backend_script_api";
|
||||
export * as scheduler from "./services/scheduler";
|
||||
|
||||
export async function initializeCore({ dbConfig, executionContext, crypto, zip, zipExportProviderFactory, translations, messaging, request, schema, extraAppInfo, platform, getDemoArchive, inAppHelp }: {
|
||||
export async function initializeCore({ dbConfig, executionContext, crypto, zip, zipExportProviderFactory, translations, messaging, request, schema, extraAppInfo, platform, getDemoArchive, inAppHelp, log, backup, image }: {
|
||||
dbConfig: SqlServiceParams,
|
||||
executionContext: ExecutionContext,
|
||||
crypto: CryptoProvider,
|
||||
@@ -138,9 +147,13 @@ export async function initializeCore({ dbConfig, executionContext, crypto, zip,
|
||||
nodeVersion: string;
|
||||
dataDirectory: string;
|
||||
};
|
||||
log?: LogService;
|
||||
backup: BackupService;
|
||||
image: ImageProvider;
|
||||
}) {
|
||||
initPlatform(platform);
|
||||
initLog();
|
||||
initLog(log);
|
||||
initBackup(backup);
|
||||
await initTranslations(translations);
|
||||
initCrypto(crypto);
|
||||
initZipProvider(zip);
|
||||
@@ -148,6 +161,7 @@ export async function initializeCore({ dbConfig, executionContext, crypto, zip,
|
||||
initContext(executionContext);
|
||||
initSql(new SqlService(dbConfig, getLog()));
|
||||
initSchema(schema);
|
||||
initImageProvider(image);
|
||||
if (getDemoArchive) {
|
||||
initDemoArchive(getDemoArchive);
|
||||
}
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
import { ConvertAttachmentToNoteResponse } from "@triliumnext/commons";
|
||||
import { ValidationError } from "../../errors";
|
||||
import type { Request } from "express";
|
||||
import type { File } from "../../services/import/common.js";
|
||||
|
||||
type FileRequest<P> = Omit<Request<P>, "file"> & { file?: File };
|
||||
|
||||
import becca from "../../becca/becca.js";
|
||||
import blobService from "../../services/blob.js";
|
||||
import imageService from "../../services/image.js";
|
||||
import { wrapStringOrBuffer } from "../../services/utils/binary.js";
|
||||
|
||||
function getAttachmentBlob(req: Request<{ attachmentId: string }>) {
|
||||
const preview = req.query.preview === "true";
|
||||
@@ -44,9 +47,9 @@ function saveAttachment(req: Request<{ noteId: string }>) {
|
||||
note.saveAttachment({ attachmentId, role, mime, title, content }, matchBy);
|
||||
}
|
||||
|
||||
function uploadAttachment(req: Request<{ noteId: string }>) {
|
||||
function uploadAttachment(req: FileRequest<{ noteId: string }>) {
|
||||
const { noteId } = req.params;
|
||||
const { file } = req as any; // TODO: Add support for file upload in type definitions and remove 'as any' cast
|
||||
const { file } = req;
|
||||
|
||||
if (!file) {
|
||||
return {
|
||||
@@ -58,8 +61,11 @@ function uploadAttachment(req: Request<{ noteId: string }>) {
|
||||
const note = becca.getNoteOrThrow(noteId);
|
||||
let url;
|
||||
|
||||
// Convert buffer to Uint8Array (Buffer extends Uint8Array, string needs encoding)
|
||||
const buffer = wrapStringOrBuffer(file.buffer as string | Uint8Array);
|
||||
|
||||
if (["image/png", "image/jpg", "image/jpeg", "image/gif", "image/webp", "image/svg+xml"].includes(file.mimetype)) {
|
||||
const attachment = imageService.saveImageToAttachment(noteId, file.buffer, file.originalname, true, true);
|
||||
const attachment = imageService.saveImageToAttachment(noteId, buffer, file.originalname, true, true);
|
||||
url = `api/attachments/${attachment.attachmentId}/image/${encodeURIComponent(attachment.title)}`;
|
||||
} else {
|
||||
const attachment = note.saveAttachment({
|
||||
|
||||
16
packages/trilium-core/src/routes/api/backend_log.ts
Normal file
16
packages/trilium-core/src/routes/api/backend_log.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
"use strict";
|
||||
|
||||
import { getLog } from "../../services/log.js";
|
||||
import { t } from "i18next";
|
||||
|
||||
function getBackendLog() {
|
||||
const contents = getLog().getLogContents();
|
||||
if (contents === null) {
|
||||
return t("backend_log.log-does-not-exist", { fileName: "current log" });
|
||||
}
|
||||
return contents;
|
||||
}
|
||||
|
||||
export default {
|
||||
getBackendLog
|
||||
};
|
||||
38
packages/trilium-core/src/routes/api/backup.ts
Normal file
38
packages/trilium-core/src/routes/api/backup.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import type { BackupDatabaseNowResponse, DatabaseBackup } from "@triliumnext/commons";
|
||||
import { getBackup } from "../../services/backup.js";
|
||||
import { Request, Response } from "express";
|
||||
|
||||
async function getExistingBackups(): Promise<DatabaseBackup[]> {
|
||||
return getBackup().getExistingBackups();
|
||||
}
|
||||
|
||||
async function backupDatabase(): Promise<BackupDatabaseNowResponse> {
|
||||
return {
|
||||
backupFile: await getBackup().backupNow("now")
|
||||
};
|
||||
}
|
||||
|
||||
async function downloadBackup(req: Request, res: Response): Promise<void> {
|
||||
const filePath = req.query.filePath;
|
||||
if (!filePath || typeof filePath !== "string") {
|
||||
res.status(400).send("Missing or invalid filePath");
|
||||
return;
|
||||
}
|
||||
|
||||
const content = await getBackup().getBackupContent(filePath);
|
||||
if (!content) {
|
||||
res.status(404).send("Backup not found");
|
||||
return;
|
||||
}
|
||||
|
||||
const fileName = filePath.split("/").pop() || "backup.db";
|
||||
res.set("Content-Type", "application/x-sqlite3");
|
||||
res.set("Content-Disposition", `attachment; filename="${fileName}"`);
|
||||
res.send(content);
|
||||
}
|
||||
|
||||
export default {
|
||||
getExistingBackups,
|
||||
backupDatabase,
|
||||
downloadBackup
|
||||
};
|
||||
@@ -1,4 +1,7 @@
|
||||
import type { Request, Response } from "express";
|
||||
import type { File } from "../../services/import/common.js";
|
||||
|
||||
type FileRequest<P> = Omit<Request<P>, "file"> & { file?: File };
|
||||
|
||||
import becca from "../../becca/becca.js";
|
||||
import type BNote from "../../becca/entities/bnote.js";
|
||||
@@ -103,9 +106,9 @@ function returnAttachedImage(req: Request<{ attachmentId: string }>, res: Respon
|
||||
}
|
||||
}
|
||||
|
||||
function updateImage(req: Request<{ noteId: string }>) {
|
||||
function updateImage(req: FileRequest<{ noteId: string }>) {
|
||||
const { noteId } = req.params;
|
||||
const { file } = req as any; // TODO: Add support for file upload in type definitions and remove 'as any' cast
|
||||
const { file } = req;
|
||||
|
||||
const _note = becca.getNoteOrThrow(noteId);
|
||||
|
||||
|
||||
52
packages/trilium-core/src/routes/api/login.ts
Normal file
52
packages/trilium-core/src/routes/api/login.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import type { Request } from "express";
|
||||
import events from "../../services/events.js";
|
||||
import passwordEncryptionService from "../../services/encryption/password_encryption.js";
|
||||
import protectedSession from "../../services/protected_session.js";
|
||||
import ws from "../../services/ws.js";
|
||||
|
||||
async function loginToProtectedSession(req: Request) {
|
||||
const password = req.body.password;
|
||||
|
||||
if (!(await passwordEncryptionService.verifyPassword(password))) {
|
||||
return {
|
||||
success: false,
|
||||
message: "Given current password doesn't match hash"
|
||||
};
|
||||
}
|
||||
|
||||
const decryptedDataKey = await passwordEncryptionService.getDataKey(password);
|
||||
if (!decryptedDataKey) {
|
||||
return {
|
||||
success: false,
|
||||
message: "Unable to obtain data key."
|
||||
};
|
||||
}
|
||||
|
||||
protectedSession.setDataKey(decryptedDataKey);
|
||||
|
||||
events.emit(events.ENTER_PROTECTED_SESSION);
|
||||
|
||||
ws.sendMessageToAllClients({ type: "protectedSessionLogin" });
|
||||
|
||||
return {
|
||||
success: true
|
||||
};
|
||||
}
|
||||
|
||||
function logoutFromProtectedSession() {
|
||||
protectedSession.resetDataKey();
|
||||
|
||||
events.emit(events.LEAVE_PROTECTED_SESSION);
|
||||
|
||||
ws.sendMessageToAllClients({ type: "protectedSessionLogout" });
|
||||
}
|
||||
|
||||
function touchProtectedSession() {
|
||||
protectedSession.touchProtectedSession();
|
||||
}
|
||||
|
||||
export default {
|
||||
loginToProtectedSession,
|
||||
logoutFromProtectedSession,
|
||||
touchProtectedSession
|
||||
};
|
||||
25
packages/trilium-core/src/routes/api/password.ts
Normal file
25
packages/trilium-core/src/routes/api/password.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import type { ChangePasswordResponse } from "@triliumnext/commons";
|
||||
import type { Request } from "express";
|
||||
import passwordService from "../../services/encryption/password.js";
|
||||
import { ValidationError } from "../../errors.js";
|
||||
|
||||
async function changePassword(req: Request): Promise<ChangePasswordResponse> {
|
||||
if (passwordService.isPasswordSet()) {
|
||||
return await passwordService.changePassword(req.body.current_password, req.body.new_password);
|
||||
}
|
||||
return await passwordService.setPassword(req.body.new_password);
|
||||
}
|
||||
|
||||
function resetPassword(req: Request) {
|
||||
// protection against accidental call (not a security measure)
|
||||
if (req.query.really !== "yesIReallyWantToResetPasswordAndLoseAccessToMyProtectedNotes") {
|
||||
throw new ValidationError("Incorrect password reset confirmation");
|
||||
}
|
||||
|
||||
return passwordService.resetPassword();
|
||||
}
|
||||
|
||||
export default {
|
||||
changePassword,
|
||||
resetPassword
|
||||
};
|
||||
@@ -28,6 +28,10 @@ import filesRoute from "./api/files";
|
||||
import importRoute from "./api/import";
|
||||
import exportRoute from "./api/export";
|
||||
import scriptRoute from "./api/script";
|
||||
import backendLogRoute from "./api/backend_log";
|
||||
import backupRoute from "./api/backup";
|
||||
import passwordApiRoute from "./api/password";
|
||||
import loginApiRoute from "./api/login";
|
||||
|
||||
// TODO: Deduplicate with routes.ts
|
||||
const GET = "get",
|
||||
@@ -121,6 +125,8 @@ export function buildSharedApiRoutes({ route, asyncRoute, apiRoute, asyncApiRout
|
||||
route(GET, "/api/revisions/:revisionId/image/:filename", [checkApiAuthOrElectron], imageRoute.returnImageFromRevision);
|
||||
route(GET, "/api/attachments/:attachmentId/image/:filename", [checkApiAuthOrElectron], imageRoute.returnAttachedImage);
|
||||
route(GET, "/api/images/:noteId/:filename", [checkApiAuthOrElectron], imageRoute.returnImageFromNote);
|
||||
route(PUT, "/api/images/:noteId", [checkApiAuthOrElectron, uploadMiddlewareWithErrorHandling, csrfMiddleware], imageRoute.updateImage, apiResultHandler);
|
||||
route(PST, "/api/notes/:noteId/attachments/upload", [checkApiAuthOrElectron, uploadMiddlewareWithErrorHandling, csrfMiddleware], attachmentsApiRoute.uploadAttachment, apiResultHandler);
|
||||
|
||||
// group of the services below are meant to be executed from the outside
|
||||
route(GET, "/api/setup/status", [], setupApiRoute.getStatus, apiResultHandler);
|
||||
@@ -198,6 +204,12 @@ export function buildSharedApiRoutes({ route, asyncRoute, apiRoute, asyncApiRout
|
||||
apiRoute(PST, "/api/bulk-action/affected-notes", bulkActionRoute.getAffectedNoteCount);
|
||||
|
||||
apiRoute(GET, "/api/app-info", appInfoRoute.getAppInfo);
|
||||
asyncApiRoute(GET, "/api/backend-log", backendLogRoute.getBackendLog);
|
||||
|
||||
// Backup routes
|
||||
asyncApiRoute(GET, "/api/database/backups", backupRoute.getExistingBackups);
|
||||
asyncApiRoute(PST, "/api/database/backup-database", backupRoute.backupDatabase);
|
||||
asyncRoute(GET, "/api/database/backup/download", [checkApiAuthOrElectron], backupRoute.downloadBackup);
|
||||
|
||||
apiRoute(GET, "/api/other/icon-usage", otherRoute.getIconUsage);
|
||||
apiRoute(PST, "/api/other/render-markdown", otherRoute.renderMarkdown);
|
||||
@@ -227,6 +239,15 @@ export function buildSharedApiRoutes({ route, asyncRoute, apiRoute, asyncApiRout
|
||||
apiRoute(PST, "/api/script/bundle/:noteId", scriptRoute.getBundle);
|
||||
apiRoute(GET, "/api/script/relation/:noteId/:relationName", scriptRoute.getRelationBundles);
|
||||
//#endregion
|
||||
|
||||
//#region Password and protected session
|
||||
asyncApiRoute(PST, "/api/password/change", passwordApiRoute.changePassword);
|
||||
apiRoute(PST, "/api/password/reset", passwordApiRoute.resetPassword);
|
||||
|
||||
asyncApiRoute(PST, "/api/login/protected", loginApiRoute.loginToProtectedSession);
|
||||
apiRoute(PST, "/api/login/protected/touch", loginApiRoute.touchProtectedSession);
|
||||
apiRoute(PST, "/api/logout/protected", loginApiRoute.logoutFromProtectedSession);
|
||||
//#endregion
|
||||
}
|
||||
|
||||
/** Handling common patterns. If entity is not caught, serialization to JSON will fail */
|
||||
|
||||
@@ -20,7 +20,7 @@ import type BRevision from "../becca/entities/brevision.js";
|
||||
import appInfo from "./app_info.js";
|
||||
import attributeService from "./attributes.js";
|
||||
import type { ApiParams } from "./backend_script_api_interface.js";
|
||||
import backupService from "./backup.js";
|
||||
import { getBackup } from "./backup.js";
|
||||
import cloningService from "./cloning.js";
|
||||
import config from "./config.js";
|
||||
import dateNoteService from "./date_notes.js";
|
||||
@@ -717,7 +717,7 @@ function BackendScriptApi(this: Api, currentNote: BNote, apiParams: ApiParams) {
|
||||
};
|
||||
|
||||
this.runOutsideOfSync = syncMutex.doExclusively;
|
||||
this.backupNow = backupService.backupNow;
|
||||
this.backupNow = (name: string) => getBackup().backupNow(name);
|
||||
this.duplicateSubtree = noteService.duplicateSubtree;
|
||||
|
||||
this.__private = {
|
||||
|
||||
@@ -1,6 +1,110 @@
|
||||
export default {
|
||||
async backupNow(name: string) {
|
||||
console.warn("Backup not yet available.");
|
||||
return "backup-" + name + "-" + new Date().toISOString() + ".zip";
|
||||
import type { DatabaseBackup, FilterOptionsByType, OptionNames } from "@triliumnext/commons";
|
||||
import { getContext } from "./context.js";
|
||||
import dateUtils from "./utils/date.js";
|
||||
|
||||
type BackupType = "daily" | "weekly" | "monthly";
|
||||
|
||||
export interface BackupOptionsService {
|
||||
getOption(name: OptionNames): string;
|
||||
getOptionBool(name: FilterOptionsByType<boolean>): boolean;
|
||||
setOption(name: OptionNames, value: string): void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Abstract backup service class.
|
||||
* Platform-specific implementations must extend this class.
|
||||
*/
|
||||
export default abstract class BackupService {
|
||||
constructor(protected readonly options: BackupOptionsService) {}
|
||||
|
||||
/**
|
||||
* Create a backup with the given name.
|
||||
* Returns the backup file path/name.
|
||||
*/
|
||||
abstract backupNow(name: string): Promise<string>;
|
||||
|
||||
/**
|
||||
* Perform regular scheduled backups (daily, weekly, monthly).
|
||||
* Called periodically by the scheduler.
|
||||
* Default implementation runs inside an execution context.
|
||||
*/
|
||||
regularBackup(): void {
|
||||
getContext().init(() => {
|
||||
this.runScheduledBackups().catch(err => {
|
||||
console.error("[Backup] Error running scheduled backups:", err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of existing backups.
|
||||
*/
|
||||
abstract getExistingBackups(): Promise<DatabaseBackup[]>;
|
||||
|
||||
/**
|
||||
* Get the content of a backup file.
|
||||
* Returns null if the backup doesn't exist or access is denied.
|
||||
*/
|
||||
abstract getBackupContent(filePath: string): Promise<Uint8Array | null>;
|
||||
|
||||
/**
|
||||
* Run the scheduled backup checks for daily, weekly, and monthly backups.
|
||||
*/
|
||||
protected async runScheduledBackups(): Promise<void> {
|
||||
await this.periodBackup("lastDailyBackupDate", "daily", 24 * 3600);
|
||||
await this.periodBackup("lastWeeklyBackupDate", "weekly", 7 * 24 * 3600);
|
||||
await this.periodBackup("lastMonthlyBackupDate", "monthly", 30 * 24 * 3600);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a specific backup type is enabled via options.
|
||||
*/
|
||||
protected isBackupEnabled(backupType: BackupType): boolean {
|
||||
const optionName: FilterOptionsByType<boolean> =
|
||||
backupType === "daily" ? "dailyBackupEnabled" :
|
||||
backupType === "weekly" ? "weeklyBackupEnabled" :
|
||||
"monthlyBackupEnabled";
|
||||
|
||||
return this.options.getOptionBool(optionName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a periodic backup is due and create it if so.
|
||||
*/
|
||||
protected async periodBackup(
|
||||
optionName: "lastDailyBackupDate" | "lastWeeklyBackupDate" | "lastMonthlyBackupDate",
|
||||
backupType: BackupType,
|
||||
periodInSeconds: number
|
||||
): Promise<void> {
|
||||
if (!this.isBackupEnabled(backupType)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const now = new Date();
|
||||
const lastBackupDate = dateUtils.parseDateTime(this.options.getOption(optionName));
|
||||
|
||||
if (now.getTime() - lastBackupDate.getTime() > periodInSeconds * 1000) {
|
||||
await this.backupNow(backupType);
|
||||
this.options.setOption(optionName, dateUtils.utcNowDateTime());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let backupService: BackupService | undefined;
|
||||
|
||||
/**
|
||||
* Get the current backup service instance.
|
||||
*/
|
||||
export function getBackup(): BackupService {
|
||||
if (!backupService) {
|
||||
throw new Error("Backup service not initialized. Call initBackup() first.");
|
||||
}
|
||||
return backupService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the backup service with a platform-specific provider.
|
||||
*/
|
||||
export function initBackup(provider: BackupService): void {
|
||||
backupService = provider;
|
||||
}
|
||||
|
||||
@@ -1,10 +1,18 @@
|
||||
interface Cipher {
|
||||
export interface Cipher {
|
||||
update(data: Uint8Array): Uint8Array;
|
||||
final(): Uint8Array;
|
||||
}
|
||||
|
||||
export interface CryptoProvider {
|
||||
export interface ScryptOptions {
|
||||
/** CPU/memory cost parameter (default: 16384) */
|
||||
N?: number;
|
||||
/** Block size (default: 8) */
|
||||
r?: number;
|
||||
/** Parallelization (default: 1) */
|
||||
p?: number;
|
||||
}
|
||||
|
||||
export interface CryptoProvider {
|
||||
createHash(algorithm: "md5" | "sha1" | "sha512", content: string | Uint8Array): Uint8Array;
|
||||
randomBytes(size: number): Uint8Array;
|
||||
randomString(length: number): string;
|
||||
@@ -12,6 +20,25 @@ export interface CryptoProvider {
|
||||
createDecipheriv(algorithm: "aes-128-cbc", key: Uint8Array, iv: Uint8Array): Cipher;
|
||||
hmac(secret: string | Uint8Array, value: string | Uint8Array): string;
|
||||
|
||||
/**
|
||||
* Derives a key from a password using the scrypt algorithm.
|
||||
* @param password - The password to derive from
|
||||
* @param salt - The salt to use
|
||||
* @param keyLength - The length of the derived key in bytes
|
||||
* @param options - Scrypt parameters (N, r, p)
|
||||
*/
|
||||
scrypt(
|
||||
password: Uint8Array | string,
|
||||
salt: Uint8Array | string,
|
||||
keyLength: number,
|
||||
options?: ScryptOptions
|
||||
): Promise<Uint8Array>;
|
||||
|
||||
/**
|
||||
* Constant-time comparison of two byte arrays to prevent timing attacks.
|
||||
* @returns true if arrays are equal, false otherwise
|
||||
*/
|
||||
constantTimeCompare(a: Uint8Array, b: Uint8Array): boolean;
|
||||
}
|
||||
|
||||
let crypto: CryptoProvider | null = null;
|
||||
|
||||
@@ -33,7 +33,7 @@ function encrypt(key: Uint8Array, plainText: Uint8Array | string) {
|
||||
throw new Error("No data key!");
|
||||
}
|
||||
|
||||
const plainTextUint8Array = ArrayBuffer.isView(plainText) ? plainText : Uint8Array.from(plainText);
|
||||
const plainTextUint8Array = ArrayBuffer.isView(plainText) ? plainText : encodeUtf8(plainText);
|
||||
|
||||
const iv = getCrypto().randomBytes(16);
|
||||
const cipher = getCrypto().createCipheriv("aes-128-cbc", pad(key), pad(iv));
|
||||
@@ -88,7 +88,7 @@ function decrypt(key: Uint8Array, cipherText: string | Uint8Array): Uint8Array |
|
||||
if (e.message?.includes("WRONG_FINAL_BLOCK_LENGTH") || e.message?.includes("wrong final block length")) {
|
||||
getLog().info("Caught WRONG_FINAL_BLOCK_LENGTH, returning cipherText instead");
|
||||
|
||||
return (ArrayBuffer.isView(cipherText) ? cipherText : Uint8Array.from(cipherText));
|
||||
return (ArrayBuffer.isView(cipherText) ? cipherText : encodeUtf8(cipherText));
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
|
||||
120
packages/trilium-core/src/services/encryption/password.ts
Normal file
120
packages/trilium-core/src/services/encryption/password.ts
Normal file
@@ -0,0 +1,120 @@
|
||||
import type { ChangePasswordResponse } from "@triliumnext/commons";
|
||||
import options from "../options.js";
|
||||
import { getSql } from "../sql/index.js";
|
||||
import scryptService from "./scrypt.js";
|
||||
import passwordEncryptionService from "./password_encryption.js";
|
||||
import { encodeBase64 } from "../utils/binary.js";
|
||||
import { getCrypto } from "./crypto.js";
|
||||
|
||||
/**
|
||||
* Generates a random secure token encoded as base64.
|
||||
* @param bytes - Number of random bytes to generate
|
||||
*/
|
||||
function randomSecureToken(bytes: number): string {
|
||||
return encodeBase64(getCrypto().randomBytes(bytes));
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a password has been set.
|
||||
*/
|
||||
export function isPasswordSet(): boolean {
|
||||
const sql = getSql();
|
||||
return !!sql.getValue("SELECT value FROM options WHERE name = 'passwordVerificationHash'");
|
||||
}
|
||||
|
||||
/**
|
||||
* Changes the password from currentPassword to newPassword.
|
||||
* Re-encrypts the data key with the new password.
|
||||
*/
|
||||
export async function changePassword(
|
||||
currentPassword: string,
|
||||
newPassword: string
|
||||
): Promise<ChangePasswordResponse> {
|
||||
if (!isPasswordSet()) {
|
||||
throw new Error("Password has not been set yet, so it cannot be changed. Use 'setPassword' instead.");
|
||||
}
|
||||
|
||||
if (!(await passwordEncryptionService.verifyPassword(currentPassword))) {
|
||||
return {
|
||||
success: false,
|
||||
message: "Given current password doesn't match hash"
|
||||
};
|
||||
}
|
||||
|
||||
const sql = getSql();
|
||||
const decryptedDataKey = await passwordEncryptionService.getDataKey(currentPassword);
|
||||
|
||||
sql.transactional(() => {
|
||||
options.setOption("passwordVerificationSalt", randomSecureToken(32));
|
||||
options.setOption("passwordDerivedKeySalt", randomSecureToken(32));
|
||||
});
|
||||
|
||||
const newPasswordVerificationKey = encodeBase64(
|
||||
await scryptService.getVerificationHash(newPassword)
|
||||
);
|
||||
|
||||
if (decryptedDataKey) {
|
||||
await passwordEncryptionService.setDataKey(newPassword, decryptedDataKey);
|
||||
}
|
||||
|
||||
options.setOption("passwordVerificationHash", newPasswordVerificationKey);
|
||||
|
||||
return {
|
||||
success: true
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the initial password for a new installation.
|
||||
* Creates all necessary password-related options.
|
||||
*/
|
||||
export async function setPassword(password: string): Promise<ChangePasswordResponse> {
|
||||
if (isPasswordSet()) {
|
||||
throw new Error("Password is set already. Either change it or perform 'reset password' first.");
|
||||
}
|
||||
|
||||
options.createOption("passwordVerificationSalt", randomSecureToken(32), true);
|
||||
options.createOption("passwordDerivedKeySalt", randomSecureToken(32), true);
|
||||
|
||||
const passwordVerificationKey = encodeBase64(
|
||||
await scryptService.getVerificationHash(password)
|
||||
);
|
||||
options.createOption("passwordVerificationHash", passwordVerificationKey, true);
|
||||
|
||||
// passwordEncryptionService expects these options to already exist
|
||||
options.createOption("encryptedDataKey", "", true);
|
||||
|
||||
// Generate a random 16-byte data key and encrypt it with the password
|
||||
const randomDataKey = getCrypto().randomBytes(16);
|
||||
await passwordEncryptionService.setDataKey(password, randomDataKey);
|
||||
|
||||
return {
|
||||
success: true
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Resets the password by clearing all password-related options.
|
||||
* This should be used when the user has forgotten their password.
|
||||
* WARNING: This will make all protected notes inaccessible.
|
||||
*/
|
||||
export function resetPassword(): ChangePasswordResponse {
|
||||
const sql = getSql();
|
||||
sql.transactional(() => {
|
||||
options.setOption("passwordVerificationSalt", "");
|
||||
options.setOption("passwordDerivedKeySalt", "");
|
||||
options.setOption("encryptedDataKey", "");
|
||||
options.setOption("passwordVerificationHash", "");
|
||||
});
|
||||
|
||||
return {
|
||||
success: true
|
||||
};
|
||||
}
|
||||
|
||||
export default {
|
||||
isPasswordSet,
|
||||
changePassword,
|
||||
setPassword,
|
||||
resetPassword
|
||||
};
|
||||
@@ -0,0 +1,51 @@
|
||||
import data_encryption from "./data_encryption.js";
|
||||
import scryptService from "./scrypt.js";
|
||||
import options from "../options.js";
|
||||
import { getCrypto } from "./crypto.js";
|
||||
import { encodeBase64 } from "../utils/binary.js";
|
||||
|
||||
/**
|
||||
* Verifies a password against the stored hash.
|
||||
* Uses constant-time comparison to prevent timing attacks.
|
||||
*/
|
||||
export async function verifyPassword(password: string): Promise<boolean> {
|
||||
const givenPasswordHash = encodeBase64(await scryptService.getVerificationHash(password));
|
||||
const dbPasswordHash = options.getOptionOrNull("passwordVerificationHash");
|
||||
|
||||
if (!dbPasswordHash) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Use constant-time comparison to prevent timing attacks
|
||||
const givenBytes = new TextEncoder().encode(givenPasswordHash);
|
||||
const dbBytes = new TextEncoder().encode(dbPasswordHash);
|
||||
|
||||
return getCrypto().constantTimeCompare(givenBytes, dbBytes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encrypts and stores the data key using the password-derived key.
|
||||
*/
|
||||
export async function setDataKey(
|
||||
password: string,
|
||||
plainTextDataKey: string | Uint8Array
|
||||
): Promise<void> {
|
||||
const passwordDerivedKey = await scryptService.getPasswordDerivedKey(password);
|
||||
const newEncryptedDataKey = data_encryption.encrypt(passwordDerivedKey, plainTextDataKey);
|
||||
options.setOption("encryptedDataKey", newEncryptedDataKey);
|
||||
}
|
||||
|
||||
/**
|
||||
* Decrypts and returns the data key using the password-derived key.
|
||||
*/
|
||||
export async function getDataKey(password: string): Promise<Uint8Array | false | null> {
|
||||
const passwordDerivedKey = await scryptService.getPasswordDerivedKey(password);
|
||||
const encryptedDataKey = options.getOption("encryptedDataKey");
|
||||
return data_encryption.decrypt(passwordDerivedKey, encryptedDataKey);
|
||||
}
|
||||
|
||||
export default {
|
||||
verifyPassword,
|
||||
getDataKey,
|
||||
setDataKey
|
||||
};
|
||||
41
packages/trilium-core/src/services/encryption/scrypt.ts
Normal file
41
packages/trilium-core/src/services/encryption/scrypt.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import options from "../options.js";
|
||||
import { getCrypto } from "./crypto.js";
|
||||
|
||||
const SCRYPT_OPTIONS = { N: 16384, r: 8, p: 1 };
|
||||
|
||||
/**
|
||||
* Gets the password verification hash using scrypt.
|
||||
* Uses the passwordVerificationSalt option as salt.
|
||||
*/
|
||||
export async function getVerificationHash(password: string): Promise<Uint8Array> {
|
||||
const salt = options.getOption("passwordVerificationSalt");
|
||||
return getScryptHash(password, salt);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the password-derived encryption key using scrypt.
|
||||
* Uses the passwordDerivedKeySalt option as salt.
|
||||
*/
|
||||
export async function getPasswordDerivedKey(password: string): Promise<Uint8Array> {
|
||||
const salt = options.getOption("passwordDerivedKeySalt");
|
||||
return getScryptHash(password, salt);
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes a scrypt hash with standard parameters.
|
||||
* @param password - The password to hash
|
||||
* @param salt - The salt to use
|
||||
* @returns 32-byte derived key
|
||||
*/
|
||||
export async function getScryptHash(
|
||||
password: string,
|
||||
salt: string
|
||||
): Promise<Uint8Array> {
|
||||
return getCrypto().scrypt(password, salt, 32, SCRYPT_OPTIONS);
|
||||
}
|
||||
|
||||
export default {
|
||||
getVerificationHash,
|
||||
getPasswordDerivedKey,
|
||||
getScryptHash
|
||||
};
|
||||
212
packages/trilium-core/src/services/file_based_log.ts
Normal file
212
packages/trilium-core/src/services/file_based_log.ts
Normal file
@@ -0,0 +1,212 @@
|
||||
import LogService from "./log.js";
|
||||
import { getContext } from "./context.js";
|
||||
|
||||
const SECOND = 1000;
|
||||
const MINUTE = 60 * SECOND;
|
||||
const HOUR = 60 * MINUTE;
|
||||
const DAY = 24 * HOUR;
|
||||
|
||||
const MINIMUM_FILES_TO_KEEP = 7;
|
||||
const DEFAULT_RETENTION_DAYS = 7;
|
||||
|
||||
export interface LogFileInfo {
|
||||
name: string;
|
||||
mtime: Date;
|
||||
}
|
||||
|
||||
/**
|
||||
* Abstract base class for file-based logging implementations.
|
||||
* Provides shared logic for log rotation, cleanup, and formatting.
|
||||
* Platform-specific implementations (Node.js fs, OPFS) extend this class.
|
||||
*/
|
||||
export default abstract class FileBasedLogService extends LogService {
|
||||
protected todaysMidnight!: Date;
|
||||
private isInitialized = false;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the log service. Must be called before logging.
|
||||
* Separated from constructor to allow async initialization in some platforms.
|
||||
* For sync platforms (Node.js), call the methods directly in the constructor.
|
||||
*/
|
||||
async initialize(): Promise<void> {
|
||||
if (this.isInitialized) return;
|
||||
await this.ensureLogDirectory();
|
||||
this.todaysMidnight = this.getTodaysMidnight();
|
||||
await this.openLogFile(this.getLogFileName());
|
||||
this.isInitialized = true;
|
||||
}
|
||||
|
||||
// ==================== Abstract Methods ====================
|
||||
|
||||
/** Line ending character(s) for this platform */
|
||||
protected abstract get eol(): string;
|
||||
|
||||
/** Ensure the log directory exists */
|
||||
protected abstract ensureLogDirectory(): Promise<void> | void;
|
||||
|
||||
/** Open a log file for appending */
|
||||
protected abstract openLogFile(fileName: string): Promise<void> | void;
|
||||
|
||||
/** Close the current log file */
|
||||
protected abstract closeLogFile(): Promise<void> | void;
|
||||
|
||||
/** Write an entry to the current log file */
|
||||
protected abstract writeEntry(entry: string): void;
|
||||
|
||||
/** Read the contents of a log file */
|
||||
protected abstract readLogFile(fileName: string): string | null;
|
||||
|
||||
/** List all log files with their modification times */
|
||||
protected abstract listLogFiles(): Promise<LogFileInfo[]>;
|
||||
|
||||
/** Delete a log file by name */
|
||||
protected abstract deleteLogFile(fileName: string): Promise<void>;
|
||||
|
||||
/** Get the configured retention days (-1 = keep all, 0 = use default) */
|
||||
protected abstract getRetentionDays(): number;
|
||||
|
||||
// ==================== Shared Implementation ====================
|
||||
|
||||
protected getTodaysMidnight(): Date {
|
||||
const now = new Date();
|
||||
return new Date(now.getFullYear(), now.getMonth(), now.getDate());
|
||||
}
|
||||
|
||||
protected getLogFileName(): string {
|
||||
return `trilium-${this.formatDate()}.log`;
|
||||
}
|
||||
|
||||
protected async rotateLogFile(): Promise<void> {
|
||||
await this.closeLogFile();
|
||||
this.todaysMidnight = this.getTodaysMidnight();
|
||||
await this.openLogFile(this.getLogFileName());
|
||||
|
||||
// Trigger cleanup asynchronously
|
||||
this.cleanupOldLogFiles().catch(() => {
|
||||
// Ignore cleanup errors
|
||||
});
|
||||
}
|
||||
|
||||
protected checkDateAndRotate(millisSinceMidnight: number): number {
|
||||
if (millisSinceMidnight >= DAY) {
|
||||
// Trigger rotation asynchronously to avoid blocking
|
||||
this.rotateLogFile().catch(() => {});
|
||||
return millisSinceMidnight - DAY;
|
||||
}
|
||||
return millisSinceMidnight;
|
||||
}
|
||||
|
||||
protected async cleanupOldLogFiles(): Promise<void> {
|
||||
try {
|
||||
let retentionDays = this.getRetentionDays();
|
||||
|
||||
if (retentionDays <= -1) {
|
||||
this.info("Log cleanup: keeping all log files, as specified by configuration.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (retentionDays === 0) {
|
||||
retentionDays = DEFAULT_RETENTION_DAYS;
|
||||
}
|
||||
|
||||
const cutoffDate = new Date();
|
||||
cutoffDate.setDate(cutoffDate.getDate() - retentionDays);
|
||||
|
||||
const logFiles = await this.listLogFiles();
|
||||
|
||||
// Sort by modification time (oldest first)
|
||||
logFiles.sort((a, b) => a.mtime.getTime() - b.mtime.getTime());
|
||||
|
||||
if (logFiles.length <= MINIMUM_FILES_TO_KEEP) {
|
||||
return;
|
||||
}
|
||||
|
||||
let deletedCount = 0;
|
||||
for (let i = 0; i < logFiles.length - MINIMUM_FILES_TO_KEEP; i++) {
|
||||
const file = logFiles[i];
|
||||
if (file.mtime < cutoffDate) {
|
||||
try {
|
||||
await this.deleteLogFile(file.name);
|
||||
deletedCount++;
|
||||
} catch {
|
||||
// Log deletion failed, but continue with others
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (deletedCount > 0) {
|
||||
this.info(`Log cleanup: deleted ${deletedCount} old log files`);
|
||||
}
|
||||
} catch {
|
||||
// Cleanup failed, but don't crash
|
||||
}
|
||||
}
|
||||
|
||||
protected getScriptContext(): string | undefined {
|
||||
try {
|
||||
return getContext().get("bundleNoteId");
|
||||
} catch {
|
||||
// Context not initialized yet
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
override log(message: string | Error): void {
|
||||
const bundleNoteId = this.getScriptContext();
|
||||
let str = String(message);
|
||||
|
||||
if (bundleNoteId) {
|
||||
str = `[Script ${bundleNoteId}] ${str}`;
|
||||
}
|
||||
|
||||
let millisSinceMidnight = Date.now() - this.todaysMidnight.getTime();
|
||||
millisSinceMidnight = this.checkDateAndRotate(millisSinceMidnight);
|
||||
|
||||
const entry = `${this.formatTime(millisSinceMidnight)} ${str}${this.eol}`;
|
||||
this.writeEntry(entry);
|
||||
console.log(str);
|
||||
}
|
||||
|
||||
override info(message: string | Error): void {
|
||||
this.log(message);
|
||||
}
|
||||
|
||||
override error(message: string | Error | unknown): void {
|
||||
const str = message instanceof Error
|
||||
? message.stack || message.message
|
||||
: String(message);
|
||||
this.log(`ERROR: ${str}`);
|
||||
}
|
||||
|
||||
override getLogContents(): string | null {
|
||||
return this.readLogFile(this.getLogFileName());
|
||||
}
|
||||
|
||||
// ==================== Formatting Helpers ====================
|
||||
|
||||
protected pad(num: number): string {
|
||||
num = Math.floor(num);
|
||||
return num < 10 ? `0${num}` : num.toString();
|
||||
}
|
||||
|
||||
protected padMilli(num: number): string {
|
||||
if (num < 10) {
|
||||
return `00${num}`;
|
||||
} else if (num < 100) {
|
||||
return `0${num}`;
|
||||
}
|
||||
return num.toString();
|
||||
}
|
||||
|
||||
protected formatTime(millisSinceMidnight: number): string {
|
||||
return `${this.pad(millisSinceMidnight / HOUR)}:${this.pad((millisSinceMidnight % HOUR) / MINUTE)}:${this.pad((millisSinceMidnight % MINUTE) / SECOND)}.${this.padMilli(millisSinceMidnight % SECOND)}`;
|
||||
}
|
||||
|
||||
protected formatDate(): string {
|
||||
return `${this.pad(this.todaysMidnight.getFullYear())}-${this.pad(this.todaysMidnight.getMonth() + 1)}-${this.pad(this.todaysMidnight.getDate())}`;
|
||||
}
|
||||
}
|
||||
@@ -1,22 +1,163 @@
|
||||
export default {
|
||||
saveImageToAttachment(noteId: string, imageBuffer: Uint8Array, title: string, b1?: boolean, b2?: boolean) {
|
||||
console.warn("Image save ignored", noteId, title);
|
||||
/**
|
||||
* Image service for saving and updating images.
|
||||
* Uses ImageProvider for platform-specific processing (compression, format detection).
|
||||
*/
|
||||
|
||||
return {
|
||||
attachmentId: null,
|
||||
title: ""
|
||||
};
|
||||
},
|
||||
import sanitizeFilename from "sanitize-filename";
|
||||
|
||||
updateImage(noteId: string, imageBuffer: Uint8Array, title: string) {
|
||||
console.warn("Image update ignored", noteId, title);
|
||||
},
|
||||
import becca from "../becca/becca.js";
|
||||
import { getContext } from "./context.js";
|
||||
import { getLog } from "./log.js";
|
||||
import { getImageProvider } from "./image_provider.js";
|
||||
import noteService from "./notes.js";
|
||||
import protectedSessionService from "./protected_session.js";
|
||||
import { getSql } from "./sql/index.js";
|
||||
import { sanitizeHtml } from "./sanitizer.js";
|
||||
|
||||
saveImage(noteId: string, imageBuffer: Uint8Array, title: string, b1?: boolean, b2?: boolean) {
|
||||
console.warn("Image save ignored", noteId, title);
|
||||
|
||||
return {
|
||||
note: null
|
||||
};
|
||||
}
|
||||
function getImageMimeFromExtension(ext: string): string {
|
||||
ext = ext.toLowerCase();
|
||||
return `image/${ext === "svg" ? "svg+xml" : ext}`;
|
||||
}
|
||||
|
||||
function updateImage(noteId: string, uploadBuffer: Uint8Array, originalName: string): void {
|
||||
getLog().info(`Updating image ${noteId}: ${originalName}`);
|
||||
|
||||
originalName = sanitizeHtml(originalName);
|
||||
|
||||
const note = becca.getNote(noteId);
|
||||
if (!note) {
|
||||
throw new Error("Unable to find note.");
|
||||
}
|
||||
|
||||
note.saveRevision();
|
||||
note.setLabel("originalFileName", originalName);
|
||||
|
||||
// Process image asynchronously
|
||||
getImageProvider().processImage(uploadBuffer, originalName, true).then(({ buffer, format }) => {
|
||||
getContext().init(() => {
|
||||
getSql().transactional(() => {
|
||||
note.mime = getImageMimeFromExtension(format.ext);
|
||||
note.save();
|
||||
note.setContent(buffer);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function saveImage(
|
||||
parentNoteId: string,
|
||||
uploadBuffer: Uint8Array,
|
||||
originalName: string,
|
||||
shrinkImageSwitch: boolean,
|
||||
trimFilename = false
|
||||
): { fileName: string; note: ReturnType<typeof noteService.createNewNote>["note"]; noteId: string; url: string } {
|
||||
getLog().info(`Saving image ${originalName} into parent ${parentNoteId}`);
|
||||
|
||||
if (trimFilename && originalName.length > 40) {
|
||||
originalName = "image";
|
||||
}
|
||||
|
||||
const fileName = sanitizeFilename(originalName);
|
||||
const parentNote = becca.getNote(parentNoteId);
|
||||
if (!parentNote) {
|
||||
throw new Error("Unable to find parent note.");
|
||||
}
|
||||
|
||||
const { note } = noteService.createNewNote({
|
||||
parentNoteId,
|
||||
title: fileName,
|
||||
type: "image",
|
||||
mime: "unknown",
|
||||
content: "",
|
||||
isProtected: parentNote.isProtected && protectedSessionService.isProtectedSessionAvailable()
|
||||
});
|
||||
|
||||
note.addLabel("originalFileName", originalName);
|
||||
|
||||
// Process image asynchronously
|
||||
getImageProvider().processImage(uploadBuffer, originalName, shrinkImageSwitch).then(({ buffer, format }) => {
|
||||
getContext().init(() => {
|
||||
getSql().transactional(() => {
|
||||
note.mime = getImageMimeFromExtension(format.ext);
|
||||
|
||||
if (!originalName.includes(".")) {
|
||||
originalName += `.${format.ext}`;
|
||||
note.setLabel("originalFileName", originalName);
|
||||
note.title = sanitizeFilename(originalName);
|
||||
}
|
||||
|
||||
note.setContent(buffer, { forceSave: true });
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
return {
|
||||
fileName,
|
||||
note,
|
||||
noteId: note.noteId,
|
||||
url: `api/images/${note.noteId}/${encodeURIComponent(fileName)}`
|
||||
};
|
||||
}
|
||||
|
||||
function saveImageToAttachment(
|
||||
noteId: string,
|
||||
uploadBuffer: Uint8Array,
|
||||
originalName: string,
|
||||
shrinkImageSwitch?: boolean,
|
||||
trimFilename = false
|
||||
): { attachmentId: string | undefined; title: string } {
|
||||
getLog().info(`Saving image '${originalName}' as attachment into note '${noteId}'`);
|
||||
|
||||
if (trimFilename && originalName.length > 40) {
|
||||
originalName = "image";
|
||||
}
|
||||
|
||||
const fileName = sanitizeFilename(originalName);
|
||||
const note = becca.getNoteOrThrow(noteId);
|
||||
|
||||
let attachment = note.saveAttachment({
|
||||
role: "image",
|
||||
mime: "unknown",
|
||||
title: fileName
|
||||
});
|
||||
|
||||
// Schedule post-processing to mark unused attachments
|
||||
setTimeout(() => {
|
||||
getContext().init(() => {
|
||||
getSql().transactional(() => {
|
||||
const note = becca.getNoteOrThrow(noteId);
|
||||
noteService.asyncPostProcessContent(note, note.getContent());
|
||||
});
|
||||
});
|
||||
}, 5000);
|
||||
|
||||
// Process image asynchronously
|
||||
const attachmentId = attachment.attachmentId;
|
||||
getImageProvider().processImage(uploadBuffer, originalName, !!shrinkImageSwitch).then(({ buffer, format }) => {
|
||||
getContext().init(() => {
|
||||
getSql().transactional(() => {
|
||||
if (!attachmentId) {
|
||||
throw new Error("Missing attachment ID.");
|
||||
}
|
||||
attachment = becca.getAttachmentOrThrow(attachmentId);
|
||||
|
||||
attachment.mime = getImageMimeFromExtension(format.ext);
|
||||
|
||||
if (!originalName.includes(".")) {
|
||||
originalName += `.${format.ext}`;
|
||||
attachment.title = sanitizeFilename(originalName);
|
||||
}
|
||||
|
||||
attachment.setContent(buffer, { forceSave: true });
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
return attachment;
|
||||
}
|
||||
|
||||
export default {
|
||||
saveImage,
|
||||
saveImageToAttachment,
|
||||
updateImage
|
||||
};
|
||||
|
||||
44
packages/trilium-core/src/services/image_provider.ts
Normal file
44
packages/trilium-core/src/services/image_provider.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
/**
|
||||
* Interface for platform-specific image processing.
|
||||
* Server uses JIMP with full compression support.
|
||||
* Standalone uses simple format detection without compression.
|
||||
*/
|
||||
|
||||
export interface ImageFormat {
|
||||
ext: string;
|
||||
mime: string;
|
||||
}
|
||||
|
||||
export interface ProcessedImage {
|
||||
buffer: Uint8Array;
|
||||
format: ImageFormat;
|
||||
}
|
||||
|
||||
export interface ImageProvider {
|
||||
/**
|
||||
* Detect image format from buffer.
|
||||
*/
|
||||
getImageType(buffer: Uint8Array): ImageFormat | null;
|
||||
|
||||
/**
|
||||
* Process image - may resize/compress depending on implementation.
|
||||
* @param buffer - Raw image data
|
||||
* @param originalName - Original filename for logging
|
||||
* @param shrink - Whether to attempt shrinking the image
|
||||
* @returns Processed image buffer and detected format
|
||||
*/
|
||||
processImage(buffer: Uint8Array, originalName: string, shrink: boolean): Promise<ProcessedImage>;
|
||||
}
|
||||
|
||||
let imageProvider: ImageProvider | null = null;
|
||||
|
||||
export function initImageProvider(provider: ImageProvider) {
|
||||
imageProvider = provider;
|
||||
}
|
||||
|
||||
export function getImageProvider(): ImageProvider {
|
||||
if (!imageProvider) {
|
||||
throw new Error("Image provider not initialized");
|
||||
}
|
||||
return imageProvider;
|
||||
}
|
||||
@@ -38,12 +38,21 @@ export default class LogService {
|
||||
console.log(`\n${top}\n${mid}\n${bot}\n`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the current log contents as a string.
|
||||
* Override in platform-specific implementations to return actual log data.
|
||||
* @returns The log contents, or null if not available
|
||||
*/
|
||||
getLogContents(): string | null {
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
let log: LogService;
|
||||
|
||||
export function initLog() {
|
||||
log = new LogService();
|
||||
export function initLog(provider?: LogService) {
|
||||
log = provider ?? new LogService();
|
||||
}
|
||||
|
||||
export function getLog() {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import backupService from "./backup.js";
|
||||
import { getBackup } from "./backup.js";
|
||||
import { getSql } from "./sql/index.js";
|
||||
import { getLog } from "./log.js";
|
||||
import { getPlatform } from "./platform.js";
|
||||
@@ -26,7 +26,7 @@ async function migrate() {
|
||||
|
||||
// backup before attempting migration
|
||||
if (!getPlatform().getEnv("TRILIUM_INTEGRATION_TEST")) {
|
||||
await backupService.backupNow(
|
||||
await getBackup().backupNow(
|
||||
// creating a special backup for version 0.60.4, the changes in 0.61 are major.
|
||||
currentDbVersion === 214 ? `before-migration-v060` : "before-migration"
|
||||
);
|
||||
|
||||
@@ -636,41 +636,43 @@ function downloadImages(noteId: string, content: string) {
|
||||
// are downloaded and the IMG references are not updated. For this occasion we have this code
|
||||
// which upon the download of all the images will update the note if the links have not been fixed before
|
||||
|
||||
getSql().transactional(() => {
|
||||
const imageNotes = becca.getNotes(Object.values(imageUrlToAttachmentIdMapping), true);
|
||||
const log = getLog();
|
||||
cls.getContext().init(() => {
|
||||
getSql().transactional(() => {
|
||||
const imageNotes = becca.getNotes(Object.values(imageUrlToAttachmentIdMapping), true);
|
||||
const log = getLog();
|
||||
|
||||
const origNote = becca.getNote(noteId);
|
||||
const origNote = becca.getNote(noteId);
|
||||
|
||||
if (!origNote) {
|
||||
log.error(`Cannot find note '${noteId}' to replace image link.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const origContent = origNote.getContent();
|
||||
let updatedContent = origContent;
|
||||
|
||||
if (typeof updatedContent !== "string") {
|
||||
log.error(`Note '${noteId}' has a non-string content, cannot replace image link.`);
|
||||
return;
|
||||
}
|
||||
|
||||
for (const url in imageUrlToAttachmentIdMapping) {
|
||||
const imageNote = imageNotes.find((note) => note.noteId === imageUrlToAttachmentIdMapping[url]);
|
||||
|
||||
if (imageNote) {
|
||||
updatedContent = replaceUrl(updatedContent, url, imageNote);
|
||||
if (!origNote) {
|
||||
log.error(`Cannot find note '${noteId}' to replace image link.`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// update only if the links have not been already fixed.
|
||||
if (updatedContent !== origContent) {
|
||||
origNote.setContent(updatedContent);
|
||||
const origContent = origNote.getContent();
|
||||
let updatedContent = origContent;
|
||||
|
||||
asyncPostProcessContent(origNote, updatedContent);
|
||||
if (typeof updatedContent !== "string") {
|
||||
log.error(`Note '${noteId}' has a non-string content, cannot replace image link.`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Fixed the image links for note '${noteId}' to the offline saved.`);
|
||||
}
|
||||
for (const url in imageUrlToAttachmentIdMapping) {
|
||||
const imageNote = imageNotes.find((note) => note.noteId === imageUrlToAttachmentIdMapping[url]);
|
||||
|
||||
if (imageNote) {
|
||||
updatedContent = replaceUrl(updatedContent, url, imageNote);
|
||||
}
|
||||
}
|
||||
|
||||
// update only if the links have not been already fixed.
|
||||
if (updatedContent !== origContent) {
|
||||
origNote.setContent(updatedContent);
|
||||
|
||||
asyncPostProcessContent(origNote, updatedContent);
|
||||
|
||||
console.log(`Fixed the image links for note '${noteId}' to the offline saved.`);
|
||||
}
|
||||
});
|
||||
});
|
||||
}, 5000);
|
||||
});
|
||||
|
||||
@@ -373,6 +373,18 @@ export class SqlService {
|
||||
await this.dbConnection.backup(targetFilePath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize the database to a byte array.
|
||||
* Only available with browser-based providers.
|
||||
* @throws Error if the provider doesn't support serialization
|
||||
*/
|
||||
serialize(): Uint8Array {
|
||||
if (!this.dbConnection.serialize) {
|
||||
throw new Error("Database provider does not support serialization");
|
||||
}
|
||||
return this.dbConnection.serialize();
|
||||
}
|
||||
|
||||
disableSlowQueryLogging<T>(cb: () => T) {
|
||||
const orig = isSlowQueryLoggingDisabled();
|
||||
|
||||
|
||||
@@ -23,6 +23,11 @@ export interface DatabaseProvider {
|
||||
loadFromMemory(): void;
|
||||
loadFromBuffer(buffer: Uint8Array): void;
|
||||
backup(destinationFile: string): void;
|
||||
/**
|
||||
* Serialize the database to a byte array.
|
||||
* Optional - only implemented by browser-based providers.
|
||||
*/
|
||||
serialize?(): Uint8Array;
|
||||
prepare(query: string): Statement;
|
||||
transaction<T>(func: (statement: Statement) => T): Transaction;
|
||||
get inTransaction(): boolean;
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { deferred, OptionRow } from "@triliumnext/commons";
|
||||
import { getSql } from "./sql";
|
||||
import { getLog } from "./log";
|
||||
import { getBackup } from "./backup";
|
||||
import optionService from "./options";
|
||||
import eventService from "./events";
|
||||
import { getContext } from "./context";
|
||||
@@ -11,6 +12,7 @@ import hidden_subtree from "./hidden_subtree";
|
||||
import TaskContext from "./task_context";
|
||||
import BOption from "../becca/entities/boption";
|
||||
import migrationService from "./migration";
|
||||
import passwordService from "./encryption/password";
|
||||
|
||||
export const dbReady = deferred<void>();
|
||||
|
||||
@@ -105,22 +107,16 @@ function initializeDb() {
|
||||
getContext().init(initDbConnection);
|
||||
|
||||
dbReady.then(() => {
|
||||
// TODO: Re-enable backup.
|
||||
// if (config.General && config.General.noBackup === true) {
|
||||
// log.info("Disabling scheduled backups.");
|
||||
// Run regular backups every 4 hours
|
||||
setInterval(() => getBackup().regularBackup(), 4 * 60 * 60 * 1000);
|
||||
|
||||
// return;
|
||||
// }
|
||||
// Kickoff first backup soon after start up
|
||||
setTimeout(() => getBackup().regularBackup(), 5 * 60 * 1000);
|
||||
|
||||
// setInterval(() => backup.regularBackup(), 4 * 60 * 60 * 1000);
|
||||
// Optimize is usually inexpensive no-op, so running it semi-frequently is not a big deal
|
||||
setTimeout(() => optimize(), 60 * 60 * 1000);
|
||||
|
||||
// // kickoff first backup soon after start up
|
||||
// setTimeout(() => backup.regularBackup(), 5 * 60 * 1000);
|
||||
|
||||
// // optimize is usually inexpensive no-op, so running it semi-frequently is not a big deal
|
||||
// setTimeout(() => optimize(), 60 * 60 * 1000);
|
||||
|
||||
// setInterval(() => optimize(), 10 * 60 * 60 * 1000);
|
||||
setInterval(() => optimize(), 10 * 60 * 60 * 1000);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -171,7 +167,7 @@ async function createInitialDatabase(skipDemoDb?: boolean) {
|
||||
initDocumentOptions();
|
||||
initNotSyncedOptions(true, {});
|
||||
initStartupOptions();
|
||||
// password.resetPassword();
|
||||
passwordService.resetPassword();
|
||||
});
|
||||
|
||||
// Check hidden subtree.
|
||||
|
||||
54
packages/trilium-core/src/services/sync_options.spec.ts
Normal file
54
packages/trilium-core/src/services/sync_options.spec.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
describe("syncOptions.getSyncTimeout", () => {
|
||||
let getSyncTimeout: () => number;
|
||||
let getOptionMock: ReturnType<typeof vi.fn>;
|
||||
let mockSyncConfig: Record<string, string | undefined>;
|
||||
|
||||
beforeEach(async () => {
|
||||
// Reset the module cache so the dynamic import below gets a fresh
|
||||
// instance of sync_options.ts with the mocked dependencies rather than
|
||||
// the cached copy loaded by the test runner's setupFiles.
|
||||
vi.resetModules();
|
||||
mockSyncConfig = {};
|
||||
getOptionMock = vi.fn();
|
||||
|
||||
vi.doMock("./config.js", () => ({ default: { Sync: mockSyncConfig } }));
|
||||
vi.doMock("./options.js", () => ({ default: { getOption: getOptionMock } }));
|
||||
|
||||
const mod = await import("./sync_options.js");
|
||||
getSyncTimeout = mod.default.getSyncTimeout;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it("converts database value from seconds to milliseconds", () => {
|
||||
// TimeSelector stores value in seconds (displayed value × scale)
|
||||
// Scale is UI-only, not used in backend calculation
|
||||
getOptionMock.mockReturnValue("120"); // 120 seconds = 2 minutes
|
||||
expect(getSyncTimeout()).toBe(120000);
|
||||
|
||||
getOptionMock.mockReturnValue("30"); // 30 seconds
|
||||
expect(getSyncTimeout()).toBe(30000);
|
||||
|
||||
getOptionMock.mockReturnValue("3600"); // 3600 seconds = 1 hour
|
||||
expect(getSyncTimeout()).toBe(3600000);
|
||||
});
|
||||
|
||||
it("treats config override as raw milliseconds for backward compatibility", () => {
|
||||
mockSyncConfig.syncServerTimeout = "60000"; // 60 seconds in ms
|
||||
// Config value takes precedence, db value is ignored
|
||||
getOptionMock.mockReturnValue("9999");
|
||||
expect(getSyncTimeout()).toBe(60000);
|
||||
});
|
||||
|
||||
it("uses safe defaults for invalid values", () => {
|
||||
getOptionMock.mockReturnValue("");
|
||||
expect(getSyncTimeout()).toBe(120000); // default 120 seconds
|
||||
|
||||
mockSyncConfig.syncServerTimeout = "invalid";
|
||||
expect(getSyncTimeout()).toBe(120000); // fallback for invalid config
|
||||
});
|
||||
});
|
||||
@@ -29,6 +29,14 @@ export default {
|
||||
// and we need to override it with config from config.ini
|
||||
return !!syncServerHost && syncServerHost !== "disabled";
|
||||
},
|
||||
getSyncTimeout: () => parseInt(get("syncServerTimeout")) || 120000,
|
||||
getSyncTimeout: () => {
|
||||
// Config.ini values are in raw milliseconds (backward compat with old configs)
|
||||
if (config["Sync"] && config["Sync"]["syncServerTimeout"]) {
|
||||
return parseInt(config["Sync"]["syncServerTimeout"]) || 120000;
|
||||
}
|
||||
// Database values are stored in seconds — convert to milliseconds
|
||||
const seconds = parseInt(optionService.getOption("syncServerTimeout"));
|
||||
return (isNaN(seconds) || seconds <= 0) ? 120000 : seconds * 1000;
|
||||
},
|
||||
getSyncProxy: () => get("syncProxy")
|
||||
};
|
||||
|
||||
236
pnpm-lock.yaml
generated
236
pnpm-lock.yaml
generated
@@ -7,7 +7,7 @@ settings:
|
||||
overrides:
|
||||
'@codemirror/language': 6.12.3
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/common': 1.5.1
|
||||
'@lezer/common': 1.5.2
|
||||
mermaid: 11.14.0
|
||||
preact: 10.29.1
|
||||
roughjs: 4.6.6
|
||||
@@ -61,7 +61,7 @@ overrides:
|
||||
handlebars@<4.7.9: '>=4.7.9'
|
||||
qs@<6.14.2: '>=6.14.2'
|
||||
minimatch@<3.1.4: ^3.1.4
|
||||
minimatch@3>brace-expansion: ^5.0.0
|
||||
minimatch@3>brace-expansion: ^1.1.13
|
||||
serialize-javascript@<7.0.5: '>=7.0.5'
|
||||
webpack@<5.104.1: '>=5.104.1'
|
||||
file-type@>=13.0.0 <21.3.1: '>=21.3.1'
|
||||
@@ -194,8 +194,8 @@ importers:
|
||||
version: link:../server
|
||||
devDependencies:
|
||||
'@redocly/cli':
|
||||
specifier: 2.25.4
|
||||
version: 2.25.4(@opentelemetry/api@1.9.0)(bufferutil@4.0.9)(core-js@3.46.0)(encoding@0.1.13)(utf-8-validate@6.0.5)
|
||||
specifier: 2.26.0
|
||||
version: 2.26.0(@opentelemetry/api@1.9.0)(bufferutil@4.0.9)(core-js@3.46.0)(encoding@0.1.13)(utf-8-validate@6.0.5)
|
||||
archiver:
|
||||
specifier: 7.0.1
|
||||
version: 7.0.1
|
||||
@@ -293,8 +293,8 @@ importers:
|
||||
specifier: 0.20.0
|
||||
version: 0.20.0(@types/react-dom@19.1.6(@types/react@19.1.7))(@types/react@19.1.7)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(rxjs@7.8.2)
|
||||
'@zumer/snapdom':
|
||||
specifier: 2.7.0
|
||||
version: 2.7.0
|
||||
specifier: 2.8.0
|
||||
version: 2.8.0
|
||||
autocomplete.js:
|
||||
specifier: 0.38.1
|
||||
version: 0.38.1
|
||||
@@ -323,8 +323,8 @@ importers:
|
||||
specifier: 1.51.2
|
||||
version: 1.51.2
|
||||
i18next:
|
||||
specifier: 26.0.3
|
||||
version: 26.0.3(typescript@6.0.2)
|
||||
specifier: 26.0.4
|
||||
version: 26.0.4(typescript@6.0.2)
|
||||
i18next-http-backend:
|
||||
specifier: 3.0.4
|
||||
version: 3.0.4(encoding@0.1.13)
|
||||
@@ -366,7 +366,7 @@ importers:
|
||||
version: 10.29.1
|
||||
react-i18next:
|
||||
specifier: 17.0.2
|
||||
version: 17.0.2(i18next@26.0.3(typescript@6.0.2))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(typescript@6.0.2)
|
||||
version: 17.0.2(i18next@26.0.4(typescript@6.0.2))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(typescript@6.0.2)
|
||||
react-window:
|
||||
specifier: 2.2.7
|
||||
version: 2.2.7(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
|
||||
@@ -489,8 +489,11 @@ importers:
|
||||
specifier: workspace:*
|
||||
version: link:../../packages/splitjs
|
||||
'@zumer/snapdom':
|
||||
specifier: 2.7.0
|
||||
version: 2.7.0
|
||||
specifier: 2.8.0
|
||||
version: 2.8.0
|
||||
aes-js:
|
||||
specifier: 3.1.2
|
||||
version: 3.1.2
|
||||
autocomplete.js:
|
||||
specifier: 0.38.1
|
||||
version: 0.38.1
|
||||
@@ -522,8 +525,8 @@ importers:
|
||||
specifier: 17.4.0
|
||||
version: 17.4.0
|
||||
i18next:
|
||||
specifier: 26.0.3
|
||||
version: 26.0.3(typescript@6.0.2)
|
||||
specifier: 26.0.4
|
||||
version: 26.0.4(typescript@6.0.2)
|
||||
i18next-http-backend:
|
||||
specifier: 3.0.4
|
||||
version: 3.0.4(encoding@0.1.13)
|
||||
@@ -583,13 +586,16 @@ importers:
|
||||
version: 10.29.1
|
||||
react-i18next:
|
||||
specifier: 17.0.2
|
||||
version: 17.0.2(i18next@26.0.3(typescript@6.0.2))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(typescript@6.0.2)
|
||||
version: 17.0.2(i18next@26.0.4(typescript@6.0.2))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(typescript@6.0.2)
|
||||
react-window:
|
||||
specifier: 2.2.7
|
||||
version: 2.2.7(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
|
||||
reveal.js:
|
||||
specifier: 6.0.0
|
||||
version: 6.0.0
|
||||
scrypt-js:
|
||||
specifier: 3.0.1
|
||||
version: 3.0.1
|
||||
svg-pan-zoom:
|
||||
specifier: 3.6.2
|
||||
version: 3.6.2
|
||||
@@ -606,6 +612,9 @@ importers:
|
||||
'@preact/preset-vite':
|
||||
specifier: 2.10.2
|
||||
version: 2.10.2(@babel/core@7.29.0)(preact@10.29.1)(vite@8.0.7(@types/node@24.12.2)(esbuild@0.28.0)(jiti@2.6.1)(less@4.1.3)(sass-embedded@1.91.0)(sass@1.91.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.3))
|
||||
'@types/aes-js':
|
||||
specifier: 3.1.4
|
||||
version: 3.1.4
|
||||
'@types/bootstrap':
|
||||
specifier: 5.2.10
|
||||
version: 5.2.10
|
||||
@@ -662,7 +671,7 @@ importers:
|
||||
dependencies:
|
||||
'@electron/remote':
|
||||
specifier: 2.1.3
|
||||
version: 2.1.3(electron@41.1.1)
|
||||
version: 2.1.3(electron@41.2.0)
|
||||
better-sqlite3:
|
||||
specifier: 12.8.0
|
||||
version: 12.8.0
|
||||
@@ -722,8 +731,8 @@ importers:
|
||||
specifier: 14.0.0
|
||||
version: 14.0.0(webpack@5.105.4(esbuild@0.28.0))
|
||||
electron:
|
||||
specifier: 41.1.1
|
||||
version: 41.1.1
|
||||
specifier: 41.2.0
|
||||
version: 41.2.0
|
||||
prebuild-install:
|
||||
specifier: 7.1.3
|
||||
version: 7.1.3
|
||||
@@ -781,8 +790,8 @@ importers:
|
||||
specifier: 14.0.0
|
||||
version: 14.0.0(webpack@5.105.4(esbuild@0.28.0))
|
||||
electron:
|
||||
specifier: 41.1.1
|
||||
version: 41.1.1
|
||||
specifier: 41.2.0
|
||||
version: 41.2.0
|
||||
fs-extra:
|
||||
specifier: 11.3.4
|
||||
version: 11.3.4
|
||||
@@ -811,8 +820,8 @@ importers:
|
||||
specifier: ^1.12.1
|
||||
version: 1.29.0(zod@4.3.6)
|
||||
ai:
|
||||
specifier: 6.0.153
|
||||
version: 6.0.153(zod@4.3.6)
|
||||
specifier: 6.0.154
|
||||
version: 6.0.154(zod@4.3.6)
|
||||
better-sqlite3:
|
||||
specifier: 12.8.0
|
||||
version: 12.8.0
|
||||
@@ -820,8 +829,8 @@ importers:
|
||||
specifier: 9.0.5
|
||||
version: 9.0.5
|
||||
i18next:
|
||||
specifier: 26.0.3
|
||||
version: 26.0.3(typescript@6.0.2)
|
||||
specifier: 26.0.4
|
||||
version: 26.0.4(typescript@6.0.2)
|
||||
i18next-fs-backend:
|
||||
specifier: 2.6.3
|
||||
version: 2.6.3
|
||||
@@ -837,7 +846,7 @@ importers:
|
||||
version: 7.1.2
|
||||
'@electron/remote':
|
||||
specifier: 2.1.3
|
||||
version: 2.1.3(electron@41.1.1)
|
||||
version: 2.1.3(electron@41.2.0)
|
||||
'@triliumnext/commons':
|
||||
specifier: workspace:*
|
||||
version: link:../../packages/commons
|
||||
@@ -950,8 +959,8 @@ importers:
|
||||
specifier: 5.0.1
|
||||
version: 5.0.1
|
||||
electron:
|
||||
specifier: 41.1.1
|
||||
version: 41.1.1
|
||||
specifier: 41.2.0
|
||||
version: 41.2.0
|
||||
electron-window-state:
|
||||
specifier: 5.0.3
|
||||
version: 5.0.3
|
||||
@@ -1083,8 +1092,8 @@ importers:
|
||||
apps/website:
|
||||
dependencies:
|
||||
i18next:
|
||||
specifier: 26.0.3
|
||||
version: 26.0.3(typescript@6.0.2)
|
||||
specifier: 26.0.4
|
||||
version: 26.0.4(typescript@6.0.2)
|
||||
preact:
|
||||
specifier: 10.29.1
|
||||
version: 10.29.1
|
||||
@@ -1096,7 +1105,7 @@ importers:
|
||||
version: 6.6.7(preact@10.29.1)
|
||||
react-i18next:
|
||||
specifier: 17.0.2
|
||||
version: 17.0.2(i18next@26.0.3(typescript@6.0.2))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(typescript@6.0.2)
|
||||
version: 17.0.2(i18next@26.0.4(typescript@6.0.2))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(typescript@6.0.2)
|
||||
devDependencies:
|
||||
'@preact/preset-vite':
|
||||
specifier: 2.10.5
|
||||
@@ -1515,7 +1524,7 @@ importers:
|
||||
version: 6.5.3(@codemirror/language@6.12.3)(@codemirror/state@6.6.0)(@codemirror/view@6.41.0)
|
||||
'@replit/codemirror-lang-nix':
|
||||
specifier: 6.0.1
|
||||
version: 6.0.1(@codemirror/autocomplete@6.18.6)(@codemirror/language@6.12.3)(@codemirror/state@6.6.0)(@codemirror/view@6.41.0)(@lezer/common@1.5.1)(@lezer/highlight@1.2.3)(@lezer/lr@1.4.2)
|
||||
version: 6.0.1(@codemirror/autocomplete@6.18.6)(@codemirror/language@6.12.3)(@codemirror/state@6.6.0)(@codemirror/view@6.41.0)(@lezer/common@1.5.2)(@lezer/highlight@1.2.3)(@lezer/lr@1.4.2)
|
||||
'@replit/codemirror-vim':
|
||||
specifier: 6.3.0
|
||||
version: 6.3.0(@codemirror/commands@6.10.3)(@codemirror/language@6.12.3)(@codemirror/search@6.6.0)(@codemirror/state@6.6.0)(@codemirror/view@6.41.0)
|
||||
@@ -1659,8 +1668,8 @@ importers:
|
||||
specifier: 1.0.3
|
||||
version: 1.0.3
|
||||
i18next:
|
||||
specifier: 26.0.3
|
||||
version: 26.0.3(typescript@6.0.2)
|
||||
specifier: 26.0.4
|
||||
version: 26.0.4(typescript@6.0.2)
|
||||
mime-types:
|
||||
specifier: 3.0.2
|
||||
version: 3.0.2
|
||||
@@ -1719,8 +1728,8 @@ packages:
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
|
||||
'@ai-sdk/gateway@3.0.93':
|
||||
resolution: {integrity: sha512-8D6C9eEvDq6IgrdlWzpbniahDkoLiieTCrpzH8p/Hw63/0iPnZJ1uZcqxHrDIVDW/+aaGhBXqmx5C7HSd2eMmQ==}
|
||||
'@ai-sdk/gateway@3.0.94':
|
||||
resolution: {integrity: sha512-uDDwLZhCkvC89crVS3S90D5L7AcVN8WriGuYVNYgVAaVcvy3Mthy3R9ICfzG75BObhz6pm2FWnhxDfNRK+t69Q==}
|
||||
engines: {node: '>=18'}
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
@@ -3858,8 +3867,8 @@ packages:
|
||||
'@keyv/serialize@1.1.1':
|
||||
resolution: {integrity: sha512-dXn3FZhPv0US+7dtJsIi2R+c7qWYiReoEh5zUntWCf4oSpMNib8FDhSoed6m3QyZdx5hK7iLFkYk3rNxwt8vTA==}
|
||||
|
||||
'@lezer/common@1.5.1':
|
||||
resolution: {integrity: sha512-6YRVG9vBkaY7p1IVxL4s44n5nUnaNnGM2/AckNgYOnxTG2kWh1vR8BMxPseWPjRNpb5VtXnMpeYAEAADoRV1Iw==}
|
||||
'@lezer/common@1.5.2':
|
||||
resolution: {integrity: sha512-sxQE460fPZyU3sdc8lafxiPwJHBzZRy/udNFynGQky1SePYBdhkBl1kOagA9uT3pxR8K09bOrmTUqA9wb/PjSQ==}
|
||||
|
||||
'@lezer/css@1.1.11':
|
||||
resolution: {integrity: sha512-FuAnusbLBl1SEAtfN8NdShxYJiESKw9LAFysfea1T96jD3ydBn12oYjaSG1a04BQRIUd93/0D8e5CV1cUMkmQg==}
|
||||
@@ -5046,27 +5055,27 @@ packages:
|
||||
'@redocly/cli-otel@0.1.2':
|
||||
resolution: {integrity: sha512-Bg7BoO5t1x3lVK+KhA5aGPmeXpQmdf6WtTYHhelKJCsQ+tRMiJoFAQoKHoBHAoNxXrhlS3K9lKFLHGmtxsFQfA==}
|
||||
|
||||
'@redocly/cli@2.25.4':
|
||||
resolution: {integrity: sha512-ypBv8ZhckTzcOfsFH2VILsLqk00bJ1tI0POtlaEf8z0rDsnmD8auUETkMzw8wlUB+aQM7+VSzpSsmcmqeSgzWQ==}
|
||||
'@redocly/cli@2.26.0':
|
||||
resolution: {integrity: sha512-24S1ls0qvu3uaPiW4OImy06CpImAkUOd3h7OG+Hq9By5pPavjOE34KtdQTaaFso3e1qgzXYdQh6HPqEY1nTZgA==}
|
||||
engines: {node: '>=22.12.0 || >=20.19.0 <21.0.0', npm: '>=10'}
|
||||
hasBin: true
|
||||
|
||||
'@redocly/config@0.22.2':
|
||||
resolution: {integrity: sha512-roRDai8/zr2S9YfmzUfNhKjOF0NdcOIqF7bhf4MVC5UxpjIysDjyudvlAiVbpPHp3eDRWbdzUgtkK1a7YiDNyQ==}
|
||||
|
||||
'@redocly/config@0.46.0':
|
||||
resolution: {integrity: sha512-FZEprNEkmLITKKdv5blIai1qiCcc4dn5+96AjWnmFQmH/oz/OyBiXBSi752/M+Wmype7aH2uRywSCuYlu4CgVA==}
|
||||
'@redocly/config@0.46.1':
|
||||
resolution: {integrity: sha512-dSdkB2wRLtvl3f7ayRu9vqVhUMjjRaxZlHgRbgOtPPXxn4uI/ciDO87h4CJb7Iet+OVpevpAU6gU8bo5qVbQxg==}
|
||||
|
||||
'@redocly/openapi-core@1.34.5':
|
||||
resolution: {integrity: sha512-0EbE8LRbkogtcCXU7liAyC00n9uNG9hJ+eMyHFdUsy9lB/WGqnEBgwjA9q2cyzAVcdTkQqTBBU1XePNnN3OijA==}
|
||||
engines: {node: '>=18.17.0', npm: '>=9.5.0'}
|
||||
|
||||
'@redocly/openapi-core@2.25.4':
|
||||
resolution: {integrity: sha512-zYdKQEsowPNtkTixrfbn5DySWBLQpTsISthVBBEPAa3OZC75UI76CbHXEamJ8Kmlead9IkD5RbgeJvxqJ5/H6Q==}
|
||||
'@redocly/openapi-core@2.26.0':
|
||||
resolution: {integrity: sha512-BjTPzSV1Gv430W9S/7i5T/dEZDK00GFk6ILCNTI+31pA9lEFJOXc0XRJT+V3v+m3nXIgGoo6GgqeLdAiM10rNg==}
|
||||
engines: {node: '>=22.12.0 || >=20.19.0 <21.0.0', npm: '>=10'}
|
||||
|
||||
'@redocly/respect-core@2.25.4':
|
||||
resolution: {integrity: sha512-0xMbcSft+9Q2sO1wSJMxo510Aqc/kGF/AmUK3OaLQvGvKUgOqq2Op/0aorNQJk6s8WBEH4UN4eFt7fUzUeXs8g==}
|
||||
'@redocly/respect-core@2.26.0':
|
||||
resolution: {integrity: sha512-mejFg26XNp8pqHwnL75QvI7MO4dhgFKa+v35OgOcVMrU9tGZ/VaFbplEyvdrRgjoonguXoLDoMN4Iw1rWlZg0g==}
|
||||
engines: {node: '>=22.12.0 || >=20.19.0 <21.0.0', npm: '>=10'}
|
||||
|
||||
'@replit/codemirror-indentation-markers@6.5.3':
|
||||
@@ -5083,7 +5092,7 @@ packages:
|
||||
'@codemirror/language': 6.12.3
|
||||
'@codemirror/state': ^6.0.0
|
||||
'@codemirror/view': ^6.0.0
|
||||
'@lezer/common': 1.5.1
|
||||
'@lezer/common': 1.5.2
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': ^1.0.0
|
||||
|
||||
@@ -5466,6 +5475,9 @@ packages:
|
||||
'@tybys/wasm-util@0.10.1':
|
||||
resolution: {integrity: sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==}
|
||||
|
||||
'@types/aes-js@3.1.4':
|
||||
resolution: {integrity: sha512-v3D66IptpUqh+pHKVNRxY8yvp2ESSZXe0rTzsGdzUhEwag7ljVfgCllkWv2YgiYXDhWFBrEywll4A5JToyTNFA==}
|
||||
|
||||
'@types/appdmg@0.5.5':
|
||||
resolution: {integrity: sha512-G+n6DgZTZFOteITE30LnWj+HRVIGr7wMlAiLWOO02uJFWVEitaPU9JVXm9wJokkgshBawb2O1OykdcsmkkZfgg==}
|
||||
|
||||
@@ -6801,8 +6813,8 @@ packages:
|
||||
resolution: {integrity: sha512-0fztsk/0ryJ+2PPr9EyXS5/Co7OK8q3zY/xOoozEWaUsL5x+C0cyZ4YyMuUffOO2Dx/rAdq4JMPqW0VUtm+vzA==}
|
||||
engines: {bun: '>=0.7.0', deno: '>=1.0.0', node: '>=18.0.0'}
|
||||
|
||||
'@zumer/snapdom@2.7.0':
|
||||
resolution: {integrity: sha512-ZiELKzDszeFOazPQ/ExXzgtdoW9jADVjDjInr5XDAlVdCx0RbNsFiG7RLyM48XnA7EyCA9yTvmXSc3ElDrTRqA==}
|
||||
'@zumer/snapdom@2.8.0':
|
||||
resolution: {integrity: sha512-NhztgFDNfOkFt8Ox9PIJ1IwggyMui5UDazysOgZD7FSGL0G7H8U+J3ft0iecxAS8daj5aC62i3blaTk7s2GcpA==}
|
||||
|
||||
abbrev@1.1.1:
|
||||
resolution: {integrity: sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==}
|
||||
@@ -6863,6 +6875,9 @@ packages:
|
||||
resolution: {integrity: sha512-TGw5yVi4saajsSEgz25grObGHEUaDrniwvA2qwSC060KfqGPdglhvPMA2lPIoxs3PQIItj2iag35fONcQqgUaQ==}
|
||||
engines: {node: '>=12.0'}
|
||||
|
||||
aes-js@3.1.2:
|
||||
resolution: {integrity: sha512-e5pEa2kBnBOgR4Y/p20pskXI74UEz7de8ZGVo58asOtvSVG5YAbJeELPZxOmt+Bnz3rX753YKhfIn4X4l1PPRQ==}
|
||||
|
||||
agent-base@6.0.2:
|
||||
resolution: {integrity: sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==}
|
||||
engines: {node: '>= 6.0.0'}
|
||||
@@ -6883,8 +6898,8 @@ packages:
|
||||
resolution: {integrity: sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==}
|
||||
engines: {node: '>=8'}
|
||||
|
||||
ai@6.0.153:
|
||||
resolution: {integrity: sha512-UlgBe4k0Ja1m1Eufn6FVSsHoF0sc7qwxX35ywJPDogIvBz0pHc+NOmCqiRY904DczNYIuwpZfKBLVz8HXgu3mg==}
|
||||
ai@6.0.154:
|
||||
resolution: {integrity: sha512-HfKJKCTJsDZxqrIUDSVnBQ7DpQlx5WI4ExqtLd7Bl70epLmvkpc/HYMzU1hP9W+g9VEAcvZo4fbMqc3v5D+9gQ==}
|
||||
engines: {node: '>=18'}
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
@@ -7123,6 +7138,9 @@ packages:
|
||||
bail@2.0.2:
|
||||
resolution: {integrity: sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==}
|
||||
|
||||
balanced-match@1.0.2:
|
||||
resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==}
|
||||
|
||||
balanced-match@4.0.3:
|
||||
resolution: {integrity: sha512-1pHv8LX9CpKut1Zp4EXey7Z8OfH11ONNH6Dhi2WDUt31VVZFXZzKwXcysBgqSumFCmR+0dqjMK5v5JiFHzi0+g==}
|
||||
engines: {node: 20 || >=22}
|
||||
@@ -7252,6 +7270,9 @@ packages:
|
||||
bplist-creator@0.0.8:
|
||||
resolution: {integrity: sha512-Za9JKzD6fjLC16oX2wsXfc+qBEhJBJB1YPInoAQpMLhDuj5aVOv1baGeIQSq1Fr3OCqzvsoQcSBSwGId/Ja2PA==}
|
||||
|
||||
brace-expansion@1.1.14:
|
||||
resolution: {integrity: sha512-MWPGfDxnyzKU7rNOW9SP/c50vi3xrmrua/+6hfPbCS2ABNWfx24vPidzvC7krjU/RTo235sV776ymlsMtGKj8g==}
|
||||
|
||||
brace-expansion@5.0.5:
|
||||
resolution: {integrity: sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ==}
|
||||
engines: {node: 18 || 20 || >=22}
|
||||
@@ -7720,6 +7741,9 @@ packages:
|
||||
resolution: {integrity: sha512-9mAqGPHLakhCLeNyxPkK4xVo746zQ/czLH1Ky+vkitMnWfWZps8r0qXuwhwizagCRttsL4lfG4pIOvaWLpAP0w==}
|
||||
engines: {node: '>= 0.8.0'}
|
||||
|
||||
concat-map@0.0.1:
|
||||
resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==}
|
||||
|
||||
concat-stream@1.6.2:
|
||||
resolution: {integrity: sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==}
|
||||
engines: {'0': node >= 0.8}
|
||||
@@ -8428,8 +8452,8 @@ packages:
|
||||
resolution: {integrity: sha512-bO3y10YikuUwUuDUQRM4KfwNkKhnpVO7IPdbsrejwN9/AABJzzTQ4GeHwyzNSrVO+tEH3/Np255a3sVZpZDjvg==}
|
||||
engines: {node: '>=8.0.0'}
|
||||
|
||||
electron@41.1.1:
|
||||
resolution: {integrity: sha512-8bgvDhBjli+3Z2YCKgzzoBPh6391pr7Xv2h/tTJG4ETgvPvUxZomObbZLs31mUzYb6VrlcDDd9cyWyNKtPm3tA==}
|
||||
electron@41.2.0:
|
||||
resolution: {integrity: sha512-0OKLiymqfV0WK68RBXqAm3Myad2TpI5wwxLCBEUcH5Nugo3YfSk7p1Js/AL9266qTz5xZioUnxt9hG8FFwax0g==}
|
||||
engines: {node: '>= 12.20.55'}
|
||||
hasBin: true
|
||||
|
||||
@@ -9593,8 +9617,8 @@ packages:
|
||||
i18next-http-backend@3.0.4:
|
||||
resolution: {integrity: sha512-udwrBIE6cNpqn1gRAqRULq3+7MzIIuaiKRWrz++dVz5SqWW2VwXmPJtAgkI0JtMLFaADC9qNmnZAxWAhsxXx2g==}
|
||||
|
||||
i18next@26.0.3:
|
||||
resolution: {integrity: sha512-1571kXINxHKY7LksWp8wP+zP0YqHSSpl/OW0Y0owFEf2H3s8gCAffWaZivcz14rMkOvn3R/psiQxVsR9t2Nafg==}
|
||||
i18next@26.0.4:
|
||||
resolution: {integrity: sha512-gXF7U9bfioXPLv7mw8Qt2nfO7vij5MyINvPgVv99pX3fL1Y01pw2mKBFrlYpRxRCl2wz3ISenj6VsMJT2isfuA==}
|
||||
peerDependencies:
|
||||
typescript: ^5 || ^6
|
||||
peerDependenciesMeta:
|
||||
@@ -12591,6 +12615,9 @@ packages:
|
||||
script-loader@0.7.2:
|
||||
resolution: {integrity: sha512-UMNLEvgOAQuzK8ji8qIscM3GIrRCWN6MmMXGD4SD5l6cSycgGsCo0tX5xRnfQcoghqct0tjHjcykgI1PyBE2aA==}
|
||||
|
||||
scrypt-js@3.0.1:
|
||||
resolution: {integrity: sha512-cdwTTnqPu0Hyvf5in5asVdZocVDTNRmR7XEcJuIzMjJeSHybHl7vpB66AzwTaIg6CLSbtjcxc8fqcySfnTkccA==}
|
||||
|
||||
scule@1.3.0:
|
||||
resolution: {integrity: sha512-6FtHJEvt+pVMIB9IBY+IcCJ6Z5f1iQnytgyfKMhDKgmzYG+TeH/wx1y3l27rshSbLiSanrR9ffZDrEsmjlQF2g==}
|
||||
|
||||
@@ -14368,7 +14395,7 @@ snapshots:
|
||||
'@ai-sdk/provider-utils': 4.0.23(zod@4.3.6)
|
||||
zod: 4.3.6
|
||||
|
||||
'@ai-sdk/gateway@3.0.93(zod@4.3.6)':
|
||||
'@ai-sdk/gateway@3.0.94(zod@4.3.6)':
|
||||
dependencies:
|
||||
'@ai-sdk/provider': 3.0.8
|
||||
'@ai-sdk/provider-utils': 4.0.23(zod@4.3.6)
|
||||
@@ -15608,28 +15635,28 @@ snapshots:
|
||||
'@codemirror/language': 6.12.3
|
||||
'@codemirror/state': 6.6.0
|
||||
'@codemirror/view': 6.41.0
|
||||
'@lezer/common': 1.5.1
|
||||
'@lezer/common': 1.5.2
|
||||
|
||||
'@codemirror/commands@6.10.3':
|
||||
dependencies:
|
||||
'@codemirror/language': 6.12.3
|
||||
'@codemirror/state': 6.6.0
|
||||
'@codemirror/view': 6.41.0
|
||||
'@lezer/common': 1.5.1
|
||||
'@lezer/common': 1.5.2
|
||||
|
||||
'@codemirror/commands@6.8.1':
|
||||
dependencies:
|
||||
'@codemirror/language': 6.12.3
|
||||
'@codemirror/state': 6.6.0
|
||||
'@codemirror/view': 6.41.0
|
||||
'@lezer/common': 1.5.1
|
||||
'@lezer/common': 1.5.2
|
||||
|
||||
'@codemirror/lang-css@6.3.1':
|
||||
dependencies:
|
||||
'@codemirror/autocomplete': 6.18.6
|
||||
'@codemirror/language': 6.12.3
|
||||
'@codemirror/state': 6.6.0
|
||||
'@lezer/common': 1.5.1
|
||||
'@lezer/common': 1.5.2
|
||||
'@lezer/css': 1.1.11
|
||||
|
||||
'@codemirror/lang-html@6.4.11':
|
||||
@@ -15640,7 +15667,7 @@ snapshots:
|
||||
'@codemirror/language': 6.12.3
|
||||
'@codemirror/state': 6.6.0
|
||||
'@codemirror/view': 6.41.0
|
||||
'@lezer/common': 1.5.1
|
||||
'@lezer/common': 1.5.2
|
||||
'@lezer/css': 1.1.11
|
||||
'@lezer/html': 1.3.12
|
||||
|
||||
@@ -15651,7 +15678,7 @@ snapshots:
|
||||
'@codemirror/lint': 6.8.5
|
||||
'@codemirror/state': 6.6.0
|
||||
'@codemirror/view': 6.41.0
|
||||
'@lezer/common': 1.5.1
|
||||
'@lezer/common': 1.5.2
|
||||
'@lezer/javascript': 1.5.1
|
||||
|
||||
'@codemirror/lang-json@6.0.2':
|
||||
@@ -15666,7 +15693,7 @@ snapshots:
|
||||
'@codemirror/language': 6.12.3
|
||||
'@codemirror/state': 6.6.0
|
||||
'@codemirror/view': 6.41.0
|
||||
'@lezer/common': 1.5.1
|
||||
'@lezer/common': 1.5.2
|
||||
'@lezer/markdown': 1.4.3
|
||||
|
||||
'@codemirror/lang-markdown@6.5.0':
|
||||
@@ -15676,7 +15703,7 @@ snapshots:
|
||||
'@codemirror/language': 6.12.3
|
||||
'@codemirror/state': 6.6.0
|
||||
'@codemirror/view': 6.41.0
|
||||
'@lezer/common': 1.5.1
|
||||
'@lezer/common': 1.5.2
|
||||
'@lezer/markdown': 1.4.3
|
||||
|
||||
'@codemirror/lang-php@6.0.2':
|
||||
@@ -15684,7 +15711,7 @@ snapshots:
|
||||
'@codemirror/lang-html': 6.4.11
|
||||
'@codemirror/language': 6.12.3
|
||||
'@codemirror/state': 6.6.0
|
||||
'@lezer/common': 1.5.1
|
||||
'@lezer/common': 1.5.2
|
||||
'@lezer/php': 1.0.2
|
||||
|
||||
'@codemirror/lang-vue@0.1.3':
|
||||
@@ -15692,7 +15719,7 @@ snapshots:
|
||||
'@codemirror/lang-html': 6.4.11
|
||||
'@codemirror/lang-javascript': 6.2.5
|
||||
'@codemirror/language': 6.12.3
|
||||
'@lezer/common': 1.5.1
|
||||
'@lezer/common': 1.5.2
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.2
|
||||
|
||||
@@ -15702,14 +15729,14 @@ snapshots:
|
||||
'@codemirror/language': 6.12.3
|
||||
'@codemirror/state': 6.6.0
|
||||
'@codemirror/view': 6.41.0
|
||||
'@lezer/common': 1.5.1
|
||||
'@lezer/common': 1.5.2
|
||||
'@lezer/xml': 1.0.6
|
||||
|
||||
'@codemirror/language@6.12.3':
|
||||
dependencies:
|
||||
'@codemirror/state': 6.6.0
|
||||
'@codemirror/view': 6.41.0
|
||||
'@lezer/common': 1.5.1
|
||||
'@lezer/common': 1.5.2
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.2
|
||||
style-mod: 4.1.2
|
||||
@@ -16224,9 +16251,9 @@ snapshots:
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@electron/remote@2.1.3(electron@41.1.1)':
|
||||
'@electron/remote@2.1.3(electron@41.2.0)':
|
||||
dependencies:
|
||||
electron: 41.1.1
|
||||
electron: 41.2.0
|
||||
|
||||
'@electron/universal@2.0.2':
|
||||
dependencies:
|
||||
@@ -17433,54 +17460,54 @@ snapshots:
|
||||
|
||||
'@keyv/serialize@1.1.1': {}
|
||||
|
||||
'@lezer/common@1.5.1': {}
|
||||
'@lezer/common@1.5.2': {}
|
||||
|
||||
'@lezer/css@1.1.11':
|
||||
dependencies:
|
||||
'@lezer/common': 1.5.1
|
||||
'@lezer/common': 1.5.2
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.2
|
||||
|
||||
'@lezer/highlight@1.2.3':
|
||||
dependencies:
|
||||
'@lezer/common': 1.5.1
|
||||
'@lezer/common': 1.5.2
|
||||
|
||||
'@lezer/html@1.3.12':
|
||||
dependencies:
|
||||
'@lezer/common': 1.5.1
|
||||
'@lezer/common': 1.5.2
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.2
|
||||
|
||||
'@lezer/javascript@1.5.1':
|
||||
dependencies:
|
||||
'@lezer/common': 1.5.1
|
||||
'@lezer/common': 1.5.2
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.2
|
||||
|
||||
'@lezer/json@1.0.3':
|
||||
dependencies:
|
||||
'@lezer/common': 1.5.1
|
||||
'@lezer/common': 1.5.2
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.2
|
||||
|
||||
'@lezer/lr@1.4.2':
|
||||
dependencies:
|
||||
'@lezer/common': 1.5.1
|
||||
'@lezer/common': 1.5.2
|
||||
|
||||
'@lezer/markdown@1.4.3':
|
||||
dependencies:
|
||||
'@lezer/common': 1.5.1
|
||||
'@lezer/common': 1.5.2
|
||||
'@lezer/highlight': 1.2.3
|
||||
|
||||
'@lezer/php@1.0.2':
|
||||
dependencies:
|
||||
'@lezer/common': 1.5.1
|
||||
'@lezer/common': 1.5.2
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.2
|
||||
|
||||
'@lezer/xml@1.0.6':
|
||||
dependencies:
|
||||
'@lezer/common': 1.5.1
|
||||
'@lezer/common': 1.5.2
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.2
|
||||
|
||||
@@ -18678,15 +18705,15 @@ snapshots:
|
||||
dependencies:
|
||||
ulid: 2.4.0
|
||||
|
||||
'@redocly/cli@2.25.4(@opentelemetry/api@1.9.0)(bufferutil@4.0.9)(core-js@3.46.0)(encoding@0.1.13)(utf-8-validate@6.0.5)':
|
||||
'@redocly/cli@2.26.0(@opentelemetry/api@1.9.0)(bufferutil@4.0.9)(core-js@3.46.0)(encoding@0.1.13)(utf-8-validate@6.0.5)':
|
||||
dependencies:
|
||||
'@opentelemetry/exporter-trace-otlp-http': 0.202.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/resources': 2.0.1(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/sdk-trace-node': 2.0.1(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/semantic-conventions': 1.34.0
|
||||
'@redocly/cli-otel': 0.1.2
|
||||
'@redocly/openapi-core': 2.25.4
|
||||
'@redocly/respect-core': 2.25.4
|
||||
'@redocly/openapi-core': 2.26.0
|
||||
'@redocly/respect-core': 2.26.0
|
||||
abort-controller: 3.0.0
|
||||
ajv: '@redocly/ajv@8.18.0'
|
||||
ajv-formats: 3.0.1(@redocly/ajv@8.18.0)
|
||||
@@ -18720,7 +18747,7 @@ snapshots:
|
||||
|
||||
'@redocly/config@0.22.2': {}
|
||||
|
||||
'@redocly/config@0.46.0':
|
||||
'@redocly/config@0.46.1':
|
||||
dependencies:
|
||||
json-schema-to-ts: 2.7.2
|
||||
|
||||
@@ -18738,10 +18765,10 @@ snapshots:
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@redocly/openapi-core@2.25.4':
|
||||
'@redocly/openapi-core@2.26.0':
|
||||
dependencies:
|
||||
'@redocly/ajv': 8.18.0
|
||||
'@redocly/config': 0.46.0
|
||||
'@redocly/config': 0.46.1
|
||||
ajv: '@redocly/ajv@8.18.0'
|
||||
ajv-formats: 3.0.1(@redocly/ajv@8.18.0)
|
||||
colorette: 1.4.0
|
||||
@@ -18751,12 +18778,12 @@ snapshots:
|
||||
pluralize: 8.0.0
|
||||
yaml-ast-parser: 0.0.43
|
||||
|
||||
'@redocly/respect-core@2.25.4':
|
||||
'@redocly/respect-core@2.26.0':
|
||||
dependencies:
|
||||
'@faker-js/faker': 7.6.0
|
||||
'@noble/hashes': 1.8.0
|
||||
'@redocly/ajv': 8.18.0
|
||||
'@redocly/openapi-core': 2.25.4
|
||||
'@redocly/openapi-core': 2.26.0
|
||||
ajv: '@redocly/ajv@8.18.0'
|
||||
better-ajv-errors: 1.2.0(@redocly/ajv@8.18.0)
|
||||
colorette: 2.0.20
|
||||
@@ -18772,13 +18799,13 @@ snapshots:
|
||||
'@codemirror/state': 6.6.0
|
||||
'@codemirror/view': 6.41.0
|
||||
|
||||
'@replit/codemirror-lang-nix@6.0.1(@codemirror/autocomplete@6.18.6)(@codemirror/language@6.12.3)(@codemirror/state@6.6.0)(@codemirror/view@6.41.0)(@lezer/common@1.5.1)(@lezer/highlight@1.2.3)(@lezer/lr@1.4.2)':
|
||||
'@replit/codemirror-lang-nix@6.0.1(@codemirror/autocomplete@6.18.6)(@codemirror/language@6.12.3)(@codemirror/state@6.6.0)(@codemirror/view@6.41.0)(@lezer/common@1.5.2)(@lezer/highlight@1.2.3)(@lezer/lr@1.4.2)':
|
||||
dependencies:
|
||||
'@codemirror/autocomplete': 6.18.6
|
||||
'@codemirror/language': 6.12.3
|
||||
'@codemirror/state': 6.6.0
|
||||
'@codemirror/view': 6.41.0
|
||||
'@lezer/common': 1.5.1
|
||||
'@lezer/common': 1.5.2
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.2
|
||||
|
||||
@@ -19072,6 +19099,8 @@ snapshots:
|
||||
tslib: 2.8.1
|
||||
optional: true
|
||||
|
||||
'@types/aes-js@3.1.4': {}
|
||||
|
||||
'@types/appdmg@0.5.5':
|
||||
dependencies:
|
||||
'@types/node': 24.12.2
|
||||
@@ -21538,7 +21567,7 @@ snapshots:
|
||||
|
||||
'@zip.js/zip.js@2.8.11': {}
|
||||
|
||||
'@zumer/snapdom@2.7.0': {}
|
||||
'@zumer/snapdom@2.8.0': {}
|
||||
|
||||
abbrev@1.1.1: {}
|
||||
|
||||
@@ -21582,6 +21611,8 @@ snapshots:
|
||||
|
||||
adm-zip@0.5.16: {}
|
||||
|
||||
aes-js@3.1.2: {}
|
||||
|
||||
agent-base@6.0.2:
|
||||
dependencies:
|
||||
debug: 4.4.3
|
||||
@@ -21601,9 +21632,9 @@ snapshots:
|
||||
clean-stack: 2.2.0
|
||||
indent-string: 4.0.0
|
||||
|
||||
ai@6.0.153(zod@4.3.6):
|
||||
ai@6.0.154(zod@4.3.6):
|
||||
dependencies:
|
||||
'@ai-sdk/gateway': 3.0.93(zod@4.3.6)
|
||||
'@ai-sdk/gateway': 3.0.94(zod@4.3.6)
|
||||
'@ai-sdk/provider': 3.0.8
|
||||
'@ai-sdk/provider-utils': 4.0.23(zod@4.3.6)
|
||||
'@opentelemetry/api': 1.9.0
|
||||
@@ -21876,6 +21907,8 @@ snapshots:
|
||||
|
||||
bail@2.0.2: {}
|
||||
|
||||
balanced-match@1.0.2: {}
|
||||
|
||||
balanced-match@4.0.3: {}
|
||||
|
||||
bare-events@2.7.0: {}
|
||||
@@ -22016,6 +22049,11 @@ snapshots:
|
||||
stream-buffers: 2.2.0
|
||||
optional: true
|
||||
|
||||
brace-expansion@1.1.14:
|
||||
dependencies:
|
||||
balanced-match: 1.0.2
|
||||
concat-map: 0.0.1
|
||||
|
||||
brace-expansion@5.0.5:
|
||||
dependencies:
|
||||
balanced-match: 4.0.3
|
||||
@@ -22653,6 +22691,8 @@ snapshots:
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
concat-map@0.0.1: {}
|
||||
|
||||
concat-stream@1.6.2:
|
||||
dependencies:
|
||||
buffer-from: 1.1.2
|
||||
@@ -23440,10 +23480,10 @@ snapshots:
|
||||
- supports-color
|
||||
optional: true
|
||||
|
||||
electron@41.1.1:
|
||||
electron@41.2.0:
|
||||
dependencies:
|
||||
'@electron/get': 2.0.3
|
||||
'@types/node': 24.12.0
|
||||
'@types/node': 24.12.2
|
||||
extract-zip: 2.0.1
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
@@ -25079,7 +25119,7 @@ snapshots:
|
||||
transitivePeerDependencies:
|
||||
- encoding
|
||||
|
||||
i18next@26.0.3(typescript@6.0.2):
|
||||
i18next@26.0.4(typescript@6.0.2):
|
||||
dependencies:
|
||||
'@babel/runtime': 7.29.2
|
||||
optionalDependencies:
|
||||
@@ -26540,7 +26580,7 @@ snapshots:
|
||||
|
||||
minimatch@3.1.5:
|
||||
dependencies:
|
||||
brace-expansion: 5.0.5
|
||||
brace-expansion: 1.1.14
|
||||
|
||||
minimatch@5.1.9:
|
||||
dependencies:
|
||||
@@ -27735,11 +27775,11 @@ snapshots:
|
||||
react: 19.2.4
|
||||
scheduler: 0.27.0
|
||||
|
||||
react-i18next@17.0.2(i18next@26.0.3(typescript@6.0.2))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(typescript@6.0.2):
|
||||
react-i18next@17.0.2(i18next@26.0.4(typescript@6.0.2))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(typescript@6.0.2):
|
||||
dependencies:
|
||||
'@babel/runtime': 7.29.2
|
||||
html-parse-stringify: 3.0.1
|
||||
i18next: 26.0.3(typescript@6.0.2)
|
||||
i18next: 26.0.4(typescript@6.0.2)
|
||||
react: 19.2.4
|
||||
use-sync-external-store: 1.6.0(react@19.2.4)
|
||||
optionalDependencies:
|
||||
@@ -28434,6 +28474,8 @@ snapshots:
|
||||
dependencies:
|
||||
raw-loader: 0.5.1
|
||||
|
||||
scrypt-js@3.0.1: {}
|
||||
|
||||
scule@1.3.0: {}
|
||||
|
||||
secure-compare@3.0.1: {}
|
||||
|
||||
Reference in New Issue
Block a user