mirror of
https://github.com/zadam/trilium.git
synced 2025-11-16 18:25:51 +01:00
chore(monorepo/server): move server-side source code
This commit is contained in:
112
apps/server/src/routes/api/anthropic.ts
Normal file
112
apps/server/src/routes/api/anthropic.ts
Normal file
@@ -0,0 +1,112 @@
|
||||
import options from "../../services/options.js";
|
||||
import log from "../../services/log.js";
|
||||
import type { Request, Response } from "express";
|
||||
import { PROVIDER_CONSTANTS } from '../../services/llm/constants/provider_constants.js';
|
||||
import Anthropic from '@anthropic-ai/sdk';
|
||||
|
||||
// Interface for Anthropic model entries
|
||||
interface AnthropicModel {
|
||||
id: string;
|
||||
name: string;
|
||||
type: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/anthropic/models:
|
||||
* post:
|
||||
* summary: List available models from Anthropic
|
||||
* operationId: anthropic-list-models
|
||||
* requestBody:
|
||||
* required: false
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* baseUrl:
|
||||
* type: string
|
||||
* description: Optional custom Anthropic API base URL
|
||||
* responses:
|
||||
* '200':
|
||||
* description: List of available Anthropic models
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* chatModels:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* properties:
|
||||
* id:
|
||||
* type: string
|
||||
* name:
|
||||
* type: string
|
||||
* type:
|
||||
* type: string
|
||||
* embeddingModels:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* properties:
|
||||
* id:
|
||||
* type: string
|
||||
* name:
|
||||
* type: string
|
||||
* type:
|
||||
* type: string
|
||||
* '500':
|
||||
* description: Error listing models
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function listModels(req: Request, res: Response) {
|
||||
try {
|
||||
const { baseUrl } = req.body;
|
||||
|
||||
// Use provided base URL or default from options
|
||||
const anthropicBaseUrl = baseUrl ||
|
||||
await options.getOption('anthropicBaseUrl') ||
|
||||
PROVIDER_CONSTANTS.ANTHROPIC.BASE_URL;
|
||||
|
||||
const apiKey = await options.getOption('anthropicApiKey');
|
||||
|
||||
if (!apiKey) {
|
||||
throw new Error('Anthropic API key is not configured');
|
||||
}
|
||||
|
||||
log.info(`Using predefined Anthropic models list (avoiding direct API call)`);
|
||||
|
||||
// Instead of using the SDK's built-in models listing which might not work,
|
||||
// directly use the predefined available models
|
||||
const chatModels = PROVIDER_CONSTANTS.ANTHROPIC.AVAILABLE_MODELS.map(model => ({
|
||||
id: model.id,
|
||||
name: model.name,
|
||||
type: 'chat'
|
||||
}));
|
||||
|
||||
// Anthropic doesn't currently have embedding models
|
||||
const embeddingModels: AnthropicModel[] = [];
|
||||
|
||||
// Return the models list
|
||||
return {
|
||||
success: true,
|
||||
chatModels,
|
||||
embeddingModels
|
||||
};
|
||||
} catch (error: any) {
|
||||
log.error(`Error listing Anthropic models: ${error.message || 'Unknown error'}`);
|
||||
|
||||
// Properly throw the error to be handled by the global error handler
|
||||
throw new Error(`Failed to list Anthropic models: ${error.message || 'Unknown error'}`);
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
listModels
|
||||
};
|
||||
55
apps/server/src/routes/api/app_info.ts
Normal file
55
apps/server/src/routes/api/app_info.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
import appInfo from "../../services/app_info.js";
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/app-info:
|
||||
* get:
|
||||
* summary: Get installation info
|
||||
* operationId: app-info
|
||||
* externalDocs:
|
||||
* description: Server implementation
|
||||
* url: https://github.com/TriliumNext/Notes/blob/v0.91.6/src/services/app_info.ts
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Installation info
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* appVersion:
|
||||
* type: string
|
||||
* example: "0.91.6"
|
||||
* dbVersion:
|
||||
* type: integer
|
||||
* example: 228
|
||||
* nodeVersion:
|
||||
* type: string
|
||||
* description: "value of process.version"
|
||||
* syncVersion:
|
||||
* type: integer
|
||||
* example: 34
|
||||
* buildDate:
|
||||
* type: string
|
||||
* example: "2024-09-07T18:36:34Z"
|
||||
* buildRevision:
|
||||
* type: string
|
||||
* example: "7c0d6930fa8f20d269dcfbcbc8f636a25f6bb9a7"
|
||||
* dataDirectory:
|
||||
* type: string
|
||||
* example: "/var/lib/trilium"
|
||||
* clipperProtocolVersion:
|
||||
* type: string
|
||||
* example: "1.0"
|
||||
* utcDateTime:
|
||||
* $ref: '#/components/schemas/UtcDateTime'
|
||||
* security:
|
||||
* - session: []
|
||||
*/
|
||||
function getAppInfo() {
|
||||
return appInfo;
|
||||
}
|
||||
|
||||
export default {
|
||||
getAppInfo
|
||||
};
|
||||
119
apps/server/src/routes/api/attachments.ts
Normal file
119
apps/server/src/routes/api/attachments.ts
Normal file
@@ -0,0 +1,119 @@
|
||||
import becca from "../../becca/becca.js";
|
||||
import blobService from "../../services/blob.js";
|
||||
import ValidationError from "../../errors/validation_error.js";
|
||||
import imageService from "../../services/image.js";
|
||||
import type { Request } from "express";
|
||||
|
||||
function getAttachmentBlob(req: Request) {
|
||||
const preview = req.query.preview === "true";
|
||||
|
||||
return blobService.getBlobPojo("attachments", req.params.attachmentId, { preview });
|
||||
}
|
||||
|
||||
function getAttachments(req: Request) {
|
||||
const note = becca.getNoteOrThrow(req.params.noteId);
|
||||
|
||||
return note.getAttachments({ includeContentLength: true });
|
||||
}
|
||||
|
||||
function getAttachment(req: Request) {
|
||||
const { attachmentId } = req.params;
|
||||
|
||||
return becca.getAttachmentOrThrow(attachmentId, { includeContentLength: true });
|
||||
}
|
||||
|
||||
function getAllAttachments(req: Request) {
|
||||
const { attachmentId } = req.params;
|
||||
// one particular attachment is requested, but return all note's attachments
|
||||
|
||||
const attachment = becca.getAttachmentOrThrow(attachmentId);
|
||||
return attachment.getNote()?.getAttachments({ includeContentLength: true }) || [];
|
||||
}
|
||||
|
||||
function saveAttachment(req: Request) {
|
||||
const { noteId } = req.params;
|
||||
const { attachmentId, role, mime, title, content } = req.body;
|
||||
const matchByQuery = req.query.matchBy
|
||||
const isValidMatchBy = (typeof matchByQuery === "string") && (matchByQuery === "attachmentId" || matchByQuery === "title");
|
||||
const matchBy = isValidMatchBy ? matchByQuery : undefined;
|
||||
|
||||
const note = becca.getNoteOrThrow(noteId);
|
||||
note.saveAttachment({ attachmentId, role, mime, title, content }, matchBy);
|
||||
}
|
||||
|
||||
function uploadAttachment(req: Request) {
|
||||
const { noteId } = req.params;
|
||||
const { file } = req;
|
||||
|
||||
if (!file) {
|
||||
return {
|
||||
uploaded: false,
|
||||
message: `Missing attachment data.`
|
||||
};
|
||||
}
|
||||
|
||||
const note = becca.getNoteOrThrow(noteId);
|
||||
let url;
|
||||
|
||||
if (["image/png", "image/jpg", "image/jpeg", "image/gif", "image/webp", "image/svg+xml"].includes(file.mimetype)) {
|
||||
const attachment = imageService.saveImageToAttachment(noteId, file.buffer, file.originalname, true, true);
|
||||
url = `api/attachments/${attachment.attachmentId}/image/${encodeURIComponent(attachment.title)}`;
|
||||
} else {
|
||||
const attachment = note.saveAttachment({
|
||||
role: "file",
|
||||
mime: file.mimetype,
|
||||
title: file.originalname,
|
||||
content: file.buffer
|
||||
});
|
||||
|
||||
url = `#root/${noteId}?viewMode=attachments&attachmentId=${attachment.attachmentId}`;
|
||||
}
|
||||
|
||||
return {
|
||||
uploaded: true,
|
||||
url
|
||||
};
|
||||
}
|
||||
|
||||
function renameAttachment(req: Request) {
|
||||
const { title } = req.body;
|
||||
const { attachmentId } = req.params;
|
||||
|
||||
const attachment = becca.getAttachmentOrThrow(attachmentId);
|
||||
|
||||
if (!title?.trim()) {
|
||||
throw new ValidationError("Title must not be empty");
|
||||
}
|
||||
|
||||
attachment.title = title;
|
||||
attachment.save();
|
||||
}
|
||||
|
||||
function deleteAttachment(req: Request) {
|
||||
const { attachmentId } = req.params;
|
||||
|
||||
const attachment = becca.getAttachment(attachmentId);
|
||||
|
||||
if (attachment) {
|
||||
attachment.markAsDeleted();
|
||||
}
|
||||
}
|
||||
|
||||
function convertAttachmentToNote(req: Request) {
|
||||
const { attachmentId } = req.params;
|
||||
|
||||
const attachment = becca.getAttachmentOrThrow(attachmentId);
|
||||
return attachment.convertToNote();
|
||||
}
|
||||
|
||||
export default {
|
||||
getAttachmentBlob,
|
||||
getAttachments,
|
||||
getAttachment,
|
||||
getAllAttachments,
|
||||
saveAttachment,
|
||||
uploadAttachment,
|
||||
renameAttachment,
|
||||
deleteAttachment,
|
||||
convertAttachmentToNote
|
||||
};
|
||||
248
apps/server/src/routes/api/attributes.ts
Normal file
248
apps/server/src/routes/api/attributes.ts
Normal file
@@ -0,0 +1,248 @@
|
||||
"use strict";
|
||||
|
||||
import sql from "../../services/sql.js";
|
||||
import log from "../../services/log.js";
|
||||
import attributeService from "../../services/attributes.js";
|
||||
import BAttribute from "../../becca/entities/battribute.js";
|
||||
import becca from "../../becca/becca.js";
|
||||
import ValidationError from "../../errors/validation_error.js";
|
||||
import type { Request } from "express";
|
||||
|
||||
function getEffectiveNoteAttributes(req: Request) {
|
||||
const note = becca.getNote(req.params.noteId);
|
||||
|
||||
return note?.getAttributes();
|
||||
}
|
||||
|
||||
function updateNoteAttribute(req: Request) {
|
||||
const noteId = req.params.noteId;
|
||||
const body = req.body;
|
||||
|
||||
let attribute;
|
||||
if (body.attributeId) {
|
||||
attribute = becca.getAttributeOrThrow(body.attributeId);
|
||||
|
||||
if (attribute.noteId !== noteId) {
|
||||
throw new ValidationError(`Attribute '${body.attributeId}' is not owned by ${noteId}`);
|
||||
}
|
||||
|
||||
if (body.type !== attribute.type || body.name !== attribute.name || (body.type === "relation" && body.value !== attribute.value)) {
|
||||
let newAttribute;
|
||||
|
||||
if (body.type !== "relation" || !!body.value.trim()) {
|
||||
newAttribute = attribute.createClone(body.type, body.name, body.value);
|
||||
newAttribute.save();
|
||||
}
|
||||
|
||||
attribute.markAsDeleted();
|
||||
|
||||
return {
|
||||
attributeId: newAttribute ? newAttribute.attributeId : null
|
||||
};
|
||||
}
|
||||
} else {
|
||||
if (body.type === "relation" && !body.value?.trim()) {
|
||||
return {};
|
||||
}
|
||||
|
||||
attribute = new BAttribute({
|
||||
noteId: noteId,
|
||||
name: body.name,
|
||||
type: body.type
|
||||
});
|
||||
}
|
||||
|
||||
if (attribute.type === "label" || body.value.trim()) {
|
||||
attribute.value = body.value;
|
||||
} else {
|
||||
// relations should never have empty target
|
||||
attribute.markAsDeleted();
|
||||
}
|
||||
|
||||
attribute.save();
|
||||
|
||||
return {
|
||||
attributeId: attribute.attributeId
|
||||
};
|
||||
}
|
||||
|
||||
function setNoteAttribute(req: Request) {
|
||||
const noteId = req.params.noteId;
|
||||
const body = req.body;
|
||||
|
||||
const attributeId = sql.getValue<string | null>(/*sql*/`SELECT attributeId FROM attributes WHERE isDeleted = 0 AND noteId = ? AND type = ? AND name = ?`, [noteId, body.type, body.name]);
|
||||
|
||||
if (attributeId) {
|
||||
const attr = becca.getAttribute(attributeId);
|
||||
if (!attr) {
|
||||
throw new ValidationError(`Missing attribute with ID ${attributeId}.`);
|
||||
}
|
||||
attr.value = body.value;
|
||||
attr.save();
|
||||
} else {
|
||||
const params = { ...body };
|
||||
params.noteId = noteId; // noteId must be set before calling constructor for proper initialization
|
||||
|
||||
new BAttribute(params).save();
|
||||
}
|
||||
}
|
||||
|
||||
function addNoteAttribute(req: Request) {
|
||||
const noteId = req.params.noteId;
|
||||
const body = req.body;
|
||||
|
||||
new BAttribute({ ...body, noteId }).save();
|
||||
}
|
||||
|
||||
function deleteNoteAttribute(req: Request) {
|
||||
const noteId = req.params.noteId;
|
||||
const attributeId = req.params.attributeId;
|
||||
|
||||
const attribute = becca.getAttribute(attributeId);
|
||||
|
||||
if (attribute) {
|
||||
if (attribute.noteId !== noteId) {
|
||||
throw new ValidationError(`Attribute ${attributeId} is not owned by ${noteId}`);
|
||||
}
|
||||
|
||||
attribute.markAsDeleted();
|
||||
}
|
||||
}
|
||||
|
||||
function updateNoteAttributes(req: Request) {
|
||||
const noteId = req.params.noteId;
|
||||
const incomingAttributes = req.body;
|
||||
|
||||
const note = becca.getNote(noteId);
|
||||
if (!note) {
|
||||
throw new ValidationError(`Cannot find note with ID ${noteId}.`);
|
||||
}
|
||||
|
||||
let existingAttrs = note.getOwnedAttributes().slice();
|
||||
|
||||
let position = 0;
|
||||
|
||||
for (const incAttr of incomingAttributes) {
|
||||
position += 10;
|
||||
|
||||
const value = incAttr.value || "";
|
||||
|
||||
const perfectMatchAttr = existingAttrs.find((attr) => attr.type === incAttr.type && attr.name === incAttr.name && attr.isInheritable === incAttr.isInheritable && attr.value === value);
|
||||
|
||||
if (perfectMatchAttr) {
|
||||
existingAttrs = existingAttrs.filter((attr) => attr.attributeId !== perfectMatchAttr.attributeId);
|
||||
|
||||
if (perfectMatchAttr.position !== position) {
|
||||
perfectMatchAttr.position = position;
|
||||
perfectMatchAttr.save();
|
||||
}
|
||||
|
||||
continue; // nothing to update
|
||||
}
|
||||
|
||||
if (incAttr.type === "relation") {
|
||||
const targetNote = becca.getNote(incAttr.value);
|
||||
|
||||
if (!targetNote) {
|
||||
log.error(`Target note of relation ${JSON.stringify(incAttr)} does not exist or is deleted`);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
const matchedAttr = existingAttrs.find((attr) => attr.type === incAttr.type && attr.name === incAttr.name && attr.isInheritable === incAttr.isInheritable);
|
||||
|
||||
if (matchedAttr) {
|
||||
matchedAttr.value = incAttr.value;
|
||||
matchedAttr.position = position;
|
||||
matchedAttr.save();
|
||||
|
||||
existingAttrs = existingAttrs.filter((attr) => attr.attributeId !== matchedAttr.attributeId);
|
||||
continue;
|
||||
}
|
||||
|
||||
// no existing attribute has been matched, so we need to create a new one
|
||||
// type, name and isInheritable are immutable so even if there is an attribute with matching type & name, we need to create a new one and delete the former one
|
||||
|
||||
note.addAttribute(incAttr.type, incAttr.name, incAttr.value, incAttr.isInheritable, position);
|
||||
}
|
||||
|
||||
// all the remaining existing attributes are not defined anymore and should be deleted
|
||||
for (const toDeleteAttr of existingAttrs) {
|
||||
if (!toDeleteAttr.isAutoLink()) {
|
||||
toDeleteAttr.markAsDeleted();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getAttributeNames(req: Request) {
|
||||
const type = req.query.type;
|
||||
const query = req.query.query;
|
||||
|
||||
if (typeof type !== "string" || typeof query !== "string") {
|
||||
throw new ValidationError("Invalid data type.");
|
||||
}
|
||||
|
||||
return attributeService.getAttributeNames(type, query);
|
||||
}
|
||||
|
||||
function getValuesForAttribute(req: Request) {
|
||||
const attributeName = req.params.attributeName;
|
||||
|
||||
return sql.getColumn("SELECT DISTINCT value FROM attributes WHERE isDeleted = 0 AND name = ? AND type = 'label' AND value != '' ORDER BY value", [attributeName]);
|
||||
}
|
||||
|
||||
function createRelation(req: Request) {
|
||||
const sourceNoteId = req.params.noteId;
|
||||
const targetNoteId = req.params.targetNoteId;
|
||||
const name = req.params.name;
|
||||
|
||||
const attributeId = sql.getValue<string>(/*sql*/`SELECT attributeId FROM attributes WHERE isDeleted = 0 AND noteId = ? AND type = 'relation' AND name = ? AND value = ?`, [
|
||||
sourceNoteId,
|
||||
name,
|
||||
targetNoteId
|
||||
]);
|
||||
let attribute = becca.getAttribute(attributeId);
|
||||
|
||||
if (!attribute) {
|
||||
attribute = new BAttribute({
|
||||
noteId: sourceNoteId,
|
||||
name: name,
|
||||
type: "relation",
|
||||
value: targetNoteId
|
||||
}).save();
|
||||
}
|
||||
|
||||
return attribute;
|
||||
}
|
||||
|
||||
function deleteRelation(req: Request) {
|
||||
const sourceNoteId = req.params.noteId;
|
||||
const targetNoteId = req.params.targetNoteId;
|
||||
const name = req.params.name;
|
||||
|
||||
const attributeId = sql.getValue<string | null>(/*sql*/`SELECT attributeId FROM attributes WHERE isDeleted = 0 AND noteId = ? AND type = 'relation' AND name = ? AND value = ?`, [
|
||||
sourceNoteId,
|
||||
name,
|
||||
targetNoteId
|
||||
]);
|
||||
|
||||
if (attributeId) {
|
||||
const attribute = becca.getAttribute(attributeId);
|
||||
if (attribute) {
|
||||
attribute.markAsDeleted();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
updateNoteAttributes,
|
||||
updateNoteAttribute,
|
||||
setNoteAttribute,
|
||||
addNoteAttribute,
|
||||
deleteNoteAttribute,
|
||||
getAttributeNames,
|
||||
getValuesForAttribute,
|
||||
getEffectiveNoteAttributes,
|
||||
createRelation,
|
||||
deleteRelation
|
||||
};
|
||||
95
apps/server/src/routes/api/autocomplete.ts
Normal file
95
apps/server/src/routes/api/autocomplete.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
"use strict";
|
||||
|
||||
import beccaService from "../../becca/becca_service.js";
|
||||
import searchService from "../../services/search/services/search.js";
|
||||
import log from "../../services/log.js";
|
||||
import utils from "../../services/utils.js";
|
||||
import cls from "../../services/cls.js";
|
||||
import becca from "../../becca/becca.js";
|
||||
import type { Request } from "express";
|
||||
import ValidationError from "../../errors/validation_error.js";
|
||||
import sql from "../../services/sql.js";
|
||||
|
||||
function getAutocomplete(req: Request) {
|
||||
if (typeof req.query.query !== "string") {
|
||||
throw new ValidationError("Invalid query data type.");
|
||||
}
|
||||
const query = (req.query.query || "").trim();
|
||||
const fastSearch = String(req.query.fastSearch).toLowerCase() === "false" ? false : true;
|
||||
|
||||
const activeNoteId = req.query.activeNoteId || "none";
|
||||
|
||||
let results;
|
||||
|
||||
const timestampStarted = Date.now();
|
||||
|
||||
if (query.length === 0 && typeof activeNoteId === "string") {
|
||||
results = getRecentNotes(activeNoteId);
|
||||
} else {
|
||||
results = searchService.searchNotesForAutocomplete(query, fastSearch);
|
||||
}
|
||||
|
||||
const msTaken = Date.now() - timestampStarted;
|
||||
|
||||
if (msTaken >= 100) {
|
||||
log.info(`Slow autocomplete took ${msTaken}ms`);
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
function getRecentNotes(activeNoteId: string) {
|
||||
let extraCondition = "";
|
||||
const params = [activeNoteId];
|
||||
|
||||
const hoistedNoteId = cls.getHoistedNoteId();
|
||||
if (hoistedNoteId !== "root") {
|
||||
extraCondition = `AND recent_notes.notePath LIKE ?`;
|
||||
params.push(`%${hoistedNoteId}%`);
|
||||
}
|
||||
|
||||
const recentNotes = becca.getRecentNotesFromQuery(
|
||||
`
|
||||
SELECT
|
||||
recent_notes.*
|
||||
FROM
|
||||
recent_notes
|
||||
JOIN notes USING(noteId)
|
||||
WHERE
|
||||
notes.isDeleted = 0
|
||||
AND notes.noteId != ?
|
||||
${extraCondition}
|
||||
ORDER BY
|
||||
utcDateCreated DESC
|
||||
LIMIT 200`,
|
||||
params
|
||||
);
|
||||
|
||||
return recentNotes.map((rn) => {
|
||||
const notePathArray = rn.notePath.split("/");
|
||||
|
||||
const { title, icon } = beccaService.getNoteTitleAndIcon(notePathArray[notePathArray.length - 1]);
|
||||
const notePathTitle = beccaService.getNoteTitleForPath(notePathArray);
|
||||
|
||||
return {
|
||||
notePath: rn.notePath,
|
||||
noteTitle: title,
|
||||
notePathTitle,
|
||||
highlightedNotePathTitle: utils.escapeHtml(notePathTitle),
|
||||
icon: icon ?? "bx bx-note"
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
// Get the total number of notes
|
||||
function getNotesCount(req: Request) {
|
||||
const notesCount = sql.getRow(
|
||||
/*sql*/`SELECT COUNT(*) AS count FROM notes WHERE isDeleted = 0;`,
|
||||
) as { count: number };
|
||||
return notesCount.count;
|
||||
}
|
||||
|
||||
export default {
|
||||
getAutocomplete,
|
||||
getNotesCount
|
||||
};
|
||||
33
apps/server/src/routes/api/backend_log.ts
Normal file
33
apps/server/src/routes/api/backend_log.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
"use strict";
|
||||
|
||||
import { readFile } from "fs/promises";
|
||||
import { join } from "path";
|
||||
import dateUtils from "../../services/date_utils.js";
|
||||
import dataDir from "../../services/data_dir.js";
|
||||
import log from "../../services/log.js";
|
||||
import { t } from "i18next";
|
||||
|
||||
const { LOG_DIR } = dataDir;
|
||||
|
||||
async function getBackendLog() {
|
||||
const fileName = `trilium-${dateUtils.localNowDate()}.log`;
|
||||
try {
|
||||
const file = join(LOG_DIR, fileName);
|
||||
return await readFile(file, "utf8");
|
||||
} catch (e) {
|
||||
const isErrorInstance = e instanceof Error;
|
||||
|
||||
// most probably the log file does not exist yet - https://github.com/zadam/trilium/issues/1977
|
||||
if (isErrorInstance && "code" in e && e.code === "ENOENT") {
|
||||
log.error(e);
|
||||
return t("backend_log.log-does-not-exist", { fileName });
|
||||
}
|
||||
|
||||
log.error(isErrorInstance ? e : `Reading the backend log '${fileName}' failed with an unknown error: '${e}'.`);
|
||||
return t("backend_log.reading-log-failed", { fileName });
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
getBackendLog
|
||||
};
|
||||
280
apps/server/src/routes/api/branches.ts
Normal file
280
apps/server/src/routes/api/branches.ts
Normal file
@@ -0,0 +1,280 @@
|
||||
"use strict";
|
||||
|
||||
import sql from "../../services/sql.js";
|
||||
import utils from "../../services/utils.js";
|
||||
import entityChangesService from "../../services/entity_changes.js";
|
||||
import treeService from "../../services/tree.js";
|
||||
import eraseService from "../../services/erase.js";
|
||||
import becca from "../../becca/becca.js";
|
||||
import TaskContext from "../../services/task_context.js";
|
||||
import branchService from "../../services/branches.js";
|
||||
import log from "../../services/log.js";
|
||||
import ValidationError from "../../errors/validation_error.js";
|
||||
import eventService from "../../services/events.js";
|
||||
import type { Request } from "express";
|
||||
|
||||
/**
|
||||
* Code in this file deals with moving and cloning branches. The relationship between note and parent note is unique
|
||||
* for not deleted branches. There may be multiple deleted note-parent note relationships.
|
||||
*/
|
||||
|
||||
function moveBranchToParent(req: Request) {
|
||||
const { branchId, parentBranchId } = req.params;
|
||||
|
||||
const branchToMove = becca.getBranch(branchId);
|
||||
const targetParentBranch = becca.getBranch(parentBranchId);
|
||||
|
||||
if (!branchToMove || !targetParentBranch) {
|
||||
throw new ValidationError(`One or both branches '${branchId}', '${parentBranchId}' have not been found`);
|
||||
}
|
||||
|
||||
return branchService.moveBranchToBranch(branchToMove, targetParentBranch, branchId);
|
||||
}
|
||||
|
||||
function moveBranchBeforeNote(req: Request) {
|
||||
const { branchId, beforeBranchId } = req.params;
|
||||
|
||||
const branchToMove = becca.getBranchOrThrow(branchId);
|
||||
const beforeBranch = becca.getBranchOrThrow(beforeBranchId);
|
||||
|
||||
const validationResult = treeService.validateParentChild(beforeBranch.parentNoteId, branchToMove.noteId, branchId);
|
||||
|
||||
if (!validationResult.success) {
|
||||
return [200, validationResult];
|
||||
}
|
||||
|
||||
const originalBeforeNotePosition = beforeBranch.notePosition;
|
||||
|
||||
// we don't change utcDateModified, so other changes are prioritized in case of conflict
|
||||
// also we would have to sync all those modified branches otherwise hash checks would fail
|
||||
|
||||
sql.execute("UPDATE branches SET notePosition = notePosition + 10 WHERE parentNoteId = ? AND notePosition >= ? AND isDeleted = 0", [beforeBranch.parentNoteId, originalBeforeNotePosition]);
|
||||
|
||||
// also need to update becca positions
|
||||
const parentNote = becca.getNoteOrThrow(beforeBranch.parentNoteId);
|
||||
|
||||
for (const childBranch of parentNote.getChildBranches()) {
|
||||
if (childBranch.notePosition >= originalBeforeNotePosition) {
|
||||
childBranch.notePosition += 10;
|
||||
}
|
||||
}
|
||||
|
||||
if (branchToMove.parentNoteId === beforeBranch.parentNoteId) {
|
||||
branchToMove.notePosition = originalBeforeNotePosition;
|
||||
branchToMove.save();
|
||||
} else {
|
||||
const newBranch = branchToMove.createClone(beforeBranch.parentNoteId, originalBeforeNotePosition);
|
||||
newBranch.save();
|
||||
|
||||
branchToMove.markAsDeleted();
|
||||
}
|
||||
|
||||
treeService.sortNotesIfNeeded(parentNote.noteId);
|
||||
|
||||
// if sorting is not needed, then still the ordering might have changed above manually
|
||||
entityChangesService.putNoteReorderingEntityChange(parentNote.noteId);
|
||||
|
||||
log.info(`Moved note ${branchToMove.noteId}, branch ${branchId} before note ${beforeBranch.noteId}, branch ${beforeBranchId}`);
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
function moveBranchAfterNote(req: Request) {
|
||||
const { branchId, afterBranchId } = req.params;
|
||||
|
||||
const branchToMove = becca.getBranchOrThrow(branchId);
|
||||
const afterNote = becca.getBranchOrThrow(afterBranchId);
|
||||
|
||||
const validationResult = treeService.validateParentChild(afterNote.parentNoteId, branchToMove.noteId, branchId);
|
||||
|
||||
if (!validationResult.success) {
|
||||
return [200, validationResult];
|
||||
}
|
||||
|
||||
const originalAfterNotePosition = afterNote.notePosition;
|
||||
|
||||
// we don't change utcDateModified, so other changes are prioritized in case of conflict
|
||||
// also we would have to sync all those modified branches otherwise hash checks would fail
|
||||
sql.execute("UPDATE branches SET notePosition = notePosition + 10 WHERE parentNoteId = ? AND notePosition > ? AND isDeleted = 0", [afterNote.parentNoteId, originalAfterNotePosition]);
|
||||
|
||||
// also need to update becca positions
|
||||
const parentNote = becca.getNoteOrThrow(afterNote.parentNoteId);
|
||||
|
||||
for (const childBranch of parentNote.getChildBranches()) {
|
||||
if (childBranch.notePosition > originalAfterNotePosition) {
|
||||
childBranch.notePosition += 10;
|
||||
}
|
||||
}
|
||||
|
||||
const movedNotePosition = originalAfterNotePosition + 10;
|
||||
|
||||
if (branchToMove.parentNoteId === afterNote.parentNoteId) {
|
||||
branchToMove.notePosition = movedNotePosition;
|
||||
branchToMove.save();
|
||||
} else {
|
||||
const newBranch = branchToMove.createClone(afterNote.parentNoteId, movedNotePosition);
|
||||
newBranch.save();
|
||||
|
||||
branchToMove.markAsDeleted();
|
||||
}
|
||||
|
||||
treeService.sortNotesIfNeeded(parentNote.noteId);
|
||||
|
||||
// if sorting is not needed, then still the ordering might have changed above manually
|
||||
entityChangesService.putNoteReorderingEntityChange(parentNote.noteId);
|
||||
|
||||
log.info(`Moved note ${branchToMove.noteId}, branch ${branchId} after note ${afterNote.noteId}, branch ${afterBranchId}`);
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
function setExpanded(req: Request) {
|
||||
const { branchId } = req.params;
|
||||
const expanded = parseInt(req.params.expanded);
|
||||
|
||||
if (branchId !== "none_root") {
|
||||
sql.execute("UPDATE branches SET isExpanded = ? WHERE branchId = ?", [expanded, branchId]);
|
||||
// we don't sync expanded label
|
||||
// also this does not trigger updates to the frontend, this would trigger too many reloads
|
||||
|
||||
const branch = becca.branches[branchId];
|
||||
|
||||
if (branch) {
|
||||
branch.isExpanded = !!expanded;
|
||||
}
|
||||
|
||||
eventService.emit(eventService.ENTITY_CHANGED, {
|
||||
entityName: "branches",
|
||||
entity: branch
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function setExpandedForSubtree(req: Request) {
|
||||
const { branchId } = req.params;
|
||||
const expanded = parseInt(req.params.expanded);
|
||||
|
||||
let branchIds = sql.getColumn<string>(
|
||||
`
|
||||
WITH RECURSIVE
|
||||
tree(branchId, noteId) AS (
|
||||
SELECT branchId, noteId FROM branches WHERE branchId = ?
|
||||
UNION
|
||||
SELECT branches.branchId, branches.noteId FROM branches
|
||||
JOIN tree ON branches.parentNoteId = tree.noteId
|
||||
WHERE branches.isDeleted = 0
|
||||
)
|
||||
SELECT branchId FROM tree`,
|
||||
[branchId]
|
||||
);
|
||||
|
||||
// root is always expanded
|
||||
branchIds = branchIds.filter((branchId) => branchId !== "none_root");
|
||||
|
||||
sql.executeMany(/*sql*/`UPDATE branches SET isExpanded = ${expanded} WHERE branchId IN (???)`, branchIds);
|
||||
|
||||
for (const branchId of branchIds) {
|
||||
const branch = becca.branches[branchId];
|
||||
|
||||
if (branch) {
|
||||
branch.isExpanded = !!expanded;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
branchIds
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/branches/{branchId}:
|
||||
* delete:
|
||||
* summary: Delete branch (note clone)
|
||||
* operationId: branches-delete
|
||||
* parameters:
|
||||
* - name: branchId
|
||||
* in: path
|
||||
* required: true
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/BranchId"
|
||||
* - name: taskId
|
||||
* in: query
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Task group identifier
|
||||
* - name: eraseNotes
|
||||
* in: query
|
||||
* schema:
|
||||
* type: boolean
|
||||
* required: false
|
||||
* description: Whether to erase the note immediately
|
||||
* - name: last
|
||||
* in: query
|
||||
* schema:
|
||||
* type: boolean
|
||||
* required: true
|
||||
* description: Whether this is the last request of this task group
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Branch successfully deleted
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* noteDeleted:
|
||||
* type: boolean
|
||||
* description: Whether the last note clone was deleted
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["data"]
|
||||
*/
|
||||
function deleteBranch(req: Request) {
|
||||
const last = req.query.last === "true";
|
||||
const eraseNotes = req.query.eraseNotes === "true";
|
||||
const branch = becca.getBranchOrThrow(req.params.branchId);
|
||||
|
||||
const taskContext = TaskContext.getInstance(req.query.taskId as string, "deleteNotes");
|
||||
|
||||
const deleteId = utils.randomString(10);
|
||||
let noteDeleted;
|
||||
|
||||
if (eraseNotes) {
|
||||
// erase automatically means deleting all clones + note itself
|
||||
branch.getNote().deleteNote(deleteId, taskContext);
|
||||
eraseService.eraseNotesWithDeleteId(deleteId);
|
||||
noteDeleted = true;
|
||||
} else {
|
||||
noteDeleted = branch.deleteBranch(deleteId, taskContext);
|
||||
}
|
||||
|
||||
if (last) {
|
||||
taskContext.taskSucceeded();
|
||||
}
|
||||
|
||||
return {
|
||||
noteDeleted: noteDeleted
|
||||
};
|
||||
}
|
||||
|
||||
function setPrefix(req: Request) {
|
||||
const branchId = req.params.branchId;
|
||||
//TriliumNextTODO: req.body arrives as string, so req.body.prefix will be undefined – did the code below ever even work?
|
||||
const prefix = utils.isEmptyOrWhitespace(req.body.prefix) ? null : req.body.prefix;
|
||||
|
||||
const branch = becca.getBranchOrThrow(branchId);
|
||||
branch.prefix = prefix;
|
||||
branch.save();
|
||||
}
|
||||
|
||||
export default {
|
||||
moveBranchToParent,
|
||||
moveBranchBeforeNote,
|
||||
moveBranchAfterNote,
|
||||
setExpanded,
|
||||
setExpandedForSubtree,
|
||||
deleteBranch,
|
||||
setPrefix
|
||||
};
|
||||
49
apps/server/src/routes/api/bulk_action.ts
Normal file
49
apps/server/src/routes/api/bulk_action.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import type { Request } from "express";
|
||||
import becca from "../../becca/becca.js";
|
||||
import bulkActionService from "../../services/bulk_actions.js";
|
||||
|
||||
function execute(req: Request) {
|
||||
const { noteIds, includeDescendants } = req.body;
|
||||
|
||||
const affectedNoteIds = getAffectedNoteIds(noteIds, includeDescendants);
|
||||
|
||||
const bulkActionNote = becca.getNoteOrThrow("_bulkAction");
|
||||
|
||||
bulkActionService.executeActions(bulkActionNote, affectedNoteIds);
|
||||
}
|
||||
|
||||
function getAffectedNoteCount(req: Request) {
|
||||
const { noteIds, includeDescendants } = req.body;
|
||||
|
||||
const affectedNoteIds = getAffectedNoteIds(noteIds, includeDescendants);
|
||||
|
||||
return {
|
||||
affectedNoteCount: affectedNoteIds.size
|
||||
};
|
||||
}
|
||||
|
||||
function getAffectedNoteIds(noteIds: string[], includeDescendants: boolean) {
|
||||
const affectedNoteIds = new Set<string>();
|
||||
|
||||
for (const noteId of noteIds) {
|
||||
const note = becca.getNote(noteId);
|
||||
|
||||
if (!note) {
|
||||
continue;
|
||||
}
|
||||
|
||||
affectedNoteIds.add(noteId);
|
||||
|
||||
if (includeDescendants) {
|
||||
for (const descendantNoteId of note.getDescendantNoteIds()) {
|
||||
affectedNoteIds.add(descendantNoteId);
|
||||
}
|
||||
}
|
||||
}
|
||||
return affectedNoteIds;
|
||||
}
|
||||
|
||||
export default {
|
||||
execute,
|
||||
getAffectedNoteCount
|
||||
};
|
||||
231
apps/server/src/routes/api/clipper.ts
Normal file
231
apps/server/src/routes/api/clipper.ts
Normal file
@@ -0,0 +1,231 @@
|
||||
import type { Request } from "express";
|
||||
import jsdom from "jsdom";
|
||||
import path from "path";
|
||||
|
||||
import type BNote from "../../becca/entities/bnote.js";
|
||||
import ValidationError from "../../errors/validation_error.js";
|
||||
import appInfo from "../../services/app_info.js";
|
||||
import attributeFormatter from "../../services/attribute_formatter.js";
|
||||
import attributeService from "../../services/attributes.js";
|
||||
import cloneService from "../../services/cloning.js";
|
||||
import dateNoteService from "../../services/date_notes.js";
|
||||
import dateUtils from "../../services/date_utils.js";
|
||||
import htmlSanitizer from "../../services/html_sanitizer.js";
|
||||
import imageService from "../../services/image.js";
|
||||
import log from "../../services/log.js";
|
||||
import noteService from "../../services/notes.js";
|
||||
import utils from "../../services/utils.js";
|
||||
import ws from "../../services/ws.js";
|
||||
const { JSDOM } = jsdom;
|
||||
|
||||
interface Image {
|
||||
src: string;
|
||||
dataUrl: string;
|
||||
imageId: string;
|
||||
}
|
||||
|
||||
async function addClipping(req: Request) {
|
||||
// if a note under the clipperInbox has the same 'pageUrl' attribute,
|
||||
// add the content to that note and clone it under today's inbox
|
||||
// otherwise just create a new note under today's inbox
|
||||
const { title, content, images } = req.body;
|
||||
const clipType = "clippings";
|
||||
|
||||
const clipperInbox = await getClipperInboxNote();
|
||||
|
||||
const pageUrl = htmlSanitizer.sanitizeUrl(req.body.pageUrl);
|
||||
let clippingNote = findClippingNote(clipperInbox, pageUrl, clipType);
|
||||
|
||||
if (!clippingNote) {
|
||||
clippingNote = noteService.createNewNote({
|
||||
parentNoteId: clipperInbox.noteId,
|
||||
title: title,
|
||||
content: "",
|
||||
type: "text"
|
||||
}).note;
|
||||
|
||||
clippingNote.setLabel("clipType", "clippings");
|
||||
clippingNote.setLabel("pageUrl", pageUrl);
|
||||
clippingNote.setLabel("iconClass", "bx bx-globe");
|
||||
}
|
||||
|
||||
const rewrittenContent = processContent(images, clippingNote, content);
|
||||
|
||||
const existingContent = clippingNote.getContent();
|
||||
if (typeof existingContent !== "string") {
|
||||
throw new ValidationError("Invalid note content type.");
|
||||
}
|
||||
|
||||
clippingNote.setContent(`${existingContent}${existingContent.trim() ? "<br>" : ""}${rewrittenContent}`);
|
||||
|
||||
// TODO: Is parentNoteId ever defined?
|
||||
if ((clippingNote as any).parentNoteId !== clipperInbox.noteId) {
|
||||
cloneService.cloneNoteToParentNote(clippingNote.noteId, clipperInbox.noteId);
|
||||
}
|
||||
|
||||
return {
|
||||
noteId: clippingNote.noteId
|
||||
};
|
||||
}
|
||||
|
||||
function findClippingNote(clipperInboxNote: BNote, pageUrl: string, clipType: string | null) {
|
||||
if (!pageUrl) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const notes = clipperInboxNote.searchNotesInSubtree(
|
||||
attributeFormatter.formatAttrForSearch(
|
||||
{
|
||||
type: "label",
|
||||
name: "pageUrl",
|
||||
value: pageUrl
|
||||
},
|
||||
true
|
||||
)
|
||||
);
|
||||
|
||||
return clipType ? notes.find((note) => note.getOwnedLabelValue("clipType") === clipType) : notes[0];
|
||||
}
|
||||
|
||||
async function getClipperInboxNote() {
|
||||
let clipperInbox = attributeService.getNoteWithLabel("clipperInbox");
|
||||
|
||||
if (!clipperInbox) {
|
||||
clipperInbox = await dateNoteService.getDayNote(dateUtils.localNowDate());
|
||||
}
|
||||
|
||||
return clipperInbox;
|
||||
}
|
||||
|
||||
async function createNote(req: Request) {
|
||||
const { content, images, labels } = req.body;
|
||||
|
||||
const clipType = htmlSanitizer.sanitize(req.body.clipType);
|
||||
const pageUrl = htmlSanitizer.sanitizeUrl(req.body.pageUrl);
|
||||
|
||||
const trimmedTitle = (typeof req.body.title === "string") ? req.body.title.trim() : "";
|
||||
const title = trimmedTitle || `Clipped note from ${pageUrl}`;
|
||||
|
||||
const clipperInbox = await getClipperInboxNote();
|
||||
let note = findClippingNote(clipperInbox, pageUrl, clipType);
|
||||
|
||||
if (!note) {
|
||||
note = noteService.createNewNote({
|
||||
parentNoteId: clipperInbox.noteId,
|
||||
title,
|
||||
content: "",
|
||||
type: "text"
|
||||
}).note;
|
||||
|
||||
note.setLabel("clipType", clipType);
|
||||
|
||||
if (pageUrl) {
|
||||
note.setLabel("pageUrl", pageUrl);
|
||||
note.setLabel("iconClass", "bx bx-globe");
|
||||
}
|
||||
}
|
||||
|
||||
if (labels) {
|
||||
for (const labelName in labels) {
|
||||
const labelValue = htmlSanitizer.sanitize(labels[labelName]);
|
||||
note.setLabel(labelName, labelValue);
|
||||
}
|
||||
}
|
||||
|
||||
const existingContent = note.getContent();
|
||||
if (typeof existingContent !== "string") {
|
||||
throw new ValidationError("Invalid note content type.");
|
||||
}
|
||||
const rewrittenContent = processContent(images, note, content);
|
||||
const newContent = `${existingContent}${existingContent.trim() ? "<br/>" : ""}${rewrittenContent}`;
|
||||
note.setContent(newContent);
|
||||
|
||||
noteService.asyncPostProcessContent(note, newContent); // to mark attachments as used
|
||||
|
||||
return {
|
||||
noteId: note.noteId
|
||||
};
|
||||
}
|
||||
|
||||
function processContent(images: Image[], note: BNote, content: string) {
|
||||
let rewrittenContent = htmlSanitizer.sanitize(content);
|
||||
|
||||
if (images) {
|
||||
for (const { src, dataUrl, imageId } of images) {
|
||||
const filename = path.basename(src);
|
||||
|
||||
if (!dataUrl || !dataUrl.startsWith("data:image")) {
|
||||
const excerpt = dataUrl ? dataUrl.substr(0, Math.min(100, dataUrl.length)) : "null";
|
||||
|
||||
log.info(`Image could not be recognized as data URL: ${excerpt}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const buffer = Buffer.from(dataUrl.split(",")[1], "base64");
|
||||
|
||||
const attachment = imageService.saveImageToAttachment(note.noteId, buffer, filename, true);
|
||||
|
||||
const encodedTitle = encodeURIComponent(attachment.title);
|
||||
const url = `api/attachments/${attachment.attachmentId}/image/${encodedTitle}`;
|
||||
|
||||
log.info(`Replacing '${imageId}' with '${url}' in note '${note.noteId}'`);
|
||||
|
||||
rewrittenContent = utils.replaceAll(rewrittenContent, imageId, url);
|
||||
}
|
||||
}
|
||||
|
||||
// fallback if parsing/downloading images fails for some reason on the extension side (
|
||||
rewrittenContent = noteService.downloadImages(note.noteId, rewrittenContent);
|
||||
// Check if rewrittenContent contains at least one HTML tag
|
||||
if (!/<.+?>/.test(rewrittenContent)) {
|
||||
rewrittenContent = `<p>${rewrittenContent}</p>`;
|
||||
}
|
||||
// Create a JSDOM object from the existing HTML content
|
||||
const dom = new JSDOM(rewrittenContent);
|
||||
|
||||
// Get the content inside the body tag and serialize it
|
||||
rewrittenContent = dom.window.document.body.innerHTML;
|
||||
|
||||
return rewrittenContent;
|
||||
}
|
||||
|
||||
function openNote(req: Request) {
|
||||
if (utils.isElectron) {
|
||||
ws.sendMessageToAllClients({
|
||||
type: "openNote",
|
||||
noteId: req.params.noteId
|
||||
});
|
||||
|
||||
return {
|
||||
result: "ok"
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
result: "open-in-browser"
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function handshake() {
|
||||
return {
|
||||
appName: "trilium",
|
||||
protocolVersion: appInfo.clipperProtocolVersion
|
||||
};
|
||||
}
|
||||
|
||||
async function findNotesByUrl(req: Request) {
|
||||
const pageUrl = req.params.noteUrl;
|
||||
const clipperInbox = await getClipperInboxNote();
|
||||
const foundPage = findClippingNote(clipperInbox, pageUrl, null);
|
||||
return {
|
||||
noteId: foundPage ? foundPage.noteId : null
|
||||
};
|
||||
}
|
||||
|
||||
export default {
|
||||
createNote,
|
||||
addClipping,
|
||||
openNote,
|
||||
handshake,
|
||||
findNotesByUrl
|
||||
};
|
||||
37
apps/server/src/routes/api/cloning.ts
Normal file
37
apps/server/src/routes/api/cloning.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
"use strict";
|
||||
|
||||
import type { Request } from "express";
|
||||
import cloningService from "../../services/cloning.js";
|
||||
|
||||
function cloneNoteToBranch(req: Request) {
|
||||
const { noteId, parentBranchId } = req.params;
|
||||
const { prefix } = req.body;
|
||||
|
||||
return cloningService.cloneNoteToBranch(noteId, parentBranchId, prefix);
|
||||
}
|
||||
|
||||
function cloneNoteToParentNote(req: Request) {
|
||||
const { noteId, parentNoteId } = req.params;
|
||||
const { prefix } = req.body;
|
||||
|
||||
return cloningService.cloneNoteToParentNote(noteId, parentNoteId, prefix);
|
||||
}
|
||||
|
||||
function cloneNoteAfter(req: Request) {
|
||||
const { noteId, afterBranchId } = req.params;
|
||||
|
||||
return cloningService.cloneNoteAfter(noteId, afterBranchId);
|
||||
}
|
||||
|
||||
function toggleNoteInParent(req: Request) {
|
||||
const { noteId, parentNoteId, present } = req.params;
|
||||
|
||||
return cloningService.toggleNoteInParent(present === "true", noteId, parentNoteId);
|
||||
}
|
||||
|
||||
export default {
|
||||
cloneNoteToBranch,
|
||||
cloneNoteToParentNote,
|
||||
cloneNoteAfter,
|
||||
toggleNoteInParent
|
||||
};
|
||||
69
apps/server/src/routes/api/database.ts
Normal file
69
apps/server/src/routes/api/database.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
"use strict";
|
||||
|
||||
import sql from "../../services/sql.js";
|
||||
import log from "../../services/log.js";
|
||||
import backupService from "../../services/backup.js";
|
||||
import anonymizationService from "../../services/anonymization.js";
|
||||
import consistencyChecksService from "../../services/consistency_checks.js";
|
||||
import type { Request } from "express";
|
||||
import ValidationError from "../../errors/validation_error.js";
|
||||
import sql_init from "../../services/sql_init.js";
|
||||
import becca_loader from "../../becca/becca_loader.js";
|
||||
|
||||
function getExistingBackups() {
|
||||
return backupService.getExistingBackups();
|
||||
}
|
||||
|
||||
async function backupDatabase() {
|
||||
return {
|
||||
backupFile: await backupService.backupNow("now")
|
||||
};
|
||||
}
|
||||
|
||||
function vacuumDatabase() {
|
||||
sql.execute("VACUUM");
|
||||
|
||||
log.info("Database has been vacuumed.");
|
||||
}
|
||||
|
||||
function findAndFixConsistencyIssues() {
|
||||
consistencyChecksService.runOnDemandChecks(true);
|
||||
}
|
||||
|
||||
async function rebuildIntegrationTestDatabase() {
|
||||
sql.rebuildIntegrationTestDatabase();
|
||||
sql_init.initializeDb();
|
||||
becca_loader.load();
|
||||
}
|
||||
|
||||
function getExistingAnonymizedDatabases() {
|
||||
return anonymizationService.getExistingAnonymizedDatabases();
|
||||
}
|
||||
|
||||
async function anonymize(req: Request) {
|
||||
if (req.params.type !== "full" && req.params.type !== "light") {
|
||||
throw new ValidationError("Invalid type provided.");
|
||||
}
|
||||
return await anonymizationService.createAnonymizedCopy(req.params.type);
|
||||
}
|
||||
|
||||
function checkIntegrity() {
|
||||
const results = sql.getRows("PRAGMA integrity_check");
|
||||
|
||||
log.info(`Integrity check result: ${JSON.stringify(results)}`);
|
||||
|
||||
return {
|
||||
results
|
||||
};
|
||||
}
|
||||
|
||||
export default {
|
||||
getExistingBackups,
|
||||
backupDatabase,
|
||||
vacuumDatabase,
|
||||
findAndFixConsistencyIssues,
|
||||
rebuildIntegrationTestDatabase,
|
||||
getExistingAnonymizedDatabases,
|
||||
anonymize,
|
||||
checkIntegrity
|
||||
};
|
||||
798
apps/server/src/routes/api/embeddings.ts
Normal file
798
apps/server/src/routes/api/embeddings.ts
Normal file
@@ -0,0 +1,798 @@
|
||||
import options from "../../services/options.js";
|
||||
import vectorStore from "../../services/llm/embeddings/index.js";
|
||||
import providerManager from "../../services/llm/providers/providers.js";
|
||||
import indexService from "../../services/llm/index_service.js";
|
||||
import becca from "../../becca/becca.js";
|
||||
import type { Request, Response } from "express";
|
||||
import log from "../../services/log.js";
|
||||
import sql from "../../services/sql.js";
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/embeddings/similar/{noteId}:
|
||||
* get:
|
||||
* summary: Find similar notes based on a given note ID
|
||||
* operationId: embeddings-similar-by-note
|
||||
* parameters:
|
||||
* - name: noteId
|
||||
* in: path
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* - name: providerId
|
||||
* in: query
|
||||
* required: false
|
||||
* schema:
|
||||
* type: string
|
||||
* default: openai
|
||||
* description: Embedding provider ID
|
||||
* - name: modelId
|
||||
* in: query
|
||||
* required: false
|
||||
* schema:
|
||||
* type: string
|
||||
* default: text-embedding-3-small
|
||||
* description: Embedding model ID
|
||||
* - name: limit
|
||||
* in: query
|
||||
* required: false
|
||||
* schema:
|
||||
* type: integer
|
||||
* default: 10
|
||||
* description: Maximum number of similar notes to return
|
||||
* - name: threshold
|
||||
* in: query
|
||||
* required: false
|
||||
* schema:
|
||||
* type: number
|
||||
* format: float
|
||||
* default: 0.7
|
||||
* description: Similarity threshold (0.0-1.0)
|
||||
* responses:
|
||||
* '200':
|
||||
* description: List of similar notes
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* similarNotes:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* properties:
|
||||
* noteId:
|
||||
* type: string
|
||||
* title:
|
||||
* type: string
|
||||
* similarity:
|
||||
* type: number
|
||||
* format: float
|
||||
* '400':
|
||||
* description: Invalid request parameters
|
||||
* '404':
|
||||
* description: Note not found
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function findSimilarNotes(req: Request, res: Response) {
|
||||
const noteId = req.params.noteId;
|
||||
const providerId = req.query.providerId as string || 'openai';
|
||||
const modelId = req.query.modelId as string || 'text-embedding-3-small';
|
||||
const limit = parseInt(req.query.limit as string || '10', 10);
|
||||
const threshold = parseFloat(req.query.threshold as string || '0.7');
|
||||
|
||||
if (!noteId) {
|
||||
return [400, {
|
||||
success: false,
|
||||
message: "Note ID is required"
|
||||
}];
|
||||
}
|
||||
|
||||
const embedding = await vectorStore.getEmbeddingForNote(noteId, providerId, modelId);
|
||||
|
||||
if (!embedding) {
|
||||
// If no embedding exists for this note yet, generate one
|
||||
const note = becca.getNote(noteId);
|
||||
if (!note) {
|
||||
return [404, {
|
||||
success: false,
|
||||
message: "Note not found"
|
||||
}];
|
||||
}
|
||||
|
||||
const context = await vectorStore.getNoteEmbeddingContext(noteId);
|
||||
const provider = providerManager.getEmbeddingProvider(providerId);
|
||||
|
||||
if (!provider) {
|
||||
return [400, {
|
||||
success: false,
|
||||
message: `Embedding provider '${providerId}' not found`
|
||||
}];
|
||||
}
|
||||
|
||||
const newEmbedding = await provider.generateNoteEmbeddings(context);
|
||||
await vectorStore.storeNoteEmbedding(noteId, providerId, modelId, newEmbedding);
|
||||
|
||||
const similarNotes = await vectorStore.findSimilarNotes(
|
||||
newEmbedding, providerId, modelId, limit, threshold
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
similarNotes
|
||||
};
|
||||
}
|
||||
|
||||
const similarNotes = await vectorStore.findSimilarNotes(
|
||||
embedding.embedding, providerId, modelId, limit, threshold
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
similarNotes
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/embeddings/search:
|
||||
* post:
|
||||
* summary: Search for notes similar to provided text
|
||||
* operationId: embeddings-search-by-text
|
||||
* parameters:
|
||||
* - name: providerId
|
||||
* in: query
|
||||
* required: false
|
||||
* schema:
|
||||
* type: string
|
||||
* default: openai
|
||||
* description: Embedding provider ID
|
||||
* - name: modelId
|
||||
* in: query
|
||||
* required: false
|
||||
* schema:
|
||||
* type: string
|
||||
* default: text-embedding-3-small
|
||||
* description: Embedding model ID
|
||||
* - name: limit
|
||||
* in: query
|
||||
* required: false
|
||||
* schema:
|
||||
* type: integer
|
||||
* default: 10
|
||||
* description: Maximum number of similar notes to return
|
||||
* - name: threshold
|
||||
* in: query
|
||||
* required: false
|
||||
* schema:
|
||||
* type: number
|
||||
* format: float
|
||||
* default: 0.7
|
||||
* description: Similarity threshold (0.0-1.0)
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* text:
|
||||
* type: string
|
||||
* description: Text to search with
|
||||
* responses:
|
||||
* '200':
|
||||
* description: List of similar notes
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* similarNotes:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* properties:
|
||||
* noteId:
|
||||
* type: string
|
||||
* title:
|
||||
* type: string
|
||||
* similarity:
|
||||
* type: number
|
||||
* format: float
|
||||
* '400':
|
||||
* description: Invalid request parameters
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function searchByText(req: Request, res: Response) {
|
||||
const { text } = req.body;
|
||||
const providerId = req.query.providerId as string || 'openai';
|
||||
const modelId = req.query.modelId as string || 'text-embedding-3-small';
|
||||
const limit = parseInt(req.query.limit as string || '10', 10);
|
||||
const threshold = parseFloat(req.query.threshold as string || '0.7');
|
||||
|
||||
if (!text) {
|
||||
return [400, {
|
||||
success: false,
|
||||
message: "Search text is required"
|
||||
}];
|
||||
}
|
||||
|
||||
const provider = providerManager.getEmbeddingProvider(providerId);
|
||||
|
||||
if (!provider) {
|
||||
return [400, {
|
||||
success: false,
|
||||
message: `Embedding provider '${providerId}' not found`
|
||||
}];
|
||||
}
|
||||
|
||||
// Generate embedding for the search text
|
||||
const embedding = await provider.generateEmbeddings(text);
|
||||
|
||||
// Find similar notes
|
||||
const similarNotes = await vectorStore.findSimilarNotes(
|
||||
embedding, providerId, modelId, limit, threshold
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
similarNotes
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/embeddings/providers:
|
||||
* get:
|
||||
* summary: Get available embedding providers
|
||||
* operationId: embeddings-get-providers
|
||||
* responses:
|
||||
* '200':
|
||||
* description: List of available embedding providers
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* providers:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* properties:
|
||||
* id:
|
||||
* type: string
|
||||
* name:
|
||||
* type: string
|
||||
* isEnabled:
|
||||
* type: boolean
|
||||
* priority:
|
||||
* type: integer
|
||||
* config:
|
||||
* type: object
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function getProviders(req: Request, res: Response) {
|
||||
const providerConfigs = await providerManager.getEmbeddingProviderConfigs();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
providers: providerConfigs
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/embeddings/providers/{providerId}:
|
||||
* patch:
|
||||
* summary: Update embedding provider configuration
|
||||
* operationId: embeddings-update-provider
|
||||
* parameters:
|
||||
* - name: providerId
|
||||
* in: path
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Provider ID to update
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* enabled:
|
||||
* type: boolean
|
||||
* description: Whether provider is enabled
|
||||
* priority:
|
||||
* type: integer
|
||||
* description: Priority order (lower is higher priority)
|
||||
* config:
|
||||
* type: object
|
||||
* description: Provider-specific configuration
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Provider updated successfully
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* '400':
|
||||
* description: Invalid provider ID or configuration
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function updateProvider(req: Request, res: Response) {
|
||||
const { providerId } = req.params;
|
||||
const { isEnabled, priority, config } = req.body;
|
||||
|
||||
const success = await providerManager.updateEmbeddingProviderConfig(
|
||||
providerId, isEnabled, priority
|
||||
);
|
||||
|
||||
if (!success) {
|
||||
return [404, {
|
||||
success: false,
|
||||
message: "Provider not found"
|
||||
}];
|
||||
}
|
||||
|
||||
return {
|
||||
success: true
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/embeddings/reprocess:
|
||||
* post:
|
||||
* summary: Reprocess embeddings for all notes
|
||||
* operationId: embeddings-reprocess-all
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* providerId:
|
||||
* type: string
|
||||
* description: Provider ID to use for reprocessing
|
||||
* modelId:
|
||||
* type: string
|
||||
* description: Model ID to use for reprocessing
|
||||
* forceReprocess:
|
||||
* type: boolean
|
||||
* description: Whether to reprocess notes that already have embeddings
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Reprocessing started
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* jobId:
|
||||
* type: string
|
||||
* message:
|
||||
* type: string
|
||||
* '400':
|
||||
* description: Invalid provider ID or configuration
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function reprocessAllNotes(req: Request, res: Response) {
|
||||
// Import cls
|
||||
const cls = (await import("../../services/cls.js")).default;
|
||||
|
||||
// Start the reprocessing operation in the background
|
||||
setTimeout(async () => {
|
||||
try {
|
||||
// Wrap the operation in cls.init to ensure proper context
|
||||
cls.init(async () => {
|
||||
await vectorStore.reprocessAllNotes();
|
||||
log.info("Embedding reprocessing completed successfully");
|
||||
});
|
||||
} catch (error: any) {
|
||||
log.error(`Error during background embedding reprocessing: ${error.message || "Unknown error"}`);
|
||||
}
|
||||
}, 0);
|
||||
|
||||
// Return the response data
|
||||
return {
|
||||
success: true,
|
||||
message: "Embedding reprocessing started in the background"
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/embeddings/queue-status:
|
||||
* get:
|
||||
* summary: Get status of the embedding processing queue
|
||||
* operationId: embeddings-queue-status
|
||||
* parameters:
|
||||
* - name: jobId
|
||||
* in: query
|
||||
* required: false
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Optional job ID to get status for a specific processing job
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Queue status information
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* status:
|
||||
* type: string
|
||||
* enum: [idle, processing, paused]
|
||||
* progress:
|
||||
* type: number
|
||||
* format: float
|
||||
* description: Progress percentage (0-100)
|
||||
* details:
|
||||
* type: object
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function getQueueStatus(req: Request, res: Response) {
|
||||
// Use the imported sql instead of requiring it
|
||||
const queueCount = await sql.getValue(
|
||||
"SELECT COUNT(*) FROM embedding_queue"
|
||||
);
|
||||
|
||||
const failedCount = await sql.getValue(
|
||||
"SELECT COUNT(*) FROM embedding_queue WHERE attempts > 0"
|
||||
);
|
||||
|
||||
const totalEmbeddingsCount = await sql.getValue(
|
||||
"SELECT COUNT(*) FROM note_embeddings"
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
status: {
|
||||
queueCount,
|
||||
failedCount,
|
||||
totalEmbeddingsCount
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/embeddings/stats:
|
||||
* get:
|
||||
* summary: Get embedding statistics
|
||||
* operationId: embeddings-stats
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Embedding statistics
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* stats:
|
||||
* type: object
|
||||
* properties:
|
||||
* totalEmbeddings:
|
||||
* type: integer
|
||||
* providers:
|
||||
* type: object
|
||||
* modelCounts:
|
||||
* type: object
|
||||
* lastUpdated:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function getEmbeddingStats(req: Request, res: Response) {
|
||||
const stats = await vectorStore.getEmbeddingStats();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
stats
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/embeddings/failed:
|
||||
* get:
|
||||
* summary: Get list of notes that failed embedding generation
|
||||
* operationId: embeddings-failed-notes
|
||||
* responses:
|
||||
* '200':
|
||||
* description: List of failed notes
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* failedNotes:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* properties:
|
||||
* noteId:
|
||||
* type: string
|
||||
* title:
|
||||
* type: string
|
||||
* error:
|
||||
* type: string
|
||||
* failedAt:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function getFailedNotes(req: Request, res: Response) {
|
||||
const limit = parseInt(req.query.limit as string || '100', 10);
|
||||
const failedNotes = await vectorStore.getFailedEmbeddingNotes(limit);
|
||||
|
||||
// No need to fetch note titles here anymore as they're already included in the response
|
||||
return {
|
||||
success: true,
|
||||
failedNotes: failedNotes
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/embeddings/retry/{noteId}:
|
||||
* post:
|
||||
* summary: Retry generating embeddings for a failed note
|
||||
* operationId: embeddings-retry-note
|
||||
* parameters:
|
||||
* - name: noteId
|
||||
* in: path
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Note ID to retry
|
||||
* - name: providerId
|
||||
* in: query
|
||||
* required: false
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Provider ID to use (defaults to configured default)
|
||||
* - name: modelId
|
||||
* in: query
|
||||
* required: false
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Model ID to use (defaults to provider default)
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Retry result
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* message:
|
||||
* type: string
|
||||
* '400':
|
||||
* description: Invalid request
|
||||
* '404':
|
||||
* description: Note not found
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function retryFailedNote(req: Request, res: Response) {
|
||||
const { noteId } = req.params;
|
||||
|
||||
if (!noteId) {
|
||||
return [400, {
|
||||
success: false,
|
||||
message: "Note ID is required"
|
||||
}];
|
||||
}
|
||||
|
||||
const success = await vectorStore.retryFailedEmbedding(noteId);
|
||||
|
||||
if (!success) {
|
||||
return [404, {
|
||||
success: false,
|
||||
message: "Failed note not found or note is not marked as failed"
|
||||
}];
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: "Note queued for retry"
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/embeddings/retry-all-failed:
|
||||
* post:
|
||||
* summary: Retry generating embeddings for all failed notes
|
||||
* operationId: embeddings-retry-all-failed
|
||||
* requestBody:
|
||||
* required: false
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* providerId:
|
||||
* type: string
|
||||
* description: Provider ID to use (defaults to configured default)
|
||||
* modelId:
|
||||
* type: string
|
||||
* description: Model ID to use (defaults to provider default)
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Retry started
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* message:
|
||||
* type: string
|
||||
* jobId:
|
||||
* type: string
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function retryAllFailedNotes(req: Request, res: Response) {
|
||||
const count = await vectorStore.retryAllFailedEmbeddings();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `${count} failed notes queued for retry`
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/embeddings/rebuild-index:
|
||||
* post:
|
||||
* summary: Rebuild the vector store index
|
||||
* operationId: embeddings-rebuild-index
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Rebuild started
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* message:
|
||||
* type: string
|
||||
* jobId:
|
||||
* type: string
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function rebuildIndex(req: Request, res: Response) {
|
||||
// Start the index rebuilding operation in the background
|
||||
setTimeout(async () => {
|
||||
try {
|
||||
await indexService.startFullIndexing(true);
|
||||
log.info("Index rebuilding completed successfully");
|
||||
} catch (error: any) {
|
||||
log.error(`Error during background index rebuilding: ${error.message || "Unknown error"}`);
|
||||
}
|
||||
}, 0);
|
||||
|
||||
// Return the response data
|
||||
return {
|
||||
success: true,
|
||||
message: "Index rebuilding started in the background"
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/embeddings/index-rebuild-status:
|
||||
* get:
|
||||
* summary: Get status of the vector index rebuild operation
|
||||
* operationId: embeddings-rebuild-status
|
||||
* parameters:
|
||||
* - name: jobId
|
||||
* in: query
|
||||
* required: false
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Optional job ID to get status for a specific rebuild job
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Rebuild status information
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* status:
|
||||
* type: string
|
||||
* enum: [idle, in_progress, completed, failed]
|
||||
* progress:
|
||||
* type: number
|
||||
* format: float
|
||||
* description: Progress percentage (0-100)
|
||||
* message:
|
||||
* type: string
|
||||
* details:
|
||||
* type: object
|
||||
* properties:
|
||||
* startTime:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* processed:
|
||||
* type: integer
|
||||
* total:
|
||||
* type: integer
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function getIndexRebuildStatus(req: Request, res: Response) {
|
||||
const status = indexService.getIndexRebuildStatus();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
status
|
||||
};
|
||||
}
|
||||
|
||||
export default {
|
||||
findSimilarNotes,
|
||||
searchByText,
|
||||
getProviders,
|
||||
updateProvider,
|
||||
reprocessAllNotes,
|
||||
getQueueStatus,
|
||||
getEmbeddingStats,
|
||||
getFailedNotes,
|
||||
retryFailedNote,
|
||||
retryAllFailedNotes,
|
||||
rebuildIndex,
|
||||
getIndexRebuildStatus
|
||||
};
|
||||
29
apps/server/src/routes/api/etapi_tokens.ts
Normal file
29
apps/server/src/routes/api/etapi_tokens.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import type { Request } from "express";
|
||||
import etapiTokenService from "../../services/etapi_tokens.js";
|
||||
|
||||
function getTokens() {
|
||||
const tokens = etapiTokenService.getTokens();
|
||||
|
||||
tokens.sort((a, b) => (a.utcDateCreated < b.utcDateCreated ? -1 : 1));
|
||||
|
||||
return tokens;
|
||||
}
|
||||
|
||||
function createToken(req: Request) {
|
||||
return etapiTokenService.createToken(req.body.tokenName);
|
||||
}
|
||||
|
||||
function patchToken(req: Request) {
|
||||
etapiTokenService.renameToken(req.params.etapiTokenId, req.body.name);
|
||||
}
|
||||
|
||||
function deleteToken(req: Request) {
|
||||
etapiTokenService.deleteToken(req.params.etapiTokenId);
|
||||
}
|
||||
|
||||
export default {
|
||||
getTokens,
|
||||
createToken,
|
||||
patchToken,
|
||||
deleteToken
|
||||
};
|
||||
54
apps/server/src/routes/api/export.ts
Normal file
54
apps/server/src/routes/api/export.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
"use strict";
|
||||
|
||||
import zipExportService from "../../services/export/zip.js";
|
||||
import singleExportService from "../../services/export/single.js";
|
||||
import opmlExportService from "../../services/export/opml.js";
|
||||
import becca from "../../becca/becca.js";
|
||||
import TaskContext from "../../services/task_context.js";
|
||||
import log from "../../services/log.js";
|
||||
import NotFoundError from "../../errors/not_found_error.js";
|
||||
import type { Request, Response } from "express";
|
||||
import ValidationError from "../../errors/validation_error.js";
|
||||
import { safeExtractMessageAndStackFromError } from "../../services/utils.js";
|
||||
|
||||
function exportBranch(req: Request, res: Response) {
|
||||
const { branchId, type, format, version, taskId } = req.params;
|
||||
const branch = becca.getBranch(branchId);
|
||||
|
||||
if (!branch) {
|
||||
const message = `Cannot export branch '${branchId}' since it does not exist.`;
|
||||
log.error(message);
|
||||
|
||||
res.setHeader("Content-Type", "text/plain").status(500).send(message);
|
||||
return;
|
||||
}
|
||||
|
||||
const taskContext = new TaskContext(taskId, "export");
|
||||
|
||||
try {
|
||||
if (type === "subtree" && (format === "html" || format === "markdown")) {
|
||||
zipExportService.exportToZip(taskContext, branch, format, res);
|
||||
} else if (type === "single") {
|
||||
if (format !== "html" && format !== "markdown") {
|
||||
throw new ValidationError("Invalid export type.");
|
||||
}
|
||||
singleExportService.exportSingleNote(taskContext, branch, format, res);
|
||||
} else if (format === "opml") {
|
||||
opmlExportService.exportToOpml(taskContext, branch, version, res);
|
||||
} else {
|
||||
throw new NotFoundError(`Unrecognized export format '${format}'`);
|
||||
}
|
||||
} catch (e: unknown) {
|
||||
const [errMessage, errStack] = safeExtractMessageAndStackFromError(e);
|
||||
const message = `Export failed with following error: '${errMessage}'. More details might be in the logs.`;
|
||||
taskContext.reportError(message);
|
||||
|
||||
log.error(errMessage + errStack);
|
||||
|
||||
res.setHeader("Content-Type", "text/plain").status(500).send(message);
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
exportBranch
|
||||
};
|
||||
261
apps/server/src/routes/api/files.ts
Normal file
261
apps/server/src/routes/api/files.ts
Normal file
@@ -0,0 +1,261 @@
|
||||
"use strict";
|
||||
|
||||
import protectedSessionService from "../../services/protected_session.js";
|
||||
import utils from "../../services/utils.js";
|
||||
import log from "../../services/log.js";
|
||||
import noteService from "../../services/notes.js";
|
||||
import tmp from "tmp";
|
||||
import fs from "fs";
|
||||
import { Readable } from "stream";
|
||||
import chokidar from "chokidar";
|
||||
import ws from "../../services/ws.js";
|
||||
import becca from "../../becca/becca.js";
|
||||
import ValidationError from "../../errors/validation_error.js";
|
||||
import type { Request, Response } from "express";
|
||||
import type BNote from "../../becca/entities/bnote.js";
|
||||
import type BAttachment from "../../becca/entities/battachment.js";
|
||||
import dataDirs from "../../services/data_dir.js";
|
||||
|
||||
function updateFile(req: Request) {
|
||||
const note = becca.getNoteOrThrow(req.params.noteId);
|
||||
|
||||
const file = req.file;
|
||||
if (!file) {
|
||||
return {
|
||||
uploaded: false,
|
||||
message: `Missing file.`
|
||||
};
|
||||
}
|
||||
|
||||
note.saveRevision();
|
||||
|
||||
note.mime = file.mimetype.toLowerCase();
|
||||
note.save();
|
||||
|
||||
note.setContent(file.buffer);
|
||||
|
||||
note.setLabel("originalFileName", file.originalname);
|
||||
|
||||
noteService.asyncPostProcessContent(note, file.buffer);
|
||||
|
||||
return {
|
||||
uploaded: true
|
||||
};
|
||||
}
|
||||
|
||||
function updateAttachment(req: Request) {
|
||||
const attachment = becca.getAttachmentOrThrow(req.params.attachmentId);
|
||||
const file = req.file;
|
||||
if (!file) {
|
||||
return {
|
||||
uploaded: false,
|
||||
message: `Missing file.`
|
||||
};
|
||||
}
|
||||
|
||||
attachment.getNote().saveRevision();
|
||||
|
||||
attachment.mime = file.mimetype.toLowerCase();
|
||||
attachment.setContent(file.buffer, { forceSave: true });
|
||||
|
||||
return {
|
||||
uploaded: true
|
||||
};
|
||||
}
|
||||
|
||||
function downloadData(noteOrAttachment: BNote | BAttachment, res: Response, contentDisposition: boolean) {
|
||||
if (noteOrAttachment.isProtected && !protectedSessionService.isProtectedSessionAvailable()) {
|
||||
return res.status(401).send("Protected session not available");
|
||||
}
|
||||
|
||||
if (contentDisposition) {
|
||||
const fileName = noteOrAttachment.getFileName();
|
||||
|
||||
res.setHeader("Content-Disposition", utils.getContentDisposition(fileName));
|
||||
}
|
||||
|
||||
res.setHeader("Cache-Control", "no-cache, no-store, must-revalidate");
|
||||
res.setHeader("Content-Type", noteOrAttachment.mime);
|
||||
|
||||
res.send(noteOrAttachment.getContent());
|
||||
}
|
||||
|
||||
function downloadNoteInt(noteId: string, res: Response, contentDisposition = true) {
|
||||
const note = becca.getNote(noteId);
|
||||
|
||||
if (!note) {
|
||||
return res.setHeader("Content-Type", "text/plain").status(404).send(`Note '${noteId}' doesn't exist.`);
|
||||
}
|
||||
|
||||
return downloadData(note, res, contentDisposition);
|
||||
}
|
||||
|
||||
function downloadAttachmentInt(attachmentId: string, res: Response, contentDisposition = true) {
|
||||
const attachment = becca.getAttachment(attachmentId);
|
||||
|
||||
if (!attachment) {
|
||||
return res.setHeader("Content-Type", "text/plain").status(404).send(`Attachment '${attachmentId}' doesn't exist.`);
|
||||
}
|
||||
|
||||
return downloadData(attachment, res, contentDisposition);
|
||||
}
|
||||
|
||||
const downloadFile = (req: Request, res: Response) => downloadNoteInt(req.params.noteId, res, true);
|
||||
const openFile = (req: Request, res: Response) => downloadNoteInt(req.params.noteId, res, false);
|
||||
|
||||
const downloadAttachment = (req: Request, res: Response) => downloadAttachmentInt(req.params.attachmentId, res, true);
|
||||
const openAttachment = (req: Request, res: Response) => downloadAttachmentInt(req.params.attachmentId, res, false);
|
||||
|
||||
function fileContentProvider(req: Request) {
|
||||
// Read the file name from route params.
|
||||
const note = becca.getNoteOrThrow(req.params.noteId);
|
||||
|
||||
return streamContent(note.getContent(), note.getFileName(), note.mime);
|
||||
}
|
||||
|
||||
function attachmentContentProvider(req: Request) {
|
||||
// Read the file name from route params.
|
||||
const attachment = becca.getAttachmentOrThrow(req.params.attachmentId);
|
||||
|
||||
return streamContent(attachment.getContent(), attachment.getFileName(), attachment.mime);
|
||||
}
|
||||
|
||||
async function streamContent(content: string | Buffer, fileName: string, mimeType: string) {
|
||||
if (typeof content === "string") {
|
||||
content = Buffer.from(content, "utf8");
|
||||
}
|
||||
|
||||
const totalSize = content.byteLength;
|
||||
|
||||
const getStream = (range: { start: number; end: number }) => {
|
||||
if (!range) {
|
||||
// Request if for complete content.
|
||||
return Readable.from(content);
|
||||
}
|
||||
// Partial content request.
|
||||
const { start, end } = range;
|
||||
|
||||
return Readable.from(content.slice(start, end + 1));
|
||||
};
|
||||
|
||||
return {
|
||||
fileName,
|
||||
totalSize,
|
||||
mimeType,
|
||||
getStream
|
||||
};
|
||||
}
|
||||
|
||||
function saveNoteToTmpDir(req: Request) {
|
||||
const note = becca.getNoteOrThrow(req.params.noteId);
|
||||
const fileName = note.getFileName();
|
||||
const content = note.getContent();
|
||||
|
||||
return saveToTmpDir(fileName, content, "notes", note.noteId);
|
||||
}
|
||||
|
||||
function saveAttachmentToTmpDir(req: Request) {
|
||||
const attachment = becca.getAttachmentOrThrow(req.params.attachmentId);
|
||||
const fileName = attachment.getFileName();
|
||||
const content = attachment.getContent();
|
||||
|
||||
if (!attachment.attachmentId) {
|
||||
throw new ValidationError("Missing attachment ID.");
|
||||
}
|
||||
return saveToTmpDir(fileName, content, "attachments", attachment.attachmentId);
|
||||
}
|
||||
|
||||
const createdTemporaryFiles = new Set<string>();
|
||||
|
||||
function saveToTmpDir(fileName: string, content: string | Buffer, entityType: string, entityId: string) {
|
||||
const tmpObj = tmp.fileSync({
|
||||
postfix: fileName,
|
||||
tmpdir: dataDirs.TMP_DIR
|
||||
});
|
||||
|
||||
if (typeof content === "string") {
|
||||
fs.writeSync(tmpObj.fd, content);
|
||||
} else {
|
||||
fs.writeSync(tmpObj.fd, content);
|
||||
}
|
||||
|
||||
fs.closeSync(tmpObj.fd);
|
||||
|
||||
createdTemporaryFiles.add(tmpObj.name);
|
||||
|
||||
log.info(`Saved temporary file ${tmpObj.name}`);
|
||||
|
||||
if (utils.isElectron) {
|
||||
chokidar.watch(tmpObj.name).on("change", (path, stats) => {
|
||||
ws.sendMessageToAllClients({
|
||||
type: "openedFileUpdated",
|
||||
entityType: entityType,
|
||||
entityId: entityId,
|
||||
lastModifiedMs: stats?.atimeMs,
|
||||
filePath: tmpObj.name
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
tmpFilePath: tmpObj.name
|
||||
};
|
||||
}
|
||||
|
||||
function uploadModifiedFileToNote(req: Request) {
|
||||
const noteId = req.params.noteId;
|
||||
const { filePath } = req.body;
|
||||
|
||||
if (!createdTemporaryFiles.has(filePath)) {
|
||||
throw new ValidationError(`File '${filePath}' is not a temporary file.`);
|
||||
}
|
||||
|
||||
const note = becca.getNoteOrThrow(noteId);
|
||||
|
||||
log.info(`Updating note '${noteId}' with content from '${filePath}'`);
|
||||
|
||||
note.saveRevision();
|
||||
|
||||
const fileContent = fs.readFileSync(filePath);
|
||||
|
||||
if (!fileContent) {
|
||||
throw new ValidationError(`File '${fileContent}' is empty`);
|
||||
}
|
||||
|
||||
note.setContent(fileContent);
|
||||
}
|
||||
|
||||
function uploadModifiedFileToAttachment(req: Request) {
|
||||
const { attachmentId } = req.params;
|
||||
const { filePath } = req.body;
|
||||
|
||||
const attachment = becca.getAttachmentOrThrow(attachmentId);
|
||||
|
||||
log.info(`Updating attachment '${attachmentId}' with content from '${filePath}'`);
|
||||
|
||||
attachment.getNote().saveRevision();
|
||||
|
||||
const fileContent = fs.readFileSync(filePath);
|
||||
|
||||
if (!fileContent) {
|
||||
throw new ValidationError(`File '${fileContent}' is empty`);
|
||||
}
|
||||
|
||||
attachment.setContent(fileContent);
|
||||
}
|
||||
|
||||
export default {
|
||||
updateFile,
|
||||
updateAttachment,
|
||||
openFile,
|
||||
fileContentProvider,
|
||||
downloadFile,
|
||||
downloadNoteInt,
|
||||
saveNoteToTmpDir,
|
||||
openAttachment,
|
||||
downloadAttachment,
|
||||
saveAttachmentToTmpDir,
|
||||
attachmentContentProvider,
|
||||
uploadModifiedFileToNote,
|
||||
uploadModifiedFileToAttachment
|
||||
};
|
||||
94
apps/server/src/routes/api/fonts.ts
Normal file
94
apps/server/src/routes/api/fonts.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import type { Request, Response } from "express";
|
||||
import optionService from "../../services/options.js";
|
||||
import type { OptionMap } from "../../services/options_interface.js";
|
||||
|
||||
const SYSTEM_SANS_SERIF = [
|
||||
"system-ui",
|
||||
"-apple-system",
|
||||
"BlinkMacSystemFont",
|
||||
"Segoe UI",
|
||||
"Cantarell",
|
||||
"Ubuntu",
|
||||
"Noto Sans",
|
||||
"Helvetica",
|
||||
"Arial",
|
||||
"sans-serif",
|
||||
"Apple Color Emoji",
|
||||
"Segoe UI Emoji"
|
||||
].join(",");
|
||||
|
||||
const SYSTEM_MONOSPACE = ["ui-monospace", "SFMono-Regular", "SF Mono", "Consolas", "Source Code Pro", "Ubuntu Mono", "Menlo", "Liberation Mono", "monospace"].join(",");
|
||||
|
||||
function getFontCss(req: Request, res: Response) {
|
||||
res.setHeader("Content-Type", "text/css");
|
||||
|
||||
if (!optionService.getOptionBool("overrideThemeFonts")) {
|
||||
res.send("");
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const optionsMap = optionService.getOptionMap();
|
||||
|
||||
// using body to be more specific than themes' :root
|
||||
let style = "body {";
|
||||
style += getFontFamily(optionsMap);
|
||||
style += getFontSize(optionsMap);
|
||||
style += "}";
|
||||
|
||||
res.send(style);
|
||||
}
|
||||
|
||||
function getFontFamily({ mainFontFamily, treeFontFamily, detailFontFamily, monospaceFontFamily }: OptionMap) {
|
||||
let style = "";
|
||||
|
||||
// System override
|
||||
if (mainFontFamily === "system") {
|
||||
mainFontFamily = SYSTEM_SANS_SERIF;
|
||||
}
|
||||
|
||||
if (treeFontFamily === "system") {
|
||||
treeFontFamily = SYSTEM_SANS_SERIF;
|
||||
}
|
||||
|
||||
if (detailFontFamily === "system") {
|
||||
detailFontFamily = SYSTEM_SANS_SERIF;
|
||||
}
|
||||
|
||||
if (monospaceFontFamily === "system") {
|
||||
monospaceFontFamily = SYSTEM_MONOSPACE;
|
||||
}
|
||||
|
||||
// Apply the font override if not using theme fonts.
|
||||
if (mainFontFamily !== "theme") {
|
||||
style += `--main-font-family: ${mainFontFamily};`;
|
||||
}
|
||||
|
||||
if (treeFontFamily !== "theme") {
|
||||
style += `--tree-font-family: ${treeFontFamily};`;
|
||||
}
|
||||
|
||||
if (detailFontFamily !== "theme") {
|
||||
style += `--detail-font-family: ${detailFontFamily};`;
|
||||
}
|
||||
|
||||
if (monospaceFontFamily !== "theme") {
|
||||
style += `--monospace-font-family: ${monospaceFontFamily};`;
|
||||
}
|
||||
|
||||
return style;
|
||||
}
|
||||
|
||||
function getFontSize(optionsMap: OptionMap) {
|
||||
let style = "";
|
||||
style += `--main-font-size: ${optionsMap.mainFontSize}%;`;
|
||||
style += `--tree-font-size: ${optionsMap.treeFontSize}%;`;
|
||||
style += `--detail-font-size: ${optionsMap.detailFontSize}%;`;
|
||||
style += `--monospace-font-size: ${optionsMap.monospaceFontSize}%;`;
|
||||
|
||||
return style;
|
||||
}
|
||||
|
||||
export default {
|
||||
getFontCss
|
||||
};
|
||||
32
apps/server/src/routes/api/image.spec.ts
Normal file
32
apps/server/src/routes/api/image.spec.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { note } from "../../../spec/support/becca_mocking.js";
|
||||
import { renderSvgAttachment } from "./image.js";
|
||||
|
||||
describe("Image API", () => {
|
||||
it("renders empty SVG properly", () => {
|
||||
const parentNote = note("note").note;
|
||||
const response = new MockResponse();
|
||||
renderSvgAttachment(parentNote, response as any, "attachment");
|
||||
expect(response.headers["Content-Type"]).toBe("image/svg+xml");
|
||||
expect(response.body).toBe(`<svg xmlns="http://www.w3.org/2000/svg"></svg>`);
|
||||
});
|
||||
});
|
||||
|
||||
class MockResponse {
|
||||
|
||||
body?: string;
|
||||
headers: Record<string, string>;
|
||||
|
||||
constructor() {
|
||||
this.headers = {};
|
||||
}
|
||||
|
||||
set(name: string, value: string) {
|
||||
this.headers[name] = value;
|
||||
}
|
||||
|
||||
send(body: string) {
|
||||
this.body = body;
|
||||
}
|
||||
|
||||
}
|
||||
118
apps/server/src/routes/api/image.ts
Normal file
118
apps/server/src/routes/api/image.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
"use strict";
|
||||
|
||||
import imageService from "../../services/image.js";
|
||||
import becca from "../../becca/becca.js";
|
||||
import fs from "fs";
|
||||
import type { Request, Response } from "express";
|
||||
import type BNote from "../../becca/entities/bnote.js";
|
||||
import type BRevision from "../../becca/entities/brevision.js";
|
||||
import { RESOURCE_DIR } from "../../services/resource_dir.js";
|
||||
|
||||
function returnImageFromNote(req: Request, res: Response) {
|
||||
const image = becca.getNote(req.params.noteId);
|
||||
|
||||
return returnImageInt(image, res);
|
||||
}
|
||||
|
||||
function returnImageFromRevision(req: Request, res: Response) {
|
||||
const image = becca.getRevision(req.params.revisionId);
|
||||
|
||||
return returnImageInt(image, res);
|
||||
}
|
||||
|
||||
function returnImageInt(image: BNote | BRevision | null, res: Response) {
|
||||
if (!image) {
|
||||
res.set("Content-Type", "image/png");
|
||||
return res.send(fs.readFileSync(`${RESOURCE_DIR}/db/image-deleted.png`));
|
||||
} else if (!["image", "canvas", "mermaid", "mindMap"].includes(image.type)) {
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
if (image.type === "canvas") {
|
||||
renderSvgAttachment(image, res, "canvas-export.svg");
|
||||
} else if (image.type === "mermaid") {
|
||||
renderSvgAttachment(image, res, "mermaid-export.svg");
|
||||
} else if (image.type === "mindMap") {
|
||||
renderSvgAttachment(image, res, "mindmap-export.svg");
|
||||
} else {
|
||||
res.set("Content-Type", image.mime);
|
||||
res.set("Cache-Control", "no-cache, no-store, must-revalidate");
|
||||
res.send(image.getContent());
|
||||
}
|
||||
}
|
||||
|
||||
export function renderSvgAttachment(image: BNote | BRevision, res: Response, attachmentName: string) {
|
||||
let svg: string | Buffer = `<svg xmlns="http://www.w3.org/2000/svg"></svg>`;
|
||||
const attachment = image.getAttachmentByTitle(attachmentName);
|
||||
|
||||
if (attachment) {
|
||||
svg = attachment.getContent();
|
||||
} else {
|
||||
// backwards compatibility, before attachments, the SVG was stored in the main note content as a separate key
|
||||
const contentSvg = image.getJsonContentSafely()?.svg;
|
||||
|
||||
if (contentSvg) {
|
||||
svg = contentSvg;
|
||||
}
|
||||
}
|
||||
|
||||
res.set("Content-Type", "image/svg+xml");
|
||||
res.set("Cache-Control", "no-cache, no-store, must-revalidate");
|
||||
res.send(svg);
|
||||
}
|
||||
|
||||
function returnAttachedImage(req: Request, res: Response) {
|
||||
const attachment = becca.getAttachment(req.params.attachmentId);
|
||||
|
||||
if (!attachment) {
|
||||
res.set("Content-Type", "image/png");
|
||||
return res.send(fs.readFileSync(`${RESOURCE_DIR}/db/image-deleted.png`));
|
||||
}
|
||||
|
||||
if (!["image"].includes(attachment.role)) {
|
||||
return res.setHeader("Content-Type", "text/plain").status(400).send(`Attachment '${attachment.attachmentId}' has role '${attachment.role}', but 'image' was expected.`);
|
||||
}
|
||||
|
||||
res.set("Content-Type", attachment.mime);
|
||||
res.set("Cache-Control", "no-cache, no-store, must-revalidate");
|
||||
res.send(attachment.getContent());
|
||||
}
|
||||
|
||||
function updateImage(req: Request) {
|
||||
const { noteId } = req.params;
|
||||
const { file } = req;
|
||||
|
||||
const _note = becca.getNoteOrThrow(noteId);
|
||||
|
||||
if (!file) {
|
||||
return {
|
||||
uploaded: false,
|
||||
message: `Missing image data.`
|
||||
};
|
||||
}
|
||||
|
||||
if (!["image/png", "image/jpeg", "image/gif", "image/webp", "image/svg+xml"].includes(file.mimetype)) {
|
||||
return {
|
||||
uploaded: false,
|
||||
message: `Unknown image type: ${file.mimetype}`
|
||||
};
|
||||
}
|
||||
|
||||
if (typeof file.buffer === "string") {
|
||||
return {
|
||||
uploaded: false,
|
||||
message: "Invalid image content."
|
||||
};
|
||||
}
|
||||
|
||||
imageService.updateImage(noteId, file.buffer, file.originalname);
|
||||
|
||||
return { uploaded: true };
|
||||
}
|
||||
|
||||
export default {
|
||||
returnImageFromNote,
|
||||
returnImageFromRevision,
|
||||
returnAttachedImage,
|
||||
updateImage
|
||||
};
|
||||
151
apps/server/src/routes/api/import.ts
Normal file
151
apps/server/src/routes/api/import.ts
Normal file
@@ -0,0 +1,151 @@
|
||||
"use strict";
|
||||
|
||||
import enexImportService from "../../services/import/enex.js";
|
||||
import opmlImportService from "../../services/import/opml.js";
|
||||
import zipImportService from "../../services/import/zip.js";
|
||||
import singleImportService from "../../services/import/single.js";
|
||||
import cls from "../../services/cls.js";
|
||||
import path from "path";
|
||||
import becca from "../../becca/becca.js";
|
||||
import beccaLoader from "../../becca/becca_loader.js";
|
||||
import log from "../../services/log.js";
|
||||
import TaskContext from "../../services/task_context.js";
|
||||
import ValidationError from "../../errors/validation_error.js";
|
||||
import type { Request } from "express";
|
||||
import type BNote from "../../becca/entities/bnote.js";
|
||||
import { safeExtractMessageAndStackFromError } from "../../services/utils.js";
|
||||
|
||||
async function importNotesToBranch(req: Request) {
|
||||
const { parentNoteId } = req.params;
|
||||
const { taskId, last } = req.body;
|
||||
|
||||
const options = {
|
||||
safeImport: req.body.safeImport !== "false",
|
||||
shrinkImages: req.body.shrinkImages !== "false",
|
||||
textImportedAsText: req.body.textImportedAsText !== "false",
|
||||
codeImportedAsCode: req.body.codeImportedAsCode !== "false",
|
||||
explodeArchives: req.body.explodeArchives !== "false",
|
||||
replaceUnderscoresWithSpaces: req.body.replaceUnderscoresWithSpaces !== "false"
|
||||
};
|
||||
|
||||
const file = req.file;
|
||||
|
||||
if (!file) {
|
||||
throw new ValidationError("No file has been uploaded");
|
||||
}
|
||||
|
||||
const parentNote = becca.getNoteOrThrow(parentNoteId);
|
||||
|
||||
const extension = path.extname(file.originalname).toLowerCase();
|
||||
|
||||
// running all the event handlers on imported notes (and attributes) is slow
|
||||
// and may produce unintended consequences
|
||||
cls.disableEntityEvents();
|
||||
|
||||
// eliminate flickering during import
|
||||
cls.ignoreEntityChangeIds();
|
||||
|
||||
let note: BNote | null; // typically root of the import - client can show it after finishing the import
|
||||
|
||||
const taskContext = TaskContext.getInstance(taskId, "importNotes", options);
|
||||
|
||||
try {
|
||||
if (extension === ".zip" && options.explodeArchives && typeof file.buffer !== "string") {
|
||||
note = await zipImportService.importZip(taskContext, file.buffer, parentNote);
|
||||
} else if (extension === ".opml" && options.explodeArchives) {
|
||||
const importResult = await opmlImportService.importOpml(taskContext, file.buffer, parentNote);
|
||||
if (!Array.isArray(importResult)) {
|
||||
note = importResult;
|
||||
} else {
|
||||
return importResult;
|
||||
}
|
||||
} else if (extension === ".enex" && options.explodeArchives) {
|
||||
const importResult = await enexImportService.importEnex(taskContext, file, parentNote);
|
||||
if (!Array.isArray(importResult)) {
|
||||
note = importResult;
|
||||
} else {
|
||||
return importResult;
|
||||
}
|
||||
} else {
|
||||
note = await singleImportService.importSingleFile(taskContext, file, parentNote);
|
||||
}
|
||||
} catch (e: unknown) {
|
||||
const [errMessage, errStack] = safeExtractMessageAndStackFromError(e);
|
||||
const message = `Import failed with following error: '${errMessage}'. More details might be in the logs.`;
|
||||
taskContext.reportError(message);
|
||||
|
||||
log.error(message + errStack);
|
||||
|
||||
return [500, message];
|
||||
}
|
||||
|
||||
if (!note) {
|
||||
return [500, "No note was generated as a result of the import."];
|
||||
}
|
||||
|
||||
if (last === "true") {
|
||||
// small timeout to avoid race condition (the message is received before the transaction is committed)
|
||||
setTimeout(
|
||||
() =>
|
||||
taskContext.taskSucceeded({
|
||||
parentNoteId: parentNoteId,
|
||||
importedNoteId: note?.noteId
|
||||
}),
|
||||
1000
|
||||
);
|
||||
}
|
||||
|
||||
// import has deactivated note events so becca is not updated, instead we force it to reload
|
||||
beccaLoader.load();
|
||||
|
||||
return note.getPojo();
|
||||
}
|
||||
|
||||
async function importAttachmentsToNote(req: Request) {
|
||||
const { parentNoteId } = req.params;
|
||||
const { taskId, last } = req.body;
|
||||
|
||||
const options = {
|
||||
shrinkImages: req.body.shrinkImages !== "false"
|
||||
};
|
||||
|
||||
const file = req.file;
|
||||
|
||||
if (!file) {
|
||||
throw new ValidationError("No file has been uploaded");
|
||||
}
|
||||
|
||||
const parentNote = becca.getNoteOrThrow(parentNoteId);
|
||||
const taskContext = TaskContext.getInstance(taskId, "importAttachment", options);
|
||||
|
||||
// unlike in note import, we let the events run, because a huge number of attachments is not likely
|
||||
|
||||
try {
|
||||
await singleImportService.importAttachment(taskContext, file, parentNote);
|
||||
} catch (e: unknown) {
|
||||
const [errMessage, errStack] = safeExtractMessageAndStackFromError(e);
|
||||
|
||||
const message = `Import failed with following error: '${errMessage}'. More details might be in the logs.`;
|
||||
taskContext.reportError(message);
|
||||
|
||||
log.error(message + errStack);
|
||||
|
||||
return [500, message];
|
||||
}
|
||||
|
||||
if (last === "true") {
|
||||
// small timeout to avoid race condition (the message is received before the transaction is committed)
|
||||
setTimeout(
|
||||
() =>
|
||||
taskContext.taskSucceeded({
|
||||
parentNoteId: parentNoteId
|
||||
}),
|
||||
1000
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
importNotesToBranch,
|
||||
importAttachmentsToNote
|
||||
};
|
||||
20
apps/server/src/routes/api/keys.ts
Normal file
20
apps/server/src/routes/api/keys.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
"use strict";
|
||||
|
||||
import keyboardActions from "../../services/keyboard_actions.js";
|
||||
import becca from "../../becca/becca.js";
|
||||
|
||||
function getKeyboardActions() {
|
||||
return keyboardActions.getKeyboardActions();
|
||||
}
|
||||
|
||||
function getShortcutsForNotes() {
|
||||
const labels = becca.findAttributes("label", "keyboardShortcut");
|
||||
|
||||
// launchers have different handling
|
||||
return labels.filter((attr) => becca.getNote(attr.noteId)?.type !== "launcher");
|
||||
}
|
||||
|
||||
export default {
|
||||
getKeyboardActions,
|
||||
getShortcutsForNotes
|
||||
};
|
||||
944
apps/server/src/routes/api/llm.ts
Normal file
944
apps/server/src/routes/api/llm.ts
Normal file
@@ -0,0 +1,944 @@
|
||||
import type { Request, Response } from "express";
|
||||
import log from "../../services/log.js";
|
||||
import options from "../../services/options.js";
|
||||
|
||||
// Import the index service for knowledge base management
|
||||
import indexService from "../../services/llm/index_service.js";
|
||||
import restChatService from "../../services/llm/rest_chat_service.js";
|
||||
import chatService from '../../services/llm/chat_service.js';
|
||||
import chatStorageService from '../../services/llm/chat_storage_service.js';
|
||||
|
||||
// Define basic interfaces
|
||||
interface ChatMessage {
|
||||
role: 'user' | 'assistant' | 'system';
|
||||
content: string;
|
||||
timestamp?: Date;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/sessions:
|
||||
* post:
|
||||
* summary: Create a new LLM chat session
|
||||
* operationId: llm-create-session
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* title:
|
||||
* type: string
|
||||
* description: Title for the chat session
|
||||
* systemPrompt:
|
||||
* type: string
|
||||
* description: System message to set the behavior of the assistant
|
||||
* temperature:
|
||||
* type: number
|
||||
* description: Temperature parameter for the LLM (0.0-1.0)
|
||||
* maxTokens:
|
||||
* type: integer
|
||||
* description: Maximum tokens to generate in responses
|
||||
* model:
|
||||
* type: string
|
||||
* description: Specific model to use (depends on provider)
|
||||
* provider:
|
||||
* type: string
|
||||
* description: LLM provider to use (e.g., 'openai', 'anthropic', 'ollama')
|
||||
* contextNoteId:
|
||||
* type: string
|
||||
* description: Note ID to use as context for the session
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Successfully created session
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* sessionId:
|
||||
* type: string
|
||||
* title:
|
||||
* type: string
|
||||
* createdAt:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function createSession(req: Request, res: Response) {
|
||||
return restChatService.createSession(req, res);
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/sessions/{sessionId}:
|
||||
* get:
|
||||
* summary: Retrieve a specific chat session
|
||||
* operationId: llm-get-session
|
||||
* parameters:
|
||||
* - name: sessionId
|
||||
* in: path
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Chat session details
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* id:
|
||||
* type: string
|
||||
* title:
|
||||
* type: string
|
||||
* messages:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* properties:
|
||||
* role:
|
||||
* type: string
|
||||
* enum: [user, assistant, system]
|
||||
* content:
|
||||
* type: string
|
||||
* timestamp:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* createdAt:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* lastActive:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* '404':
|
||||
* description: Session not found
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function getSession(req: Request, res: Response) {
|
||||
return restChatService.getSession(req, res);
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/chat/{chatNoteId}:
|
||||
* patch:
|
||||
* summary: Update a chat's settings
|
||||
* operationId: llm-update-chat
|
||||
* parameters:
|
||||
* - name: chatNoteId
|
||||
* in: path
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: The ID of the chat note (formerly sessionId)
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* title:
|
||||
* type: string
|
||||
* description: Updated title for the session
|
||||
* systemPrompt:
|
||||
* type: string
|
||||
* description: Updated system prompt
|
||||
* temperature:
|
||||
* type: number
|
||||
* description: Updated temperature setting
|
||||
* maxTokens:
|
||||
* type: integer
|
||||
* description: Updated maximum tokens setting
|
||||
* model:
|
||||
* type: string
|
||||
* description: Updated model selection
|
||||
* provider:
|
||||
* type: string
|
||||
* description: Updated provider selection
|
||||
* contextNoteId:
|
||||
* type: string
|
||||
* description: Updated note ID for context
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Session successfully updated
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* id:
|
||||
* type: string
|
||||
* title:
|
||||
* type: string
|
||||
* updatedAt:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* '404':
|
||||
* description: Session not found
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function updateSession(req: Request, res: Response) {
|
||||
// Get the chat using ChatService
|
||||
const chatNoteId = req.params.chatNoteId;
|
||||
const updates = req.body;
|
||||
|
||||
try {
|
||||
// Get the chat
|
||||
const session = await chatService.getOrCreateSession(chatNoteId);
|
||||
|
||||
// Update title if provided
|
||||
if (updates.title) {
|
||||
await chatStorageService.updateChat(chatNoteId, session.messages, updates.title);
|
||||
}
|
||||
|
||||
// Return the updated chat
|
||||
return {
|
||||
id: chatNoteId,
|
||||
title: updates.title || session.title,
|
||||
updatedAt: new Date()
|
||||
};
|
||||
} catch (error) {
|
||||
log.error(`Error updating chat: ${error}`);
|
||||
throw new Error(`Failed to update chat: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/sessions:
|
||||
* get:
|
||||
* summary: List all chat sessions
|
||||
* operationId: llm-list-sessions
|
||||
* responses:
|
||||
* '200':
|
||||
* description: List of chat sessions
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* properties:
|
||||
* id:
|
||||
* type: string
|
||||
* title:
|
||||
* type: string
|
||||
* createdAt:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* lastActive:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* messageCount:
|
||||
* type: integer
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function listSessions(req: Request, res: Response) {
|
||||
// Get all sessions using ChatService
|
||||
try {
|
||||
const sessions = await chatService.getAllSessions();
|
||||
|
||||
// Format the response
|
||||
return {
|
||||
sessions: sessions.map(session => ({
|
||||
id: session.id,
|
||||
title: session.title,
|
||||
createdAt: new Date(), // Since we don't have this in chat sessions
|
||||
lastActive: new Date(), // Since we don't have this in chat sessions
|
||||
messageCount: session.messages.length
|
||||
}))
|
||||
};
|
||||
} catch (error) {
|
||||
log.error(`Error listing sessions: ${error}`);
|
||||
throw new Error(`Failed to list sessions: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/sessions/{sessionId}:
|
||||
* delete:
|
||||
* summary: Delete a chat session
|
||||
* operationId: llm-delete-session
|
||||
* parameters:
|
||||
* - name: sessionId
|
||||
* in: path
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Session successfully deleted
|
||||
* '404':
|
||||
* description: Session not found
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function deleteSession(req: Request, res: Response) {
|
||||
return restChatService.deleteSession(req, res);
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/chat/{chatNoteId}/messages:
|
||||
* post:
|
||||
* summary: Send a message to an LLM and get a response
|
||||
* operationId: llm-send-message
|
||||
* parameters:
|
||||
* - name: chatNoteId
|
||||
* in: path
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: The ID of the chat note (formerly sessionId)
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* message:
|
||||
* type: string
|
||||
* description: The user message to send to the LLM
|
||||
* options:
|
||||
* type: object
|
||||
* description: Optional parameters for this specific message
|
||||
* properties:
|
||||
* temperature:
|
||||
* type: number
|
||||
* maxTokens:
|
||||
* type: integer
|
||||
* model:
|
||||
* type: string
|
||||
* provider:
|
||||
* type: string
|
||||
* includeContext:
|
||||
* type: boolean
|
||||
* description: Whether to include relevant notes as context
|
||||
* useNoteContext:
|
||||
* type: boolean
|
||||
* description: Whether to use the session's context note
|
||||
* responses:
|
||||
* '200':
|
||||
* description: LLM response
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* response:
|
||||
* type: string
|
||||
* sources:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* properties:
|
||||
* noteId:
|
||||
* type: string
|
||||
* title:
|
||||
* type: string
|
||||
* similarity:
|
||||
* type: number
|
||||
* sessionId:
|
||||
* type: string
|
||||
* '404':
|
||||
* description: Session not found
|
||||
* '500':
|
||||
* description: Error processing request
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function sendMessage(req: Request, res: Response) {
|
||||
return restChatService.handleSendMessage(req, res);
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/indexes/stats:
|
||||
* get:
|
||||
* summary: Get stats about the LLM knowledge base indexing status
|
||||
* operationId: llm-index-stats
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Index stats successfully retrieved
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function getIndexStats(req: Request, res: Response) {
|
||||
try {
|
||||
// Check if AI is enabled
|
||||
const aiEnabled = await options.getOptionBool('aiEnabled');
|
||||
if (!aiEnabled) {
|
||||
return {
|
||||
success: false,
|
||||
message: "AI features are disabled"
|
||||
};
|
||||
}
|
||||
|
||||
// Return indexing stats
|
||||
const stats = await indexService.getIndexingStats();
|
||||
return {
|
||||
success: true,
|
||||
...stats
|
||||
};
|
||||
} catch (error: any) {
|
||||
log.error(`Error getting index stats: ${error.message || 'Unknown error'}`);
|
||||
throw new Error(`Failed to get index stats: ${error.message || 'Unknown error'}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/indexes:
|
||||
* post:
|
||||
* summary: Start or continue indexing the knowledge base
|
||||
* operationId: llm-start-indexing
|
||||
* requestBody:
|
||||
* required: false
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* force:
|
||||
* type: boolean
|
||||
* description: Whether to force reindexing of all notes
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Indexing started successfully
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function startIndexing(req: Request, res: Response) {
|
||||
try {
|
||||
// Check if AI is enabled
|
||||
const aiEnabled = await options.getOptionBool('aiEnabled');
|
||||
if (!aiEnabled) {
|
||||
return {
|
||||
success: false,
|
||||
message: "AI features are disabled"
|
||||
};
|
||||
}
|
||||
|
||||
const { force = false } = req.body;
|
||||
|
||||
// Start indexing
|
||||
await indexService.startFullIndexing(force);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: "Indexing started"
|
||||
};
|
||||
} catch (error: any) {
|
||||
log.error(`Error starting indexing: ${error.message || 'Unknown error'}`);
|
||||
throw new Error(`Failed to start indexing: ${error.message || 'Unknown error'}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/indexes/failed:
|
||||
* get:
|
||||
* summary: Get list of notes that failed to index
|
||||
* operationId: llm-failed-indexes
|
||||
* parameters:
|
||||
* - name: limit
|
||||
* in: query
|
||||
* required: false
|
||||
* schema:
|
||||
* type: integer
|
||||
* default: 100
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Failed indexes successfully retrieved
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function getFailedIndexes(req: Request, res: Response) {
|
||||
try {
|
||||
// Check if AI is enabled
|
||||
const aiEnabled = await options.getOptionBool('aiEnabled');
|
||||
if (!aiEnabled) {
|
||||
return {
|
||||
success: false,
|
||||
message: "AI features are disabled"
|
||||
};
|
||||
}
|
||||
|
||||
const limit = parseInt(req.query.limit as string || "100", 10);
|
||||
|
||||
// Get failed indexes
|
||||
const failed = await indexService.getFailedIndexes(limit);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
failed
|
||||
};
|
||||
} catch (error: any) {
|
||||
log.error(`Error getting failed indexes: ${error.message || 'Unknown error'}`);
|
||||
throw new Error(`Failed to get failed indexes: ${error.message || 'Unknown error'}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/indexes/notes/{noteId}:
|
||||
* put:
|
||||
* summary: Retry indexing a specific note that previously failed
|
||||
* operationId: llm-retry-index
|
||||
* parameters:
|
||||
* - name: noteId
|
||||
* in: path
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Index retry successfully initiated
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function retryFailedIndex(req: Request, res: Response) {
|
||||
try {
|
||||
// Check if AI is enabled
|
||||
const aiEnabled = await options.getOptionBool('aiEnabled');
|
||||
if (!aiEnabled) {
|
||||
return {
|
||||
success: false,
|
||||
message: "AI features are disabled"
|
||||
};
|
||||
}
|
||||
|
||||
const { noteId } = req.params;
|
||||
|
||||
// Retry indexing the note
|
||||
const result = await indexService.retryFailedNote(noteId);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: result ? "Note queued for indexing" : "Failed to queue note for indexing"
|
||||
};
|
||||
} catch (error: any) {
|
||||
log.error(`Error retrying failed index: ${error.message || 'Unknown error'}`);
|
||||
throw new Error(`Failed to retry index: ${error.message || 'Unknown error'}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/indexes/failed:
|
||||
* put:
|
||||
* summary: Retry indexing all failed notes
|
||||
* operationId: llm-retry-all-indexes
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Retry of all failed indexes successfully initiated
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function retryAllFailedIndexes(req: Request, res: Response) {
|
||||
try {
|
||||
// Check if AI is enabled
|
||||
const aiEnabled = await options.getOptionBool('aiEnabled');
|
||||
if (!aiEnabled) {
|
||||
return {
|
||||
success: false,
|
||||
message: "AI features are disabled"
|
||||
};
|
||||
}
|
||||
|
||||
// Retry all failed notes
|
||||
const count = await indexService.retryAllFailedNotes();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `${count} notes queued for reprocessing`
|
||||
};
|
||||
} catch (error: any) {
|
||||
log.error(`Error retrying all failed indexes: ${error.message || 'Unknown error'}`);
|
||||
throw new Error(`Failed to retry all indexes: ${error.message || 'Unknown error'}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/indexes/notes/similar:
|
||||
* get:
|
||||
* summary: Find notes similar to a query string
|
||||
* operationId: llm-find-similar-notes
|
||||
* parameters:
|
||||
* - name: query
|
||||
* in: query
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* - name: contextNoteId
|
||||
* in: query
|
||||
* required: false
|
||||
* schema:
|
||||
* type: string
|
||||
* - name: limit
|
||||
* in: query
|
||||
* required: false
|
||||
* schema:
|
||||
* type: integer
|
||||
* default: 5
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Similar notes found successfully
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function findSimilarNotes(req: Request, res: Response) {
|
||||
try {
|
||||
// Check if AI is enabled
|
||||
const aiEnabled = await options.getOptionBool('aiEnabled');
|
||||
if (!aiEnabled) {
|
||||
return {
|
||||
success: false,
|
||||
message: "AI features are disabled"
|
||||
};
|
||||
}
|
||||
|
||||
const query = req.query.query as string;
|
||||
const contextNoteId = req.query.contextNoteId as string | undefined;
|
||||
const limit = parseInt(req.query.limit as string || "5", 10);
|
||||
|
||||
if (!query) {
|
||||
return {
|
||||
success: false,
|
||||
message: "Query is required"
|
||||
};
|
||||
}
|
||||
|
||||
// Find similar notes
|
||||
const similar = await indexService.findSimilarNotes(query, contextNoteId, limit);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
similar
|
||||
};
|
||||
} catch (error: any) {
|
||||
log.error(`Error finding similar notes: ${error.message || 'Unknown error'}`);
|
||||
throw new Error(`Failed to find similar notes: ${error.message || 'Unknown error'}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/indexes/context:
|
||||
* get:
|
||||
* summary: Generate context for an LLM query based on the knowledge base
|
||||
* operationId: llm-generate-context
|
||||
* parameters:
|
||||
* - name: query
|
||||
* in: query
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* - name: contextNoteId
|
||||
* in: query
|
||||
* required: false
|
||||
* schema:
|
||||
* type: string
|
||||
* - name: depth
|
||||
* in: query
|
||||
* required: false
|
||||
* schema:
|
||||
* type: integer
|
||||
* default: 2
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Context generated successfully
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function generateQueryContext(req: Request, res: Response) {
|
||||
try {
|
||||
// Check if AI is enabled
|
||||
const aiEnabled = await options.getOptionBool('aiEnabled');
|
||||
if (!aiEnabled) {
|
||||
return {
|
||||
success: false,
|
||||
message: "AI features are disabled"
|
||||
};
|
||||
}
|
||||
|
||||
const query = req.query.query as string;
|
||||
const contextNoteId = req.query.contextNoteId as string | undefined;
|
||||
const depth = parseInt(req.query.depth as string || "2", 10);
|
||||
|
||||
if (!query) {
|
||||
return {
|
||||
success: false,
|
||||
message: "Query is required"
|
||||
};
|
||||
}
|
||||
|
||||
// Generate context
|
||||
const context = await indexService.generateQueryContext(query, contextNoteId, depth);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
context
|
||||
};
|
||||
} catch (error: any) {
|
||||
log.error(`Error generating query context: ${error.message || 'Unknown error'}`);
|
||||
throw new Error(`Failed to generate query context: ${error.message || 'Unknown error'}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/indexes/notes/{noteId}:
|
||||
* post:
|
||||
* summary: Index a specific note for LLM knowledge base
|
||||
* operationId: llm-index-note
|
||||
* parameters:
|
||||
* - name: noteId
|
||||
* in: path
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Note indexed successfully
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function indexNote(req: Request, res: Response) {
|
||||
try {
|
||||
// Check if AI is enabled
|
||||
const aiEnabled = await options.getOptionBool('aiEnabled');
|
||||
if (!aiEnabled) {
|
||||
return {
|
||||
success: false,
|
||||
message: "AI features are disabled"
|
||||
};
|
||||
}
|
||||
|
||||
const { noteId } = req.params;
|
||||
|
||||
if (!noteId) {
|
||||
return {
|
||||
success: false,
|
||||
message: "Note ID is required"
|
||||
};
|
||||
}
|
||||
|
||||
// Index the note
|
||||
const result = await indexService.generateNoteIndex(noteId);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: result ? "Note indexed successfully" : "Failed to index note"
|
||||
};
|
||||
} catch (error: any) {
|
||||
log.error(`Error indexing note: ${error.message || 'Unknown error'}`);
|
||||
throw new Error(`Failed to index note: ${error.message || 'Unknown error'}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/chat/{chatNoteId}/messages/stream:
|
||||
* post:
|
||||
* summary: Stream a message to an LLM via WebSocket
|
||||
* operationId: llm-stream-message
|
||||
* parameters:
|
||||
* - name: chatNoteId
|
||||
* in: path
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: The ID of the chat note to stream messages to (formerly sessionId)
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* content:
|
||||
* type: string
|
||||
* description: The user message to send to the LLM
|
||||
* useAdvancedContext:
|
||||
* type: boolean
|
||||
* description: Whether to use advanced context extraction
|
||||
* showThinking:
|
||||
* type: boolean
|
||||
* description: Whether to show thinking process in the response
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Streaming started successfully
|
||||
* '404':
|
||||
* description: Session not found
|
||||
* '500':
|
||||
* description: Error processing request
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function streamMessage(req: Request, res: Response) {
|
||||
log.info("=== Starting streamMessage ===");
|
||||
try {
|
||||
const chatNoteId = req.params.chatNoteId;
|
||||
const { content, useAdvancedContext, showThinking } = req.body;
|
||||
|
||||
if (!content || typeof content !== 'string' || content.trim().length === 0) {
|
||||
throw new Error('Content cannot be empty');
|
||||
}
|
||||
|
||||
// Check if session exists
|
||||
const session = restChatService.getSessions().get(chatNoteId);
|
||||
if (!session) {
|
||||
throw new Error('Chat not found');
|
||||
}
|
||||
|
||||
// Update last active timestamp
|
||||
session.lastActive = new Date();
|
||||
|
||||
// Add user message to the session
|
||||
session.messages.push({
|
||||
role: 'user',
|
||||
content,
|
||||
timestamp: new Date()
|
||||
});
|
||||
|
||||
// Create request parameters for the pipeline
|
||||
const requestParams = {
|
||||
chatNoteId: chatNoteId,
|
||||
content,
|
||||
useAdvancedContext: useAdvancedContext === true,
|
||||
showThinking: showThinking === true,
|
||||
stream: true // Always stream for this endpoint
|
||||
};
|
||||
|
||||
// Create a fake request/response pair to pass to the handler
|
||||
const fakeReq = {
|
||||
...req,
|
||||
method: 'GET', // Set to GET to indicate streaming
|
||||
query: {
|
||||
stream: 'true', // Set stream param - don't use format: 'stream' to avoid confusion
|
||||
useAdvancedContext: String(useAdvancedContext === true),
|
||||
showThinking: String(showThinking === true)
|
||||
},
|
||||
params: {
|
||||
chatNoteId: chatNoteId
|
||||
},
|
||||
// Make sure the original content is available to the handler
|
||||
body: {
|
||||
content,
|
||||
useAdvancedContext: useAdvancedContext === true,
|
||||
showThinking: showThinking === true
|
||||
}
|
||||
} as unknown as Request;
|
||||
|
||||
// Log to verify correct parameters
|
||||
log.info(`WebSocket stream settings - useAdvancedContext=${useAdvancedContext === true}, in query=${fakeReq.query.useAdvancedContext}, in body=${fakeReq.body.useAdvancedContext}`);
|
||||
// Extra safety to ensure the parameters are passed correctly
|
||||
if (useAdvancedContext === true) {
|
||||
log.info(`Enhanced context IS enabled for this request`);
|
||||
} else {
|
||||
log.info(`Enhanced context is NOT enabled for this request`);
|
||||
}
|
||||
|
||||
// Process the request in the background
|
||||
Promise.resolve().then(async () => {
|
||||
try {
|
||||
await restChatService.handleSendMessage(fakeReq, res);
|
||||
} catch (error) {
|
||||
log.error(`Background message processing error: ${error}`);
|
||||
|
||||
// Import the WebSocket service
|
||||
const wsService = (await import('../../services/ws.js')).default;
|
||||
|
||||
// Define LLMStreamMessage interface
|
||||
interface LLMStreamMessage {
|
||||
type: 'llm-stream';
|
||||
chatNoteId: string;
|
||||
content?: string;
|
||||
thinking?: string;
|
||||
toolExecution?: any;
|
||||
done?: boolean;
|
||||
error?: string;
|
||||
raw?: unknown;
|
||||
}
|
||||
|
||||
// Send error to client via WebSocket
|
||||
wsService.sendMessageToAllClients({
|
||||
type: 'llm-stream',
|
||||
chatNoteId: chatNoteId,
|
||||
error: `Error processing message: ${error}`,
|
||||
done: true
|
||||
} as LLMStreamMessage);
|
||||
}
|
||||
});
|
||||
|
||||
// Import the WebSocket service
|
||||
const wsService = (await import('../../services/ws.js')).default;
|
||||
|
||||
// Let the client know streaming has started via WebSocket (helps client confirm connection is working)
|
||||
wsService.sendMessageToAllClients({
|
||||
type: 'llm-stream',
|
||||
chatNoteId: chatNoteId,
|
||||
thinking: 'Initializing streaming LLM response...'
|
||||
});
|
||||
|
||||
// Let the client know streaming has started via HTTP response
|
||||
return {
|
||||
success: true,
|
||||
message: 'Streaming started',
|
||||
chatNoteId: chatNoteId
|
||||
};
|
||||
} catch (error: any) {
|
||||
log.error(`Error starting message stream: ${error.message}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
// Chat session management
|
||||
createSession,
|
||||
getSession,
|
||||
updateSession,
|
||||
listSessions,
|
||||
deleteSession,
|
||||
sendMessage,
|
||||
streamMessage,
|
||||
|
||||
// Knowledge base index management
|
||||
getIndexStats,
|
||||
startIndexing,
|
||||
getFailedIndexes,
|
||||
retryFailedIndex,
|
||||
retryAllFailedIndexes,
|
||||
findSimilarNotes,
|
||||
generateQueryContext,
|
||||
indexNote
|
||||
};
|
||||
183
apps/server/src/routes/api/login.ts
Normal file
183
apps/server/src/routes/api/login.ts
Normal file
@@ -0,0 +1,183 @@
|
||||
"use strict";
|
||||
|
||||
import options from "../../services/options.js";
|
||||
import utils from "../../services/utils.js";
|
||||
import dateUtils from "../../services/date_utils.js";
|
||||
import instanceId from "../../services/instance_id.js";
|
||||
import passwordEncryptionService from "../../services/encryption/password_encryption.js";
|
||||
import protectedSessionService from "../../services/protected_session.js";
|
||||
import appInfo from "../../services/app_info.js";
|
||||
import eventService from "../../services/events.js";
|
||||
import sqlInit from "../../services/sql_init.js";
|
||||
import sql from "../../services/sql.js";
|
||||
import ws from "../../services/ws.js";
|
||||
import etapiTokenService from "../../services/etapi_tokens.js";
|
||||
import type { Request } from "express";
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/login/sync:
|
||||
* post:
|
||||
* tags:
|
||||
* - auth
|
||||
* summary: Log in using documentSecret
|
||||
* description: The `hash` parameter is computed using a HMAC of the `documentSecret` and `timestamp`.
|
||||
* operationId: login-sync
|
||||
* externalDocs:
|
||||
* description: HMAC calculation
|
||||
* url: https://github.com/TriliumNext/Notes/blob/v0.91.6/src/services/utils.ts#L62-L66
|
||||
* requestBody:
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* timestamp:
|
||||
* $ref: '#/components/schemas/UtcDateTime'
|
||||
* hash:
|
||||
* type: string
|
||||
* syncVersion:
|
||||
* type: integer
|
||||
* example: 34
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Successful operation
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* syncVersion:
|
||||
* type: integer
|
||||
* example: 34
|
||||
* options:
|
||||
* type: object
|
||||
* properties:
|
||||
* documentSecret:
|
||||
* type: string
|
||||
* '400':
|
||||
* description: Sync version / document secret mismatch
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* message:
|
||||
* type: string
|
||||
* example: "Non-matching sync versions, local is version ${server syncVersion}, remote is ${requested syncVersion}. It is recommended to run same version of Trilium on both sides of sync"
|
||||
* '401':
|
||||
* description: Timestamp mismatch
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* message:
|
||||
* type: string
|
||||
* example: "Auth request time is out of sync, please check that both client and server have correct time. The difference between clocks has to be smaller than 5 minutes"
|
||||
*/
|
||||
function loginSync(req: Request) {
|
||||
if (!sqlInit.schemaExists()) {
|
||||
return [500, { message: "DB schema does not exist, can't sync." }];
|
||||
}
|
||||
|
||||
const timestampStr = req.body.timestamp;
|
||||
|
||||
const timestamp = dateUtils.parseDateTime(timestampStr);
|
||||
|
||||
const now = new Date();
|
||||
|
||||
// login token is valid for 5 minutes
|
||||
if (Math.abs(timestamp.getTime() - now.getTime()) > 5 * 60 * 1000) {
|
||||
return [401, { message: "Auth request time is out of sync, please check that both client and server have correct time. The difference between clocks has to be smaller than 5 minutes." }];
|
||||
}
|
||||
|
||||
const syncVersion = req.body.syncVersion;
|
||||
|
||||
if (syncVersion !== appInfo.syncVersion) {
|
||||
return [
|
||||
400,
|
||||
{ message: `Non-matching sync versions, local is version ${appInfo.syncVersion}, remote is ${syncVersion}. It is recommended to run same version of Trilium on both sides of sync.` }
|
||||
];
|
||||
}
|
||||
|
||||
const documentSecret = options.getOption("documentSecret");
|
||||
const expectedHash = utils.hmac(documentSecret, timestampStr);
|
||||
|
||||
const givenHash = req.body.hash;
|
||||
|
||||
if (expectedHash !== givenHash) {
|
||||
return [400, { message: "Sync login credentials are incorrect. It looks like you're trying to sync two different initialized documents which is not possible." }];
|
||||
}
|
||||
|
||||
req.session.loggedIn = true;
|
||||
|
||||
return {
|
||||
instanceId: instanceId,
|
||||
maxEntityChangeId: sql.getValue("SELECT COALESCE(MAX(id), 0) FROM entity_changes WHERE isSynced = 1")
|
||||
};
|
||||
}
|
||||
|
||||
function loginToProtectedSession(req: Request) {
|
||||
const password = req.body.password;
|
||||
|
||||
if (!passwordEncryptionService.verifyPassword(password)) {
|
||||
return {
|
||||
success: false,
|
||||
message: "Given current password doesn't match hash"
|
||||
};
|
||||
}
|
||||
|
||||
const decryptedDataKey = passwordEncryptionService.getDataKey(password);
|
||||
if (!decryptedDataKey) {
|
||||
return {
|
||||
success: false,
|
||||
message: "Unable to obtain data key."
|
||||
};
|
||||
}
|
||||
|
||||
protectedSessionService.setDataKey(decryptedDataKey);
|
||||
|
||||
eventService.emit(eventService.ENTER_PROTECTED_SESSION);
|
||||
|
||||
ws.sendMessageToAllClients({ type: "protectedSessionLogin" });
|
||||
|
||||
return {
|
||||
success: true
|
||||
};
|
||||
}
|
||||
|
||||
function logoutFromProtectedSession() {
|
||||
protectedSessionService.resetDataKey();
|
||||
|
||||
eventService.emit(eventService.LEAVE_PROTECTED_SESSION);
|
||||
|
||||
ws.sendMessageToAllClients({ type: "protectedSessionLogout" });
|
||||
}
|
||||
|
||||
function touchProtectedSession() {
|
||||
protectedSessionService.touchProtectedSession();
|
||||
}
|
||||
|
||||
function token(req: Request) {
|
||||
const password = req.body.password;
|
||||
|
||||
if (!passwordEncryptionService.verifyPassword(password)) {
|
||||
return [401, "Incorrect password"];
|
||||
}
|
||||
|
||||
// for backwards compatibility with Sender which does not send the name
|
||||
const tokenName = req.body.tokenName || "Trilium Sender / Web Clipper";
|
||||
|
||||
const { authToken } = etapiTokenService.createToken(tokenName);
|
||||
|
||||
return { token: authToken };
|
||||
}
|
||||
|
||||
export default {
|
||||
loginSync,
|
||||
loginToProtectedSession,
|
||||
logoutFromProtectedSession,
|
||||
touchProtectedSession,
|
||||
token
|
||||
};
|
||||
385
apps/server/src/routes/api/note_map.ts
Normal file
385
apps/server/src/routes/api/note_map.ts
Normal file
@@ -0,0 +1,385 @@
|
||||
"use strict";
|
||||
|
||||
import becca from "../../becca/becca.js";
|
||||
import { JSDOM } from "jsdom";
|
||||
import type BNote from "../../becca/entities/bnote.js";
|
||||
import type BAttribute from "../../becca/entities/battribute.js";
|
||||
import type { Request } from "express";
|
||||
|
||||
interface Backlink {
|
||||
noteId: string;
|
||||
relationName?: string;
|
||||
excerpts?: string[];
|
||||
}
|
||||
|
||||
function buildDescendantCountMap(noteIdsToCount: string[]) {
|
||||
if (!Array.isArray(noteIdsToCount)) {
|
||||
throw new Error("noteIdsToCount: type error");
|
||||
}
|
||||
|
||||
const noteIdToCountMap = Object.create(null);
|
||||
|
||||
function getCount(noteId: string) {
|
||||
if (!(noteId in noteIdToCountMap)) {
|
||||
const note = becca.getNote(noteId);
|
||||
if (!note) {
|
||||
return;
|
||||
}
|
||||
|
||||
const hiddenImageNoteIds = note.getRelations("imageLink").map((rel) => rel.value);
|
||||
const childNoteIds = note.children.map((child) => child.noteId);
|
||||
const nonHiddenNoteIds = childNoteIds.filter((childNoteId) => !hiddenImageNoteIds.includes(childNoteId));
|
||||
|
||||
noteIdToCountMap[noteId] = nonHiddenNoteIds.length;
|
||||
|
||||
for (const child of note.children) {
|
||||
noteIdToCountMap[noteId] += getCount(child.noteId);
|
||||
}
|
||||
}
|
||||
|
||||
return noteIdToCountMap[noteId];
|
||||
}
|
||||
noteIdsToCount.forEach((noteId) => {
|
||||
getCount(noteId);
|
||||
});
|
||||
|
||||
return noteIdToCountMap;
|
||||
}
|
||||
function getNeighbors(note: BNote, depth: number): string[] {
|
||||
if (depth === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const retNoteIds = [];
|
||||
|
||||
function isIgnoredRelation(relation: BAttribute) {
|
||||
return ["relationMapLink", "template", "inherit", "image", "ancestor"].includes(relation.name);
|
||||
}
|
||||
|
||||
// forward links
|
||||
for (const relation of note.getRelations()) {
|
||||
if (isIgnoredRelation(relation)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const targetNote = relation.getTargetNote();
|
||||
|
||||
if (!targetNote || targetNote.isLabelTruthy("excludeFromNoteMap")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
retNoteIds.push(targetNote.noteId);
|
||||
|
||||
for (const noteId of getNeighbors(targetNote, depth - 1)) {
|
||||
retNoteIds.push(noteId);
|
||||
}
|
||||
}
|
||||
|
||||
// backward links
|
||||
for (const relation of note.getTargetRelations()) {
|
||||
if (isIgnoredRelation(relation)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const sourceNote = relation.getNote();
|
||||
|
||||
if (!sourceNote || sourceNote.isLabelTruthy("excludeFromNoteMap")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
retNoteIds.push(sourceNote.noteId);
|
||||
|
||||
for (const noteId of getNeighbors(sourceNote, depth - 1)) {
|
||||
retNoteIds.push(noteId);
|
||||
}
|
||||
}
|
||||
|
||||
return retNoteIds;
|
||||
}
|
||||
|
||||
function getLinkMap(req: Request) {
|
||||
const mapRootNote = becca.getNoteOrThrow(req.params.noteId);
|
||||
|
||||
// if the map root itself has "excludeFromNoteMap" attribute (journal typically) then there wouldn't be anything
|
||||
// to display, so we'll just ignore it
|
||||
const ignoreExcludeFromNoteMap = mapRootNote.isLabelTruthy("excludeFromNoteMap");
|
||||
let unfilteredNotes;
|
||||
|
||||
if (mapRootNote.type === "search") {
|
||||
// for search notes, we want to consider the direct search results only without the descendants
|
||||
unfilteredNotes = mapRootNote.getSearchResultNotes();
|
||||
} else {
|
||||
unfilteredNotes = mapRootNote.getSubtree({
|
||||
includeArchived: false,
|
||||
resolveSearch: true,
|
||||
includeHidden: mapRootNote.isInHiddenSubtree()
|
||||
}).notes;
|
||||
}
|
||||
|
||||
const noteIds = new Set(unfilteredNotes.filter((note) => ignoreExcludeFromNoteMap || !note.isLabelTruthy("excludeFromNoteMap")).map((note) => note.noteId));
|
||||
|
||||
if (mapRootNote.type === "search") {
|
||||
noteIds.delete(mapRootNote.noteId);
|
||||
}
|
||||
|
||||
for (const noteId of getNeighbors(mapRootNote, 3)) {
|
||||
noteIds.add(noteId);
|
||||
}
|
||||
|
||||
const noteIdsArray = Array.from(noteIds);
|
||||
|
||||
const notes = noteIdsArray.map((noteId) => {
|
||||
const note = becca.getNoteOrThrow(noteId);
|
||||
|
||||
return [note.noteId, note.getTitleOrProtected(), note.type, note.getLabelValue("color")];
|
||||
});
|
||||
|
||||
const links = Object.values(becca.attributes)
|
||||
.filter((rel) => {
|
||||
if (rel.type !== "relation" || rel.name === "relationMapLink" || rel.name === "template" || rel.name === "inherit") {
|
||||
return false;
|
||||
} else if (!noteIds.has(rel.noteId) || !noteIds.has(rel.value)) {
|
||||
return false;
|
||||
} else if (rel.name === "imageLink") {
|
||||
const parentNote = becca.getNote(rel.noteId);
|
||||
if (!parentNote) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return !parentNote.getChildNotes().find((childNote) => childNote.noteId === rel.value);
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
})
|
||||
.map((rel) => ({
|
||||
id: `${rel.noteId}-${rel.name}-${rel.value}`,
|
||||
sourceNoteId: rel.noteId,
|
||||
targetNoteId: rel.value,
|
||||
name: rel.name
|
||||
}));
|
||||
|
||||
return {
|
||||
notes: notes,
|
||||
noteIdToDescendantCountMap: buildDescendantCountMap(noteIdsArray),
|
||||
links: links
|
||||
};
|
||||
}
|
||||
|
||||
function getTreeMap(req: Request) {
|
||||
const mapRootNote = becca.getNoteOrThrow(req.params.noteId);
|
||||
// if the map root itself has "excludeFromNoteMap" (journal typically) then there wouldn't be anything to display,
|
||||
// so we'll just ignore it
|
||||
const ignoreExcludeFromNoteMap = mapRootNote.isLabelTruthy("excludeFromNoteMap");
|
||||
const subtree = mapRootNote.getSubtree({
|
||||
includeArchived: false,
|
||||
resolveSearch: true,
|
||||
includeHidden: mapRootNote.isInHiddenSubtree()
|
||||
});
|
||||
|
||||
const notes = subtree.notes
|
||||
.filter((note) => ignoreExcludeFromNoteMap || !note.isLabelTruthy("excludeFromNoteMap"))
|
||||
.filter((note) => {
|
||||
if (note.type !== "image" || note.getChildNotes().length > 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const imageLinkRelation = note.getTargetRelations().find((rel) => rel.name === "imageLink");
|
||||
|
||||
if (!imageLinkRelation) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return !note.getParentNotes().find((parentNote) => parentNote.noteId === imageLinkRelation.noteId);
|
||||
})
|
||||
.map((note) => [note.noteId, note.getTitleOrProtected(), note.type, note.getLabelValue("color")]);
|
||||
|
||||
const noteIds = new Set<string>();
|
||||
notes.forEach(([noteId]) => noteId && noteIds.add(noteId));
|
||||
|
||||
const links = [];
|
||||
|
||||
for (const { parentNoteId, childNoteId } of subtree.relationships) {
|
||||
if (!noteIds.has(parentNoteId) || !noteIds.has(childNoteId)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
links.push({
|
||||
sourceNoteId: parentNoteId,
|
||||
targetNoteId: childNoteId
|
||||
});
|
||||
}
|
||||
|
||||
const noteIdToDescendantCountMap = buildDescendantCountMap(Array.from(noteIds));
|
||||
|
||||
updateDescendantCountMapForSearch(noteIdToDescendantCountMap, subtree.relationships);
|
||||
|
||||
return {
|
||||
notes: notes,
|
||||
noteIdToDescendantCountMap: noteIdToDescendantCountMap,
|
||||
links: links
|
||||
};
|
||||
}
|
||||
|
||||
function updateDescendantCountMapForSearch(noteIdToDescendantCountMap: Record<string, number>, relationships: { parentNoteId: string; childNoteId: string }[]) {
|
||||
for (const { parentNoteId, childNoteId } of relationships) {
|
||||
const parentNote = becca.notes[parentNoteId];
|
||||
if (!parentNote || parentNote.type !== "search") {
|
||||
continue;
|
||||
}
|
||||
|
||||
noteIdToDescendantCountMap[parentNote.noteId] = noteIdToDescendantCountMap[parentNoteId] || 0;
|
||||
noteIdToDescendantCountMap[parentNote.noteId] += noteIdToDescendantCountMap[childNoteId] || 1;
|
||||
}
|
||||
}
|
||||
|
||||
function removeImages(document: Document) {
|
||||
const images = document.getElementsByTagName("img");
|
||||
while (images && images.length > 0) {
|
||||
images[0]?.parentNode?.removeChild(images[0]);
|
||||
}
|
||||
}
|
||||
|
||||
const EXCERPT_CHAR_LIMIT = 200;
|
||||
type ElementOrText = Element | Text;
|
||||
|
||||
function findExcerpts(sourceNote: BNote, referencedNoteId: string) {
|
||||
const html = sourceNote.getContent();
|
||||
const document = new JSDOM(html).window.document;
|
||||
|
||||
const excerpts = [];
|
||||
|
||||
removeImages(document);
|
||||
|
||||
for (const linkEl of document.querySelectorAll("a")) {
|
||||
const href = linkEl.getAttribute("href");
|
||||
|
||||
if (!href || !href.endsWith(referencedNoteId)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
linkEl.classList.add("backlink-link");
|
||||
|
||||
let centerEl: HTMLElement = linkEl;
|
||||
|
||||
while (centerEl.tagName !== "BODY" && centerEl.parentElement && (centerEl.parentElement?.textContent?.length || 0) <= EXCERPT_CHAR_LIMIT) {
|
||||
centerEl = centerEl.parentElement;
|
||||
}
|
||||
|
||||
const excerptEls: ElementOrText[] = [centerEl];
|
||||
let excerptLength = centerEl.textContent?.length || 0;
|
||||
let left: ElementOrText = centerEl;
|
||||
let right: ElementOrText = centerEl;
|
||||
|
||||
while (excerptLength < EXCERPT_CHAR_LIMIT) {
|
||||
let added = false;
|
||||
|
||||
const prev: Element | null = left.previousElementSibling;
|
||||
|
||||
if (prev) {
|
||||
const prevText = prev.textContent || "";
|
||||
|
||||
if (prevText.length + excerptLength > EXCERPT_CHAR_LIMIT) {
|
||||
const prefix = prevText.substr(prevText.length - (EXCERPT_CHAR_LIMIT - excerptLength));
|
||||
|
||||
const textNode = document.createTextNode(`…${prefix}`);
|
||||
excerptEls.unshift(textNode);
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
left = prev;
|
||||
excerptEls.unshift(left);
|
||||
excerptLength += prevText.length;
|
||||
added = true;
|
||||
}
|
||||
|
||||
const next: Element | null = right.nextElementSibling;
|
||||
|
||||
if (next) {
|
||||
const nextText = next.textContent;
|
||||
|
||||
if (nextText && nextText.length + excerptLength > EXCERPT_CHAR_LIMIT) {
|
||||
const suffix = nextText.substr(nextText.length - (EXCERPT_CHAR_LIMIT - excerptLength));
|
||||
|
||||
const textNode = document.createTextNode(`${suffix}…`);
|
||||
excerptEls.push(textNode);
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
right = next;
|
||||
excerptEls.push(right);
|
||||
excerptLength += nextText?.length || 0;
|
||||
added = true;
|
||||
}
|
||||
|
||||
if (!added) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const excerptWrapper = document.createElement("div");
|
||||
excerptWrapper.classList.add("ck-content");
|
||||
excerptWrapper.classList.add("backlink-excerpt");
|
||||
|
||||
for (const childEl of excerptEls) {
|
||||
excerptWrapper.appendChild(childEl);
|
||||
}
|
||||
|
||||
excerpts.push(excerptWrapper.outerHTML);
|
||||
}
|
||||
return excerpts;
|
||||
}
|
||||
|
||||
function getFilteredBacklinks(note: BNote): BAttribute[] {
|
||||
return (
|
||||
note
|
||||
.getTargetRelations()
|
||||
// search notes have "ancestor" relations which are not interesting
|
||||
.filter((relation) => !!relation.getNote() && relation.getNote().type !== "search")
|
||||
);
|
||||
}
|
||||
|
||||
function getBacklinkCount(req: Request) {
|
||||
const { noteId } = req.params;
|
||||
|
||||
const note = becca.getNoteOrThrow(noteId);
|
||||
|
||||
return {
|
||||
count: getFilteredBacklinks(note).length
|
||||
};
|
||||
}
|
||||
|
||||
function getBacklinks(req: Request): Backlink[] {
|
||||
const { noteId } = req.params;
|
||||
const note = becca.getNoteOrThrow(noteId);
|
||||
|
||||
let backlinksWithExcerptCount = 0;
|
||||
|
||||
return getFilteredBacklinks(note).map((backlink) => {
|
||||
const sourceNote = backlink.note;
|
||||
|
||||
if (sourceNote.type !== "text" || backlinksWithExcerptCount > 50) {
|
||||
return {
|
||||
noteId: sourceNote.noteId,
|
||||
relationName: backlink.name
|
||||
};
|
||||
}
|
||||
|
||||
backlinksWithExcerptCount++;
|
||||
|
||||
const excerpts = findExcerpts(sourceNote, noteId);
|
||||
|
||||
return {
|
||||
noteId: sourceNote.noteId,
|
||||
excerpts
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
export default {
|
||||
getLinkMap,
|
||||
getTreeMap,
|
||||
getBacklinkCount,
|
||||
getBacklinks
|
||||
};
|
||||
383
apps/server/src/routes/api/notes.ts
Normal file
383
apps/server/src/routes/api/notes.ts
Normal file
@@ -0,0 +1,383 @@
|
||||
"use strict";
|
||||
|
||||
import noteService from "../../services/notes.js";
|
||||
import eraseService from "../../services/erase.js";
|
||||
import treeService from "../../services/tree.js";
|
||||
import sql from "../../services/sql.js";
|
||||
import utils from "../../services/utils.js";
|
||||
import log from "../../services/log.js";
|
||||
import TaskContext from "../../services/task_context.js";
|
||||
import becca from "../../becca/becca.js";
|
||||
import ValidationError from "../../errors/validation_error.js";
|
||||
import blobService from "../../services/blob.js";
|
||||
import type { Request } from "express";
|
||||
import type BBranch from "../../becca/entities/bbranch.js";
|
||||
import type { AttributeRow } from "../../becca/entities/rows.js";
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/notes/{noteId}:
|
||||
* get:
|
||||
* summary: Retrieve note metadata
|
||||
* operationId: notes-get
|
||||
* parameters:
|
||||
* - name: noteId
|
||||
* in: path
|
||||
* required: true
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/NoteId"
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Note metadata
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* allOf:
|
||||
* - $ref: '#/components/schemas/Note'
|
||||
* - $ref: "#/components/schemas/Timestamps"
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["data"]
|
||||
*/
|
||||
function getNote(req: Request) {
|
||||
return becca.getNoteOrThrow(req.params.noteId);
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/notes/{noteId}/blob:
|
||||
* get:
|
||||
* summary: Retrieve note content
|
||||
* operationId: notes-blob
|
||||
* parameters:
|
||||
* - name: noteId
|
||||
* in: path
|
||||
* required: true
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/NoteId"
|
||||
* responses:
|
||||
* '304':
|
||||
* description: Note content
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: '#/components/schemas/Blob'
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["data"]
|
||||
*/
|
||||
function getNoteBlob(req: Request) {
|
||||
return blobService.getBlobPojo("notes", req.params.noteId);
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/notes/{noteId}/metadata:
|
||||
* get:
|
||||
* summary: Retrieve note metadata (limited to timestamps)
|
||||
* operationId: notes-metadata
|
||||
* parameters:
|
||||
* - name: noteId
|
||||
* in: path
|
||||
* required: true
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/NoteId"
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Note metadata
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Timestamps"
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["data"]
|
||||
*/
|
||||
function getNoteMetadata(req: Request) {
|
||||
const note = becca.getNoteOrThrow(req.params.noteId);
|
||||
|
||||
return {
|
||||
dateCreated: note.dateCreated,
|
||||
utcDateCreated: note.utcDateCreated,
|
||||
dateModified: note.dateModified,
|
||||
utcDateModified: note.utcDateModified
|
||||
};
|
||||
}
|
||||
|
||||
function createNote(req: Request) {
|
||||
const params = Object.assign({}, req.body); // clone
|
||||
params.parentNoteId = req.params.parentNoteId;
|
||||
|
||||
const { target, targetBranchId } = req.query;
|
||||
|
||||
if (target !== "into" && target !== "after") {
|
||||
throw new ValidationError("Invalid target type.");
|
||||
}
|
||||
|
||||
if (targetBranchId && typeof targetBranchId !== "string") {
|
||||
throw new ValidationError("Missing or incorrect type for target branch ID.");
|
||||
}
|
||||
|
||||
const { note, branch } = noteService.createNewNoteWithTarget(target, targetBranchId, params);
|
||||
|
||||
return {
|
||||
note,
|
||||
branch
|
||||
};
|
||||
}
|
||||
|
||||
function updateNoteData(req: Request) {
|
||||
const { content, attachments } = req.body;
|
||||
const { noteId } = req.params;
|
||||
|
||||
return noteService.updateNoteData(noteId, content, attachments);
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/notes/{noteId}:
|
||||
* delete:
|
||||
* summary: Delete note
|
||||
* operationId: notes-delete
|
||||
* parameters:
|
||||
* - name: noteId
|
||||
* in: path
|
||||
* required: true
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/NoteId"
|
||||
* - name: taskId
|
||||
* in: query
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Task group identifier
|
||||
* - name: eraseNotes
|
||||
* in: query
|
||||
* schema:
|
||||
* type: boolean
|
||||
* required: false
|
||||
* description: Whether to erase the note immediately
|
||||
* - name: last
|
||||
* in: query
|
||||
* schema:
|
||||
* type: boolean
|
||||
* required: true
|
||||
* description: Whether this is the last request of this task group
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Note successfully deleted
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["data"]
|
||||
*/
|
||||
function deleteNote(req: Request) {
|
||||
const noteId = req.params.noteId;
|
||||
const taskId = req.query.taskId;
|
||||
const eraseNotes = req.query.eraseNotes === "true";
|
||||
const last = req.query.last === "true";
|
||||
|
||||
// note how deleteId is separate from taskId - single taskId produces separate deleteId for each "top level" deleted note
|
||||
const deleteId = utils.randomString(10);
|
||||
|
||||
const note = becca.getNoteOrThrow(noteId);
|
||||
|
||||
if (typeof taskId !== "string") {
|
||||
throw new ValidationError("Missing or incorrect type for task ID.");
|
||||
}
|
||||
const taskContext = TaskContext.getInstance(taskId, "deleteNotes");
|
||||
|
||||
note.deleteNote(deleteId, taskContext);
|
||||
|
||||
if (eraseNotes) {
|
||||
eraseService.eraseNotesWithDeleteId(deleteId);
|
||||
}
|
||||
|
||||
if (last) {
|
||||
taskContext.taskSucceeded();
|
||||
}
|
||||
}
|
||||
|
||||
function undeleteNote(req: Request) {
|
||||
const taskContext = TaskContext.getInstance(utils.randomString(10), "undeleteNotes");
|
||||
|
||||
noteService.undeleteNote(req.params.noteId, taskContext);
|
||||
|
||||
taskContext.taskSucceeded();
|
||||
}
|
||||
|
||||
function sortChildNotes(req: Request) {
|
||||
const noteId = req.params.noteId;
|
||||
const { sortBy, sortDirection, foldersFirst, sortNatural, sortLocale } = req.body;
|
||||
|
||||
log.info(`Sorting '${noteId}' children with ${sortBy} ${sortDirection}, foldersFirst=${foldersFirst}, sortNatural=${sortNatural}, sortLocale=${sortLocale}`);
|
||||
|
||||
const reverse = sortDirection === "desc";
|
||||
|
||||
treeService.sortNotes(noteId, sortBy, reverse, foldersFirst, sortNatural, sortLocale);
|
||||
}
|
||||
|
||||
function protectNote(req: Request) {
|
||||
const noteId = req.params.noteId;
|
||||
const note = becca.notes[noteId];
|
||||
const protect = !!parseInt(req.params.isProtected);
|
||||
const includingSubTree = !!parseInt(req.query?.subtree as string);
|
||||
|
||||
const taskContext = new TaskContext(utils.randomString(10), "protectNotes", { protect });
|
||||
|
||||
noteService.protectNoteRecursively(note, protect, includingSubTree, taskContext);
|
||||
|
||||
taskContext.taskSucceeded();
|
||||
}
|
||||
|
||||
function setNoteTypeMime(req: Request) {
|
||||
// can't use [] destructuring because req.params is not iterable
|
||||
const { noteId } = req.params;
|
||||
const { type, mime } = req.body;
|
||||
|
||||
const note = becca.getNoteOrThrow(noteId);
|
||||
note.type = type;
|
||||
note.mime = mime;
|
||||
note.save();
|
||||
}
|
||||
|
||||
function changeTitle(req: Request) {
|
||||
const noteId = req.params.noteId;
|
||||
const title = req.body.title;
|
||||
|
||||
const note = becca.getNoteOrThrow(noteId);
|
||||
|
||||
if (!note.isContentAvailable()) {
|
||||
throw new ValidationError(`Note '${noteId}' is not available for change`);
|
||||
}
|
||||
|
||||
const noteTitleChanged = note.title !== title;
|
||||
|
||||
if (noteTitleChanged) {
|
||||
noteService.saveRevisionIfNeeded(note);
|
||||
}
|
||||
|
||||
note.title = title;
|
||||
|
||||
note.save();
|
||||
|
||||
if (noteTitleChanged) {
|
||||
noteService.triggerNoteTitleChanged(note);
|
||||
}
|
||||
|
||||
return note;
|
||||
}
|
||||
|
||||
function duplicateSubtree(req: Request) {
|
||||
const { noteId, parentNoteId } = req.params;
|
||||
|
||||
return noteService.duplicateSubtree(noteId, parentNoteId);
|
||||
}
|
||||
|
||||
function eraseDeletedNotesNow() {
|
||||
eraseService.eraseDeletedNotesNow();
|
||||
}
|
||||
|
||||
function eraseUnusedAttachmentsNow() {
|
||||
eraseService.eraseUnusedAttachmentsNow();
|
||||
}
|
||||
|
||||
function getDeleteNotesPreview(req: Request) {
|
||||
const { branchIdsToDelete, deleteAllClones } = req.body;
|
||||
|
||||
const noteIdsToBeDeleted = new Set<string>();
|
||||
const strongBranchCountToDelete: Record<string, number> = {}; // noteId => count
|
||||
|
||||
function branchPreviewDeletion(branch: BBranch) {
|
||||
if (branch.isWeak || !branch.branchId) {
|
||||
return;
|
||||
}
|
||||
|
||||
strongBranchCountToDelete[branch.branchId] = strongBranchCountToDelete[branch.branchId] || 0;
|
||||
strongBranchCountToDelete[branch.branchId]++;
|
||||
|
||||
const note = branch.getNote();
|
||||
|
||||
if (deleteAllClones || note.getStrongParentBranches().length <= strongBranchCountToDelete[branch.branchId]) {
|
||||
noteIdsToBeDeleted.add(note.noteId);
|
||||
|
||||
for (const childBranch of note.getChildBranches()) {
|
||||
branchPreviewDeletion(childBranch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const branchId of branchIdsToDelete) {
|
||||
const branch = becca.getBranch(branchId);
|
||||
|
||||
if (!branch) {
|
||||
log.error(`Branch ${branchId} was not found and delete preview can't be calculated for this note.`);
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
branchPreviewDeletion(branch);
|
||||
}
|
||||
|
||||
let brokenRelations: AttributeRow[] = [];
|
||||
|
||||
if (noteIdsToBeDeleted.size > 0) {
|
||||
sql.fillParamList(noteIdsToBeDeleted);
|
||||
|
||||
// FIXME: No need to do this in database, can be done with becca data
|
||||
brokenRelations = sql
|
||||
.getRows<AttributeRow>(
|
||||
`
|
||||
SELECT attr.noteId, attr.name, attr.value
|
||||
FROM attributes attr
|
||||
JOIN param_list ON param_list.paramId = attr.value
|
||||
WHERE attr.isDeleted = 0
|
||||
AND attr.type = 'relation'`
|
||||
)
|
||||
.filter((attr) => attr.noteId && !noteIdsToBeDeleted.has(attr.noteId));
|
||||
}
|
||||
|
||||
return {
|
||||
noteIdsToBeDeleted: Array.from(noteIdsToBeDeleted),
|
||||
brokenRelations
|
||||
};
|
||||
}
|
||||
|
||||
function forceSaveRevision(req: Request) {
|
||||
const { noteId } = req.params;
|
||||
const note = becca.getNoteOrThrow(noteId);
|
||||
|
||||
if (!note.isContentAvailable()) {
|
||||
throw new ValidationError(`Note revision of a protected note cannot be created outside of a protected session.`);
|
||||
}
|
||||
|
||||
note.saveRevision();
|
||||
}
|
||||
|
||||
function convertNoteToAttachment(req: Request) {
|
||||
const { noteId } = req.params;
|
||||
const note = becca.getNoteOrThrow(noteId);
|
||||
|
||||
return {
|
||||
attachment: note.convertToParentAttachment()
|
||||
};
|
||||
}
|
||||
|
||||
export default {
|
||||
getNote,
|
||||
getNoteBlob,
|
||||
getNoteMetadata,
|
||||
updateNoteData,
|
||||
deleteNote,
|
||||
undeleteNote,
|
||||
createNote,
|
||||
sortChildNotes,
|
||||
protectNote,
|
||||
setNoteTypeMime,
|
||||
changeTitle,
|
||||
duplicateSubtree,
|
||||
eraseDeletedNotesNow,
|
||||
eraseUnusedAttachmentsNow,
|
||||
getDeleteNotesPreview,
|
||||
forceSaveRevision,
|
||||
convertNoteToAttachment
|
||||
};
|
||||
64
apps/server/src/routes/api/ollama.ts
Normal file
64
apps/server/src/routes/api/ollama.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
import options from "../../services/options.js";
|
||||
import log from "../../services/log.js";
|
||||
import type { Request, Response } from "express";
|
||||
import { Ollama } from "ollama";
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/llm/providers/ollama/models:
|
||||
* get:
|
||||
* summary: List available models from Ollama
|
||||
* operationId: ollama-list-models
|
||||
* parameters:
|
||||
* - name: baseUrl
|
||||
* in: query
|
||||
* required: false
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Optional custom Ollama API base URL
|
||||
* responses:
|
||||
* '200':
|
||||
* description: List of available Ollama models
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* models:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* '500':
|
||||
* description: Error listing models
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function listModels(req: Request, res: Response) {
|
||||
try {
|
||||
const baseUrl = req.query.baseUrl as string || await options.getOption('ollamaBaseUrl') || 'http://localhost:11434';
|
||||
|
||||
// Create Ollama client
|
||||
const ollama = new Ollama({ host: baseUrl });
|
||||
|
||||
// Call Ollama API to get models using the official client
|
||||
const response = await ollama.list();
|
||||
|
||||
// Return the models list
|
||||
return {
|
||||
success: true,
|
||||
models: response.models || []
|
||||
};
|
||||
} catch (error: any) {
|
||||
log.error(`Error listing Ollama models: ${error.message || 'Unknown error'}`);
|
||||
|
||||
// Properly throw the error to be handled by the global error handler
|
||||
throw new Error(`Failed to list Ollama models: ${error.message || 'Unknown error'}`);
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
listModels
|
||||
};
|
||||
127
apps/server/src/routes/api/openai.ts
Normal file
127
apps/server/src/routes/api/openai.ts
Normal file
@@ -0,0 +1,127 @@
|
||||
import options from "../../services/options.js";
|
||||
import log from "../../services/log.js";
|
||||
import type { Request, Response } from "express";
|
||||
import OpenAI from "openai";
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/openai/models:
|
||||
* post:
|
||||
* summary: List available models from OpenAI
|
||||
* operationId: openai-list-models
|
||||
* requestBody:
|
||||
* required: false
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* baseUrl:
|
||||
* type: string
|
||||
* description: Optional custom OpenAI API base URL
|
||||
* responses:
|
||||
* '200':
|
||||
* description: List of available OpenAI models
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* chatModels:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* properties:
|
||||
* id:
|
||||
* type: string
|
||||
* name:
|
||||
* type: string
|
||||
* type:
|
||||
* type: string
|
||||
* embeddingModels:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* properties:
|
||||
* id:
|
||||
* type: string
|
||||
* name:
|
||||
* type: string
|
||||
* type:
|
||||
* type: string
|
||||
* '500':
|
||||
* description: Error listing models
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["llm"]
|
||||
*/
|
||||
async function listModels(req: Request, res: Response) {
|
||||
try {
|
||||
const { baseUrl } = req.body;
|
||||
|
||||
// Use provided base URL or default from options
|
||||
const openaiBaseUrl = baseUrl || await options.getOption('openaiBaseUrl') || 'https://api.openai.com/v1';
|
||||
const apiKey = await options.getOption('openaiApiKey');
|
||||
|
||||
if (!apiKey) {
|
||||
throw new Error('OpenAI API key is not configured');
|
||||
}
|
||||
|
||||
// Initialize OpenAI client with the API key and base URL
|
||||
const openai = new OpenAI({
|
||||
apiKey,
|
||||
baseURL: openaiBaseUrl
|
||||
});
|
||||
|
||||
// Call OpenAI API to get models using the SDK
|
||||
const response = await openai.models.list();
|
||||
|
||||
// Filter and categorize models
|
||||
const allModels = response.data || [];
|
||||
|
||||
// Separate models into chat models and embedding models
|
||||
const chatModels = allModels
|
||||
.filter((model) =>
|
||||
// Include GPT models for chat
|
||||
model.id.includes('gpt') ||
|
||||
// Include Claude models via Azure OpenAI
|
||||
model.id.includes('claude')
|
||||
)
|
||||
.map((model) => ({
|
||||
id: model.id,
|
||||
name: model.id,
|
||||
type: 'chat'
|
||||
}));
|
||||
|
||||
const embeddingModels = allModels
|
||||
.filter((model) =>
|
||||
// Only include embedding-specific models
|
||||
model.id.includes('embedding') ||
|
||||
model.id.includes('embed')
|
||||
)
|
||||
.map((model) => ({
|
||||
id: model.id,
|
||||
name: model.id,
|
||||
type: 'embedding'
|
||||
}));
|
||||
|
||||
// Return the models list
|
||||
return {
|
||||
success: true,
|
||||
chatModels,
|
||||
embeddingModels
|
||||
};
|
||||
} catch (error: any) {
|
||||
log.error(`Error listing OpenAI models: ${error.message || 'Unknown error'}`);
|
||||
|
||||
// Properly throw the error to be handled by the global error handler
|
||||
throw new Error(`Failed to list OpenAI models: ${error.message || 'Unknown error'}`);
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
listModels
|
||||
};
|
||||
|
||||
1
apps/server/src/routes/api/openapi.json
Normal file
1
apps/server/src/routes/api/openapi.json
Normal file
File diff suppressed because one or more lines are too long
214
apps/server/src/routes/api/options.ts
Normal file
214
apps/server/src/routes/api/options.ts
Normal file
@@ -0,0 +1,214 @@
|
||||
"use strict";
|
||||
|
||||
import optionService from "../../services/options.js";
|
||||
import log from "../../services/log.js";
|
||||
import searchService from "../../services/search/services/search.js";
|
||||
import ValidationError from "../../errors/validation_error.js";
|
||||
import type { Request } from "express";
|
||||
import { changeLanguage, getLocales } from "../../services/i18n.js";
|
||||
import { listSyntaxHighlightingThemes } from "../../services/code_block_theme.js";
|
||||
import type { OptionNames } from "../../services/options_interface.js";
|
||||
|
||||
// options allowed to be updated directly in the Options dialog
|
||||
const ALLOWED_OPTIONS = new Set<OptionNames>([
|
||||
"eraseEntitiesAfterTimeInSeconds",
|
||||
"eraseEntitiesAfterTimeScale",
|
||||
"protectedSessionTimeout",
|
||||
"protectedSessionTimeoutTimeScale",
|
||||
"revisionSnapshotTimeInterval",
|
||||
"revisionSnapshotTimeIntervalTimeScale",
|
||||
"revisionSnapshotNumberLimit",
|
||||
"zoomFactor",
|
||||
"theme",
|
||||
"codeBlockTheme",
|
||||
"codeBlockWordWrap",
|
||||
"syncServerHost",
|
||||
"syncServerTimeout",
|
||||
"syncProxy",
|
||||
"hoistedNoteId",
|
||||
"mainFontSize",
|
||||
"mainFontFamily",
|
||||
"treeFontSize",
|
||||
"treeFontFamily",
|
||||
"detailFontSize",
|
||||
"detailFontFamily",
|
||||
"monospaceFontSize",
|
||||
"monospaceFontFamily",
|
||||
"openNoteContexts",
|
||||
"vimKeymapEnabled",
|
||||
"codeLineWrapEnabled",
|
||||
"codeNotesMimeTypes",
|
||||
"spellCheckEnabled",
|
||||
"spellCheckLanguageCode",
|
||||
"imageMaxWidthHeight",
|
||||
"imageJpegQuality",
|
||||
"leftPaneWidth",
|
||||
"rightPaneWidth",
|
||||
"leftPaneVisible",
|
||||
"rightPaneVisible",
|
||||
"nativeTitleBarVisible",
|
||||
"headingStyle",
|
||||
"autoCollapseNoteTree",
|
||||
"autoReadonlySizeText",
|
||||
"autoReadonlySizeCode",
|
||||
"overrideThemeFonts",
|
||||
"dailyBackupEnabled",
|
||||
"weeklyBackupEnabled",
|
||||
"monthlyBackupEnabled",
|
||||
"maxContentWidth",
|
||||
"compressImages",
|
||||
"downloadImagesAutomatically",
|
||||
"minTocHeadings",
|
||||
"highlightsList",
|
||||
"checkForUpdates",
|
||||
"disableTray",
|
||||
"eraseUnusedAttachmentsAfterSeconds",
|
||||
"eraseUnusedAttachmentsAfterTimeScale",
|
||||
"disableTray",
|
||||
"customSearchEngineName",
|
||||
"customSearchEngineUrl",
|
||||
"promotedAttributesOpenInRibbon",
|
||||
"editedNotesOpenInRibbon",
|
||||
"locale",
|
||||
"formattingLocale",
|
||||
"firstDayOfWeek",
|
||||
"firstWeekOfYear",
|
||||
"minDaysInFirstWeek",
|
||||
"languages",
|
||||
"textNoteEditorType",
|
||||
"textNoteEditorMultilineToolbar",
|
||||
"layoutOrientation",
|
||||
"backgroundEffects",
|
||||
"allowedHtmlTags",
|
||||
"redirectBareDomain",
|
||||
"showLoginInShareTheme",
|
||||
"splitEditorOrientation",
|
||||
|
||||
// AI/LLM integration options
|
||||
"aiEnabled",
|
||||
"aiTemperature",
|
||||
"aiSystemPrompt",
|
||||
"aiProviderPrecedence",
|
||||
"openaiApiKey",
|
||||
"openaiBaseUrl",
|
||||
"openaiDefaultModel",
|
||||
"openaiEmbeddingModel",
|
||||
"anthropicApiKey",
|
||||
"anthropicBaseUrl",
|
||||
"anthropicDefaultModel",
|
||||
"voyageApiKey",
|
||||
"voyageEmbeddingModel",
|
||||
"ollamaBaseUrl",
|
||||
"ollamaDefaultModel",
|
||||
"ollamaEmbeddingModel",
|
||||
"embeddingAutoUpdateEnabled",
|
||||
"embeddingDimensionStrategy",
|
||||
"embeddingProviderPrecedence",
|
||||
"embeddingSimilarityThreshold",
|
||||
"embeddingBatchSize",
|
||||
"embeddingUpdateInterval",
|
||||
"enableAutomaticIndexing",
|
||||
"maxNotesPerLlmQuery",
|
||||
|
||||
// Embedding options
|
||||
"embeddingDefaultDimension",
|
||||
"mfaEnabled",
|
||||
"mfaMethod"
|
||||
]);
|
||||
|
||||
function getOptions() {
|
||||
const optionMap = optionService.getOptionMap();
|
||||
const resultMap: Record<string, string> = {};
|
||||
|
||||
for (const optionName in optionMap) {
|
||||
if (isAllowed(optionName)) {
|
||||
resultMap[optionName] = optionMap[optionName as OptionNames];
|
||||
}
|
||||
}
|
||||
|
||||
resultMap["isPasswordSet"] = optionMap["passwordVerificationHash"] ? "true" : "false";
|
||||
|
||||
return resultMap;
|
||||
}
|
||||
|
||||
function updateOption(req: Request) {
|
||||
const { name, value } = req.params;
|
||||
|
||||
if (!update(name, value)) {
|
||||
throw new ValidationError("not allowed option to change");
|
||||
}
|
||||
}
|
||||
|
||||
function updateOptions(req: Request) {
|
||||
for (const optionName in req.body) {
|
||||
if (!update(optionName, req.body[optionName])) {
|
||||
// this should be improved
|
||||
// it should return 400 instead of current 500, but at least it now rollbacks transaction
|
||||
throw new Error(`Option '${optionName}' is not allowed to be changed`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function update(name: string, value: string) {
|
||||
if (!isAllowed(name)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (name !== "openNoteContexts") {
|
||||
log.info(`Updating option '${name}' to '${value}'`);
|
||||
}
|
||||
|
||||
optionService.setOption(name as OptionNames, value);
|
||||
|
||||
if (name === "locale") {
|
||||
// This runs asynchronously, so it's not perfect, but it does the trick for now.
|
||||
changeLanguage(value);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function getUserThemes() {
|
||||
const notes = searchService.searchNotes("#appTheme", { ignoreHoistedNote: true });
|
||||
const ret = [];
|
||||
|
||||
for (const note of notes) {
|
||||
let value = note.getOwnedLabelValue("appTheme");
|
||||
|
||||
if (!value) {
|
||||
value = note.title.toLowerCase().replace(/[^a-z0-9]/gi, "-");
|
||||
}
|
||||
|
||||
ret.push({
|
||||
val: value,
|
||||
title: note.title,
|
||||
noteId: note.noteId
|
||||
});
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
function getSyntaxHighlightingThemes() {
|
||||
return listSyntaxHighlightingThemes();
|
||||
}
|
||||
|
||||
function getSupportedLocales() {
|
||||
return getLocales();
|
||||
}
|
||||
|
||||
function isAllowed(name: string) {
|
||||
return (ALLOWED_OPTIONS as Set<string>).has(name)
|
||||
|| name.startsWith("keyboardShortcuts")
|
||||
|| name.endsWith("Collapsed")
|
||||
|| name.startsWith("hideArchivedNotes");
|
||||
}
|
||||
|
||||
export default {
|
||||
getOptions,
|
||||
updateOption,
|
||||
updateOptions,
|
||||
getUserThemes,
|
||||
getSyntaxHighlightingThemes,
|
||||
getSupportedLocales
|
||||
};
|
||||
41
apps/server/src/routes/api/other.ts
Normal file
41
apps/server/src/routes/api/other.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import type { Request } from "express";
|
||||
|
||||
import becca from "../../becca/becca.js";
|
||||
import markdownService from "../../services/import/markdown.js";
|
||||
|
||||
function getIconUsage() {
|
||||
const iconClassToCountMap: Record<string, number> = {};
|
||||
|
||||
for (const { value: iconClass, noteId } of becca.findAttributes("label", "iconClass")) {
|
||||
if (noteId.startsWith("_")) {
|
||||
continue; // ignore icons of "system" notes since they were not set by the user
|
||||
}
|
||||
|
||||
if (!iconClass?.trim()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const clazz of iconClass.trim().split(/\s+/)) {
|
||||
if (clazz === "bx") {
|
||||
continue;
|
||||
}
|
||||
|
||||
iconClassToCountMap[clazz] = (iconClassToCountMap[clazz] || 0) + 1;
|
||||
}
|
||||
}
|
||||
|
||||
return { iconClassToCountMap };
|
||||
}
|
||||
|
||||
function renderMarkdown(req: Request) {
|
||||
const { markdownContent } = req.body;
|
||||
|
||||
return {
|
||||
htmlContent: markdownService.renderToHtml(markdownContent, "")
|
||||
};
|
||||
}
|
||||
|
||||
export default {
|
||||
getIconUsage,
|
||||
renderMarkdown
|
||||
};
|
||||
27
apps/server/src/routes/api/password.ts
Normal file
27
apps/server/src/routes/api/password.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
"use strict";
|
||||
|
||||
import passwordService from "../../services/encryption/password.js";
|
||||
import ValidationError from "../../errors/validation_error.js";
|
||||
import type { Request } from "express";
|
||||
|
||||
function changePassword(req: Request) {
|
||||
if (passwordService.isPasswordSet()) {
|
||||
return passwordService.changePassword(req.body.current_password, req.body.new_password);
|
||||
} else {
|
||||
return passwordService.setPassword(req.body.new_password);
|
||||
}
|
||||
}
|
||||
|
||||
function resetPassword(req: Request) {
|
||||
// protection against accidental call (not a security measure)
|
||||
if (req.query.really !== "yesIReallyWantToResetPasswordAndLoseAccessToMyProtectedNotes") {
|
||||
throw new ValidationError("Incorrect password reset confirmation");
|
||||
}
|
||||
|
||||
return passwordService.resetPassword();
|
||||
}
|
||||
|
||||
export default {
|
||||
changePassword,
|
||||
resetPassword
|
||||
};
|
||||
114
apps/server/src/routes/api/recent_changes.ts
Normal file
114
apps/server/src/routes/api/recent_changes.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
"use strict";
|
||||
|
||||
import sql from "../../services/sql.js";
|
||||
import protectedSessionService from "../../services/protected_session.js";
|
||||
import noteService from "../../services/notes.js";
|
||||
import becca from "../../becca/becca.js";
|
||||
import type { Request } from "express";
|
||||
|
||||
interface RecentChangeRow {
|
||||
noteId: string;
|
||||
current_isDeleted: boolean;
|
||||
current_deleteId: string;
|
||||
current_title: string;
|
||||
current_isProtected: boolean;
|
||||
title: string;
|
||||
utcDate: string;
|
||||
date: string;
|
||||
canBeUndeleted?: boolean;
|
||||
}
|
||||
|
||||
function getRecentChanges(req: Request) {
|
||||
const { ancestorNoteId } = req.params;
|
||||
|
||||
let recentChanges = [];
|
||||
|
||||
const revisionRows = sql.getRows<RecentChangeRow>(`
|
||||
SELECT
|
||||
notes.noteId,
|
||||
notes.isDeleted AS current_isDeleted,
|
||||
notes.deleteId AS current_deleteId,
|
||||
notes.title AS current_title,
|
||||
notes.isProtected AS current_isProtected,
|
||||
revisions.title,
|
||||
revisions.utcDateCreated AS utcDate,
|
||||
revisions.dateCreated AS date
|
||||
FROM
|
||||
revisions
|
||||
JOIN notes USING(noteId)`);
|
||||
|
||||
for (const revisionRow of revisionRows) {
|
||||
const note = becca.getNote(revisionRow.noteId);
|
||||
|
||||
// for deleted notes, the becca note is null, and it's not possible to (easily) determine if it belongs to a subtree
|
||||
if (ancestorNoteId === "root" || note?.hasAncestor(ancestorNoteId)) {
|
||||
recentChanges.push(revisionRow);
|
||||
}
|
||||
}
|
||||
|
||||
// now we need to also collect date points not represented in note revisions:
|
||||
// 1. creation for all notes (dateCreated)
|
||||
// 2. deletion for deleted notes (dateModified)
|
||||
const noteRows = sql.getRows<RecentChangeRow>(`
|
||||
SELECT
|
||||
notes.noteId,
|
||||
notes.isDeleted AS current_isDeleted,
|
||||
notes.deleteId AS current_deleteId,
|
||||
notes.title AS current_title,
|
||||
notes.isProtected AS current_isProtected,
|
||||
notes.title,
|
||||
notes.utcDateCreated AS utcDate, -- different from the second SELECT
|
||||
notes.dateCreated AS date -- different from the second SELECT
|
||||
FROM notes
|
||||
UNION ALL
|
||||
SELECT
|
||||
notes.noteId,
|
||||
notes.isDeleted AS current_isDeleted,
|
||||
notes.deleteId AS current_deleteId,
|
||||
notes.title AS current_title,
|
||||
notes.isProtected AS current_isProtected,
|
||||
notes.title,
|
||||
notes.utcDateModified AS utcDate, -- different from the first SELECT
|
||||
notes.dateModified AS date -- different from the first SELECT
|
||||
FROM notes
|
||||
WHERE notes.isDeleted = 1`);
|
||||
|
||||
for (const noteRow of noteRows) {
|
||||
const note = becca.getNote(noteRow.noteId);
|
||||
|
||||
// for deleted notes, the becca note is null, and it's not possible to (easily) determine if it belongs to a subtree
|
||||
if (ancestorNoteId === "root" || note?.hasAncestor(ancestorNoteId)) {
|
||||
recentChanges.push(noteRow);
|
||||
}
|
||||
}
|
||||
|
||||
recentChanges.sort((a, b) => (a.utcDate > b.utcDate ? -1 : 1));
|
||||
|
||||
recentChanges = recentChanges.slice(0, Math.min(500, recentChanges.length));
|
||||
|
||||
for (const change of recentChanges) {
|
||||
if (change.current_isProtected) {
|
||||
if (protectedSessionService.isProtectedSessionAvailable()) {
|
||||
change.title = protectedSessionService.decryptString(change.title) || "[protected]";
|
||||
change.current_title = protectedSessionService.decryptString(change.current_title) || "[protected]";
|
||||
} else {
|
||||
change.title = change.current_title = "[protected]";
|
||||
}
|
||||
}
|
||||
|
||||
if (change.current_isDeleted) {
|
||||
const deleteId = change.current_deleteId;
|
||||
|
||||
const undeletedParentBranchIds = noteService.getUndeletedParentBranchIds(change.noteId, deleteId);
|
||||
|
||||
// note (and the subtree) can be undeleted if there's at least one undeleted parent (whose branch would be undeleted by this op)
|
||||
change.canBeUndeleted = undeletedParentBranchIds.length > 0;
|
||||
}
|
||||
}
|
||||
|
||||
return recentChanges;
|
||||
}
|
||||
|
||||
export default {
|
||||
getRecentChanges
|
||||
};
|
||||
24
apps/server/src/routes/api/recent_notes.ts
Normal file
24
apps/server/src/routes/api/recent_notes.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
"use strict";
|
||||
|
||||
import BRecentNote from "../../becca/entities/brecent_note.js";
|
||||
import sql from "../../services/sql.js";
|
||||
import dateUtils from "../../services/date_utils.js";
|
||||
import type { Request } from "express";
|
||||
|
||||
function addRecentNote(req: Request) {
|
||||
new BRecentNote({
|
||||
noteId: req.body.noteId,
|
||||
notePath: req.body.notePath
|
||||
}).save();
|
||||
|
||||
if (Math.random() < 0.05) {
|
||||
// it's not necessary to run this every time ...
|
||||
const cutOffDate = dateUtils.utcDateTimeStr(new Date(Date.now() - 24 * 3600 * 1000));
|
||||
|
||||
sql.execute(/*sql*/`DELETE FROM recent_notes WHERE utcDateCreated < ?`, [cutOffDate]);
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
addRecentNote
|
||||
};
|
||||
54
apps/server/src/routes/api/recovery_codes.ts
Normal file
54
apps/server/src/routes/api/recovery_codes.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import recovery_codes from '../../services/encryption/recovery_codes.js';
|
||||
import type { Request } from 'express';
|
||||
import { randomBytes } from 'crypto';
|
||||
|
||||
function setRecoveryCodes(req: Request) {
|
||||
const success = recovery_codes.setRecoveryCodes(req.body.recoveryCodes.join(','));
|
||||
return { success: success, message: 'Recovery codes set!' };
|
||||
}
|
||||
|
||||
function verifyRecoveryCode(req: Request) {
|
||||
const success = recovery_codes.verifyRecoveryCode(req.body.recovery_code_guess);
|
||||
|
||||
return { success: success };
|
||||
}
|
||||
|
||||
function checkForRecoveryKeys() {
|
||||
return {
|
||||
success: true, keysExist: recovery_codes.isRecoveryCodeSet()
|
||||
};
|
||||
}
|
||||
|
||||
function generateRecoveryCodes() {
|
||||
const recoveryKeys = Array.from({ length: 8 }, () => randomBytes(16).toString('base64'));
|
||||
|
||||
recovery_codes.setRecoveryCodes(recoveryKeys.join(','));
|
||||
|
||||
return { success: true, recoveryCodes: recoveryKeys };
|
||||
}
|
||||
|
||||
function getUsedRecoveryCodes() {
|
||||
if (!recovery_codes.isRecoveryCodeSet()) {
|
||||
return []
|
||||
}
|
||||
|
||||
const dateRegex = RegExp(/^\d{4}\/\d{2}\/\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z$/gm);
|
||||
const recoveryCodes = recovery_codes.getRecoveryCodes();
|
||||
|
||||
const usedStatus = recoveryCodes.map(recoveryKey => {
|
||||
return (dateRegex.test(recoveryKey)) ? recoveryKey : String(recoveryCodes.indexOf(recoveryKey))
|
||||
})
|
||||
|
||||
return {
|
||||
success: true,
|
||||
usedRecoveryCodes: usedStatus
|
||||
};
|
||||
}
|
||||
|
||||
export default {
|
||||
setRecoveryCodes,
|
||||
generateRecoveryCodes,
|
||||
verifyRecoveryCode,
|
||||
checkForRecoveryKeys,
|
||||
getUsedRecoveryCodes
|
||||
};
|
||||
78
apps/server/src/routes/api/relation-map.ts
Normal file
78
apps/server/src/routes/api/relation-map.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import type { Request } from "express";
|
||||
import becca from "../../becca/becca.js";
|
||||
import sql from "../../services/sql.js";
|
||||
|
||||
interface ResponseData {
|
||||
noteTitles: Record<string, string>;
|
||||
relations: {
|
||||
attributeId: string;
|
||||
sourceNoteId: string;
|
||||
targetNoteId: string;
|
||||
name: string;
|
||||
}[];
|
||||
inverseRelations: Record<string, string>;
|
||||
}
|
||||
|
||||
function getRelationMap(req: Request) {
|
||||
const { relationMapNoteId, noteIds } = req.body;
|
||||
|
||||
const resp: ResponseData = {
|
||||
// noteId => title
|
||||
noteTitles: {},
|
||||
relations: [],
|
||||
// relation name => inverse relation name
|
||||
inverseRelations: {
|
||||
internalLink: "internalLink"
|
||||
}
|
||||
};
|
||||
|
||||
if (!Array.isArray(noteIds) || noteIds.length === 0) {
|
||||
return resp;
|
||||
}
|
||||
|
||||
const questionMarks = noteIds.map((_noteId) => "?").join(",");
|
||||
|
||||
const relationMapNote = becca.getNoteOrThrow(relationMapNoteId);
|
||||
|
||||
const displayRelationsVal = relationMapNote.getLabelValue("displayRelations");
|
||||
const displayRelations = !displayRelationsVal ? [] : displayRelationsVal.split(",").map((token) => token.trim());
|
||||
|
||||
const hideRelationsVal = relationMapNote.getLabelValue("hideRelations");
|
||||
const hideRelations = !hideRelationsVal ? [] : hideRelationsVal.split(",").map((token) => token.trim());
|
||||
|
||||
const foundNoteIds = sql.getColumn<string>(/*sql*/`SELECT noteId FROM notes WHERE isDeleted = 0 AND noteId IN (${questionMarks})`, noteIds);
|
||||
const notes = becca.getNotes(foundNoteIds);
|
||||
|
||||
for (const note of notes) {
|
||||
resp.noteTitles[note.noteId] = note.title;
|
||||
|
||||
resp.relations = resp.relations.concat(
|
||||
note
|
||||
.getRelations()
|
||||
.filter((relation) => !relation.isAutoLink() || displayRelations.includes(relation.name))
|
||||
.filter((relation) => (displayRelations.length > 0 ? displayRelations.includes(relation.name) : !hideRelations.includes(relation.name)))
|
||||
.filter((relation) => noteIds.includes(relation.value))
|
||||
.map((relation) => ({
|
||||
attributeId: relation.attributeId,
|
||||
sourceNoteId: relation.noteId,
|
||||
targetNoteId: relation.value,
|
||||
name: relation.name
|
||||
}))
|
||||
);
|
||||
|
||||
for (const relationDefinition of note.getRelationDefinitions()) {
|
||||
const def = relationDefinition.getDefinition();
|
||||
|
||||
if (def.inverseRelation) {
|
||||
resp.inverseRelations[relationDefinition.getDefinedName()] = def.inverseRelation;
|
||||
resp.inverseRelations[def.inverseRelation] = relationDefinition.getDefinedName();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return resp;
|
||||
}
|
||||
|
||||
export default {
|
||||
getRelationMap
|
||||
};
|
||||
224
apps/server/src/routes/api/revisions.ts
Normal file
224
apps/server/src/routes/api/revisions.ts
Normal file
@@ -0,0 +1,224 @@
|
||||
"use strict";
|
||||
|
||||
import beccaService from "../../becca/becca_service.js";
|
||||
import utils from "../../services/utils.js";
|
||||
import sql from "../../services/sql.js";
|
||||
import cls from "../../services/cls.js";
|
||||
import path from "path";
|
||||
import becca from "../../becca/becca.js";
|
||||
import blobService from "../../services/blob.js";
|
||||
import eraseService from "../../services/erase.js";
|
||||
import type { Request, Response } from "express";
|
||||
import type BRevision from "../../becca/entities/brevision.js";
|
||||
import type BNote from "../../becca/entities/bnote.js";
|
||||
import type { NotePojo } from "../../becca/becca-interface.js";
|
||||
|
||||
interface NotePath {
|
||||
noteId: string;
|
||||
branchId?: string;
|
||||
title: string;
|
||||
notePath: string[];
|
||||
path: string;
|
||||
}
|
||||
|
||||
interface NotePojoWithNotePath extends NotePojo {
|
||||
notePath?: string[] | null;
|
||||
}
|
||||
|
||||
function getRevisionBlob(req: Request) {
|
||||
const preview = req.query.preview === "true";
|
||||
|
||||
return blobService.getBlobPojo("revisions", req.params.revisionId, { preview });
|
||||
}
|
||||
|
||||
function getRevisions(req: Request) {
|
||||
return becca.getRevisionsFromQuery(
|
||||
`
|
||||
SELECT revisions.*,
|
||||
LENGTH(blobs.content) AS contentLength
|
||||
FROM revisions
|
||||
JOIN blobs ON revisions.blobId = blobs.blobId
|
||||
WHERE revisions.noteId = ?
|
||||
ORDER BY revisions.utcDateCreated DESC`,
|
||||
[req.params.noteId]
|
||||
);
|
||||
}
|
||||
|
||||
function getRevision(req: Request) {
|
||||
const revision = becca.getRevisionOrThrow(req.params.revisionId);
|
||||
|
||||
if (revision.type === "file") {
|
||||
if (revision.hasStringContent()) {
|
||||
revision.content = (revision.getContent() as string).substr(0, 10000);
|
||||
}
|
||||
} else {
|
||||
revision.content = revision.getContent();
|
||||
|
||||
if (revision.content && revision.type === "image") {
|
||||
revision.content = revision.content.toString("base64");
|
||||
}
|
||||
}
|
||||
|
||||
return revision;
|
||||
}
|
||||
|
||||
function getRevisionFilename(revision: BRevision) {
|
||||
let filename = utils.formatDownloadTitle(revision.title, revision.type, revision.mime);
|
||||
|
||||
if (!revision.dateCreated) {
|
||||
throw new Error("Missing creation date for revision.");
|
||||
}
|
||||
|
||||
const extension = path.extname(filename);
|
||||
const date = revision.dateCreated
|
||||
.substr(0, 19)
|
||||
.replace(" ", "_")
|
||||
.replace(/[^0-9_]/g, "");
|
||||
|
||||
if (extension) {
|
||||
filename = `${filename.substr(0, filename.length - extension.length)}-${date}${extension}`;
|
||||
} else {
|
||||
filename += `-${date}`;
|
||||
}
|
||||
|
||||
return filename;
|
||||
}
|
||||
|
||||
function downloadRevision(req: Request, res: Response) {
|
||||
const revision = becca.getRevisionOrThrow(req.params.revisionId);
|
||||
|
||||
if (!revision.isContentAvailable()) {
|
||||
return res.setHeader("Content-Type", "text/plain").status(401).send("Protected session not available");
|
||||
}
|
||||
|
||||
const filename = getRevisionFilename(revision);
|
||||
|
||||
res.setHeader("Content-Disposition", utils.getContentDisposition(filename));
|
||||
res.setHeader("Content-Type", revision.mime);
|
||||
|
||||
res.send(revision.getContent());
|
||||
}
|
||||
|
||||
function eraseAllRevisions(req: Request) {
|
||||
const revisionIdsToErase = sql.getColumn<string>("SELECT revisionId FROM revisions WHERE noteId = ?", [req.params.noteId]);
|
||||
|
||||
eraseService.eraseRevisions(revisionIdsToErase);
|
||||
}
|
||||
|
||||
function eraseRevision(req: Request) {
|
||||
eraseService.eraseRevisions([req.params.revisionId]);
|
||||
}
|
||||
|
||||
function eraseAllExcessRevisions() {
|
||||
const allNoteIds = sql.getRows("SELECT noteId FROM notes WHERE SUBSTRING(noteId, 1, 1) != '_'") as { noteId: string }[];
|
||||
allNoteIds.forEach((row) => {
|
||||
becca.getNote(row.noteId)?.eraseExcessRevisionSnapshots();
|
||||
});
|
||||
}
|
||||
|
||||
function restoreRevision(req: Request) {
|
||||
const revision = becca.getRevision(req.params.revisionId);
|
||||
|
||||
if (revision) {
|
||||
const note = revision.getNote();
|
||||
|
||||
sql.transactional(() => {
|
||||
note.saveRevision();
|
||||
|
||||
for (const oldNoteAttachment of note.getAttachments()) {
|
||||
oldNoteAttachment.markAsDeleted();
|
||||
}
|
||||
|
||||
let revisionContent = revision.getContent();
|
||||
|
||||
for (const revisionAttachment of revision.getAttachments()) {
|
||||
const noteAttachment = revisionAttachment.copy();
|
||||
noteAttachment.ownerId = note.noteId;
|
||||
noteAttachment.setContent(revisionAttachment.getContent(), { forceSave: true });
|
||||
|
||||
// content is rewritten to point to the restored revision attachments
|
||||
if (typeof revisionContent === "string") {
|
||||
revisionContent = revisionContent.replaceAll(`attachments/${revisionAttachment.attachmentId}`, `attachments/${noteAttachment.attachmentId}`);
|
||||
}
|
||||
}
|
||||
|
||||
note.title = revision.title;
|
||||
note.mime = revision.mime;
|
||||
note.type = revision.type;
|
||||
note.setContent(revisionContent, { forceSave: true });
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function getEditedNotesOnDate(req: Request) {
|
||||
const noteIds = sql.getColumn<string>(
|
||||
`
|
||||
SELECT notes.*
|
||||
FROM notes
|
||||
WHERE noteId IN (
|
||||
SELECT noteId FROM notes
|
||||
WHERE notes.dateCreated LIKE :date
|
||||
OR notes.dateModified LIKE :date
|
||||
UNION ALL
|
||||
SELECT noteId FROM revisions
|
||||
WHERE revisions.dateLastEdited LIKE :date
|
||||
)
|
||||
ORDER BY isDeleted
|
||||
LIMIT 50`,
|
||||
{ date: `${req.params.date}%` }
|
||||
);
|
||||
|
||||
let notes = becca.getNotes(noteIds, true);
|
||||
|
||||
// Narrow down the results if a note is hoisted, similar to "Jump to note".
|
||||
const hoistedNoteId = cls.getHoistedNoteId();
|
||||
if (hoistedNoteId !== "root") {
|
||||
notes = notes.filter((note) => note.hasAncestor(hoistedNoteId));
|
||||
}
|
||||
|
||||
return notes.map((note) => {
|
||||
const notePath = getNotePathData(note);
|
||||
|
||||
const notePojo: NotePojoWithNotePath = note.getPojo();
|
||||
notePojo.notePath = notePath ? notePath.notePath : null;
|
||||
|
||||
return notePojo;
|
||||
});
|
||||
}
|
||||
|
||||
function getNotePathData(note: BNote): NotePath | undefined {
|
||||
const retPath = note.getBestNotePath();
|
||||
|
||||
if (retPath) {
|
||||
const noteTitle = beccaService.getNoteTitleForPath(retPath);
|
||||
|
||||
let branchId;
|
||||
|
||||
if (note.isRoot()) {
|
||||
branchId = "none_root";
|
||||
} else {
|
||||
const parentNote = note.parents[0];
|
||||
branchId = becca.getBranchFromChildAndParent(note.noteId, parentNote.noteId)?.branchId;
|
||||
}
|
||||
|
||||
return {
|
||||
noteId: note.noteId,
|
||||
branchId: branchId,
|
||||
title: noteTitle,
|
||||
notePath: retPath,
|
||||
path: retPath.join("/")
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
getRevisionBlob,
|
||||
getRevisions,
|
||||
getRevision,
|
||||
downloadRevision,
|
||||
getEditedNotesOnDate,
|
||||
eraseAllRevisions,
|
||||
eraseAllExcessRevisions,
|
||||
eraseRevision,
|
||||
restoreRevision
|
||||
};
|
||||
133
apps/server/src/routes/api/script.ts
Normal file
133
apps/server/src/routes/api/script.ts
Normal file
@@ -0,0 +1,133 @@
|
||||
"use strict";
|
||||
|
||||
import scriptService from "../../services/script.js";
|
||||
import attributeService from "../../services/attributes.js";
|
||||
import becca from "../../becca/becca.js";
|
||||
import syncService from "../../services/sync.js";
|
||||
import sql from "../../services/sql.js";
|
||||
import type { Request } from "express";
|
||||
import { safeExtractMessageAndStackFromError } from "../../services/utils.js";
|
||||
|
||||
interface ScriptBody {
|
||||
script: string;
|
||||
params: any[];
|
||||
startNoteId: string;
|
||||
currentNoteId: string;
|
||||
originEntityName: string;
|
||||
originEntityId: string;
|
||||
transactional: boolean;
|
||||
}
|
||||
|
||||
// The async/await here is very confusing, because the body.script may, but may not be async. If it is async, then we
|
||||
// need to await it and make the complete response including metadata available in a Promise, so that the route detects
|
||||
// this and does result.then().
|
||||
async function exec(req: Request) {
|
||||
try {
|
||||
const body = req.body as ScriptBody;
|
||||
|
||||
const execute = (body: ScriptBody) => scriptService.executeScript(body.script, body.params, body.startNoteId, body.currentNoteId, body.originEntityName, body.originEntityId);
|
||||
|
||||
const result = body.transactional ? sql.transactional(() => execute(body)) : await execute(body);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
executionResult: result,
|
||||
maxEntityChangeId: syncService.getMaxEntityChangeId()
|
||||
};
|
||||
} catch (e: unknown) {
|
||||
const [errMessage] = safeExtractMessageAndStackFromError(e);
|
||||
return {
|
||||
success: false,
|
||||
error: errMessage
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function run(req: Request) {
|
||||
const note = becca.getNoteOrThrow(req.params.noteId);
|
||||
|
||||
const result = scriptService.executeNote(note, { originEntity: note });
|
||||
|
||||
return { executionResult: result };
|
||||
}
|
||||
|
||||
function getBundlesWithLabel(label: string, value?: string) {
|
||||
const notes = attributeService.getNotesWithLabel(label, value);
|
||||
|
||||
const bundles = [];
|
||||
|
||||
for (const note of notes) {
|
||||
const bundle = scriptService.getScriptBundleForFrontend(note);
|
||||
|
||||
if (bundle) {
|
||||
bundles.push(bundle);
|
||||
}
|
||||
}
|
||||
|
||||
return bundles;
|
||||
}
|
||||
|
||||
function getStartupBundles(req: Request) {
|
||||
if (!process.env.TRILIUM_SAFE_MODE) {
|
||||
if (req.query.mobile === "true") {
|
||||
return getBundlesWithLabel("run", "mobileStartup");
|
||||
} else {
|
||||
return getBundlesWithLabel("run", "frontendStartup");
|
||||
}
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
function getWidgetBundles() {
|
||||
if (!process.env.TRILIUM_SAFE_MODE) {
|
||||
return getBundlesWithLabel("widget");
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
function getRelationBundles(req: Request) {
|
||||
const noteId = req.params.noteId;
|
||||
const note = becca.getNoteOrThrow(noteId);
|
||||
const relationName = req.params.relationName;
|
||||
|
||||
const attributes = note.getAttributes();
|
||||
const filtered = attributes.filter((attr) => attr.type === "relation" && attr.name === relationName);
|
||||
const targetNoteIds = filtered.map((relation) => relation.value);
|
||||
const uniqueNoteIds = Array.from(new Set(targetNoteIds));
|
||||
|
||||
const bundles = [];
|
||||
|
||||
for (const noteId of uniqueNoteIds) {
|
||||
const note = becca.getNoteOrThrow(noteId);
|
||||
|
||||
if (!note.isJavaScript() || note.getScriptEnv() !== "frontend") {
|
||||
continue;
|
||||
}
|
||||
|
||||
const bundle = scriptService.getScriptBundleForFrontend(note);
|
||||
|
||||
if (bundle) {
|
||||
bundles.push(bundle);
|
||||
}
|
||||
}
|
||||
|
||||
return bundles;
|
||||
}
|
||||
|
||||
function getBundle(req: Request) {
|
||||
const note = becca.getNoteOrThrow(req.params.noteId);
|
||||
const { script, params } = req.body;
|
||||
|
||||
return scriptService.getScriptBundleForFrontend(note, script, params);
|
||||
}
|
||||
|
||||
export default {
|
||||
exec,
|
||||
run,
|
||||
getStartupBundles,
|
||||
getWidgetBundles,
|
||||
getRelationBundles,
|
||||
getBundle
|
||||
};
|
||||
134
apps/server/src/routes/api/search.ts
Normal file
134
apps/server/src/routes/api/search.ts
Normal file
@@ -0,0 +1,134 @@
|
||||
"use strict";
|
||||
|
||||
import type { Request } from "express";
|
||||
|
||||
import becca from "../../becca/becca.js";
|
||||
import SearchContext from "../../services/search/search_context.js";
|
||||
import searchService, { EMPTY_RESULT, type SearchNoteResult } from "../../services/search/services/search.js";
|
||||
import bulkActionService from "../../services/bulk_actions.js";
|
||||
import cls from "../../services/cls.js";
|
||||
import attributeFormatter from "../../services/attribute_formatter.js";
|
||||
import ValidationError from "../../errors/validation_error.js";
|
||||
import type SearchResult from "../../services/search/search_result.js";
|
||||
|
||||
function searchFromNote(req: Request): SearchNoteResult {
|
||||
const note = becca.getNoteOrThrow(req.params.noteId);
|
||||
|
||||
if (!note) {
|
||||
// this can be triggered from recent changes, and it's harmless to return an empty list rather than fail
|
||||
return EMPTY_RESULT;
|
||||
}
|
||||
|
||||
if (note.type !== "search") {
|
||||
throw new ValidationError(`Note '${req.params.noteId}' is not a search note.`);
|
||||
}
|
||||
|
||||
return searchService.searchFromNote(note);
|
||||
}
|
||||
|
||||
function searchAndExecute(req: Request) {
|
||||
const note = becca.getNoteOrThrow(req.params.noteId);
|
||||
|
||||
if (!note) {
|
||||
// this can be triggered from recent changes, and it's harmless to return an empty list rather than fail
|
||||
return [];
|
||||
}
|
||||
|
||||
if (note.type !== "search") {
|
||||
throw new ValidationError(`Note '${req.params.noteId}' is not a search note.`);
|
||||
}
|
||||
|
||||
const { searchResultNoteIds } = searchService.searchFromNote(note);
|
||||
|
||||
bulkActionService.executeActions(note, searchResultNoteIds);
|
||||
}
|
||||
|
||||
function quickSearch(req: Request) {
|
||||
const { searchString } = req.params;
|
||||
|
||||
const searchContext = new SearchContext({
|
||||
fastSearch: false,
|
||||
includeArchivedNotes: false,
|
||||
fuzzyAttributeSearch: false
|
||||
});
|
||||
|
||||
const resultNoteIds = searchService.findResultsWithQuery(searchString, searchContext).map((sr) => sr.noteId);
|
||||
|
||||
return {
|
||||
searchResultNoteIds: resultNoteIds,
|
||||
error: searchContext.getError()
|
||||
};
|
||||
}
|
||||
|
||||
function search(req: Request) {
|
||||
const { searchString } = req.params;
|
||||
|
||||
const searchContext = new SearchContext({
|
||||
fastSearch: false,
|
||||
includeArchivedNotes: true,
|
||||
fuzzyAttributeSearch: false,
|
||||
ignoreHoistedNote: true
|
||||
});
|
||||
|
||||
return searchService.findResultsWithQuery(searchString, searchContext).map((sr) => sr.noteId);
|
||||
}
|
||||
|
||||
function getRelatedNotes(req: Request) {
|
||||
const attr = req.body;
|
||||
|
||||
const searchSettings = {
|
||||
fastSearch: true,
|
||||
includeArchivedNotes: false,
|
||||
fuzzyAttributeSearch: false
|
||||
};
|
||||
|
||||
const matchingNameAndValue = searchService.findResultsWithQuery(attributeFormatter.formatAttrForSearch(attr, true), new SearchContext(searchSettings));
|
||||
const matchingName = searchService.findResultsWithQuery(attributeFormatter.formatAttrForSearch(attr, false), new SearchContext(searchSettings));
|
||||
|
||||
const results: SearchResult[] = [];
|
||||
|
||||
const allResults = matchingNameAndValue.concat(matchingName);
|
||||
|
||||
const allResultNoteIds = new Set();
|
||||
|
||||
for (const record of allResults) {
|
||||
allResultNoteIds.add(record.noteId);
|
||||
}
|
||||
|
||||
for (const record of allResults) {
|
||||
if (results.length >= 20) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (results.find((res) => res.noteId === record.noteId)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
results.push(record);
|
||||
}
|
||||
|
||||
return {
|
||||
count: allResultNoteIds.size,
|
||||
results
|
||||
};
|
||||
}
|
||||
|
||||
function searchTemplates() {
|
||||
const query = cls.getHoistedNoteId() === "root" ? "#template" : "#template OR #workspaceTemplate";
|
||||
|
||||
return searchService
|
||||
.searchNotes(query, {
|
||||
includeArchivedNotes: true,
|
||||
ignoreHoistedNote: false
|
||||
})
|
||||
.map((note) => note.noteId);
|
||||
}
|
||||
|
||||
export default {
|
||||
searchFromNote,
|
||||
searchAndExecute,
|
||||
getRelatedNotes,
|
||||
quickSearch,
|
||||
search,
|
||||
searchTemplates
|
||||
};
|
||||
88
apps/server/src/routes/api/sender.ts
Normal file
88
apps/server/src/routes/api/sender.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import type { Request } from "express";
|
||||
import imageType from "image-type";
|
||||
|
||||
import imageService from "../../services/image.js";
|
||||
import noteService from "../../services/notes.js";
|
||||
import sanitizeAttributeName from "../../services/sanitize_attribute_name.js";
|
||||
import specialNotesService from "../../services/special_notes.js";
|
||||
|
||||
async function uploadImage(req: Request) {
|
||||
const file = req.file;
|
||||
|
||||
if (!file) {
|
||||
return {
|
||||
uploaded: false,
|
||||
message: `Missing image data.`
|
||||
};
|
||||
}
|
||||
|
||||
if (!["image/png", "image/jpeg", "image/gif", "image/webp", "image/svg+xml"].includes(file.mimetype)) {
|
||||
return [400, `Unknown image type: ${file.mimetype}`];
|
||||
}
|
||||
if (typeof file.buffer === "string") {
|
||||
return [400, "Invalid image content type."];
|
||||
}
|
||||
|
||||
const uploadedImageType = await imageType(file.buffer);
|
||||
if (!uploadedImageType) {
|
||||
return [400, "Unable to determine image type."];
|
||||
}
|
||||
const originalName = `Sender image.${uploadedImageType.ext}`;
|
||||
|
||||
if (!req.headers["x-local-date"]) {
|
||||
return [400, "Invalid local date"];
|
||||
}
|
||||
|
||||
const parentNote = await specialNotesService.getInboxNote(req.headers["x-local-date"]);
|
||||
|
||||
const { note, noteId } = imageService.saveImage(parentNote.noteId, file.buffer, originalName, true);
|
||||
|
||||
const labelsStr = req.headers["x-labels"];
|
||||
|
||||
if (labelsStr?.trim()) {
|
||||
const labels = JSON.parse(labelsStr);
|
||||
|
||||
for (const { name, value } of labels) {
|
||||
note.setLabel(sanitizeAttributeName(name), value);
|
||||
}
|
||||
}
|
||||
|
||||
note.setLabel("sentFromSender");
|
||||
|
||||
return {
|
||||
noteId: noteId
|
||||
};
|
||||
}
|
||||
|
||||
async function saveNote(req: Request) {
|
||||
if (!req.headers["x-local-date"] || Array.isArray(req.headers["x-local-date"])) {
|
||||
return [400, "Invalid local date"];
|
||||
}
|
||||
|
||||
const parentNote = await specialNotesService.getInboxNote(req.headers["x-local-date"]);
|
||||
|
||||
const { note, branch } = noteService.createNewNote({
|
||||
parentNoteId: parentNote.noteId,
|
||||
title: req.body.title,
|
||||
content: req.body.content,
|
||||
isProtected: false,
|
||||
type: "text",
|
||||
mime: "text/html"
|
||||
});
|
||||
|
||||
if (req.body.labels) {
|
||||
for (const { name, value } of req.body.labels) {
|
||||
note.setLabel(sanitizeAttributeName(name), value);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
noteId: note.noteId,
|
||||
branchId: branch.branchId
|
||||
};
|
||||
}
|
||||
|
||||
export default {
|
||||
uploadImage,
|
||||
saveNote
|
||||
};
|
||||
91
apps/server/src/routes/api/setup.ts
Normal file
91
apps/server/src/routes/api/setup.ts
Normal file
@@ -0,0 +1,91 @@
|
||||
"use strict";
|
||||
|
||||
import sqlInit from "../../services/sql_init.js";
|
||||
import setupService from "../../services/setup.js";
|
||||
import log from "../../services/log.js";
|
||||
import appInfo from "../../services/app_info.js";
|
||||
import type { Request } from "express";
|
||||
|
||||
function getStatus() {
|
||||
return {
|
||||
isInitialized: sqlInit.isDbInitialized(),
|
||||
schemaExists: sqlInit.schemaExists(),
|
||||
syncVersion: appInfo.syncVersion
|
||||
};
|
||||
}
|
||||
|
||||
async function setupNewDocument() {
|
||||
await sqlInit.createInitialDatabase();
|
||||
}
|
||||
|
||||
function setupSyncFromServer(req: Request) {
|
||||
const { syncServerHost, syncProxy, password } = req.body;
|
||||
|
||||
return setupService.setupSyncFromSyncServer(syncServerHost, syncProxy, password);
|
||||
}
|
||||
|
||||
function saveSyncSeed(req: Request) {
|
||||
const { options, syncVersion } = req.body;
|
||||
|
||||
if (appInfo.syncVersion !== syncVersion) {
|
||||
const message = `Could not setup sync since local sync protocol version is ${appInfo.syncVersion} while remote is ${syncVersion}. To fix this issue, use same Trilium version on all instances.`;
|
||||
|
||||
log.error(message);
|
||||
|
||||
return [
|
||||
400,
|
||||
{
|
||||
error: message
|
||||
}
|
||||
];
|
||||
}
|
||||
|
||||
log.info("Saved sync seed.");
|
||||
|
||||
sqlInit.createDatabaseForSync(options);
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/setup/sync-seed:
|
||||
* get:
|
||||
* tags:
|
||||
* - auth
|
||||
* summary: Sync documentSecret value
|
||||
* description: First step to logging in.
|
||||
* operationId: setup-sync-seed
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Successful operation
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* syncVersion:
|
||||
* type: integer
|
||||
* example: 34
|
||||
* options:
|
||||
* type: object
|
||||
* properties:
|
||||
* documentSecret:
|
||||
* type: string
|
||||
* security:
|
||||
* - user-password: []
|
||||
*/
|
||||
function getSyncSeed() {
|
||||
log.info("Serving sync seed.");
|
||||
|
||||
return {
|
||||
options: setupService.getSyncSeedOptions(),
|
||||
syncVersion: appInfo.syncVersion
|
||||
};
|
||||
}
|
||||
|
||||
export default {
|
||||
getStatus,
|
||||
setupNewDocument,
|
||||
setupSyncFromServer,
|
||||
getSyncSeed,
|
||||
saveSyncSeed
|
||||
};
|
||||
18
apps/server/src/routes/api/similar_notes.ts
Normal file
18
apps/server/src/routes/api/similar_notes.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
"use strict";
|
||||
|
||||
import type { Request } from "express";
|
||||
|
||||
import similarityService from "../../becca/similarity.js";
|
||||
import becca from "../../becca/becca.js";
|
||||
|
||||
async function getSimilarNotes(req: Request) {
|
||||
const noteId = req.params.noteId;
|
||||
|
||||
const _note = becca.getNoteOrThrow(noteId);
|
||||
|
||||
return await similarityService.findSimilarNotes(noteId);
|
||||
}
|
||||
|
||||
export default {
|
||||
getSimilarNotes
|
||||
};
|
||||
123
apps/server/src/routes/api/special_notes.ts
Normal file
123
apps/server/src/routes/api/special_notes.ts
Normal file
@@ -0,0 +1,123 @@
|
||||
import dateNoteService from "../../services/date_notes.js";
|
||||
import sql from "../../services/sql.js";
|
||||
import cls from "../../services/cls.js";
|
||||
import specialNotesService, { type LauncherType } from "../../services/special_notes.js";
|
||||
import becca from "../../becca/becca.js";
|
||||
import type { Request } from "express";
|
||||
|
||||
function getInboxNote(req: Request) {
|
||||
return specialNotesService.getInboxNote(req.params.date);
|
||||
}
|
||||
|
||||
function getDayNote(req: Request) {
|
||||
return dateNoteService.getDayNote(req.params.date);
|
||||
}
|
||||
|
||||
function getWeekFirstDayNote(req: Request) {
|
||||
return dateNoteService.getWeekFirstDayNote(req.params.date);
|
||||
}
|
||||
|
||||
function getWeekNote(req: Request) {
|
||||
return dateNoteService.getWeekNote(req.params.week);
|
||||
}
|
||||
|
||||
function getMonthNote(req: Request) {
|
||||
return dateNoteService.getMonthNote(req.params.month);
|
||||
}
|
||||
|
||||
function getQuarterNote(req: Request) {
|
||||
return dateNoteService.getQuarterNote(req.params.quarter);
|
||||
}
|
||||
|
||||
function getYearNote(req: Request) {
|
||||
return dateNoteService.getYearNote(req.params.year);
|
||||
}
|
||||
|
||||
function getDayNotesForMonth(req: Request) {
|
||||
const month = req.params.month;
|
||||
const calendarRoot = req.query.calendarRoot;
|
||||
const query = `\
|
||||
SELECT
|
||||
attr.value AS date,
|
||||
notes.noteId
|
||||
FROM notes
|
||||
JOIN attributes attr USING(noteId)
|
||||
WHERE notes.isDeleted = 0
|
||||
AND attr.isDeleted = 0
|
||||
AND attr.type = 'label'
|
||||
AND attr.name = 'dateNote'
|
||||
AND attr.value LIKE '${month}%'`;
|
||||
|
||||
if (calendarRoot) {
|
||||
const rows = sql.getRows<{ date: string; noteId: string }>(query);
|
||||
const result: Record<string, string> = {};
|
||||
for (const { date, noteId } of rows) {
|
||||
const note = becca.getNote(noteId);
|
||||
if (note?.hasAncestor(String(calendarRoot))) {
|
||||
result[date] = noteId;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
} else {
|
||||
return sql.getMap(query);
|
||||
}
|
||||
}
|
||||
|
||||
async function saveSqlConsole(req: Request) {
|
||||
return await specialNotesService.saveSqlConsole(req.body.sqlConsoleNoteId);
|
||||
}
|
||||
|
||||
function createSqlConsole() {
|
||||
return specialNotesService.createSqlConsole();
|
||||
}
|
||||
|
||||
function saveSearchNote(req: Request) {
|
||||
return specialNotesService.saveSearchNote(req.body.searchNoteId);
|
||||
}
|
||||
|
||||
function createSearchNote(req: Request) {
|
||||
const hoistedNote = getHoistedNote();
|
||||
const searchString = req.body.searchString || "";
|
||||
const ancestorNoteId = req.body.ancestorNoteId || hoistedNote?.noteId;
|
||||
|
||||
return specialNotesService.createSearchNote(searchString, ancestorNoteId);
|
||||
}
|
||||
|
||||
function getHoistedNote() {
|
||||
return becca.getNote(cls.getHoistedNoteId());
|
||||
}
|
||||
|
||||
function createLauncher(req: Request) {
|
||||
return specialNotesService.createLauncher({
|
||||
parentNoteId: req.params.parentNoteId,
|
||||
// TODO: Validate the parameter
|
||||
launcherType: req.params.launcherType as LauncherType
|
||||
});
|
||||
}
|
||||
|
||||
function resetLauncher(req: Request) {
|
||||
return specialNotesService.resetLauncher(req.params.noteId);
|
||||
}
|
||||
|
||||
function createOrUpdateScriptLauncherFromApi(req: Request) {
|
||||
return specialNotesService.createOrUpdateScriptLauncherFromApi(req.body);
|
||||
}
|
||||
|
||||
export default {
|
||||
getInboxNote,
|
||||
getDayNote,
|
||||
getWeekFirstDayNote,
|
||||
getWeekNote,
|
||||
getMonthNote,
|
||||
getQuarterNote,
|
||||
getYearNote,
|
||||
getDayNotesForMonth,
|
||||
createSqlConsole,
|
||||
saveSqlConsole,
|
||||
createSearchNote,
|
||||
saveSearchNote,
|
||||
createLauncher,
|
||||
resetLauncher,
|
||||
createOrUpdateScriptLauncherFromApi
|
||||
};
|
||||
72
apps/server/src/routes/api/sql.ts
Normal file
72
apps/server/src/routes/api/sql.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
"use strict";
|
||||
|
||||
import sql from "../../services/sql.js";
|
||||
import becca from "../../becca/becca.js";
|
||||
import type { Request } from "express";
|
||||
import ValidationError from "../../errors/validation_error.js";
|
||||
import { safeExtractMessageAndStackFromError } from "../../services/utils.js";
|
||||
|
||||
function getSchema() {
|
||||
const tableNames = sql.getColumn(/*sql*/`SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' ORDER BY name`);
|
||||
const tables = [];
|
||||
|
||||
for (const tableName of tableNames) {
|
||||
tables.push({
|
||||
name: tableName,
|
||||
columns: sql.getRows(`PRAGMA table_info(${tableName})`)
|
||||
});
|
||||
}
|
||||
|
||||
return tables;
|
||||
}
|
||||
|
||||
function execute(req: Request) {
|
||||
const note = becca.getNoteOrThrow(req.params.noteId);
|
||||
|
||||
const content = note.getContent();
|
||||
if (typeof content !== "string") {
|
||||
throw new ValidationError("Invalid note type.");
|
||||
}
|
||||
|
||||
const queries = content.split("\n---");
|
||||
|
||||
try {
|
||||
const results = [];
|
||||
|
||||
for (let query of queries) {
|
||||
query = query.trim();
|
||||
|
||||
while (query.startsWith("-- ")) {
|
||||
// Query starts with one or more SQL comments, discard these before we execute.
|
||||
const pivot = query.indexOf("\n");
|
||||
query = pivot > 0 ? query.substr(pivot + 1).trim() : "";
|
||||
}
|
||||
|
||||
if (!query) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (query.toLowerCase().startsWith("select") || query.toLowerCase().startsWith("with")) {
|
||||
results.push(sql.getRows(query));
|
||||
} else {
|
||||
results.push(sql.execute(query));
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
results
|
||||
};
|
||||
} catch (e: unknown) {
|
||||
const [errMessage] = safeExtractMessageAndStackFromError(e);
|
||||
return {
|
||||
success: false,
|
||||
error: errMessage
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
getSchema,
|
||||
execute
|
||||
};
|
||||
54
apps/server/src/routes/api/stats.ts
Normal file
54
apps/server/src/routes/api/stats.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import sql from "../../services/sql.js";
|
||||
import becca from "../../becca/becca.js";
|
||||
import type { Request } from "express";
|
||||
|
||||
function getNoteSize(req: Request) {
|
||||
const { noteId } = req.params;
|
||||
|
||||
const blobSizes = sql.getMap<string, number>(
|
||||
`
|
||||
SELECT blobs.blobId, LENGTH(content)
|
||||
FROM blobs
|
||||
LEFT JOIN notes ON notes.blobId = blobs.blobId AND notes.noteId = ? AND notes.isDeleted = 0
|
||||
LEFT JOIN attachments ON attachments.blobId = blobs.blobId AND attachments.ownerId = ? AND attachments.isDeleted = 0
|
||||
LEFT JOIN revisions ON revisions.blobId = blobs.blobId AND revisions.noteId = ?
|
||||
WHERE notes.noteId IS NOT NULL
|
||||
OR attachments.attachmentId IS NOT NULL
|
||||
OR revisions.revisionId IS NOT NULL`,
|
||||
[noteId, noteId, noteId]
|
||||
);
|
||||
|
||||
const noteSize = Object.values(blobSizes).reduce((acc, blobSize) => acc + blobSize, 0);
|
||||
|
||||
return {
|
||||
noteSize
|
||||
};
|
||||
}
|
||||
|
||||
function getSubtreeSize(req: Request) {
|
||||
const note = becca.getNoteOrThrow(req.params.noteId);
|
||||
|
||||
const subTreeNoteIds = note.getSubtreeNoteIds();
|
||||
|
||||
sql.fillParamList(subTreeNoteIds);
|
||||
|
||||
const blobSizes = sql.getMap<string, number>(`
|
||||
SELECT blobs.blobId, LENGTH(content)
|
||||
FROM param_list
|
||||
JOIN notes ON notes.noteId = param_list.paramId AND notes.isDeleted = 0
|
||||
LEFT JOIN attachments ON attachments.ownerId = param_list.paramId AND attachments.isDeleted = 0
|
||||
LEFT JOIN revisions ON revisions.noteId = param_list.paramId
|
||||
JOIN blobs ON blobs.blobId = notes.blobId OR blobs.blobId = attachments.blobId OR blobs.blobId = revisions.blobId`);
|
||||
|
||||
const subTreeSize = Object.values(blobSizes).reduce((acc, blobSize) => acc + blobSize, 0);
|
||||
|
||||
return {
|
||||
subTreeSize,
|
||||
subTreeNoteCount: subTreeNoteIds.length
|
||||
};
|
||||
}
|
||||
|
||||
export default {
|
||||
getNoteSize,
|
||||
getSubtreeSize
|
||||
};
|
||||
340
apps/server/src/routes/api/sync.ts
Normal file
340
apps/server/src/routes/api/sync.ts
Normal file
@@ -0,0 +1,340 @@
|
||||
"use strict";
|
||||
|
||||
import syncService from "../../services/sync.js";
|
||||
import syncUpdateService from "../../services/sync_update.js";
|
||||
import entityChangesService from "../../services/entity_changes.js";
|
||||
import sql from "../../services/sql.js";
|
||||
import sqlInit from "../../services/sql_init.js";
|
||||
import optionService from "../../services/options.js";
|
||||
import contentHashService from "../../services/content_hash.js";
|
||||
import log from "../../services/log.js";
|
||||
import syncOptions from "../../services/sync_options.js";
|
||||
import utils, { safeExtractMessageAndStackFromError } from "../../services/utils.js";
|
||||
import ws from "../../services/ws.js";
|
||||
import type { Request } from "express";
|
||||
import type { EntityChange } from "../../services/entity_changes_interface.js";
|
||||
import ValidationError from "../../errors/validation_error.js";
|
||||
import consistencyChecksService from "../../services/consistency_checks.js";
|
||||
import { t } from "i18next";
|
||||
|
||||
async function testSync() {
|
||||
try {
|
||||
if (!syncOptions.isSyncSetup()) {
|
||||
return { success: false, message: t("test_sync.not-configured") };
|
||||
}
|
||||
|
||||
await syncService.login();
|
||||
|
||||
// login was successful, so we'll kick off sync now
|
||||
// this is important in case when sync server has been just initialized
|
||||
syncService.sync();
|
||||
|
||||
return { success: true, message: t("test_sync.successful") };
|
||||
} catch (e: unknown) {
|
||||
const [errMessage] = safeExtractMessageAndStackFromError(e);
|
||||
return {
|
||||
success: false,
|
||||
error: errMessage
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function getStats() {
|
||||
if (!sqlInit.schemaExists()) {
|
||||
// fail silently but prevent errors from not existing options table
|
||||
return {};
|
||||
}
|
||||
|
||||
const stats = {
|
||||
initialized: sql.getValue("SELECT value FROM options WHERE name = 'initialized'") === "true",
|
||||
outstandingPullCount: syncService.getOutstandingPullCount()
|
||||
};
|
||||
|
||||
log.info(`Returning sync stats: ${JSON.stringify(stats)}`);
|
||||
|
||||
return stats;
|
||||
}
|
||||
|
||||
function checkSync() {
|
||||
return {
|
||||
entityHashes: contentHashService.getEntityHashes(),
|
||||
maxEntityChangeId: sql.getValue("SELECT COALESCE(MAX(id), 0) FROM entity_changes WHERE isSynced = 1")
|
||||
};
|
||||
}
|
||||
|
||||
function syncNow() {
|
||||
log.info("Received request to trigger sync now.");
|
||||
|
||||
// when explicitly asked for set in progress status immediately for faster user feedback
|
||||
ws.syncPullInProgress();
|
||||
|
||||
return syncService.sync();
|
||||
}
|
||||
|
||||
function fillEntityChanges() {
|
||||
entityChangesService.fillAllEntityChanges();
|
||||
|
||||
log.info("Sync rows have been filled.");
|
||||
}
|
||||
|
||||
function forceFullSync() {
|
||||
optionService.setOption("lastSyncedPull", 0);
|
||||
optionService.setOption("lastSyncedPush", 0);
|
||||
|
||||
log.info("Forcing full sync.");
|
||||
|
||||
// not awaiting for the job to finish (will probably take a long time)
|
||||
syncService.sync();
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/sync/changed:
|
||||
* get:
|
||||
* summary: Pull sync changes
|
||||
* operationId: sync-changed
|
||||
* externalDocs:
|
||||
* description: Server implementation
|
||||
* url: https://github.com/TriliumNext/Notes/blob/v0.91.6/src/routes/api/sync.ts
|
||||
* parameters:
|
||||
* - in: query
|
||||
* name: instanceId
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Local instance ID
|
||||
* - in: query
|
||||
* name: lastEntityChangeId
|
||||
* required: true
|
||||
* schema:
|
||||
* type: integer
|
||||
* description: Last locally present change ID
|
||||
* - in: query
|
||||
* name: logMarkerId
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Marker to identify this request in server log
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Sync changes, limited to approximately one megabyte.
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* entityChanges:
|
||||
* type: list
|
||||
* items:
|
||||
* $ref: '#/components/schemas/EntityChange'
|
||||
* lastEntityChangeId:
|
||||
* type: integer
|
||||
* description: If `outstandingPullCount > 0`, pass this as parameter in your next request to continue.
|
||||
* outstandingPullCount:
|
||||
* type: int
|
||||
* example: 42
|
||||
* description: Number of changes not yet returned by the remote.
|
||||
* security:
|
||||
* - session: []
|
||||
* tags:
|
||||
* - sync
|
||||
*/
|
||||
function getChanged(req: Request) {
|
||||
const startTime = Date.now();
|
||||
|
||||
if (typeof req.query.lastEntityChangeId !== "string") {
|
||||
throw new ValidationError("Missing or invalid last entity change ID.");
|
||||
}
|
||||
|
||||
let lastEntityChangeId: number | null | undefined = parseInt(req.query.lastEntityChangeId);
|
||||
const clientInstanceId = req.query.instanceId;
|
||||
let filteredEntityChanges: EntityChange[] = [];
|
||||
|
||||
do {
|
||||
const entityChanges: EntityChange[] = sql.getRows<EntityChange>(
|
||||
`
|
||||
SELECT *
|
||||
FROM entity_changes
|
||||
WHERE isSynced = 1
|
||||
AND id > ?
|
||||
ORDER BY id
|
||||
LIMIT 1000`,
|
||||
[lastEntityChangeId]
|
||||
);
|
||||
|
||||
if (entityChanges.length === 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
filteredEntityChanges = entityChanges.filter((ec) => ec.instanceId !== clientInstanceId);
|
||||
|
||||
if (filteredEntityChanges.length === 0) {
|
||||
lastEntityChangeId = entityChanges[entityChanges.length - 1].id;
|
||||
}
|
||||
} while (filteredEntityChanges.length === 0);
|
||||
|
||||
const entityChangeRecords = syncService.getEntityChangeRecords(filteredEntityChanges);
|
||||
|
||||
if (entityChangeRecords.length > 0) {
|
||||
lastEntityChangeId = entityChangeRecords[entityChangeRecords.length - 1].entityChange.id;
|
||||
|
||||
log.info(`Returning ${entityChangeRecords.length} entity changes in ${Date.now() - startTime}ms`);
|
||||
}
|
||||
|
||||
return {
|
||||
entityChanges: entityChangeRecords,
|
||||
lastEntityChangeId,
|
||||
outstandingPullCount: sql.getValue(
|
||||
`
|
||||
SELECT COUNT(id)
|
||||
FROM entity_changes
|
||||
WHERE isSynced = 1
|
||||
AND instanceId != ?
|
||||
AND id > ?`,
|
||||
[clientInstanceId, lastEntityChangeId]
|
||||
)
|
||||
};
|
||||
}
|
||||
|
||||
const partialRequests: Record<
|
||||
string,
|
||||
{
|
||||
createdAt: number;
|
||||
payload: string;
|
||||
}
|
||||
> = {};
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/sync/update:
|
||||
* put:
|
||||
* summary: Push sync changes
|
||||
* description:
|
||||
* "Basic usage: set `pageCount = 1`, `pageIndex = 0`, and omit `requestId`. Supply your entity changes in the request body."
|
||||
* operationId: sync-update
|
||||
* externalDocs:
|
||||
* description: Server implementation
|
||||
* url: https://github.com/TriliumNext/Notes/blob/v0.91.6/src/routes/api/sync.ts
|
||||
* parameters:
|
||||
* - in: header
|
||||
* name: pageCount
|
||||
* required: true
|
||||
* schema:
|
||||
* type: integer
|
||||
* - in: header
|
||||
* name: pageIndex
|
||||
* required: true
|
||||
* schema:
|
||||
* type: integer
|
||||
* - in: header
|
||||
* name: requestId
|
||||
* schema:
|
||||
* type: string
|
||||
* description: ID to identify paginated requests
|
||||
* - in: query
|
||||
* name: logMarkerId
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Marker to identify this request in server log
|
||||
* requestBody:
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* instanceId:
|
||||
* type: string
|
||||
* description: Local instance ID
|
||||
* entities:
|
||||
* type: list
|
||||
* items:
|
||||
* $ref: '#/components/schemas/EntityChange'
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Changes processed successfully
|
||||
* security:
|
||||
* - session: []
|
||||
* tags:
|
||||
* - sync
|
||||
*/
|
||||
function update(req: Request) {
|
||||
let { body } = req;
|
||||
|
||||
const pageCount = parseInt(req.get("pageCount") as string);
|
||||
const pageIndex = parseInt(req.get("pageIndex") as string);
|
||||
|
||||
if (pageCount !== 1) {
|
||||
const requestId = req.get("requestId");
|
||||
if (!requestId) {
|
||||
throw new Error("Missing request ID.");
|
||||
}
|
||||
|
||||
if (pageIndex === 0) {
|
||||
partialRequests[requestId] = {
|
||||
createdAt: Date.now(),
|
||||
payload: ""
|
||||
};
|
||||
}
|
||||
|
||||
if (!partialRequests[requestId]) {
|
||||
throw new Error(`Partial request ${requestId}, page ${pageIndex + 1} of ${pageCount} of pages does not have expected record.`);
|
||||
}
|
||||
|
||||
partialRequests[requestId].payload += req.body;
|
||||
|
||||
log.info(`Receiving a partial request ${requestId}, page ${pageIndex + 1} out of ${pageCount} pages.`);
|
||||
|
||||
if (pageIndex !== pageCount - 1) {
|
||||
return;
|
||||
} else {
|
||||
body = JSON.parse(partialRequests[requestId].payload);
|
||||
delete partialRequests[requestId];
|
||||
}
|
||||
}
|
||||
|
||||
const { entities, instanceId } = body;
|
||||
|
||||
sql.transactional(() => syncUpdateService.updateEntities(entities, instanceId));
|
||||
}
|
||||
|
||||
setInterval(() => {
|
||||
for (const key in partialRequests) {
|
||||
if (Date.now() - partialRequests[key].createdAt > 20 * 60 * 1000) {
|
||||
log.info(`Cleaning up unfinished partial requests for ${key}`);
|
||||
|
||||
delete partialRequests[key];
|
||||
}
|
||||
}
|
||||
}, 60 * 1000);
|
||||
|
||||
function syncFinished() {
|
||||
// after the first sync finishes, the application is ready to be used
|
||||
// this is meaningless but at the same time harmless (idempotent) for further syncs
|
||||
sqlInit.setDbAsInitialized();
|
||||
}
|
||||
|
||||
function queueSector(req: Request) {
|
||||
const entityName = utils.sanitizeSqlIdentifier(req.params.entityName);
|
||||
const sector = utils.sanitizeSqlIdentifier(req.params.sector);
|
||||
|
||||
entityChangesService.addEntityChangesForSector(entityName, sector);
|
||||
}
|
||||
|
||||
function checkEntityChanges() {
|
||||
consistencyChecksService.runEntityChangesChecks();
|
||||
}
|
||||
|
||||
export default {
|
||||
testSync,
|
||||
checkSync,
|
||||
syncNow,
|
||||
fillEntityChanges,
|
||||
forceFullSync,
|
||||
getChanged,
|
||||
update,
|
||||
getStats,
|
||||
syncFinished,
|
||||
queueSector,
|
||||
checkEntityChanges
|
||||
};
|
||||
19
apps/server/src/routes/api/totp.ts
Normal file
19
apps/server/src/routes/api/totp.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import totpService from '../../services/totp.js';
|
||||
|
||||
function generateTOTPSecret() {
|
||||
return totpService.createSecret();
|
||||
}
|
||||
|
||||
function getTOTPStatus() {
|
||||
return { success: true, message: totpService.isTotpEnabled(), set: totpService.checkForTotpSecret() };
|
||||
}
|
||||
|
||||
function getSecret() {
|
||||
return totpService.getTotpSecret();
|
||||
}
|
||||
|
||||
export default {
|
||||
generateSecret: generateTOTPSecret,
|
||||
getTOTPStatus,
|
||||
getSecret
|
||||
};
|
||||
206
apps/server/src/routes/api/tree.ts
Normal file
206
apps/server/src/routes/api/tree.ts
Normal file
@@ -0,0 +1,206 @@
|
||||
"use strict";
|
||||
|
||||
import becca from "../../becca/becca.js";
|
||||
import log from "../../services/log.js";
|
||||
import NotFoundError from "../../errors/not_found_error.js";
|
||||
import type { Request } from "express";
|
||||
import type BNote from "../../becca/entities/bnote.js";
|
||||
|
||||
function getNotesAndBranchesAndAttributes(_noteIds: string[] | Set<string>) {
|
||||
const noteIds = new Set(_noteIds);
|
||||
const collectedNoteIds = new Set<string>();
|
||||
const collectedAttributeIds = new Set<string>();
|
||||
const collectedBranchIds = new Set<string>();
|
||||
|
||||
function collectEntityIds(note?: BNote) {
|
||||
if (!note || collectedNoteIds.has(note.noteId)) {
|
||||
return;
|
||||
}
|
||||
|
||||
collectedNoteIds.add(note.noteId);
|
||||
|
||||
for (const branch of note.getParentBranches()) {
|
||||
if (branch.branchId) {
|
||||
collectedBranchIds.add(branch.branchId);
|
||||
}
|
||||
|
||||
collectEntityIds(branch.parentNote);
|
||||
}
|
||||
|
||||
for (const childNote of note.children) {
|
||||
const childBranch = becca.getBranchFromChildAndParent(childNote.noteId, note.noteId);
|
||||
if (childBranch && childBranch.branchId) {
|
||||
collectedBranchIds.add(childBranch.branchId);
|
||||
}
|
||||
}
|
||||
|
||||
for (const attr of note.ownedAttributes) {
|
||||
collectedAttributeIds.add(attr.attributeId);
|
||||
|
||||
if (attr.type === "relation" && ["template", "inherit"].includes(attr.name) && attr.targetNote) {
|
||||
collectEntityIds(attr.targetNote);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const noteId of noteIds) {
|
||||
const note = becca.notes[noteId];
|
||||
|
||||
if (!note) {
|
||||
continue;
|
||||
}
|
||||
|
||||
collectEntityIds(note);
|
||||
}
|
||||
|
||||
const notes = [];
|
||||
|
||||
for (const noteId of collectedNoteIds) {
|
||||
const note = becca.notes[noteId];
|
||||
|
||||
notes.push({
|
||||
noteId: note.noteId,
|
||||
title: note.getTitleOrProtected(),
|
||||
isProtected: note.isProtected,
|
||||
type: note.type,
|
||||
mime: note.mime,
|
||||
blobId: note.blobId
|
||||
});
|
||||
}
|
||||
|
||||
const branches = [];
|
||||
|
||||
if (noteIds.has("root")) {
|
||||
branches.push({
|
||||
branchId: "none_root",
|
||||
noteId: "root",
|
||||
parentNoteId: "none",
|
||||
notePosition: 0,
|
||||
prefix: "",
|
||||
isExpanded: true
|
||||
});
|
||||
}
|
||||
|
||||
for (const branchId of collectedBranchIds) {
|
||||
const branch = becca.branches[branchId];
|
||||
|
||||
if (!branch) {
|
||||
log.error(`Could not find branch for branchId=${branchId}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
branches.push({
|
||||
branchId: branch.branchId,
|
||||
noteId: branch.noteId,
|
||||
parentNoteId: branch.parentNoteId,
|
||||
notePosition: branch.notePosition,
|
||||
prefix: branch.prefix,
|
||||
isExpanded: branch.isExpanded
|
||||
});
|
||||
}
|
||||
|
||||
const attributes = [];
|
||||
|
||||
for (const attributeId of collectedAttributeIds) {
|
||||
const attribute = becca.attributes[attributeId];
|
||||
|
||||
if (!attribute) {
|
||||
log.error(`Could not find attribute for attributeId=${attributeId}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
attributes.push({
|
||||
attributeId: attribute.attributeId,
|
||||
noteId: attribute.noteId,
|
||||
type: attribute.type,
|
||||
name: attribute.name,
|
||||
value: attribute.value,
|
||||
position: attribute.position,
|
||||
isInheritable: attribute.isInheritable
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
branches,
|
||||
notes,
|
||||
attributes
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/tree:
|
||||
* get:
|
||||
* summary: Retrieve tree data
|
||||
* operationId: tree
|
||||
* externalDocs:
|
||||
* description: Server implementation
|
||||
* url: https://github.com/TriliumNext/Notes/blob/v0.91.6/src/routes/api/tree.ts
|
||||
* parameters:
|
||||
* - in: query
|
||||
* name: subTreeNoteId
|
||||
* required: false
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Limit tree data to this note and descendants
|
||||
* responses:
|
||||
* '200':
|
||||
* description: Notes, branches and attributes
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* branches:
|
||||
* type: list
|
||||
* items:
|
||||
* $ref: '#/components/schemas/Branch'
|
||||
* notes:
|
||||
* type: list
|
||||
* items:
|
||||
* $ref: '#/components/schemas/Note'
|
||||
* attributes:
|
||||
* type: list
|
||||
* items:
|
||||
* $ref: '#/components/schemas/Attribute'
|
||||
* security:
|
||||
* - session: []
|
||||
* tags: ["data"]
|
||||
*/
|
||||
function getTree(req: Request) {
|
||||
const subTreeNoteId = typeof req.query.subTreeNoteId === "string" ? req.query.subTreeNoteId : "root";
|
||||
const collectedNoteIds = new Set<string>([subTreeNoteId]);
|
||||
|
||||
function collect(parentNote: BNote) {
|
||||
if (!parentNote) {
|
||||
console.trace(parentNote);
|
||||
}
|
||||
|
||||
for (const childNote of parentNote.children) {
|
||||
collectedNoteIds.add(childNote.noteId);
|
||||
|
||||
const childBranch = becca.getBranchFromChildAndParent(childNote.noteId, parentNote.noteId);
|
||||
|
||||
if (childBranch?.isExpanded) {
|
||||
collect(childBranch.childNote);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!(subTreeNoteId in becca.notes)) {
|
||||
throw new NotFoundError(`Note '${subTreeNoteId}' not found in the cache`);
|
||||
}
|
||||
|
||||
collect(becca.notes[subTreeNoteId]);
|
||||
|
||||
return getNotesAndBranchesAndAttributes(collectedNoteIds);
|
||||
}
|
||||
|
||||
function load(req: Request) {
|
||||
return getNotesAndBranchesAndAttributes(req.body.noteIds);
|
||||
}
|
||||
|
||||
export default {
|
||||
getTree,
|
||||
load
|
||||
};
|
||||
Reference in New Issue
Block a user