mirror of
https://github.com/zadam/trilium.git
synced 2025-11-18 03:00:41 +01:00
Merge branch 'beta'
# Conflicts: # docs/backend_api/BAttachment.html # docs/backend_api/BRevision.html # docs/backend_api/BackendScriptApi.html # docs/backend_api/becca_entities_battachment.js.html # docs/backend_api/becca_entities_bblob.js.html # docs/backend_api/becca_entities_brevision.js.html # docs/frontend_api/FNote.html # docs/frontend_api/FrontendScriptApi.html # docs/frontend_api/entities_fattachment.js.html # docs/frontend_api/entities_fblob.js.html # docs/frontend_api/services_frontend_script_api.js.html # package-lock.json # src/public/app/services/frontend_script_api.js
This commit is contained in:
@@ -558,6 +558,48 @@ function BackendScriptApi(currentNote, apiParams) {
|
||||
*/
|
||||
this.exportSubtreeToZipFile = async (noteId, format, zipFilePath) => await exportService.exportToZipFile(noteId, format, zipFilePath);
|
||||
|
||||
/**
|
||||
* Executes given anonymous function on the frontend(s).
|
||||
* Internally this serializes the anonymous function into string and sends it to frontend(s) via WebSocket.
|
||||
* Note that there can be multiple connected frontend instances (e.g. in different tabs). In such case, all
|
||||
* instances execute the given function.
|
||||
*
|
||||
* @method
|
||||
* @param {string} script - script to be executed on the frontend
|
||||
* @param {Array.<?>} params - list of parameters to the anonymous function to be sent to frontend
|
||||
* @returns {undefined} - no return value is provided.
|
||||
*/
|
||||
this.runOnFrontend = async (script, params = []) => {
|
||||
if (typeof script === "function") {
|
||||
script = script.toString();
|
||||
}
|
||||
|
||||
ws.sendMessageToAllClients({
|
||||
type: 'execute-script',
|
||||
script: script,
|
||||
params: prepareParams(params),
|
||||
startNoteId: this.startNote.noteId,
|
||||
currentNoteId: this.currentNote.noteId,
|
||||
originEntityName: "notes", // currently there's no other entity on the frontend which can trigger event
|
||||
originEntityId: this.originEntity?.noteId || null
|
||||
});
|
||||
|
||||
function prepareParams(params) {
|
||||
if (!params) {
|
||||
return params;
|
||||
}
|
||||
|
||||
return params.map(p => {
|
||||
if (typeof p === "function") {
|
||||
return `!@#Function: ${p.toString()}`;
|
||||
}
|
||||
else {
|
||||
return p;
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* This object contains "at your risk" and "no BC guarantees" objects for advanced use cases.
|
||||
*
|
||||
|
||||
@@ -1 +1 @@
|
||||
module.exports = { buildDate:"2023-08-10T23:49:37+02:00", buildRevision: "e741c2826c3b2ca5f3d6c7505f45a684e5231dba" };
|
||||
module.exports = { buildDate:"2023-08-16T23:02:15+02:00", buildRevision: "3f7a5504c77263a7118cede5c0d9b450ba37f424" };
|
||||
|
||||
@@ -758,7 +758,7 @@ class ConsistencyChecks {
|
||||
return `${tableName}: ${count}`;
|
||||
}
|
||||
|
||||
const tables = [ "notes", "revisions", "attachments", "branches", "attributes", "etapi_tokens" ];
|
||||
const tables = [ "notes", "revisions", "attachments", "branches", "attributes", "etapi_tokens", "blobs" ];
|
||||
|
||||
log.info(`Table counts: ${tables.map(tableName => getTableRowCount(tableName)).join(", ")}`);
|
||||
}
|
||||
@@ -767,7 +767,13 @@ class ConsistencyChecks {
|
||||
let elapsedTimeMs;
|
||||
|
||||
await syncMutexService.doExclusively(() => {
|
||||
elapsedTimeMs = this.runChecksInner();
|
||||
const startTimeMs = Date.now();
|
||||
|
||||
this.runDbDiagnostics();
|
||||
|
||||
this.runAllChecksAndFixers();
|
||||
|
||||
elapsedTimeMs = Date.now() - startTimeMs;
|
||||
});
|
||||
|
||||
if (this.unrecoveredConsistencyErrors) {
|
||||
@@ -781,16 +787,6 @@ class ConsistencyChecks {
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
runChecksInner() {
|
||||
const startTimeMs = Date.now();
|
||||
|
||||
this.runDbDiagnostics();
|
||||
|
||||
this.runAllChecksAndFixers();
|
||||
|
||||
return Date.now() - startTimeMs;
|
||||
}
|
||||
}
|
||||
|
||||
function getBlankContent(isProtected, type, mime) {
|
||||
@@ -825,11 +821,6 @@ async function runOnDemandChecks(autoFix) {
|
||||
await consistencyChecks.runChecks();
|
||||
}
|
||||
|
||||
function runOnDemandChecksWithoutExclusiveLock(autoFix) {
|
||||
const consistencyChecks = new ConsistencyChecks(autoFix);
|
||||
consistencyChecks.runChecksInner();
|
||||
}
|
||||
|
||||
function runEntityChangesChecks() {
|
||||
const consistencyChecks = new ConsistencyChecks(true);
|
||||
consistencyChecks.findEntityChangeIssues();
|
||||
@@ -844,6 +835,5 @@ sqlInit.dbReady.then(() => {
|
||||
|
||||
module.exports = {
|
||||
runOnDemandChecks,
|
||||
runOnDemandChecksWithoutExclusiveLock,
|
||||
runEntityChangesChecks
|
||||
};
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
const dayjs = require('dayjs');
|
||||
const cls = require('./cls');
|
||||
|
||||
const LOCAL_DATETIME_FORMAT = 'YYYY-MM-DD HH:mm:ss.SSSZZ';
|
||||
const UTC_DATETIME_FORMAT = 'YYYY-MM-DD HH:mm:ssZ';
|
||||
|
||||
function utcNowDateTime() {
|
||||
return utcDateTimeStr(new Date());
|
||||
}
|
||||
@@ -10,7 +13,7 @@ function utcNowDateTime() {
|
||||
// "trilium-local-now-datetime" header which is then stored in CLS
|
||||
function localNowDateTime() {
|
||||
return cls.getLocalNowDateTime()
|
||||
|| dayjs().format('YYYY-MM-DD HH:mm:ss.SSSZZ')
|
||||
|| dayjs().format(LOCAL_DATETIME_FORMAT)
|
||||
}
|
||||
|
||||
function localNowDate() {
|
||||
@@ -62,6 +65,36 @@ function getDateTimeForFile() {
|
||||
return new Date().toISOString().substr(0, 19).replace(/:/g, '');
|
||||
}
|
||||
|
||||
function validateLocalDateTime(str) {
|
||||
if (!str) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!/[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}[+-][0-9]{4}/.test(str)) {
|
||||
return `Invalid local date time format in '${str}'. Correct format shoud follow example: '2023-08-21 23:38:51.110+0200'`;
|
||||
}
|
||||
|
||||
|
||||
if (!dayjs(str, LOCAL_DATETIME_FORMAT)) {
|
||||
return `Date '${str}' appears to be in the correct format, but cannot be parsed. It likely represents an invalid date.`;
|
||||
}
|
||||
}
|
||||
|
||||
function validateUtcDateTime(str) {
|
||||
if (!str) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!/[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}Z/.test(str)) {
|
||||
return `Invalid UTC date time format in '${str}'. Correct format shoud follow example: '2023-08-21 23:38:51.110Z'`;
|
||||
}
|
||||
|
||||
|
||||
if (!dayjs(str, UTC_DATETIME_FORMAT)) {
|
||||
return `Date '${str}' appears to be in the correct format, but cannot be parsed. It likely represents an invalid date.`;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
utcNowDateTime,
|
||||
localNowDateTime,
|
||||
@@ -70,5 +103,7 @@ module.exports = {
|
||||
utcDateTimeStr,
|
||||
parseDateTime,
|
||||
parseLocalDate,
|
||||
getDateTimeForFile
|
||||
getDateTimeForFile,
|
||||
validateLocalDateTime,
|
||||
validateUtcDateTime
|
||||
};
|
||||
|
||||
@@ -15,6 +15,12 @@ function putEntityChangeWithInstanceId(origEntityChange, instanceId) {
|
||||
putEntityChange(ec);
|
||||
}
|
||||
|
||||
function putEntityChangeWithForcedChange(origEntityChange) {
|
||||
const ec = {...origEntityChange, changeId: null};
|
||||
|
||||
putEntityChange(ec);
|
||||
}
|
||||
|
||||
function putEntityChange(origEntityChange) {
|
||||
const ec = {...origEntityChange};
|
||||
|
||||
@@ -66,13 +72,37 @@ function putEntityChangeForOtherInstances(ec) {
|
||||
function addEntityChangesForSector(entityName, sector) {
|
||||
const entityChanges = sql.getRows(`SELECT * FROM entity_changes WHERE entityName = ? AND SUBSTR(entityId, 1, 1) = ?`, [entityName, sector]);
|
||||
|
||||
let entitiesInserted = entityChanges.length;
|
||||
|
||||
sql.transactional(() => {
|
||||
if (entityName === 'blobs') {
|
||||
entitiesInserted += addEntityChangesForDependingEntity(sector, 'notes', 'noteId');
|
||||
entitiesInserted += addEntityChangesForDependingEntity(sector, 'attachments', 'attachmentId');
|
||||
entitiesInserted += addEntityChangesForDependingEntity(sector, 'revisions', 'revisionId');
|
||||
}
|
||||
|
||||
for (const ec of entityChanges) {
|
||||
putEntityChange(ec);
|
||||
putEntityChangeWithForcedChange(ec);
|
||||
}
|
||||
});
|
||||
|
||||
log.info(`Added sector ${sector} of '${entityName}' (${entityChanges.length} entities) to the sync queue.`);
|
||||
log.info(`Added sector ${sector} of '${entityName}' (${entitiesInserted} entities) to the sync queue.`);
|
||||
}
|
||||
|
||||
function addEntityChangesForDependingEntity(sector, tableName, primaryKeyColumn) {
|
||||
// problem in blobs might be caused by problem in entity referencing the blob
|
||||
const dependingEntityChanges = sql.getRows(`
|
||||
SELECT dep_change.*
|
||||
FROM entity_changes orig_sector
|
||||
JOIN ${tableName} ON ${tableName}.blobId = orig_sector.entityId
|
||||
JOIN entity_changes dep_change ON dep_change.entityName = '${tableName}' AND dep_change.entityId = ${tableName}.${primaryKeyColumn}
|
||||
WHERE orig_sector.entityName = 'blobs' AND SUBSTR(orig_sector.entityId, 1, 1) = ?`, [sector]);
|
||||
|
||||
for (const ec of dependingEntityChanges) {
|
||||
putEntityChangeWithForcedChange(ec);
|
||||
}
|
||||
|
||||
return dependingEntityChanges.length;
|
||||
}
|
||||
|
||||
function cleanupEntityChangesForMissingEntities(entityName, entityPrimaryKey) {
|
||||
@@ -161,6 +191,7 @@ function recalculateMaxEntityChangeId() {
|
||||
module.exports = {
|
||||
putNoteReorderingEntityChange,
|
||||
putEntityChangeForOtherInstances,
|
||||
putEntityChangeWithForcedChange,
|
||||
putEntityChange,
|
||||
putEntityChangeWithInstanceId,
|
||||
fillAllEntityChanges,
|
||||
|
||||
@@ -39,7 +39,7 @@ function setEntityChangesAsErased(entityChanges) {
|
||||
ec.isErased = true;
|
||||
ec.utcDateChanged = dateUtils.utcNowDateTime();
|
||||
|
||||
entityChangesService.putEntityChange(ec);
|
||||
entityChangesService.putEntityChangeWithForcedChange(ec);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -301,16 +301,10 @@ function importEnex(taskContext, file, parentNote) {
|
||||
? resource.title
|
||||
: `image.${resource.mime.substr(6)}`; // default if real name is not present
|
||||
|
||||
const {url, note: imageNote} = imageService.saveImage(noteEntity.noteId, resource.content, originalName, taskContext.data.shrinkImages);
|
||||
|
||||
for (const attr of resource.attributes) {
|
||||
if (attr.name !== 'originalFileName') { // this one is already saved in imageService
|
||||
imageNote.addAttribute(attr.type, attr.name, attr.value);
|
||||
}
|
||||
}
|
||||
|
||||
updateDates(imageNote, utcDateCreated, utcDateModified);
|
||||
const attachment = imageService.saveImageToAttachment(noteEntity.noteId, resource.content, originalName, taskContext.data.shrinkImages);
|
||||
|
||||
const sanitizedTitle = attachment.title.replace(/[^a-z0-9-.]/gi, "");
|
||||
const url = `api/attachments/${attachment.attachmentId}/image/${sanitizedTitle}`;
|
||||
const imageLink = `<img src="${url}">`;
|
||||
|
||||
content = content.replace(mediaRegex, imageLink);
|
||||
|
||||
@@ -9,8 +9,8 @@ const appInfo = require('./app_info');
|
||||
async function migrate() {
|
||||
const currentDbVersion = getDbVersion();
|
||||
|
||||
if (currentDbVersion < 183) {
|
||||
log.error("Direct migration from your current version is not supported. Please upgrade to the latest v0.47.X first and only then to this version.");
|
||||
if (currentDbVersion < 214) {
|
||||
log.error("Direct migration from your current version is not supported. Please upgrade to the latest v0.60.X first and only then to this version.");
|
||||
|
||||
utils.crash();
|
||||
return;
|
||||
@@ -18,9 +18,9 @@ async function migrate() {
|
||||
|
||||
// backup before attempting migration
|
||||
await backupService.backupNow(
|
||||
// creating a special backup for versions 0.60.X and older, the changes in 0.61 are major.
|
||||
currentDbVersion < 214
|
||||
? `before-migration-v${currentDbVersion}`
|
||||
// creating a special backup for versions 0.60.X, the changes in 0.61 are major.
|
||||
currentDbVersion === 214
|
||||
? `before-migration-v060`
|
||||
: 'before-migration'
|
||||
);
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
const sql = require('./sql');
|
||||
const sqlInit = require('./sql_init');
|
||||
const optionService = require('./options');
|
||||
const dateUtils = require('./date_utils');
|
||||
const entityChangesService = require('./entity_changes');
|
||||
@@ -169,6 +168,15 @@ function createNewNote(params) {
|
||||
throw new Error(`Note content must be set`);
|
||||
}
|
||||
|
||||
let error;
|
||||
if (error = dateUtils.validateLocalDateTime(params.dateCreated)) {
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
if (error = dateUtils.validateUtcDateTime(params.utcDateCreated)) {
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
return sql.transactional(() => {
|
||||
let note, branch, isEntityEventsDisabled;
|
||||
|
||||
@@ -189,7 +197,9 @@ function createNewNote(params) {
|
||||
title: params.title,
|
||||
isProtected: !!params.isProtected,
|
||||
type: params.type,
|
||||
mime: deriveMime(params.type, params.mime)
|
||||
mime: deriveMime(params.type, params.mime),
|
||||
dateCreated: params.dateCreated,
|
||||
utcDateCreated: params.utcDateCreated
|
||||
}).save();
|
||||
|
||||
note.setContent(params.content);
|
||||
|
||||
@@ -9,7 +9,7 @@ function getOptionOrNull(name) {
|
||||
option = becca.getOption(name);
|
||||
} else {
|
||||
// e.g. in initial sync becca is not loaded because DB is not initialized
|
||||
option = sql.getRow("SELECT * FROM options WHERE name = ?", name);
|
||||
option = sql.getRow("SELECT * FROM options WHERE name = ?", [name]);
|
||||
}
|
||||
|
||||
return option ? option.value : null;
|
||||
|
||||
@@ -10,7 +10,7 @@ function executeNote(note, apiParams) {
|
||||
return;
|
||||
}
|
||||
|
||||
const bundle = getScriptBundle(note);
|
||||
const bundle = getScriptBundle(note, true, 'backend');
|
||||
|
||||
return executeBundle(bundle, apiParams);
|
||||
}
|
||||
@@ -68,9 +68,9 @@ function executeScript(script, params, startNoteId, currentNoteId, originEntityN
|
||||
|
||||
// we're just executing an excerpt of the original frontend script in the backend context, so we must
|
||||
// override normal note's content, and it's mime type / script environment
|
||||
const backendOverrideContent = `return (${script}\r\n)(${getParams(params)})`;
|
||||
const overrideContent = `return (${script}\r\n)(${getParams(params)})`;
|
||||
|
||||
const bundle = getScriptBundle(currentNote, true, null, [], backendOverrideContent);
|
||||
const bundle = getScriptBundle(currentNote, true, 'backend', [], overrideContent);
|
||||
|
||||
return executeBundle(bundle, { startNote, originEntity });
|
||||
}
|
||||
@@ -96,9 +96,17 @@ function getParams(params) {
|
||||
|
||||
/**
|
||||
* @param {BNote} note
|
||||
* @param {string} [script]
|
||||
* @param {Array} [params]
|
||||
*/
|
||||
function getScriptBundleForFrontend(note) {
|
||||
const bundle = getScriptBundle(note);
|
||||
function getScriptBundleForFrontend(note, script, params) {
|
||||
let overrideContent = null;
|
||||
|
||||
if (script) {
|
||||
overrideContent = `return (${script}\r\n)(${getParams(params)})`;
|
||||
}
|
||||
|
||||
const bundle = getScriptBundle(note, true, 'frontend', [], overrideContent);
|
||||
|
||||
if (!bundle) {
|
||||
return;
|
||||
@@ -119,9 +127,9 @@ function getScriptBundleForFrontend(note) {
|
||||
* @param {boolean} [root=true]
|
||||
* @param {string|null} [scriptEnv]
|
||||
* @param {string[]} [includedNoteIds]
|
||||
* @param {string|null} [backendOverrideContent]
|
||||
* @param {string|null} [overrideContent]
|
||||
*/
|
||||
function getScriptBundle(note, root = true, scriptEnv = null, includedNoteIds = [], backendOverrideContent = null) {
|
||||
function getScriptBundle(note, root = true, scriptEnv = null, includedNoteIds = [], overrideContent = null) {
|
||||
if (!note.isContentAvailable()) {
|
||||
return;
|
||||
}
|
||||
@@ -134,12 +142,6 @@ function getScriptBundle(note, root = true, scriptEnv = null, includedNoteIds =
|
||||
return;
|
||||
}
|
||||
|
||||
if (root) {
|
||||
scriptEnv = backendOverrideContent
|
||||
? 'backend'
|
||||
: note.getScriptEnv();
|
||||
}
|
||||
|
||||
if (note.type !== 'file' && !root && scriptEnv !== note.getScriptEnv()) {
|
||||
return;
|
||||
}
|
||||
@@ -180,7 +182,7 @@ function getScriptBundle(note, root = true, scriptEnv = null, includedNoteIds =
|
||||
apiContext.modules['${note.noteId}'] = { exports: {} };
|
||||
${root ? 'return ' : ''}${isFrontend ? 'await' : ''} ((${isFrontend ? 'async' : ''} function(exports, module, require, api${modules.length > 0 ? ', ' : ''}${modules.map(child => sanitizeVariableName(child.title)).join(', ')}) {
|
||||
try {
|
||||
${backendOverrideContent || note.getContent()};
|
||||
${overrideContent || note.getContent()};
|
||||
} catch (e) { throw new Error("Load of script note \\"${note.title}\\" (${note.noteId}) failed with: " + e.message); }
|
||||
for (const exportKey in exports) module.exports[exportKey] = exports[exportKey];
|
||||
return module.exports;
|
||||
|
||||
@@ -11,7 +11,7 @@ function lex(str) {
|
||||
let currentWord = '';
|
||||
|
||||
function isSymbolAnOperator(chr) {
|
||||
return ['=', '*', '>', '<', '!', "-", "+", '%'].includes(chr);
|
||||
return ['=', '*', '>', '<', '!', "-", "+", '%', ','].includes(chr);
|
||||
}
|
||||
|
||||
function isPreviousSymbolAnOperator() {
|
||||
@@ -128,6 +128,10 @@ function lex(str) {
|
||||
}
|
||||
}
|
||||
|
||||
if (chr === ',') {
|
||||
continue;
|
||||
}
|
||||
|
||||
currentWord += chr;
|
||||
}
|
||||
|
||||
|
||||
@@ -40,13 +40,12 @@ function updateNormalEntity(remoteEC, remoteEntityRow, instanceId) {
|
||||
// on this side, we can't unerase the entity, so force the entity to be erased on the other side.
|
||||
entityChangesService.putEntityChangeForOtherInstances(localEC);
|
||||
|
||||
return false;
|
||||
} else if (localEC?.isErased && remoteEC.isErased) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!localEC
|
||||
|| localEC.utcDateChanged < remoteEC.utcDateChanged
|
||||
|| (localEC.utcDateChanged === remoteEC.utcDateChanged && localEC.hash !== remoteEC.hash) // sync error, we should still update
|
||||
) {
|
||||
if (!localEC || localEC.utcDateChanged <= remoteEC.utcDateChanged) {
|
||||
if (remoteEC.entityName === 'blobs' && remoteEntityRow.content !== null) {
|
||||
// we always use a Buffer object which is different from normal saving - there we use a simple string type for
|
||||
// "string notes". The problem is that in general, it's not possible to detect whether a blob content
|
||||
@@ -62,7 +61,9 @@ function updateNormalEntity(remoteEC, remoteEntityRow, instanceId) {
|
||||
|
||||
sql.replace(remoteEC.entityName, remoteEntityRow);
|
||||
|
||||
entityChangesService.putEntityChangeWithInstanceId(remoteEC, instanceId);
|
||||
if (!localEC || localEC.utcDateChanged < remoteEC.utcDateChanged) {
|
||||
entityChangesService.putEntityChangeWithInstanceId(remoteEC, instanceId);
|
||||
}
|
||||
|
||||
return true;
|
||||
} else if (localEC.hash !== remoteEC.hash && localEC.utcDateChanged > remoteEC.utcDateChanged) {
|
||||
|
||||
@@ -184,10 +184,8 @@ function sortNotesIfNeeded(parentNoteId) {
|
||||
}
|
||||
|
||||
const sortReversed = parentNote.getLabelValue('sortDirection')?.toLowerCase() === "desc";
|
||||
const sortFoldersFirstLabel = parentNote.getLabel('sortFoldersFirst');
|
||||
const sortFoldersFirst = sortFoldersFirstLabel && sortFoldersFirstLabel.value.toLowerCase() !== "false";
|
||||
const sortNaturalLabel = parentNote.getLabel('sortNatural');
|
||||
const sortNatural = sortNaturalLabel && sortNaturalLabel.value.toLowerCase() !== "false";
|
||||
const sortFoldersFirst = parentNote.isLabelTruthy('sortFoldersFirst');
|
||||
const sortNatural = parentNote.isLabelTruthy('sortNatural');
|
||||
const sortLocale = parentNote.getLabelValue('sortLocale');
|
||||
|
||||
sortNotes(parentNoteId, sortedLabel.value, sortReversed, sortFoldersFirst, sortNatural, sortLocale);
|
||||
|
||||
Reference in New Issue
Block a user