mirror of
https://github.com/zadam/trilium.git
synced 2025-11-18 03:00:41 +01:00
sync fixes and refactorings
This commit is contained in:
@@ -597,14 +597,10 @@ class ConsistencyChecks {
|
||||
|
||||
runEntityChangeChecks(entityName, key) {
|
||||
this.findAndFixIssues(`
|
||||
SELECT
|
||||
${key} as entityId
|
||||
FROM
|
||||
${entityName}
|
||||
LEFT JOIN entity_changes ON entity_changes.entityName = '${entityName}'
|
||||
AND entity_changes.entityId = ${key}
|
||||
WHERE
|
||||
entity_changes.id IS NULL`,
|
||||
SELECT ${key} as entityId
|
||||
FROM ${entityName}
|
||||
LEFT JOIN entity_changes ec ON ec.entityName = '${entityName}' AND ec.entityId = ${entityName}.${key}
|
||||
WHERE ec.id IS NULL`,
|
||||
({entityId}) => {
|
||||
const entityRow = sql.getRow(`SELECT * FROM ${entityName} WHERE ${key} = ?`, [entityId]);
|
||||
|
||||
@@ -613,7 +609,7 @@ class ConsistencyChecks {
|
||||
entityName,
|
||||
entityId,
|
||||
hash: utils.randomString(10), // doesn't matter, will force sync, but that's OK
|
||||
isErased: !!entityRow.isErased,
|
||||
isErased: false,
|
||||
utcDateChanged: entityRow.utcDateModified || entityRow.utcDateCreated,
|
||||
isSynced: entityName !== 'options' || entityRow.isSynced
|
||||
});
|
||||
@@ -625,15 +621,13 @@ class ConsistencyChecks {
|
||||
});
|
||||
|
||||
this.findAndFixIssues(`
|
||||
SELECT
|
||||
id, entityId
|
||||
FROM
|
||||
entity_changes
|
||||
LEFT JOIN ${entityName} ON entityId = ${key}
|
||||
SELECT id, entityId
|
||||
FROM entity_changes
|
||||
LEFT JOIN ${entityName} ON entityId = ${entityName}.${key}
|
||||
WHERE
|
||||
entity_changes.isErased = 0
|
||||
AND entity_changes.entityName = '${entityName}'
|
||||
AND ${key} IS NULL`,
|
||||
AND ${entityName}.${key} IS NULL`,
|
||||
({id, entityId}) => {
|
||||
if (this.autoFix) {
|
||||
sql.execute("DELETE FROM entity_changes WHERE entityName = ? AND entityId = ?", [entityName, entityId]);
|
||||
@@ -645,11 +639,9 @@ class ConsistencyChecks {
|
||||
});
|
||||
|
||||
this.findAndFixIssues(`
|
||||
SELECT
|
||||
id, entityId
|
||||
FROM
|
||||
entity_changes
|
||||
JOIN ${entityName} ON entityId = ${key}
|
||||
SELECT id, entityId
|
||||
FROM entity_changes
|
||||
JOIN ${entityName} ON entityId = ${entityName}.${key}
|
||||
WHERE
|
||||
entity_changes.isErased = 1
|
||||
AND entity_changes.entityName = '${entityName}'`,
|
||||
|
||||
@@ -14,7 +14,8 @@ function getEntityHashes() {
|
||||
const hashRows = sql.getRawRows(`
|
||||
SELECT entityName,
|
||||
entityId,
|
||||
hash
|
||||
hash,
|
||||
isErased
|
||||
FROM entity_changes
|
||||
WHERE isSynced = 1
|
||||
AND entityName != 'note_reordering'`);
|
||||
@@ -25,12 +26,17 @@ function getEntityHashes() {
|
||||
|
||||
const hashMap = {};
|
||||
|
||||
for (const [entityName, entityId, hash] of hashRows) {
|
||||
for (const [entityName, entityId, hash, isErased] of hashRows) {
|
||||
const entityHashMap = hashMap[entityName] = hashMap[entityName] || {};
|
||||
|
||||
const sector = entityId[0];
|
||||
|
||||
entityHashMap[sector] = (entityHashMap[sector] || "") + hash
|
||||
if (entityName === 'revisions' && sector === '5') {
|
||||
console.log(entityId, hash, isErased);
|
||||
}
|
||||
|
||||
// if the entity is erased, its hash is not updated, so it has to be added extra
|
||||
entityHashMap[sector] = (entityHashMap[sector] || "") + hash + isErased;
|
||||
}
|
||||
|
||||
for (const entityHashMap of Object.values(hashMap)) {
|
||||
|
||||
@@ -3,7 +3,7 @@ const dateUtils = require('./date_utils');
|
||||
const log = require('./log');
|
||||
const cls = require('./cls');
|
||||
const utils = require('./utils');
|
||||
const instanceId = require('./member_id');
|
||||
const instanceId = require('./instance_id');
|
||||
const becca = require("../becca/becca");
|
||||
const blobService = require("../services/blob");
|
||||
|
||||
@@ -62,8 +62,6 @@ function moveEntityChangeToTop(entityName, entityId) {
|
||||
}
|
||||
|
||||
function addEntityChangesForSector(entityName, sector) {
|
||||
const startTime = Date.now();
|
||||
|
||||
const entityChanges = sql.getRows(`SELECT * FROM entity_changes WHERE entityName = ? AND SUBSTR(entityId, 1, 1) = ?`, [entityName, sector]);
|
||||
|
||||
sql.transactional(() => {
|
||||
@@ -72,7 +70,7 @@ function addEntityChangesForSector(entityName, sector) {
|
||||
}
|
||||
});
|
||||
|
||||
log.info(`Added sector ${sector} of '${entityName}' (${entityChanges.length} entities) to sync queue in ${Date.now() - startTime}ms.`);
|
||||
log.info(`Added sector ${sector} of '${entityName}' (${entityChanges.length} entities) to the sync queue.`);
|
||||
}
|
||||
|
||||
function cleanupEntityChangesForMissingEntities(entityName, entityPrimaryKey) {
|
||||
@@ -103,39 +101,34 @@ function fillEntityChanges(entityName, entityPrimaryKey, condition = '') {
|
||||
|
||||
createdCount++;
|
||||
|
||||
let hash;
|
||||
let utcDateChanged;
|
||||
let isSynced;
|
||||
const ec = {
|
||||
entityName,
|
||||
entityId,
|
||||
isErased: false
|
||||
};
|
||||
|
||||
if (entityName === 'blobs') {
|
||||
const blob = sql.getRow("SELECT blobId, content, utcDateModified FROM blobs WHERE blobId = ?", [entityId]);
|
||||
hash = blobService.calculateContentHash(blob);
|
||||
utcDateChanged = blob.utcDateModified;
|
||||
isSynced = true; // blobs are always synced
|
||||
ec.hash = blobService.calculateContentHash(blob);
|
||||
ec.utcDateChanged = blob.utcDateModified;
|
||||
ec.isSynced = true; // blobs are always synced
|
||||
} else {
|
||||
const entity = becca.getEntity(entityName, entityId);
|
||||
|
||||
if (entity) {
|
||||
hash = entity?.generateHash() || "|deleted";
|
||||
utcDateChanged = entity?.getUtcDateChanged() || dateUtils.utcNowDateTime();
|
||||
isSynced = entityName !== 'options' || !!entity?.isSynced;
|
||||
ec.hash = entity.generateHash() || "|deleted";
|
||||
ec.utcDateChanged = entity.getUtcDateChanged() || dateUtils.utcNowDateTime();
|
||||
ec.isSynced = entityName !== 'options' || !!entity.isSynced;
|
||||
} else {
|
||||
// entity might be null (not present in becca) when it's deleted
|
||||
// FIXME: hacky, not sure if it might cause some problems
|
||||
hash = "deleted";
|
||||
utcDateChanged = dateUtils.utcNowDateTime();
|
||||
isSynced = true; // deletable (the ones with isDeleted) entities are synced
|
||||
ec.hash = "deleted";
|
||||
ec.utcDateChanged = dateUtils.utcNowDateTime();
|
||||
ec.isSynced = true; // deletable (the ones with isDeleted) entities are synced
|
||||
}
|
||||
}
|
||||
|
||||
addEntityChange({
|
||||
entityName,
|
||||
entityId,
|
||||
hash: hash,
|
||||
isErased: false,
|
||||
utcDateChanged: utcDateChanged,
|
||||
isSynced: isSynced
|
||||
});
|
||||
addEntityChange(ec);
|
||||
}
|
||||
|
||||
if (createdCount > 0) {
|
||||
|
||||
@@ -37,6 +37,7 @@ function eraseNotes(noteIdsToErase) {
|
||||
function setEntityChangesAsErased(entityChanges) {
|
||||
for (const ec of entityChanges) {
|
||||
ec.isErased = true;
|
||||
ec.utcDateChanged = dateUtils.utcNowDateTime();
|
||||
|
||||
entityChangesService.addEntityChange(ec);
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
const log = require('./log');
|
||||
const sql = require('./sql');
|
||||
const protectedSessionService = require("./protected_session");
|
||||
const dateUtils = require("./date_utils");
|
||||
|
||||
/**
|
||||
* @param {BNote} note
|
||||
@@ -40,7 +41,7 @@ function eraseRevisions(revisionIdsToErase) {
|
||||
log.info(`Removing note revisions: ${JSON.stringify(revisionIdsToErase)}`);
|
||||
|
||||
sql.executeMany(`DELETE FROM revisions WHERE revisionId IN (???)`, revisionIdsToErase);
|
||||
sql.executeMany(`UPDATE entity_changes SET isErased = 1 WHERE entityName = 'revisions' AND entityId IN (???)`, revisionIdsToErase);
|
||||
sql.executeMany(`UPDATE entity_changes SET isErased = 1, utcDateChanged = '${dateUtils.utcNowDateTime()}' WHERE entityName = 'revisions' AND entityId IN (???)`, revisionIdsToErase);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
||||
@@ -4,7 +4,7 @@ const log = require('./log');
|
||||
const sql = require('./sql');
|
||||
const optionService = require('./options');
|
||||
const utils = require('./utils');
|
||||
const instanceId = require('./member_id');
|
||||
const instanceId = require('./instance_id');
|
||||
const dateUtils = require('./date_utils');
|
||||
const syncUpdateService = require('./sync_update');
|
||||
const contentHashService = require('./content_hash');
|
||||
@@ -54,6 +54,7 @@ async function sync() {
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
// we're dynamically switching whether we're using proxy or not based on whether we encountered error with the current method
|
||||
proxyToggle = !proxyToggle;
|
||||
|
||||
if (e.message?.includes('ECONNREFUSED') ||
|
||||
@@ -107,7 +108,7 @@ async function doLogin() {
|
||||
});
|
||||
|
||||
if (resp.instanceId === instanceId) {
|
||||
throw new Error(`Sync server has member ID '${resp.instanceId}' which is also local. This usually happens when the sync client is (mis)configured to sync with itself (URL points back to client) instead of the correct sync server.`);
|
||||
throw new Error(`Sync server has instance ID '${resp.instanceId}' which is also local. This usually happens when the sync client is (mis)configured to sync with itself (URL points back to client) instead of the correct sync server.`);
|
||||
}
|
||||
|
||||
syncContext.instanceId = resp.instanceId;
|
||||
@@ -253,7 +254,7 @@ async function checkContentHash(syncContext) {
|
||||
const failedChecks = contentHashService.checkContentHashes(resp.entityHashes);
|
||||
|
||||
if (failedChecks.length > 0) {
|
||||
// before requeuing sectors, make sure the entity changes are correct
|
||||
// before re-queuing sectors, make sure the entity changes are correct
|
||||
const consistencyChecks = require("./consistency_checks");
|
||||
consistencyChecks.runEntityChangesChecks();
|
||||
|
||||
@@ -350,7 +351,8 @@ function getEntityChangeRecords(entityChanges) {
|
||||
|
||||
length += JSON.stringify(record).length;
|
||||
|
||||
if (length > 1000000) {
|
||||
if (length > 1_000_000) {
|
||||
// each sync request/response should have at most ~1 MB.
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,98 +4,83 @@ const entityChangesService = require('./entity_changes');
|
||||
const eventService = require('./events');
|
||||
const entityConstructor = require("../becca/entity_constructor");
|
||||
|
||||
function updateEntity(entityChange, entityRow, instanceId) {
|
||||
// can be undefined for options with isSynced=false
|
||||
if (!entityRow) {
|
||||
if (entityChange.isSynced) {
|
||||
if (entityChange.isErased) {
|
||||
eraseEntity(entityChange, instanceId);
|
||||
}
|
||||
else {
|
||||
log.info(`Encountered synced non-erased entity change without entity: ${JSON.stringify(entityChange)}`);
|
||||
}
|
||||
}
|
||||
else if (entityChange.entityName !== 'options') {
|
||||
log.info(`Encountered unsynced non-option entity change without entity: ${JSON.stringify(entityChange)}`);
|
||||
}
|
||||
|
||||
return;
|
||||
function updateEntity(remoteEC, remoteEntityRow, instanceId) {
|
||||
if (!remoteEntityRow && remoteEC.entityName === 'options') {
|
||||
return; // can be undefined for options with isSynced=false
|
||||
}
|
||||
|
||||
const updated = entityChange.entityName === 'note_reordering'
|
||||
? updateNoteReordering(entityChange, entityRow, instanceId)
|
||||
: updateNormalEntity(entityChange, entityRow, instanceId);
|
||||
const updated = remoteEC.entityName === 'note_reordering'
|
||||
? updateNoteReordering(remoteEC, remoteEntityRow, instanceId)
|
||||
: updateNormalEntity(remoteEC, remoteEntityRow, instanceId);
|
||||
|
||||
if (updated) {
|
||||
if (entityRow.isDeleted) {
|
||||
if (remoteEntityRow?.isDeleted) {
|
||||
eventService.emit(eventService.ENTITY_DELETE_SYNCED, {
|
||||
entityName: entityChange.entityName,
|
||||
entityId: entityChange.entityId
|
||||
entityName: remoteEC.entityName,
|
||||
entityId: remoteEC.entityId
|
||||
});
|
||||
}
|
||||
else if (!entityChange.isErased) {
|
||||
else if (!remoteEC.isErased) {
|
||||
eventService.emit(eventService.ENTITY_CHANGE_SYNCED, {
|
||||
entityName: entityChange.entityName,
|
||||
entityRow
|
||||
entityName: remoteEC.entityName,
|
||||
entityRow: remoteEntityRow
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function updateNormalEntity(remoteEntityChange, remoteEntityRow, instanceId) {
|
||||
const localEntityChange = sql.getRow(`
|
||||
SELECT utcDateChanged, hash, isErased
|
||||
FROM entity_changes
|
||||
WHERE entityName = ? AND entityId = ?`, [remoteEntityChange.entityName, remoteEntityChange.entityId]);
|
||||
function updateNormalEntity(remoteEC, remoteEntityRow, instanceId) {
|
||||
const localEC = sql.getRow(`SELECT * FROM entity_changes WHERE entityName = ? AND entityId = ?`, [remoteEC.entityName, remoteEC.entityId]);
|
||||
|
||||
if (localEntityChange && !localEntityChange.isErased && remoteEntityChange.isErased) {
|
||||
sql.transactional(() => {
|
||||
const primaryKey = entityConstructor.getEntityFromEntityName(remoteEntityChange.entityName).primaryKeyName;
|
||||
|
||||
sql.execute(`DELETE FROM ${remoteEntityChange.entityName} WHERE ${primaryKey} = ?`, remoteEntityChange.entityId);
|
||||
|
||||
entityChangesService.addEntityChangeWithInstanceId(remoteEntityChange, instanceId);
|
||||
});
|
||||
if (!localEC?.isErased && remoteEC.isErased) {
|
||||
eraseEntity(remoteEC, instanceId);
|
||||
|
||||
return true;
|
||||
} else if (localEC?.isErased && !remoteEC.isErased) {
|
||||
// on this side, we can't unerase the entity, so force the entity to be erased on the other side.
|
||||
entityChangesService.addEntityChangeWithInstanceId(localEC, null);
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!localEntityChange
|
||||
|| localEntityChange.utcDateChanged < remoteEntityChange.utcDateChanged
|
||||
|| localEntityChange.hash !== remoteEntityChange.hash // sync error, we should still update
|
||||
if (!localEC
|
||||
|| localEC.utcDateChanged < remoteEC.utcDateChanged
|
||||
|| (localEC.utcDateChanged === remoteEC.utcDateChanged && localEC.hash !== remoteEC.hash) // sync error, we should still update
|
||||
) {
|
||||
if (remoteEntityChange.entityName === 'blobs') {
|
||||
if (remoteEC.entityName === 'blobs' && remoteEntityRow.content !== null) {
|
||||
// we always use a Buffer object which is different from normal saving - there we use a simple string type for
|
||||
// "string notes". The problem is that in general, it's not possible to detect whether a blob content
|
||||
// is string note or note (syncs can arrive out of order)
|
||||
remoteEntityRow.content = remoteEntityRow.content === null ? null : Buffer.from(remoteEntityRow.content, 'base64');
|
||||
remoteEntityRow.content = Buffer.from(remoteEntityRow.content, 'base64');
|
||||
|
||||
if (remoteEntityRow.content?.byteLength === 0) {
|
||||
if (remoteEntityRow.content.byteLength === 0) {
|
||||
// there seems to be a bug which causes empty buffer to be stored as NULL which is then picked up as inconsistency
|
||||
// (possibly not a problem anymore with the newer better-sqlite3)
|
||||
remoteEntityRow.content = "";
|
||||
}
|
||||
}
|
||||
|
||||
sql.transactional(() => {
|
||||
sql.replace(remoteEntityChange.entityName, remoteEntityRow);
|
||||
sql.replace(remoteEC.entityName, remoteEntityRow);
|
||||
|
||||
entityChangesService.addEntityChangeWithInstanceId(remoteEntityChange, instanceId);
|
||||
});
|
||||
entityChangesService.addEntityChangeWithInstanceId(remoteEC, instanceId);
|
||||
|
||||
return true;
|
||||
} else if (localEC.hash !== remoteEC.hash && localEC.utcDateChanged > remoteEC.utcDateChanged) {
|
||||
// the change on our side is newer than on the other side, so the other side should update
|
||||
entityChangesService.addEntityChangeWithInstanceId(localEC, null);
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function updateNoteReordering(entityChange, entity, instanceId) {
|
||||
sql.transactional(() => {
|
||||
for (const key in entity) {
|
||||
sql.execute("UPDATE branches SET notePosition = ? WHERE branchId = ?", [entity[key], key]);
|
||||
}
|
||||
function updateNoteReordering(remoteEC, remoteEntityRow, instanceId) {
|
||||
for (const key in remoteEntityRow) {
|
||||
sql.execute("UPDATE branches SET notePosition = ? WHERE branchId = ?", [remoteEntityRow[key], key]);
|
||||
}
|
||||
|
||||
entityChangesService.addEntityChangeWithInstanceId(entityChange, instanceId);
|
||||
});
|
||||
entityChangesService.addEntityChangeWithInstanceId(remoteEC, instanceId);
|
||||
|
||||
return true;
|
||||
}
|
||||
@@ -109,19 +94,17 @@ function eraseEntity(entityChange, instanceId) {
|
||||
"attributes",
|
||||
"revisions",
|
||||
"attachments",
|
||||
"blobs",
|
||||
"blobs"
|
||||
];
|
||||
|
||||
if (!entityNames.includes(entityName)) {
|
||||
log.error(`Cannot erase entity '${entityName}', id '${entityId}'`);
|
||||
log.error(`Cannot erase entity '${entityName}', id '${entityId}'.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const keyName = entityConstructor.getEntityFromEntityName(entityName).primaryKeyName;
|
||||
const primaryKeyName = entityConstructor.getEntityFromEntityName(entityName).primaryKeyName;
|
||||
|
||||
sql.execute(`DELETE FROM ${entityName} WHERE ${keyName} = ?`, [entityId]);
|
||||
|
||||
eventService.emit(eventService.ENTITY_DELETE_SYNCED, { entityName, entityId });
|
||||
sql.execute(`DELETE FROM ${entityName} WHERE ${primaryKeyName} = ?`, [entityId]);
|
||||
|
||||
entityChangesService.addEntityChangeWithInstanceId(entityChange, instanceId);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user