mirror of
				https://github.com/zadam/trilium.git
				synced 2025-10-31 02:16:05 +01:00 
			
		
		
		
	fix partial syncs
This commit is contained in:
		| @@ -23,6 +23,7 @@ app.use(helmet({ | ||||
|     contentSecurityPolicy: false | ||||
| })); | ||||
|  | ||||
| app.use(bodyParser.text({limit: '500mb'})); | ||||
| app.use(bodyParser.json({limit: '500mb'})); | ||||
| app.use(bodyParser.urlencoded({extended: false})); | ||||
| app.use(cookieParser()); | ||||
|   | ||||
| @@ -34,7 +34,7 @@ function getSectorHashes(tableName, primaryKeyName, whereBranch) { | ||||
| function getEntityHashes() { | ||||
|     const startTime = new Date(); | ||||
|  | ||||
|     const hashRows = sql.getRows(`SELECT entityName, entityId, hash FROM entity_changes`); | ||||
|     const hashRows = sql.getRows(`SELECT entityName, entityId, hash FROM entity_changes WHERE isSynced = 1`); | ||||
|  | ||||
|     // sorting is faster in memory | ||||
|     // sorting by entityId is enough, hashes will be segmented by entityName later on anyway | ||||
|   | ||||
| @@ -1,5 +1,4 @@ | ||||
| const sql = require('./sql'); | ||||
| const repository = require('./repository'); | ||||
| const sourceIdService = require('./source_id'); | ||||
| const dateUtils = require('./date_utils'); | ||||
| const log = require('./log'); | ||||
| @@ -39,6 +38,7 @@ function moveEntityChangeToTop(entityName, entityId) { | ||||
|  | ||||
| function addEntityChangesForSector(entityName, entityPrimaryKey, sector) { | ||||
|     const startTime = Date.now(); | ||||
|     const repository = require('./repository'); | ||||
|  | ||||
|     sql.transactional(() => { | ||||
|         const entityIds = sql.getColumn(`SELECT ${entityPrimaryKey} FROM ${entityName} WHERE SUBSTR(${entityPrimaryKey}, 1, 1) = ?`, [sector]); | ||||
|   | ||||
| @@ -253,6 +253,8 @@ async function checkContentHash(syncContext) { | ||||
|     return failedChecks.length > 0; | ||||
| } | ||||
|  | ||||
| const PAGE_SIZE = 1000000; | ||||
|  | ||||
| async function syncRequest(syncContext, method, requestPath, body) { | ||||
|     body = body ? JSON.stringify(body) : ''; | ||||
|  | ||||
| @@ -261,7 +263,7 @@ async function syncRequest(syncContext, method, requestPath, body) { | ||||
|     let response; | ||||
|  | ||||
|     const requestId = utils.randomString(10); | ||||
|     const pageCount = Math.min(1, Math.ceil(body.length / 1000000)); | ||||
|     const pageCount = Math.max(1, Math.ceil(body.length / PAGE_SIZE)); | ||||
|  | ||||
|     for (let pageIndex = 0; pageIndex < pageCount; pageIndex++) { | ||||
|         const opts = { | ||||
| @@ -274,13 +276,11 @@ async function syncRequest(syncContext, method, requestPath, body) { | ||||
|                 pageCount, | ||||
|                 requestId | ||||
|             }, | ||||
|             body, | ||||
|             body: body.substr(pageIndex * PAGE_SIZE, Math.min(PAGE_SIZE, body.length - pageIndex * PAGE_SIZE)), | ||||
|             proxy: proxyToggle ? syncOptions.getSyncProxy() : null | ||||
|         }; | ||||
|  | ||||
|         response = await utils.timeLimit(request.exec(opts), timeout); | ||||
|  | ||||
|         console.log("response", response); | ||||
|     } | ||||
|  | ||||
|     return response; | ||||
|   | ||||
| @@ -23,35 +23,36 @@ function updateEntity(entityChange, entity, sourceId) { | ||||
|     } | ||||
| } | ||||
|  | ||||
| function updateNormalEntity(entityChange, entity, sourceId) { | ||||
|     const {utcDateChanged, hash, isErased} = sql.getRow(` | ||||
| function updateNormalEntity(remoteEntityChange, entity, sourceId) { | ||||
|     const localEntityChange = sql.getRow(` | ||||
|         SELECT utcDateChanged, hash, isErased | ||||
|         FROM entity_changes  | ||||
|         WHERE entityName = ? AND entityId = ?`, [entityChange.entityName, entityChange.entityId]); | ||||
|         WHERE entityName = ? AND entityId = ?`, [remoteEntityChange.entityName, remoteEntityChange.entityId]); | ||||
|  | ||||
|     if (!isErased && entityChange.isErased) { | ||||
|     if (localEntityChange && !localEntityChange.isErased && remoteEntityChange.isErased) { | ||||
|         sql.transactional(() => { | ||||
|             const primaryKey = entityConstructor.getEntityFromEntityName(entityName).primaryKeyName; | ||||
|  | ||||
|             sql.execute(`DELETE FROM ${entityChange.entityName} WHERE ${primaryKey} = ?`, entityChange.entityId); | ||||
|             sql.execute(`DELETE FROM ${remoteEntityChange.entityName} WHERE ${primaryKey} = ?`, remoteEntityChange.entityId); | ||||
|  | ||||
|             entityChangesService.addEntityChange(entityChange, sourceId); | ||||
|             entityChangesService.addEntityChange(remoteEntityChange, sourceId); | ||||
|         }); | ||||
|  | ||||
|         return true; | ||||
|     } | ||||
|  | ||||
|     if (utcDateChanged < entityChange.utcDateChanged | ||||
|         || hash !== entityChange.hash // sync error, we should still update | ||||
|     if (!localEntityChange | ||||
|         || localEntityChange.utcDateChanged < remoteEntityChange.utcDateChanged | ||||
|         || localEntityChange.hash !== remoteEntityChange.hash // sync error, we should still update | ||||
|     ) { | ||||
|         if (['note_contents', 'note_revision_contents'].includes(entityChange.entityName)) { | ||||
|         if (['note_contents', 'note_revision_contents'].includes(remoteEntityChange.entityName)) { | ||||
|             entity.content = handleContent(entity.content); | ||||
|         } | ||||
|  | ||||
|         sql.transactional(() => { | ||||
|             sql.replace(entityChange.entityName, entity); | ||||
|             sql.replace(remoteEntityChange.entityName, entity); | ||||
|  | ||||
|             entityChangesService.addEntityChange(entityChange, sourceId); | ||||
|             entityChangesService.addEntityChange(remoteEntityChange, sourceId); | ||||
|         }); | ||||
|  | ||||
|         return true; | ||||
|   | ||||
		Reference in New Issue
	
	Block a user