mirror of
				https://github.com/TriliumNext/Notes.git
				synced 2025-10-31 21:11:30 +08:00 
			
		
		
		
	implemented sync hash check recovery process
This commit is contained in:
		
							parent
							
								
									77311954a1
								
							
						
					
					
						commit
						d47403c0e7
					
				| @ -10,6 +10,8 @@ const contentHashService = require('../../services/content_hash'); | ||||
| const log = require('../../services/log'); | ||||
| const syncOptions = require('../../services/sync_options'); | ||||
| const dateUtils = require('../../services/date_utils'); | ||||
| const entityConstructor = require('../../entities/entity_constructor'); | ||||
| const utils = require('../../services/utils'); | ||||
| 
 | ||||
| async function testSync() { | ||||
|     try { | ||||
| @ -47,7 +49,7 @@ async function getStats() { | ||||
| 
 | ||||
| async function checkSync() { | ||||
|     return { | ||||
|         hashes: await contentHashService.getHashes(), | ||||
|         entityHashes: await contentHashService.getEntityHashes(), | ||||
|         maxSyncId: await sql.getValue('SELECT MAX(id) FROM sync') | ||||
|     }; | ||||
| } | ||||
| @ -137,6 +139,15 @@ async function syncFinished() { | ||||
|     await sqlInit.dbInitialized(); | ||||
| } | ||||
| 
 | ||||
| async function queueSector(req) { | ||||
|     const entityName = utils.sanitizeSqlIdentifier(req.params.entityName); | ||||
|     const sector = utils.sanitizeSqlIdentifier(req.params.sector); | ||||
| 
 | ||||
|     const entityPrimaryKey = entityConstructor.getEntityFromEntityName(entityName).primaryKeyName; | ||||
| 
 | ||||
|     await syncTableService.addEntitySyncsForSector(entityName, entityPrimaryKey, sector); | ||||
| } | ||||
| 
 | ||||
| module.exports = { | ||||
|     testSync, | ||||
|     checkSync, | ||||
| @ -147,5 +158,6 @@ module.exports = { | ||||
|     getChanged, | ||||
|     update, | ||||
|     getStats, | ||||
|     syncFinished | ||||
|     syncFinished, | ||||
|     queueSector | ||||
| }; | ||||
| @ -199,6 +199,7 @@ function register(app) { | ||||
|     route(GET, '/api/sync/changed', [auth.checkApiAuth], syncApiRoute.getChanged, apiResultHandler); | ||||
|     route(PUT, '/api/sync/update', [auth.checkApiAuth], syncApiRoute.update, apiResultHandler); | ||||
|     route(POST, '/api/sync/finished', [auth.checkApiAuth], syncApiRoute.syncFinished, apiResultHandler); | ||||
|     route(POST, '/api/sync/queue-sector/:entityName/:sector', [auth.checkApiAuth], syncApiRoute.queueSector, apiResultHandler); | ||||
|     route(GET, '/api/sync/stats', [], syncApiRoute.getStats, apiResultHandler); | ||||
| 
 | ||||
|     apiRoute(POST, '/api/recent-notes', recentNotesRoute.addRecentNote); | ||||
|  | ||||
| @ -3,7 +3,6 @@ | ||||
| const sql = require('./sql'); | ||||
| const utils = require('./utils'); | ||||
| const log = require('./log'); | ||||
| const ws = require('./ws.js'); | ||||
| const ApiToken = require('../entities/api_token'); | ||||
| const Branch = require('../entities/branch'); | ||||
| const Note = require('../entities/note'); | ||||
| @ -70,9 +69,11 @@ async function checkContentHashes(otherHashes) { | ||||
|     if (failedChecks.length === 0) { | ||||
|         log.info("Content hash checks PASSED"); | ||||
|     } | ||||
| 
 | ||||
|     return failedChecks; | ||||
| } | ||||
| 
 | ||||
| module.exports = { | ||||
|     getHashes: getEntityHashes, | ||||
|     getEntityHashes, | ||||
|     checkContentHashes | ||||
| }; | ||||
| @ -15,6 +15,8 @@ const syncMutexService = require('./sync_mutex'); | ||||
| const cls = require('./cls'); | ||||
| const request = require('./request'); | ||||
| const ws = require('./ws'); | ||||
| const syncTableService = require('./sync_table'); | ||||
| const entityConstructor = require('../entities/entity_constructor'); | ||||
| 
 | ||||
| let proxyToggle = true; | ||||
| 
 | ||||
| @ -30,17 +32,22 @@ async function sync() { | ||||
|                 return { success: false, message: 'Sync not configured' }; | ||||
|             } | ||||
| 
 | ||||
|             const syncContext = await login(); | ||||
|             let continueSync = false; | ||||
| 
 | ||||
|             await pushSync(syncContext); | ||||
|             do { | ||||
|                 const syncContext = await login(); | ||||
| 
 | ||||
|             await pullSync(syncContext); | ||||
|                 await pushSync(syncContext); | ||||
| 
 | ||||
|             await pushSync(syncContext); | ||||
|                 await pullSync(syncContext); | ||||
| 
 | ||||
|             await syncFinished(syncContext); | ||||
|                 await pushSync(syncContext); | ||||
| 
 | ||||
|             await checkContentHash(syncContext); | ||||
|                 await syncFinished(syncContext); | ||||
| 
 | ||||
|                 continueSync = await checkContentHash(syncContext); | ||||
|             } | ||||
|             while (continueSync); | ||||
| 
 | ||||
|             return { | ||||
|                 success: true | ||||
| @ -225,7 +232,7 @@ async function checkContentHash(syncContext) { | ||||
|     if (await getLastSyncedPull() < resp.maxSyncId) { | ||||
|         log.info("There are some outstanding pulls, skipping content check."); | ||||
| 
 | ||||
|         return; | ||||
|         return true; | ||||
|     } | ||||
| 
 | ||||
|     const notPushedSyncs = await sql.getValue("SELECT EXISTS(SELECT 1 FROM sync WHERE id > ?)", [await getLastSyncedPush()]); | ||||
| @ -233,10 +240,20 @@ async function checkContentHash(syncContext) { | ||||
|     if (notPushedSyncs) { | ||||
|         log.info(`There's ${notPushedSyncs} outstanding pushes, skipping content check.`); | ||||
| 
 | ||||
|         return; | ||||
|         return true; | ||||
|     } | ||||
| 
 | ||||
|     await contentHashService.checkContentHashes(resp.hashes); | ||||
|     const failedChecks = await contentHashService.checkContentHashes(resp.entityHashes); | ||||
| 
 | ||||
|     for (const {entityName, sector} of failedChecks) { | ||||
|         const entityPrimaryKey = entityConstructor.getEntityFromEntityName(entityName).primaryKeyName; | ||||
| 
 | ||||
|         await syncTableService.addEntitySyncsForSector(entityName, entityPrimaryKey, sector); | ||||
| 
 | ||||
|         await syncRequest(syncContext, 'POST', `/api/sync/queue-sector/${entityName}/${sector}`); | ||||
|     } | ||||
| 
 | ||||
|     return failedChecks.length > 0; | ||||
| } | ||||
| 
 | ||||
| async function syncRequest(syncContext, method, requestPath, body) { | ||||
|  | ||||
| @ -6,7 +6,7 @@ const cls = require('./cls'); | ||||
| 
 | ||||
| let syncs = []; | ||||
| 
 | ||||
| async function addEntitySync(entityName, entityId, sourceId) { | ||||
| async function insertEntitySync(entityName, entityId, sourceId) { | ||||
|     const sync = { | ||||
|         entityName: entityName, | ||||
|         entityId: entityId, | ||||
| @ -16,11 +16,25 @@ async function addEntitySync(entityName, entityId, sourceId) { | ||||
| 
 | ||||
|     sync.id = await sql.replace("sync", sync); | ||||
| 
 | ||||
|     return sync; | ||||
| } | ||||
| 
 | ||||
| async function addEntitySync(entityName, entityId, sourceId) { | ||||
|     const sync = await insertEntitySync(entityName, entityId, sourceId); | ||||
| 
 | ||||
|     syncs.push(sync); | ||||
| 
 | ||||
|     setTimeout(() => require('./ws').sendPingToAllClients(), 50); | ||||
| } | ||||
| 
 | ||||
| async function addEntitySyncsForSector(entityName, entityPrimaryKey, sector) { | ||||
|     const entityIds = await sql.getColumn(`SELECT ${entityPrimaryKey} FROM ${entityName} WHERE SUBSTR(${entityPrimaryKey}, 1, 1) = ?`, [sector]); | ||||
| 
 | ||||
|     for (const entityId of entityIds) { | ||||
|         await insertEntitySync(entityName, entityId, 'content-check'); | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| function getMaxSyncId() { | ||||
|     return syncs.length === 0 ? 0 : syncs[syncs.length - 1].id; | ||||
| } | ||||
| @ -29,19 +43,19 @@ function getEntitySyncsNewerThan(syncId) { | ||||
|     return syncs.filter(s => s.id > syncId); | ||||
| } | ||||
| 
 | ||||
| async function cleanupSyncRowsForMissingEntities(entityName, entityKey) { | ||||
| async function cleanupSyncRowsForMissingEntities(entityName, entityPrimaryKey) { | ||||
|     await sql.execute(` | ||||
|       DELETE  | ||||
|       FROM sync  | ||||
|       WHERE sync.entityName = '${entityName}'  | ||||
|         AND sync.entityId NOT IN (SELECT ${entityKey} FROM ${entityName})`);
 | ||||
|         AND sync.entityId NOT IN (SELECT ${entityPrimaryKey} FROM ${entityName})`);
 | ||||
| } | ||||
| 
 | ||||
| async function fillSyncRows(entityName, entityKey, condition = '') { | ||||
| async function fillSyncRows(entityName, entityPrimaryKey, condition = '') { | ||||
|     try { | ||||
|         await cleanupSyncRowsForMissingEntities(entityName, entityKey); | ||||
|         await cleanupSyncRowsForMissingEntities(entityName, entityPrimaryKey); | ||||
| 
 | ||||
|         const entityIds = await sql.getColumn(`SELECT ${entityKey} FROM ${entityName}` | ||||
|         const entityIds = await sql.getColumn(`SELECT ${entityPrimaryKey} FROM ${entityName}` | ||||
|             + (condition ? ` WHERE ${condition}` : '')); | ||||
| 
 | ||||
|         let createdCount = 0; | ||||
| @ -69,7 +83,7 @@ async function fillSyncRows(entityName, entityKey, condition = '') { | ||||
|     catch (e) { | ||||
|         // this is to fix migration from 0.30 to 0.32, can be removed later
 | ||||
|         // see https://github.com/zadam/trilium/issues/557
 | ||||
|         log.error(`Filling sync rows failed for ${entityName} ${entityKey} with error "${e.message}", continuing`); | ||||
|         log.error(`Filling sync rows failed for ${entityName} ${entityPrimaryKey} with error "${e.message}", continuing`); | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| @ -101,5 +115,6 @@ module.exports = { | ||||
|     addEntitySync, | ||||
|     fillAllSyncRows, | ||||
|     getEntitySyncsNewerThan, | ||||
|     getMaxSyncId | ||||
|     getMaxSyncId, | ||||
|     addEntitySyncsForSector | ||||
| }; | ||||
| @ -53,6 +53,10 @@ function sanitizeSql(str) { | ||||
|     return str.replace(/'/g, "''"); | ||||
| } | ||||
| 
 | ||||
| function sanitizeSqlIdentifier(str) { | ||||
|     return str.replace(/[A-Za-z0-9_]/g, ""); | ||||
| } | ||||
| 
 | ||||
| function prepareSqlForLike(prefix, str, suffix) { | ||||
|     const value = str | ||||
|         .replace(/\\/g, "\\\\") | ||||
| @ -174,6 +178,7 @@ module.exports = { | ||||
|     hash, | ||||
|     isEmptyOrWhitespace, | ||||
|     sanitizeSql, | ||||
|     sanitizeSqlIdentifier, | ||||
|     prepareSqlForLike, | ||||
|     stopWatch, | ||||
|     escapeHtml, | ||||
|  | ||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user
	 zadam
						zadam