2017-10-21 21:10:33 -04:00
"use strict" ;
2017-10-25 22:39:21 -04:00
const log = require ( './log' ) ;
const sql = require ( './sql' ) ;
2018-04-01 21:27:46 -04:00
const optionService = require ( './options' ) ;
2017-10-26 21:16:21 -04:00
const utils = require ( './utils' ) ;
2022-01-09 21:25:15 +01:00
const instanceId = require ( './member_id' ) ;
2018-04-02 20:46:46 -04:00
const dateUtils = require ( './date_utils' ) ;
2018-04-01 21:27:46 -04:00
const syncUpdateService = require ( './sync_update' ) ;
const contentHashService = require ( './content_hash' ) ;
const appInfo = require ( './app_info' ) ;
2018-07-25 09:46:57 +02:00
const syncOptions = require ( './sync_options' ) ;
2018-04-01 21:27:46 -04:00
const syncMutexService = require ( './sync_mutex' ) ;
2018-03-28 23:41:22 -04:00
const cls = require ( './cls' ) ;
2018-12-17 21:34:02 +01:00
const request = require ( './request' ) ;
2019-10-27 22:13:38 +01:00
const ws = require ( './ws' ) ;
2021-06-29 22:15:57 +02:00
const entityChangesService = require ( './entity_changes' ) ;
const entityConstructor = require ( '../becca/entity_constructor' ) ;
2021-06-29 23:45:45 +02:00
const becca = require ( "../becca/becca" ) ;
2017-10-25 22:39:21 -04:00
2017-11-16 20:08:04 -05:00
let proxyToggle = true ;
2017-10-25 22:39:21 -04:00
2020-11-18 22:30:00 +01:00
let outstandingPullCount = 0 ;
2018-07-23 21:15:32 +02:00
2017-11-09 20:52:47 -05:00
async function sync ( ) {
2017-10-29 14:55:48 -04:00
try {
2018-07-24 20:35:03 +02:00
return await syncMutexService . doExclusively ( async ( ) => {
2020-06-20 12:31:38 +02:00
if ( ! syncOptions . isSyncSetup ( ) ) {
2022-01-13 20:22:50 +01:00
return { success : false , errorCode : 'NOT_CONFIGURED' , message : 'Sync not configured' } ;
2018-07-24 20:35:03 +02:00
}
2019-12-18 22:58:30 +01:00
let continueSync = false ;
2017-11-09 20:52:47 -05:00
2019-12-18 22:58:30 +01:00
do {
const syncContext = await login ( ) ;
2017-11-09 20:52:47 -05:00
2020-09-03 21:37:06 +02:00
await pushChanges ( syncContext ) ;
2017-11-09 20:52:47 -05:00
2020-09-03 21:37:06 +02:00
await pullChanges ( syncContext ) ;
2017-11-09 20:52:47 -05:00
2020-09-03 21:37:06 +02:00
await pushChanges ( syncContext ) ;
2018-07-24 20:35:03 +02:00
2019-12-18 22:58:30 +01:00
await syncFinished ( syncContext ) ;
continueSync = await checkContentHash ( syncContext ) ;
}
while ( continueSync ) ;
2017-11-21 22:11:27 -05:00
2021-03-21 00:01:28 +01:00
ws . syncFinished ( ) ;
2018-07-24 20:35:03 +02:00
return {
success : true
} ;
} ) ;
2017-10-29 14:55:48 -04:00
}
catch ( e ) {
2017-11-16 20:08:04 -05:00
proxyToggle = ! proxyToggle ;
2019-04-13 10:13:47 +02:00
if ( e . message &&
( e . message . includes ( 'ECONNREFUSED' ) ||
2023-07-27 14:50:18 +02:00
e . message . includes ( 'ERR_' ) || // node network errors
2019-04-13 10:13:47 +02:00
e . message . includes ( 'Bad Gateway' ) ) ) {
2021-03-21 00:01:28 +01:00
ws . syncFailed ( ) ;
2017-11-09 20:52:47 -05:00
log . info ( "No connection to sync server." ) ;
2017-10-26 20:31:31 -04:00
2017-11-09 20:52:47 -05:00
return {
success : false ,
message : "No connection to sync server."
} ;
2017-10-31 19:34:58 -04:00
}
2017-11-09 20:52:47 -05:00
else {
2022-12-21 15:19:05 +01:00
log . info ( ` sync failed: ${ e . message }
stack : $ { e . stack } ` );
2017-11-09 20:52:47 -05:00
2021-03-21 00:01:28 +01:00
ws . syncFailed ( ) ;
2017-11-09 20:52:47 -05:00
return {
success : false ,
message : e . message
}
2017-10-31 19:34:58 -04:00
}
2017-11-09 20:52:47 -05:00
}
}
async function login ( ) {
2018-09-10 20:22:26 +02:00
const setupService = require ( './setup' ) ; // circular dependency issue
2018-09-11 10:01:40 +02:00
if ( ! await setupService . hasSyncServerSchemaAndSeed ( ) ) {
await setupService . sendSeedToSyncServer ( ) ;
2018-09-10 20:22:26 +02:00
}
2020-07-01 21:33:52 +02:00
return await doLogin ( ) ;
2018-09-10 20:22:26 +02:00
}
async function doLogin ( ) {
2019-03-13 22:43:59 +01:00
const timestamp = dateUtils . utcNowDateTime ( ) ;
2017-11-09 20:52:47 -05:00
2020-06-20 12:31:38 +02:00
const documentSecret = optionService . getOption ( 'documentSecret' ) ;
2017-11-09 20:52:47 -05:00
const hash = utils . hmac ( documentSecret , timestamp ) ;
2018-12-16 21:19:12 +01:00
const syncContext = { cookieJar : { } } ;
2017-11-09 23:25:23 -05:00
const resp = await syncRequest ( syncContext , 'POST' , '/api/login/sync' , {
2017-11-09 20:52:47 -05:00
timestamp : timestamp ,
2018-06-10 15:55:29 -04:00
syncVersion : appInfo . syncVersion ,
2017-11-09 20:52:47 -05:00
hash : hash
} ) ;
2022-01-09 21:25:15 +01:00
if ( resp . instanceId === instanceId ) {
2023-05-04 22:16:18 +02:00
throw new Error ( ` Sync server has member ID ' ${ resp . instanceId } ' which is also local. This usually happens when the sync client is (mis)configured to sync with itself (URL points back to client) instead of the correct sync server. ` ) ;
2017-12-22 06:48:24 -05:00
}
2022-01-09 21:25:15 +01:00
syncContext . instanceId = resp . instanceId ;
2017-11-09 20:52:47 -05:00
2020-06-20 12:31:38 +02:00
const lastSyncedPull = getLastSyncedPull ( ) ;
2019-01-04 18:58:46 +01:00
2023-01-15 21:04:17 +01:00
// this is important in a scenario where we set up the sync by manually copying the document
2019-01-04 18:58:46 +01:00
// lastSyncedPull then could be pretty off for the newly cloned client
2020-08-02 23:27:48 +02:00
if ( lastSyncedPull > resp . maxEntityChangeId ) {
log . info ( ` Lowering last synced pull from ${ lastSyncedPull } to ${ resp . maxEntityChangeId } ` ) ;
2019-01-04 18:58:46 +01:00
2020-08-02 23:27:48 +02:00
setLastSyncedPull ( resp . maxEntityChangeId ) ;
2019-01-04 18:58:46 +01:00
}
2017-11-09 20:52:47 -05:00
return syncContext ;
}
2020-09-03 21:37:06 +02:00
async function pullChanges ( syncContext ) {
2020-07-31 00:08:01 +02:00
let atLeastOnePullApplied = false ;
2019-10-28 20:26:40 +01:00
2018-07-23 21:15:32 +02:00
while ( true ) {
2020-06-20 12:31:38 +02:00
const lastSyncedPull = getLastSyncedPull ( ) ;
2022-01-31 21:25:18 +01:00
const logMarkerId = utils . randomString ( 10 ) ; // to easily pair sync events between client and server logs
const changesUri = ` /api/sync/changed?instanceId= ${ instanceId } &lastEntityChangeId= ${ lastSyncedPull } &logMarkerId= ${ logMarkerId } ` ;
2017-11-09 20:52:47 -05:00
2020-04-04 14:57:19 +02:00
const startDate = Date . now ( ) ;
2018-07-25 22:54:37 +02:00
2018-07-23 21:15:32 +02:00
const resp = await syncRequest ( syncContext , 'GET' , changesUri ) ;
2022-01-09 21:25:15 +01:00
const { entityChanges , lastEntityChangeId } = resp ;
2020-07-28 23:29:12 +02:00
2022-01-09 21:25:15 +01:00
outstandingPullCount = resp . outstandingPullCount ;
2018-12-16 21:19:12 +01:00
2022-01-09 21:25:15 +01:00
const pulledDate = Date . now ( ) ;
2017-10-31 20:09:07 -04:00
2020-06-20 12:31:38 +02:00
sql . transactional ( ( ) => {
2020-08-02 23:43:39 +02:00
for ( const { entityChange , entity } of entityChanges ) {
2022-01-09 21:25:15 +01:00
const changeAppliedAlready = entityChange . changeId
&& ! ! sql . getValue ( "SELECT id FROM entity_changes WHERE changeId = ?" , [ entityChange . changeId ] ) ;
2021-11-17 22:57:09 +01:00
2022-01-09 20:16:39 +01:00
if ( ! changeAppliedAlready ) {
2021-03-21 00:01:28 +01:00
if ( ! atLeastOnePullApplied ) { // send only for first
2020-04-04 21:49:57 +02:00
ws . syncPullInProgress ( ) ;
2018-07-23 21:15:32 +02:00
2020-07-31 00:08:01 +02:00
atLeastOnePullApplied = true ;
2020-04-04 21:49:57 +02:00
}
2019-10-28 20:44:11 +01:00
2022-01-09 21:25:15 +01:00
syncUpdateService . updateEntity ( entityChange , entity , syncContext . instanceId ) ;
2019-10-28 20:44:11 +01:00
}
2018-07-23 21:15:32 +02:00
}
2022-01-09 20:16:39 +01:00
if ( lastSyncedPull !== lastEntityChangeId ) {
setLastSyncedPull ( lastEntityChangeId ) ;
}
2020-04-04 21:49:57 +02:00
} ) ;
2018-07-25 22:54:37 +02:00
2022-01-09 20:16:39 +01:00
if ( entityChanges . length === 0 ) {
break ;
} else {
const sizeInKb = Math . round ( JSON . stringify ( resp ) . length / 1024 ) ;
2021-02-10 22:56:23 +01:00
2022-01-31 21:25:18 +01:00
log . info ( ` Sync ${ logMarkerId } : Pulled ${ entityChanges . length } changes in ${ sizeInKb } KB, starting at entityChangeId= ${ lastSyncedPull } in ${ pulledDate - startDate } ms and applied them in ${ Date . now ( ) - pulledDate } ms, ${ outstandingPullCount } outstanding pulls ` ) ;
2022-01-09 20:16:39 +01:00
}
2017-10-26 20:31:31 -04:00
}
2017-10-31 19:34:58 -04:00
2017-11-09 20:52:47 -05:00
log . info ( "Finished pull" ) ;
2017-10-26 20:31:31 -04:00
}
2017-10-25 22:39:21 -04:00
2020-09-03 21:37:06 +02:00
async function pushChanges ( syncContext ) {
2020-06-20 12:31:38 +02:00
let lastSyncedPush = getLastSyncedPush ( ) ;
2017-10-25 22:39:21 -04:00
2017-11-09 20:52:47 -05:00
while ( true ) {
2020-08-02 23:43:39 +02:00
const entityChanges = sql . getRows ( 'SELECT * FROM entity_changes WHERE isSynced = 1 AND id > ? LIMIT 1000' , [ lastSyncedPush ] ) ;
2017-11-09 20:52:47 -05:00
2020-08-02 23:43:39 +02:00
if ( entityChanges . length === 0 ) {
2018-07-25 10:57:36 +02:00
log . info ( "Nothing to push" ) ;
break ;
}
2020-08-02 23:43:39 +02:00
const filteredEntityChanges = entityChanges . filter ( entityChange => {
2022-01-09 21:25:15 +01:00
if ( entityChange . instanceId === syncContext . instanceId ) {
2018-04-07 22:25:28 -04:00
// this may set lastSyncedPush beyond what's actually sent (because of size limit)
// so this is applied to the database only if there's no actual update
2020-08-02 23:43:39 +02:00
lastSyncedPush = entityChange . id ;
2017-11-09 20:52:47 -05:00
2018-04-07 22:25:28 -04:00
return false ;
}
else {
return true ;
}
} ) ;
2017-10-29 22:22:30 -04:00
2020-08-02 23:43:39 +02:00
if ( filteredEntityChanges . length === 0 ) {
2023-06-30 11:18:34 +02:00
// there still might be more sync changes (because of batch limit), just all the current batch
2018-07-25 10:57:36 +02:00
// has been filtered out
2020-06-20 12:31:38 +02:00
setLastSyncedPush ( lastSyncedPush ) ;
2018-01-06 15:56:00 -05:00
2018-07-25 10:57:36 +02:00
continue ;
2018-01-06 15:56:00 -05:00
}
2017-11-01 23:16:21 -04:00
2022-01-09 21:25:15 +01:00
const entityChangesRecords = getEntityChangeRecords ( filteredEntityChanges ) ;
2018-07-25 22:54:37 +02:00
const startDate = new Date ( ) ;
2018-04-08 10:09:33 -04:00
2022-01-31 21:25:18 +01:00
const logMarkerId = utils . randomString ( 10 ) ; // to easily pair sync events between client and server logs
await syncRequest ( syncContext , 'PUT' , ` /api/sync/update?logMarkerId= ${ logMarkerId } ` , {
2022-01-09 20:16:39 +01:00
entities : entityChangesRecords ,
2022-01-09 21:25:15 +01:00
instanceId
2018-04-07 22:25:28 -04:00
} ) ;
2017-11-01 23:16:21 -04:00
2021-03-21 00:01:28 +01:00
ws . syncPushInProgress ( ) ;
2022-12-21 15:19:05 +01:00
log . info ( ` Sync ${ logMarkerId } : Pushing ${ entityChangesRecords . length } sync changes in ${ Date . now ( ) - startDate . getTime ( ) } ms ` ) ;
2018-07-25 22:54:37 +02:00
2020-08-02 23:43:39 +02:00
lastSyncedPush = entityChangesRecords [ entityChangesRecords . length - 1 ] . entityChange . id ;
2017-10-29 22:22:30 -04:00
2020-06-20 12:31:38 +02:00
setLastSyncedPush ( lastSyncedPush ) ;
2018-04-07 22:25:28 -04:00
}
2017-11-09 20:52:47 -05:00
}
2017-10-28 22:17:00 -04:00
2018-07-24 20:35:03 +02:00
async function syncFinished ( syncContext ) {
await syncRequest ( syncContext , 'POST' , '/api/sync/finished' ) ;
}
2017-11-21 22:11:27 -05:00
async function checkContentHash ( syncContext ) {
2017-12-16 12:30:37 -05:00
const resp = await syncRequest ( syncContext , 'GET' , '/api/sync/check' ) ;
2020-06-20 12:31:38 +02:00
const lastSyncedPullId = getLastSyncedPull ( ) ;
2017-11-21 22:11:27 -05:00
2020-08-02 23:27:48 +02:00
if ( lastSyncedPullId < resp . maxEntityChangeId ) {
log . info ( ` There are some outstanding pulls ( ${ lastSyncedPullId } vs. ${ resp . maxEntityChangeId } ), skipping content check. ` ) ;
2017-11-21 22:11:27 -05:00
2019-12-18 22:58:30 +01:00
return true ;
2017-11-21 22:11:27 -05:00
}
2020-08-02 23:27:48 +02:00
const notPushedSyncs = sql . getValue ( "SELECT EXISTS(SELECT 1 FROM entity_changes WHERE isSynced = 1 AND id > ?)" , [ getLastSyncedPush ( ) ] ) ;
2017-11-21 22:11:27 -05:00
2019-12-01 15:01:09 +01:00
if ( notPushedSyncs ) {
2018-04-08 10:09:33 -04:00
log . info ( ` There's ${ notPushedSyncs } outstanding pushes, skipping content check. ` ) ;
2017-11-21 22:11:27 -05:00
2019-12-18 22:58:30 +01:00
return true ;
}
2020-06-20 12:31:38 +02:00
const failedChecks = contentHashService . checkContentHashes ( resp . entityHashes ) ;
2019-12-18 22:58:30 +01:00
2021-11-17 22:57:09 +01:00
if ( failedChecks . length > 0 ) {
2023-06-30 11:18:34 +02:00
// before requeuing sectors, make sure the entity changes are correct
2021-11-17 22:57:09 +01:00
const consistencyChecks = require ( "./consistency_checks" ) ;
consistencyChecks . runEntityChangesChecks ( ) ;
await syncRequest ( syncContext , 'POST' , ` /api/sync/check-entity-changes ` ) ;
}
2019-12-18 22:58:30 +01:00
for ( const { entityName , sector } of failedChecks ) {
2021-04-04 22:02:40 +02:00
entityChangesService . addEntityChangesForSector ( entityName , sector ) ;
2019-12-18 22:58:30 +01:00
await syncRequest ( syncContext , 'POST' , ` /api/sync/queue-sector/ ${ entityName } / ${ sector } ` ) ;
2017-12-15 21:14:10 -05:00
}
2019-12-18 22:58:30 +01:00
return failedChecks . length > 0 ;
2017-11-21 22:11:27 -05:00
}
2021-01-14 23:22:13 +01:00
const PAGE _SIZE = 1000000 ;
2021-01-11 22:48:51 +01:00
async function syncRequest ( syncContext , method , requestPath , body ) {
body = body ? JSON . stringify ( body ) : '' ;
2020-06-20 12:31:38 +02:00
const timeout = syncOptions . getSyncTimeout ( ) ;
2020-06-13 10:23:36 +02:00
2021-01-10 21:56:40 +01:00
let response ;
const requestId = utils . randomString ( 10 ) ;
2021-01-14 23:22:13 +01:00
const pageCount = Math . max ( 1 , Math . ceil ( body . length / PAGE _SIZE ) ) ;
2021-01-10 21:56:40 +01:00
for ( let pageIndex = 0 ; pageIndex < pageCount ; pageIndex ++ ) {
const opts = {
method ,
url : syncOptions . getSyncServerHost ( ) + requestPath ,
cookieJar : syncContext . cookieJar ,
timeout : timeout ,
paging : {
pageIndex ,
pageCount ,
requestId
} ,
2021-01-14 23:22:13 +01:00
body : body . substr ( pageIndex * PAGE _SIZE , Math . min ( PAGE _SIZE , body . length - pageIndex * PAGE _SIZE ) ) ,
2021-01-10 21:56:40 +01:00
proxy : proxyToggle ? syncOptions . getSyncProxy ( ) : null
} ;
response = await utils . timeLimit ( request . exec ( opts ) , timeout ) ;
2021-01-11 22:48:51 +01:00
}
2020-06-13 10:23:36 +02:00
2021-01-11 22:48:51 +01:00
return response ;
2017-11-05 00:16:02 -04:00
}
2020-08-02 23:27:48 +02:00
function getEntityChangeRow ( entityName , entityId ) {
2018-04-07 22:25:28 -04:00
if ( entityName === 'note_reordering' ) {
2020-06-20 12:31:38 +02:00
return sql . getMap ( "SELECT branchId, notePosition FROM branches WHERE parentNoteId = ? AND isDeleted = 0" , [ entityId ] ) ;
2018-04-07 22:25:28 -04:00
}
else {
2020-12-14 13:58:02 +01:00
const primaryKey = entityConstructor . getEntityFromEntityName ( entityName ) . primaryKeyName ;
2018-04-07 22:25:28 -04:00
if ( ! primaryKey ) {
2023-05-04 22:16:18 +02:00
throw new Error ( ` Unknown entity ' ${ entityName } ' ` ) ;
2018-04-07 22:25:28 -04:00
}
2023-06-05 09:23:42 +02:00
const entityRow = sql . getRow ( ` SELECT * FROM ${ entityName } WHERE ${ primaryKey } = ? ` , [ entityId ] ) ;
2018-04-07 22:25:28 -04:00
2023-06-05 09:23:42 +02:00
if ( ! entityRow ) {
2023-05-04 22:16:18 +02:00
throw new Error ( ` Entity ${ entityName } ' ${ entityId } ' not found. ` ) ;
2019-03-27 21:04:25 +01:00
}
2023-06-05 09:23:42 +02:00
if ( entityName === 'blobs' && entityRow . content !== null ) {
if ( typeof entityRow . content === 'string' ) {
entityRow . content = Buffer . from ( entityRow . content , 'utf-8' ) ;
2019-02-15 00:15:09 +01:00
}
2018-12-22 09:54:09 +01:00
2023-06-05 09:23:42 +02:00
entityRow . content = entityRow . content . toString ( "base64" ) ;
2018-04-07 22:25:28 -04:00
}
2023-06-05 09:23:42 +02:00
return entityRow ;
2018-04-07 22:25:28 -04:00
}
}
2022-01-09 21:25:15 +01:00
function getEntityChangeRecords ( entityChanges ) {
2018-04-07 22:25:28 -04:00
const records = [ ] ;
let length = 0 ;
2020-08-02 23:27:48 +02:00
for ( const entityChange of entityChanges ) {
2021-02-09 22:32:25 +01:00
if ( entityChange . isErased ) {
records . push ( { entityChange } ) ;
continue ;
}
2020-08-02 23:27:48 +02:00
const entity = getEntityChangeRow ( entityChange . entityName , entityChange . entityId ) ;
2020-02-05 22:08:45 +01:00
2020-08-02 23:27:48 +02:00
const record = { entityChange , entity } ;
2018-04-07 22:25:28 -04:00
records . push ( record ) ;
length += JSON . stringify ( record ) . length ;
if ( length > 1000000 ) {
break ;
}
}
return records ;
}
2020-06-20 12:31:38 +02:00
function getLastSyncedPull ( ) {
return parseInt ( optionService . getOption ( 'lastSyncedPull' ) ) ;
2018-04-08 10:09:33 -04:00
}
2020-08-02 23:27:48 +02:00
function setLastSyncedPull ( entityChangeId ) {
2021-06-29 23:45:45 +02:00
const lastSyncedPullOption = becca . getOption ( 'lastSyncedPull' ) ;
2022-01-02 21:20:56 +01:00
if ( lastSyncedPullOption ) { // might be null in initial sync when becca is not loaded
2022-12-21 15:19:05 +01:00
lastSyncedPullOption . value = ` ${ entityChangeId } ` ;
2022-01-02 21:20:56 +01:00
}
2021-06-29 23:45:45 +02:00
2021-05-15 22:00:53 +02:00
// this way we avoid updating entity_changes which otherwise means that we've never pushed all entity_changes
sql . execute ( "UPDATE options SET value = ? WHERE name = ?" , [ entityChangeId , 'lastSyncedPull' ] ) ;
2018-04-08 10:09:33 -04:00
}
2020-06-20 12:31:38 +02:00
function getLastSyncedPush ( ) {
2021-03-21 22:43:41 +01:00
const lastSyncedPush = parseInt ( optionService . getOption ( 'lastSyncedPush' ) ) ;
ws . setLastSyncedPush ( lastSyncedPush ) ;
return lastSyncedPush ;
2018-04-08 10:09:33 -04:00
}
2020-08-27 22:03:56 +02:00
function setLastSyncedPush ( entityChangeId ) {
2021-03-21 22:43:41 +01:00
ws . setLastSyncedPush ( entityChangeId ) ;
2021-06-29 23:45:45 +02:00
const lastSyncedPushOption = becca . getOption ( 'lastSyncedPush' ) ;
2022-01-02 21:20:56 +01:00
if ( lastSyncedPushOption ) { // might be null in initial sync when becca is not loaded
2022-12-21 15:19:05 +01:00
lastSyncedPushOption . value = ` ${ entityChangeId } ` ;
2022-01-02 21:20:56 +01:00
}
2021-06-29 23:45:45 +02:00
2021-05-15 22:00:53 +02:00
// this way we avoid updating entity_changes which otherwise means that we've never pushed all entity_changes
sql . execute ( "UPDATE options SET value = ? WHERE name = ?" , [ entityChangeId , 'lastSyncedPush' ] ) ;
2018-04-08 10:09:33 -04:00
}
2020-08-02 23:27:48 +02:00
function getMaxEntityChangeId ( ) {
return sql . getValue ( 'SELECT COALESCE(MAX(id), 0) FROM entity_changes' ) ;
2019-10-20 17:49:58 +02:00
}
2020-11-18 22:30:00 +01:00
function getOutstandingPullCount ( ) {
return outstandingPullCount ;
}
2021-06-29 22:15:57 +02:00
require ( "../becca/becca_loader" ) . beccaLoaded . then ( ( ) => {
2020-06-20 21:42:41 +02:00
setInterval ( cls . wrap ( sync ) , 60000 ) ;
2017-11-14 21:54:12 -05:00
2021-11-12 21:19:23 +01:00
// kickoff initial sync immediately, but should happen after initial consistency checks
setTimeout ( cls . wrap ( sync ) , 5000 ) ;
2017-10-26 21:16:21 -04:00
2021-03-27 12:30:40 -07:00
// called just so ws.setLastSyncedPush() is called
getLastSyncedPush ( ) ;
2021-05-01 21:52:22 +02:00
} ) ;
2021-03-21 22:43:41 +01:00
2017-10-26 21:16:21 -04:00
module . exports = {
2018-02-18 22:55:36 -05:00
sync ,
2018-07-23 10:29:17 +02:00
login ,
2022-01-09 21:25:15 +01:00
getEntityChangeRecords ,
2020-11-18 22:30:00 +01:00
getOutstandingPullCount ,
2020-08-02 23:27:48 +02:00
getMaxEntityChangeId
2020-06-13 10:23:36 +02:00
} ;