2018-03-25 11:09:17 -04:00
import utils from './utils.js' ;
2019-10-20 10:00:18 +02:00
import toastService from "./toast.js" ;
2019-12-09 23:07:45 +01:00
import server from "./server.js" ;
2020-02-05 22:46:20 +01:00
import LoadResults from "./load_results.js" ;
import Branch from "../entities/branch.js" ;
import Attribute from "../entities/attribute.js" ;
import options from "./options.js" ;
2020-02-08 21:23:42 +01:00
import treeCache from "./tree_cache.js" ;
2020-05-02 18:19:41 +02:00
import noteAttributeCache from "./note_attribute_cache.js" ;
2017-11-28 17:52:47 -05:00
2018-03-25 21:16:57 -04:00
const messageHandlers = [ ] ;
2018-03-25 13:08:58 -04:00
let ws ;
2020-08-02 23:27:48 +02:00
let lastAcceptedEntityChangeId = window . glob . maxEntityChangeIdAtLoad ;
let lastProcessedEntityChangeId = window . glob . maxEntityChangeIdAtLoad ;
2018-03-25 13:08:58 -04:00
let lastPingTs ;
2019-10-20 17:49:58 +02:00
let syncDataQueue = [ ] ;
2018-03-25 13:08:58 -04:00
2018-03-25 11:09:17 -04:00
function logError ( message ) {
2020-10-12 21:05:34 +02:00
console . error ( utils . now ( ) , message ) ; // needs to be separate from .trace()
2017-12-19 23:22:21 -05:00
2018-03-25 11:09:17 -04:00
if ( ws && ws . readyState === 1 ) {
ws . send ( JSON . stringify ( {
type : 'log-error' ,
2020-06-10 00:10:27 +02:00
error : message ,
stack : new Error ( ) . stack
2018-03-25 11:09:17 -04:00
} ) ) ;
}
}
2017-12-17 13:46:18 -05:00
2020-10-12 21:05:34 +02:00
window . logError = logError ;
2018-03-25 21:16:57 -04:00
function subscribeToMessages ( messageHandler ) {
messageHandlers . push ( messageHandler ) ;
}
2019-10-20 17:49:58 +02:00
// used to serialize sync operations
let consumeQueuePromise = null ;
2020-08-02 23:27:48 +02:00
// to make sure each change event is processed only once. Not clear if this is still necessary
const processedEntityChangeIds = new Set ( ) ;
2020-05-14 13:08:06 +02:00
2020-12-14 14:17:51 +01:00
function logRows ( entityChanges ) {
const filteredRows = entityChanges . filter ( row =>
2020-08-02 23:27:48 +02:00
! processedEntityChangeIds . has ( row . id )
2020-05-14 13:08:06 +02:00
&& row . entityName !== 'recent_notes'
&& ( row . entityName !== 'options' || row . entityId !== 'openTabs' ) ) ;
if ( filteredRows . length > 0 ) {
console . debug ( utils . now ( ) , "Sync data: " , filteredRows ) ;
}
}
2020-05-12 13:40:42 +02:00
2019-10-20 17:49:58 +02:00
async function handleMessage ( event ) {
2018-03-25 11:09:17 -04:00
const message = JSON . parse ( event . data ) ;
2017-12-19 23:22:21 -05:00
2018-08-01 09:26:02 +02:00
for ( const messageHandler of messageHandlers ) {
messageHandler ( message ) ;
}
2018-03-25 11:09:17 -04:00
if ( message . type === 'sync' ) {
2020-12-14 14:17:51 +01:00
let entityChanges = message . data ;
2019-02-10 16:36:25 +01:00
lastPingTs = Date . now ( ) ;
2018-01-06 22:56:54 -05:00
2020-12-14 14:17:51 +01:00
if ( entityChanges . length > 0 ) {
logRows ( entityChanges ) ;
2020-05-12 13:40:42 +02:00
2020-12-14 14:17:51 +01:00
syncDataQueue . push ( ... entityChanges ) ;
2017-11-28 17:52:47 -05:00
2020-08-02 23:27:48 +02:00
// we set lastAcceptedEntityChangeId even before sync processing and send ping so that backend can start sending more updates
2020-12-14 14:17:51 +01:00
lastAcceptedEntityChangeId = Math . max ( lastAcceptedEntityChangeId , entityChanges [ entityChanges . length - 1 ] . id ) ;
2019-12-16 22:47:07 +01:00
sendPing ( ) ;
2019-10-20 17:49:58 +02:00
// first wait for all the preceding consumers to finish
while ( consumeQueuePromise ) {
await consumeQueuePromise ;
}
2019-08-06 22:39:27 +02:00
2019-12-16 22:00:44 +01:00
try {
// it's my turn so start it up
consumeQueuePromise = consumeSyncData ( ) ;
2017-11-28 17:52:47 -05:00
2019-12-16 22:00:44 +01:00
await consumeQueuePromise ;
}
finally {
// finish and set to null to signal somebody else can pick it up
consumeQueuePromise = null ;
}
2019-10-20 17:49:58 +02:00
}
2018-03-25 11:09:17 -04:00
}
else if ( message . type === 'sync-hash-check-failed' ) {
2019-10-20 10:00:18 +02:00
toastService . showError ( "Sync check failed!" , 60000 ) ;
2017-11-28 17:52:47 -05:00
}
2018-03-25 11:09:17 -04:00
else if ( message . type === 'consistency-checks-failed' ) {
2019-10-20 10:00:18 +02:00
toastService . showError ( "Consistency checks failed! See logs for details." , 50 * 60000 ) ;
2018-03-25 11:09:17 -04:00
}
}
2020-08-02 23:27:48 +02:00
let entityChangeIdReachedListeners = [ ] ;
2019-10-20 17:49:58 +02:00
2020-08-02 23:27:48 +02:00
function waitForEntityChangeId ( desiredEntityChangeId ) {
if ( desiredEntityChangeId <= lastProcessedEntityChangeId ) {
2019-10-20 17:49:58 +02:00
return Promise . resolve ( ) ;
}
2020-08-02 23:27:48 +02:00
console . debug ( "Waiting for" , desiredEntityChangeId , 'current is' , lastProcessedEntityChangeId ) ;
2020-03-10 21:33:03 +01:00
2019-10-20 17:49:58 +02:00
return new Promise ( ( res , rej ) => {
2020-08-02 23:27:48 +02:00
entityChangeIdReachedListeners . push ( {
desiredEntityChangeId : desiredEntityChangeId ,
2019-10-24 23:02:29 +02:00
resolvePromise : res ,
start : Date . now ( )
2019-10-20 17:49:58 +02:00
} )
} ) ;
}
2020-08-02 23:27:48 +02:00
function waitForMaxKnownEntityChangeId ( ) {
return waitForEntityChangeId ( server . getMaxKnownEntityChangeId ( ) ) ;
2019-12-09 23:07:45 +01:00
}
2020-08-02 23:27:48 +02:00
function checkEntityChangeIdListeners ( ) {
entityChangeIdReachedListeners
. filter ( l => l . desiredEntityChangeId <= lastProcessedEntityChangeId )
2019-10-28 19:45:36 +01:00
. forEach ( l => l . resolvePromise ( ) ) ;
2020-08-02 23:27:48 +02:00
entityChangeIdReachedListeners = entityChangeIdReachedListeners
. filter ( l => l . desiredEntityChangeId > lastProcessedEntityChangeId ) ;
2019-10-28 19:45:36 +01:00
2020-08-02 23:27:48 +02:00
entityChangeIdReachedListeners . filter ( l => Date . now ( ) > l . start - 60000 )
. forEach ( l => console . log ( ` Waiting for entityChangeId ${ l . desiredEntityChangeId } while current is ${ lastProcessedEntityChangeId } for ${ Math . floor ( ( Date . now ( ) - l . start ) / 1000 ) } s ` ) ) ;
2019-10-28 19:45:36 +01:00
}
2019-12-16 22:00:44 +01:00
async function runSafely ( syncHandler , syncData ) {
try {
return await syncHandler ( syncData ) ;
}
catch ( e ) {
console . log ( ` Sync handler failed with ${ e . message } : ${ e . stack } ` ) ;
}
}
2019-10-20 17:49:58 +02:00
async function consumeSyncData ( ) {
2019-10-30 19:43:17 +01:00
if ( syncDataQueue . length > 0 ) {
2020-12-14 14:17:51 +01:00
const allEntityChanges = syncDataQueue ;
2019-10-20 17:49:58 +02:00
syncDataQueue = [ ] ;
2020-12-16 22:17:42 +01:00
const nonProcessedEntityChanges = allEntityChanges . filter ( ec => ! processedEntityChangeIds . has ( ec . id ) ) ;
2020-05-14 13:08:06 +02:00
2019-12-16 22:47:07 +01:00
try {
2020-12-14 14:17:51 +01:00
await utils . timeLimit ( processEntityChanges ( nonProcessedEntityChanges ) , 30000 ) ;
2019-12-16 22:47:07 +01:00
}
catch ( e ) {
2020-02-05 22:08:45 +01:00
logError ( ` Encountered error ${ e . message } : ${ e . stack } , reloading frontend. ` ) ;
2019-12-02 22:27:06 +01:00
2020-09-18 23:22:28 +02:00
if ( ! glob . isDev && ! options . is ( 'debugModeEnabled' ) ) {
// if there's an error in updating the frontend then the easy option to recover is to reload the frontend completely
2020-09-04 22:54:50 +02:00
utils . reloadApp ( ) ;
}
2020-09-18 23:22:28 +02:00
else {
2020-12-14 14:17:51 +01:00
console . log ( "nonProcessedEntityChanges causing the timeout" , nonProcessedEntityChanges ) ;
2020-09-19 22:47:14 +02:00
alert ( ` Encountered error " ${ e . message } ", check out the console. ` ) ;
2020-09-18 23:22:28 +02:00
}
2019-12-16 22:47:07 +01:00
}
2019-10-20 17:49:58 +02:00
2020-12-14 14:17:51 +01:00
for ( const entityChange of nonProcessedEntityChanges ) {
processedEntityChangeIds . add ( entityChange . id ) ;
2020-05-30 22:35:18 +02:00
}
2020-12-14 14:17:51 +01:00
lastProcessedEntityChangeId = Math . max ( lastProcessedEntityChangeId , allEntityChanges [ allEntityChanges . length - 1 ] . id ) ;
2019-10-20 17:49:58 +02:00
}
2019-12-09 23:07:45 +01:00
2020-08-02 23:27:48 +02:00
checkEntityChangeIdListeners ( ) ;
2019-10-20 17:49:58 +02:00
}
2018-03-25 11:09:17 -04:00
function connectWebSocket ( ) {
2019-11-25 21:44:46 +01:00
const loc = window . location ;
const webSocketUri = ( loc . protocol === "https:" ? "wss:" : "ws:" )
+ "//" + loc . host + loc . pathname ;
2018-03-25 11:09:17 -04:00
// use wss for secure messaging
2019-11-25 21:44:46 +01:00
const ws = new WebSocket ( webSocketUri ) ;
ws . onopen = ( ) => console . debug ( utils . now ( ) , ` Connected to server ${ webSocketUri } with WebSocket ` ) ;
2018-03-25 21:16:57 -04:00
ws . onmessage = handleMessage ;
2019-07-06 12:03:51 +02:00
// we're not handling ws.onclose here because reconnection is done in sendPing()
2017-11-28 17:52:47 -05:00
2018-03-25 11:09:17 -04:00
return ws ;
}
2019-12-02 22:27:06 +01:00
async function sendPing ( ) {
if ( Date . now ( ) - lastPingTs > 30000 ) {
2020-05-11 22:44:10 +02:00
console . log ( utils . now ( ) , "Lost websocket connection to the backend. If you keep having this issue repeatedly, you might want to check your reverse proxy (nginx, apache) configuration and allow/unblock WebSocket." ) ;
2019-12-02 22:27:06 +01:00
}
if ( ws . readyState === ws . OPEN ) {
ws . send ( JSON . stringify ( {
type : 'ping' ,
2020-08-02 23:27:48 +02:00
lastEntityChangeId : lastAcceptedEntityChangeId
2019-12-02 22:27:06 +01:00
} ) ) ;
}
else if ( ws . readyState === ws . CLOSED || ws . readyState === ws . CLOSING ) {
console . log ( utils . now ( ) , "WS closed or closing, trying to reconnect" ) ;
ws = connectWebSocket ( ) ;
}
}
2018-03-25 13:08:58 -04:00
setTimeout ( ( ) => {
ws = connectWebSocket ( ) ;
2019-02-10 16:36:25 +01:00
lastPingTs = Date . now ( ) ;
2018-03-25 13:08:58 -04:00
2019-12-02 22:27:06 +01:00
setInterval ( sendPing , 1000 ) ;
2018-04-05 23:17:19 -04:00
} , 0 ) ;
2017-12-01 22:28:22 -05:00
2019-10-25 22:20:14 +02:00
subscribeToMessages ( message => {
if ( message . type === 'sync-pull-in-progress' ) {
toastService . showPersistent ( {
id : 'sync' ,
title : "Sync status" ,
message : "Sync update in progress" ,
icon : "refresh"
} ) ;
}
2019-10-28 19:45:36 +01:00
else if ( message . type === 'sync-pull-finished' ) {
2019-10-28 20:26:40 +01:00
// this gives user a chance to see the toast in case of fast sync finish
setTimeout ( ( ) => toastService . closePersistent ( 'sync' ) , 1000 ) ;
2019-10-25 22:20:14 +02:00
}
} ) ;
2020-12-14 14:17:51 +01:00
async function processEntityChanges ( entityChanges ) {
2020-03-19 09:18:36 +01:00
const missingNoteIds = [ ] ;
2020-12-14 14:17:51 +01:00
for ( const { entityName , entity } of entityChanges ) {
2020-05-14 13:08:06 +02:00
if ( entityName === 'branches' && ! ( entity . parentNoteId in treeCache . notes ) ) {
2020-03-19 09:18:36 +01:00
missingNoteIds . push ( entity . parentNoteId ) ;
2020-05-14 13:08:06 +02:00
}
else if ( entityName === 'attributes'
&& entity . type === 'relation'
&& entity . name === 'template'
&& ! ( entity . noteId in treeCache . notes ) ) {
missingNoteIds . push ( entity . value ) ;
}
}
2020-03-19 09:18:36 +01:00
if ( missingNoteIds . length > 0 ) {
await treeCache . reloadNotes ( missingNoteIds ) ;
}
2020-02-09 21:53:10 +01:00
const loadResults = new LoadResults ( treeCache ) ;
2020-02-05 22:46:20 +01:00
2020-12-16 22:17:42 +01:00
for ( const ec of entityChanges . filter ( ec => ec . entityName === 'notes' ) ) {
const note = treeCache . notes [ ec . entityId ] ;
2020-02-05 22:46:20 +01:00
if ( note ) {
2020-12-16 22:17:42 +01:00
note . update ( ec . entity ) ;
loadResults . addNote ( ec . entityId , ec . sourceId ) ;
2020-02-05 22:46:20 +01:00
}
2020-05-14 13:08:06 +02:00
}
2020-02-05 22:46:20 +01:00
2020-12-16 22:17:42 +01:00
for ( const ec of entityChanges . filter ( ec => ec . entityName === 'branches' ) ) {
let branch = treeCache . branches [ ec . entityId ] ;
const childNote = treeCache . notes [ ec . entity . noteId ] ;
const parentNote = treeCache . notes [ ec . entity . parentNoteId ] ;
2020-02-05 22:46:20 +01:00
if ( branch ) {
2020-12-16 22:17:42 +01:00
branch . update ( ec . entity ) ;
loadResults . addBranch ( ec . entityId , ec . sourceId ) ;
2020-02-09 22:31:52 +01:00
2020-12-16 22:17:42 +01:00
if ( ec . entity . isDeleted ) {
2020-02-05 22:46:20 +01:00
if ( childNote ) {
2020-12-16 22:17:42 +01:00
childNote . parents = childNote . parents . filter ( parentNoteId => parentNoteId !== ec . entity . parentNoteId ) ;
delete childNote . parentToBranch [ ec . entity . parentNoteId ] ;
2020-02-05 22:46:20 +01:00
}
if ( parentNote ) {
2020-12-16 22:17:42 +01:00
parentNote . children = parentNote . children . filter ( childNoteId => childNoteId !== ec . entity . noteId ) ;
delete parentNote . childToBranch [ ec . entity . noteId ] ;
2020-02-05 22:46:20 +01:00
}
}
else {
if ( childNote ) {
childNote . addParent ( branch . parentNoteId , branch . branchId ) ;
}
if ( parentNote ) {
parentNote . addChild ( branch . noteId , branch . branchId ) ;
}
}
}
2020-12-16 22:17:42 +01:00
else if ( ! ec . entity . isDeleted ) {
2020-02-05 22:46:20 +01:00
if ( childNote || parentNote ) {
2020-12-16 22:17:42 +01:00
branch = new Branch ( treeCache , ec . entity ) ;
2020-02-08 21:23:42 +01:00
treeCache . branches [ branch . branchId ] = branch ;
2020-02-05 22:46:20 +01:00
2020-12-16 22:17:42 +01:00
loadResults . addBranch ( ec . entityId , ec . sourceId ) ;
2020-02-05 22:46:20 +01:00
if ( childNote ) {
childNote . addParent ( branch . parentNoteId , branch . branchId ) ;
}
if ( parentNote ) {
parentNote . addChild ( branch . noteId , branch . branchId ) ;
}
}
}
2020-05-14 13:08:06 +02:00
}
2020-02-05 22:46:20 +01:00
2020-12-16 22:17:42 +01:00
for ( const ec of entityChanges . filter ( ec => ec . entityName === 'note_reordering' ) ) {
2020-09-14 22:48:20 +02:00
const parentNoteIdsToSort = new Set ( ) ;
2020-12-16 22:17:42 +01:00
for ( const branchId in ec . positions ) {
2020-02-08 21:23:42 +01:00
const branch = treeCache . branches [ branchId ] ;
2020-02-05 22:46:20 +01:00
if ( branch ) {
2020-12-16 22:17:42 +01:00
branch . notePosition = ec . positions [ branchId ] ;
2020-09-14 22:48:20 +02:00
parentNoteIdsToSort . add ( branch . parentNoteId ) ;
}
}
for ( const parentNoteId of parentNoteIdsToSort ) {
const parentNote = treeCache . notes [ parentNoteId ] ;
if ( parentNote ) {
parentNote . sortChildren ( ) ;
2020-02-05 22:46:20 +01:00
}
}
2020-12-16 22:17:42 +01:00
loadResults . addNoteReordering ( ec . entityId , ec . sourceId ) ;
2020-05-14 13:08:06 +02:00
}
2020-02-05 22:46:20 +01:00
// missing reloading the relation target note
2020-12-16 22:17:42 +01:00
for ( const ec of entityChanges . filter ( ec => ec . entityName === 'attributes' ) ) {
let attribute = treeCache . attributes [ ec . entityId ] ;
const sourceNote = treeCache . notes [ ec . entity . noteId ] ;
const targetNote = ec . entity . type === 'relation' && treeCache . notes [ ec . entity . value ] ;
2020-02-05 22:46:20 +01:00
if ( attribute ) {
2020-12-16 22:17:42 +01:00
attribute . update ( ec . entity ) ;
loadResults . addAttribute ( ec . entityId , ec . sourceId ) ;
2020-02-05 22:46:20 +01:00
2020-12-16 22:17:42 +01:00
if ( ec . entity . isDeleted ) {
2020-02-05 22:46:20 +01:00
if ( sourceNote ) {
sourceNote . attributes = sourceNote . attributes . filter ( attributeId => attributeId !== attribute . attributeId ) ;
}
if ( targetNote ) {
2020-02-25 16:31:44 +01:00
targetNote . targetRelations = targetNote . targetRelations . filter ( attributeId => attributeId !== attribute . attributeId ) ;
2020-02-05 22:46:20 +01:00
}
}
}
2020-12-16 22:17:42 +01:00
else if ( ! ec . entity . isDeleted ) {
2020-02-05 22:46:20 +01:00
if ( sourceNote || targetNote ) {
2020-12-16 22:17:42 +01:00
attribute = new Attribute ( treeCache , ec . entity ) ;
2020-02-05 22:46:20 +01:00
2020-02-08 21:23:42 +01:00
treeCache . attributes [ attribute . attributeId ] = attribute ;
2020-02-05 22:46:20 +01:00
2020-12-16 22:17:42 +01:00
loadResults . addAttribute ( ec . entityId , ec . sourceId ) ;
2020-02-05 22:46:20 +01:00
if ( sourceNote && ! sourceNote . attributes . includes ( attribute . attributeId ) ) {
sourceNote . attributes . push ( attribute . attributeId ) ;
}
2020-03-17 18:38:49 +01:00
if ( targetNote && ! targetNote . targetRelations . includes ( attribute . attributeId ) ) {
targetNote . targetRelations . push ( attribute . attributeId ) ;
2020-02-05 22:46:20 +01:00
}
}
}
2020-05-14 13:08:06 +02:00
}
2020-02-05 22:46:20 +01:00
2020-12-16 22:17:42 +01:00
for ( const ec of entityChanges . filter ( ec => ec . entityName === 'note_contents' ) ) {
delete treeCache . noteComplementPromises [ ec . entityId ] ;
2020-02-05 22:46:20 +01:00
2020-12-16 22:17:42 +01:00
loadResults . addNoteContent ( ec . entityId , ec . sourceId ) ;
2020-05-14 13:08:06 +02:00
}
2020-02-05 22:46:20 +01:00
2020-12-16 22:17:42 +01:00
for ( const ec of entityChanges . filter ( ec => ec . entityName === 'note_revisions' ) ) {
loadResults . addNoteRevision ( ec . entityId , ec . noteId , ec . sourceId ) ;
2020-05-14 13:08:06 +02:00
}
2020-02-05 22:46:20 +01:00
2020-12-16 22:17:42 +01:00
for ( const ec of entityChanges . filter ( ec => ec . entityName === 'options' ) ) {
if ( ec . entity . name === 'openTabs' ) {
2020-05-14 13:08:06 +02:00
continue ; // only noise
2020-03-10 21:33:03 +01:00
}
2020-12-16 22:17:42 +01:00
options . set ( ec . entity . name , ec . entity . value ) ;
2020-02-05 22:46:20 +01:00
2020-12-16 22:17:42 +01:00
loadResults . addOption ( ec . entity . name ) ;
2020-05-14 13:08:06 +02:00
}
2020-02-05 22:46:20 +01:00
2020-03-10 21:33:03 +01:00
if ( ! loadResults . isEmpty ( ) ) {
2020-05-02 18:19:41 +02:00
if ( loadResults . hasAttributeRelatedChanges ( ) ) {
noteAttributeCache . invalidate ( ) ;
}
2020-03-10 21:33:03 +01:00
const appContext = ( await import ( "./app_context.js" ) ) . default ;
2020-03-18 10:08:16 +01:00
await appContext . triggerEvent ( 'entitiesReloaded' , { loadResults } ) ;
2020-03-10 21:33:03 +01:00
}
2020-02-05 22:46:20 +01:00
}
2018-03-25 11:09:17 -04:00
export default {
2018-03-25 21:16:57 -04:00
logError ,
2018-08-01 09:26:02 +02:00
subscribeToMessages ,
2020-08-02 23:27:48 +02:00
waitForEntityChangeId ,
waitForMaxKnownEntityChangeId
2020-05-11 22:44:10 +02:00
} ;