2018-03-25 11:09:17 -04:00
import utils from './utils.js' ;
2019-10-20 10:00:18 +02:00
import toastService from "./toast.js" ;
2019-12-09 23:07:45 +01:00
import server from "./server.js" ;
2020-02-05 22:46:20 +01:00
import LoadResults from "./load_results.js" ;
import Branch from "../entities/branch.js" ;
import Attribute from "../entities/attribute.js" ;
import options from "./options.js" ;
2020-02-08 21:23:42 +01:00
import treeCache from "./tree_cache.js" ;
2020-05-02 18:19:41 +02:00
import noteAttributeCache from "./note_attribute_cache.js" ;
2017-11-28 17:52:47 -05:00
2018-07-24 21:43:15 +02:00
const $outstandingSyncsCount = $ ( "#outstanding-syncs-count" ) ;
2017-11-28 17:52:47 -05:00
2018-03-25 21:16:57 -04:00
const messageHandlers = [ ] ;
2018-03-25 13:08:58 -04:00
let ws ;
2019-12-02 22:27:06 +01:00
let lastAcceptedSyncId = window . glob . maxSyncIdAtLoad ;
let lastProcessedSyncId = window . glob . maxSyncIdAtLoad ;
2018-03-25 13:08:58 -04:00
let lastPingTs ;
2019-10-20 17:49:58 +02:00
let syncDataQueue = [ ] ;
2018-03-25 13:08:58 -04:00
2018-03-25 11:09:17 -04:00
function logError ( message ) {
console . log ( utils . now ( ) , message ) ; // needs to be separate from .trace()
console . trace ( ) ;
2017-12-19 23:22:21 -05:00
2018-03-25 11:09:17 -04:00
if ( ws && ws . readyState === 1 ) {
ws . send ( JSON . stringify ( {
type : 'log-error' ,
error : message
} ) ) ;
}
}
2017-12-17 13:46:18 -05:00
2018-03-25 21:16:57 -04:00
function subscribeToMessages ( messageHandler ) {
messageHandlers . push ( messageHandler ) ;
}
2019-10-20 17:49:58 +02:00
// used to serialize sync operations
let consumeQueuePromise = null ;
2020-05-12 13:40:42 +02:00
// most sync events are sent twice - once immediatelly after finishing the transaction and once during the scheduled ping
// but we want to process only once
2020-05-14 13:08:06 +02:00
const processedSyncIds = new Set ( ) ;
function logRows ( syncRows ) {
const filteredRows = syncRows . filter ( row =>
! processedSyncIds . has ( row . id )
&& row . entityName !== 'recent_notes'
&& ( row . entityName !== 'options' || row . entityId !== 'openTabs' ) ) ;
if ( filteredRows . length > 0 ) {
console . debug ( utils . now ( ) , "Sync data: " , filteredRows ) ;
}
}
2020-05-12 13:40:42 +02:00
2019-10-20 17:49:58 +02:00
async function handleMessage ( event ) {
2018-03-25 11:09:17 -04:00
const message = JSON . parse ( event . data ) ;
2017-12-19 23:22:21 -05:00
2018-08-01 09:26:02 +02:00
for ( const messageHandler of messageHandlers ) {
messageHandler ( message ) ;
}
2018-03-25 11:09:17 -04:00
if ( message . type === 'sync' ) {
2020-03-10 21:33:03 +01:00
let syncRows = message . data ;
2019-02-10 16:36:25 +01:00
lastPingTs = Date . now ( ) ;
2018-01-06 22:56:54 -05:00
2019-10-20 17:49:58 +02:00
$outstandingSyncsCount . html ( message . outstandingSyncs ) ;
2019-10-28 19:45:36 +01:00
if ( syncRows . length > 0 ) {
2020-05-14 13:08:06 +02:00
logRows ( syncRows ) ;
2020-05-12 13:40:42 +02:00
2020-05-14 13:08:06 +02:00
syncDataQueue . push ( ... syncRows ) ;
2017-11-28 17:52:47 -05:00
2019-12-16 22:47:07 +01:00
// we set lastAcceptedSyncId even before sync processing and send ping so that backend can start sending more updates
lastAcceptedSyncId = Math . max ( lastAcceptedSyncId , syncRows [ syncRows . length - 1 ] . id ) ;
sendPing ( ) ;
2019-10-20 17:49:58 +02:00
// first wait for all the preceding consumers to finish
while ( consumeQueuePromise ) {
await consumeQueuePromise ;
}
2019-08-06 22:39:27 +02:00
2019-12-16 22:00:44 +01:00
try {
// it's my turn so start it up
consumeQueuePromise = consumeSyncData ( ) ;
2017-11-28 17:52:47 -05:00
2019-12-16 22:00:44 +01:00
await consumeQueuePromise ;
}
finally {
// finish and set to null to signal somebody else can pick it up
consumeQueuePromise = null ;
}
2019-10-20 17:49:58 +02:00
}
2018-03-25 11:09:17 -04:00
}
else if ( message . type === 'sync-hash-check-failed' ) {
2019-10-20 10:00:18 +02:00
toastService . showError ( "Sync check failed!" , 60000 ) ;
2017-11-28 17:52:47 -05:00
}
2018-03-25 11:09:17 -04:00
else if ( message . type === 'consistency-checks-failed' ) {
2019-10-20 10:00:18 +02:00
toastService . showError ( "Consistency checks failed! See logs for details." , 50 * 60000 ) ;
2018-03-25 11:09:17 -04:00
}
}
2019-10-20 17:49:58 +02:00
let syncIdReachedListeners = [ ] ;
function waitForSyncId ( desiredSyncId ) {
2019-12-02 22:27:06 +01:00
if ( desiredSyncId <= lastProcessedSyncId ) {
2019-10-20 17:49:58 +02:00
return Promise . resolve ( ) ;
}
2020-03-18 10:08:16 +01:00
console . debug ( "Waiting for" , desiredSyncId , 'current is' , lastProcessedSyncId ) ;
2020-03-10 21:33:03 +01:00
2019-10-20 17:49:58 +02:00
return new Promise ( ( res , rej ) => {
syncIdReachedListeners . push ( {
desiredSyncId ,
2019-10-24 23:02:29 +02:00
resolvePromise : res ,
start : Date . now ( )
2019-10-20 17:49:58 +02:00
} )
} ) ;
}
2019-12-09 23:07:45 +01:00
function waitForMaxKnownSyncId ( ) {
return waitForSyncId ( server . getMaxKnownSyncId ( ) ) ;
}
2019-10-28 19:45:36 +01:00
function checkSyncIdListeners ( ) {
syncIdReachedListeners
2019-12-02 22:27:06 +01:00
. filter ( l => l . desiredSyncId <= lastProcessedSyncId )
2019-10-28 19:45:36 +01:00
. forEach ( l => l . resolvePromise ( ) ) ;
syncIdReachedListeners = syncIdReachedListeners
2019-12-02 22:27:06 +01:00
. filter ( l => l . desiredSyncId > lastProcessedSyncId ) ;
2019-10-28 19:45:36 +01:00
syncIdReachedListeners . filter ( l => Date . now ( ) > l . start - 60000 )
2019-12-02 22:27:06 +01:00
. forEach ( l => console . log ( ` Waiting for syncId ${ l . desiredSyncId } while current is ${ lastProcessedSyncId } for ${ Math . floor ( ( Date . now ( ) - l . start ) / 1000 ) } s ` ) ) ;
2019-10-28 19:45:36 +01:00
}
2019-12-16 22:00:44 +01:00
async function runSafely ( syncHandler , syncData ) {
try {
return await syncHandler ( syncData ) ;
}
catch ( e ) {
console . log ( ` Sync handler failed with ${ e . message } : ${ e . stack } ` ) ;
}
}
2020-05-14 13:08:06 +02:00
/ * *
* TODO : we should rethink the fact that each sync row is sent twice ( once at the end of transaction , once periodically )
* and we keep both lastProcessedSyncId and processedSyncIds
* it even seems incorrect that when transaction sync rows are received , we incorrectly increase lastProcessedSyncId
* and then some syncs might lost ( or are * all * sync rows sent from transactions ? )
* /
2019-10-20 17:49:58 +02:00
async function consumeSyncData ( ) {
2019-10-30 19:43:17 +01:00
if ( syncDataQueue . length > 0 ) {
2020-05-14 13:08:06 +02:00
const allSyncRows = syncDataQueue ;
2019-10-20 17:49:58 +02:00
syncDataQueue = [ ] ;
2020-05-14 13:08:06 +02:00
const nonProcessedSyncRows = allSyncRows . filter ( sync => ! processedSyncIds . has ( sync . id ) ) ;
2019-12-16 22:47:07 +01:00
try {
2020-05-14 13:08:06 +02:00
await processSyncRows ( nonProcessedSyncRows ) ;
2019-12-16 22:47:07 +01:00
}
catch ( e ) {
2020-02-05 22:08:45 +01:00
logError ( ` Encountered error ${ e . message } : ${ e . stack } , reloading frontend. ` ) ;
2019-12-02 22:27:06 +01:00
2019-12-16 22:47:07 +01:00
// if there's an error in updating the frontend then the easy option to recover is to reload the frontend completely
utils . reloadApp ( ) ;
}
2019-10-20 17:49:58 +02:00
2020-05-14 13:08:06 +02:00
lastProcessedSyncId = Math . max ( lastProcessedSyncId , allSyncRows [ allSyncRows . length - 1 ] . id ) ;
2019-10-20 17:49:58 +02:00
}
2019-12-09 23:07:45 +01:00
checkSyncIdListeners ( ) ;
2019-10-20 17:49:58 +02:00
}
2018-03-25 11:09:17 -04:00
function connectWebSocket ( ) {
2019-11-25 21:44:46 +01:00
const loc = window . location ;
const webSocketUri = ( loc . protocol === "https:" ? "wss:" : "ws:" )
+ "//" + loc . host + loc . pathname ;
2018-03-25 11:09:17 -04:00
// use wss for secure messaging
2019-11-25 21:44:46 +01:00
const ws = new WebSocket ( webSocketUri ) ;
ws . onopen = ( ) => console . debug ( utils . now ( ) , ` Connected to server ${ webSocketUri } with WebSocket ` ) ;
2018-03-25 21:16:57 -04:00
ws . onmessage = handleMessage ;
2019-07-06 12:03:51 +02:00
// we're not handling ws.onclose here because reconnection is done in sendPing()
2017-11-28 17:52:47 -05:00
2018-03-25 11:09:17 -04:00
return ws ;
}
2019-12-02 22:27:06 +01:00
async function sendPing ( ) {
if ( Date . now ( ) - lastPingTs > 30000 ) {
2020-05-11 22:44:10 +02:00
console . log ( utils . now ( ) , "Lost websocket connection to the backend. If you keep having this issue repeatedly, you might want to check your reverse proxy (nginx, apache) configuration and allow/unblock WebSocket." ) ;
2019-12-02 22:27:06 +01:00
}
if ( ws . readyState === ws . OPEN ) {
ws . send ( JSON . stringify ( {
type : 'ping' ,
lastSyncId : lastAcceptedSyncId
} ) ) ;
}
else if ( ws . readyState === ws . CLOSED || ws . readyState === ws . CLOSING ) {
console . log ( utils . now ( ) , "WS closed or closing, trying to reconnect" ) ;
ws = connectWebSocket ( ) ;
}
}
2018-03-25 13:08:58 -04:00
setTimeout ( ( ) => {
ws = connectWebSocket ( ) ;
2019-02-10 16:36:25 +01:00
lastPingTs = Date . now ( ) ;
2018-03-25 13:08:58 -04:00
2019-12-02 22:27:06 +01:00
setInterval ( sendPing , 1000 ) ;
2018-04-05 23:17:19 -04:00
} , 0 ) ;
2017-12-01 22:28:22 -05:00
2019-10-25 22:20:14 +02:00
subscribeToMessages ( message => {
if ( message . type === 'sync-pull-in-progress' ) {
toastService . showPersistent ( {
id : 'sync' ,
title : "Sync status" ,
message : "Sync update in progress" ,
icon : "refresh"
} ) ;
}
2019-10-28 19:45:36 +01:00
else if ( message . type === 'sync-pull-finished' ) {
2019-10-28 20:26:40 +01:00
// this gives user a chance to see the toast in case of fast sync finish
setTimeout ( ( ) => toastService . closePersistent ( 'sync' ) , 1000 ) ;
2019-10-25 22:20:14 +02:00
}
} ) ;
2020-02-05 22:46:20 +01:00
async function processSyncRows ( syncRows ) {
2020-03-19 09:18:36 +01:00
const missingNoteIds = [ ] ;
2020-05-19 22:58:08 +02:00
for ( const { entityName , entity } of syncRows ) {
2020-05-14 13:08:06 +02:00
if ( entityName === 'branches' && ! ( entity . parentNoteId in treeCache . notes ) ) {
2020-03-19 09:18:36 +01:00
missingNoteIds . push ( entity . parentNoteId ) ;
2020-05-14 13:08:06 +02:00
}
else if ( entityName === 'attributes'
&& entity . type === 'relation'
&& entity . name === 'template'
&& ! ( entity . noteId in treeCache . notes ) ) {
missingNoteIds . push ( entity . value ) ;
}
}
2020-03-19 09:18:36 +01:00
if ( missingNoteIds . length > 0 ) {
await treeCache . reloadNotes ( missingNoteIds ) ;
}
2020-02-09 21:53:10 +01:00
const loadResults = new LoadResults ( treeCache ) ;
2020-02-05 22:46:20 +01:00
2020-05-14 13:08:06 +02:00
for ( const sync of syncRows . filter ( sync => sync . entityName === 'notes' ) ) {
2020-02-08 21:23:42 +01:00
const note = treeCache . notes [ sync . entityId ] ;
2020-02-05 22:46:20 +01:00
if ( note ) {
note . update ( sync . entity ) ;
loadResults . addNote ( sync . entityId , sync . sourceId ) ;
}
2020-05-14 13:08:06 +02:00
}
2020-02-05 22:46:20 +01:00
2020-05-14 13:08:06 +02:00
for ( const sync of syncRows . filter ( sync => sync . entityName === 'branches' ) ) {
2020-02-08 21:23:42 +01:00
let branch = treeCache . branches [ sync . entityId ] ;
const childNote = treeCache . notes [ sync . entity . noteId ] ;
const parentNote = treeCache . notes [ sync . entity . parentNoteId ] ;
2020-02-05 22:46:20 +01:00
if ( branch ) {
2020-02-09 22:31:52 +01:00
branch . update ( sync . entity ) ;
loadResults . addBranch ( sync . entityId , sync . sourceId ) ;
2020-02-05 22:46:20 +01:00
if ( sync . entity . isDeleted ) {
if ( childNote ) {
childNote . parents = childNote . parents . filter ( parentNoteId => parentNoteId !== sync . entity . parentNoteId ) ;
delete childNote . parentToBranch [ sync . entity . parentNoteId ] ;
}
if ( parentNote ) {
parentNote . children = parentNote . children . filter ( childNoteId => childNoteId !== sync . entity . noteId ) ;
delete parentNote . childToBranch [ sync . entity . noteId ] ;
}
}
else {
if ( childNote ) {
childNote . addParent ( branch . parentNoteId , branch . branchId ) ;
}
if ( parentNote ) {
parentNote . addChild ( branch . noteId , branch . branchId ) ;
}
}
}
else if ( ! sync . entity . isDeleted ) {
if ( childNote || parentNote ) {
2020-02-09 21:53:10 +01:00
branch = new Branch ( treeCache , sync . entity ) ;
2020-02-08 21:23:42 +01:00
treeCache . branches [ branch . branchId ] = branch ;
2020-02-05 22:46:20 +01:00
loadResults . addBranch ( sync . entityId , sync . sourceId ) ;
if ( childNote ) {
childNote . addParent ( branch . parentNoteId , branch . branchId ) ;
}
if ( parentNote ) {
parentNote . addChild ( branch . noteId , branch . branchId ) ;
}
}
}
2020-05-14 13:08:06 +02:00
}
2020-02-05 22:46:20 +01:00
2020-05-14 13:08:06 +02:00
for ( const sync of syncRows . filter ( sync => sync . entityName === 'note_reordering' ) ) {
2020-02-05 22:46:20 +01:00
for ( const branchId in sync . positions ) {
2020-02-08 21:23:42 +01:00
const branch = treeCache . branches [ branchId ] ;
2020-02-05 22:46:20 +01:00
if ( branch ) {
branch . notePosition = sync . positions [ branchId ] ;
}
}
loadResults . addNoteReordering ( sync . entityId , sync . sourceId ) ;
2020-05-14 13:08:06 +02:00
}
2020-02-05 22:46:20 +01:00
// missing reloading the relation target note
2020-05-14 13:08:06 +02:00
for ( const sync of syncRows . filter ( sync => sync . entityName === 'attributes' ) ) {
2020-02-08 21:23:42 +01:00
let attribute = treeCache . attributes [ sync . entityId ] ;
const sourceNote = treeCache . notes [ sync . entity . noteId ] ;
const targetNote = sync . entity . type === 'relation' && treeCache . notes [ sync . entity . value ] ;
2020-02-05 22:46:20 +01:00
if ( attribute ) {
attribute . update ( sync . entity ) ;
loadResults . addAttribute ( sync . entityId , sync . sourceId ) ;
if ( sync . entity . isDeleted ) {
if ( sourceNote ) {
sourceNote . attributes = sourceNote . attributes . filter ( attributeId => attributeId !== attribute . attributeId ) ;
}
if ( targetNote ) {
2020-02-25 16:31:44 +01:00
targetNote . targetRelations = targetNote . targetRelations . filter ( attributeId => attributeId !== attribute . attributeId ) ;
2020-02-05 22:46:20 +01:00
}
}
}
else if ( ! sync . entity . isDeleted ) {
if ( sourceNote || targetNote ) {
2020-02-09 21:53:10 +01:00
attribute = new Attribute ( treeCache , sync . entity ) ;
2020-02-05 22:46:20 +01:00
2020-02-08 21:23:42 +01:00
treeCache . attributes [ attribute . attributeId ] = attribute ;
2020-02-05 22:46:20 +01:00
loadResults . addAttribute ( sync . entityId , sync . sourceId ) ;
if ( sourceNote && ! sourceNote . attributes . includes ( attribute . attributeId ) ) {
sourceNote . attributes . push ( attribute . attributeId ) ;
}
2020-03-17 18:38:49 +01:00
if ( targetNote && ! targetNote . targetRelations . includes ( attribute . attributeId ) ) {
targetNote . targetRelations . push ( attribute . attributeId ) ;
2020-02-05 22:46:20 +01:00
}
}
}
2020-05-14 13:08:06 +02:00
}
2020-02-05 22:46:20 +01:00
2020-05-14 13:08:06 +02:00
for ( const sync of syncRows . filter ( sync => sync . entityName === 'note_contents' ) ) {
2020-02-08 21:23:42 +01:00
delete treeCache . noteComplementPromises [ sync . entityId ] ;
2020-02-05 22:46:20 +01:00
loadResults . addNoteContent ( sync . entityId , sync . sourceId ) ;
2020-05-14 13:08:06 +02:00
}
2020-02-05 22:46:20 +01:00
2020-05-14 13:08:06 +02:00
for ( const sync of syncRows . filter ( sync => sync . entityName === 'note_revisions' ) ) {
2020-02-05 22:46:20 +01:00
loadResults . addNoteRevision ( sync . entityId , sync . noteId , sync . sourceId ) ;
2020-05-14 13:08:06 +02:00
}
2020-02-05 22:46:20 +01:00
2020-05-14 13:08:06 +02:00
for ( const sync of syncRows . filter ( sync => sync . entityName === 'options' ) ) {
2020-03-10 21:33:03 +01:00
if ( sync . entity . name === 'openTabs' ) {
2020-05-14 13:08:06 +02:00
continue ; // only noise
2020-03-10 21:33:03 +01:00
}
2020-02-05 22:46:20 +01:00
options . set ( sync . entity . name , sync . entity . value ) ;
loadResults . addOption ( sync . entity . name ) ;
2020-05-14 13:08:06 +02:00
}
2020-02-05 22:46:20 +01:00
2020-03-10 21:33:03 +01:00
if ( ! loadResults . isEmpty ( ) ) {
2020-05-02 18:19:41 +02:00
if ( loadResults . hasAttributeRelatedChanges ( ) ) {
noteAttributeCache . invalidate ( ) ;
}
2020-03-10 21:33:03 +01:00
const appContext = ( await import ( "./app_context.js" ) ) . default ;
2020-03-18 10:08:16 +01:00
await appContext . triggerEvent ( 'entitiesReloaded' , { loadResults } ) ;
2020-03-10 21:33:03 +01:00
}
2020-02-05 22:46:20 +01:00
}
2018-03-25 11:09:17 -04:00
export default {
2018-03-25 21:16:57 -04:00
logError ,
2018-08-01 09:26:02 +02:00
subscribeToMessages ,
2019-12-09 23:07:45 +01:00
waitForSyncId ,
waitForMaxKnownSyncId
2020-05-11 22:44:10 +02:00
} ;