}
};
+ /**
+ * Sync process can make data intermittently inconsistent. Scripts which require strong data consistency
+ * can use this function to wait for a possible sync process to finish and prevent new sync process from starting
+ * while it is running.
+ *
+ * Because this is an async process, the inner callback doesn't have automatic transaction handling, so in case
+ * you need to make some DB changes, you need to surround your call with api.transactional(...)
+ *
+ * @method
+ * @param {function} callback - function to be executed while sync process is not running
+ * @returns {Promise} - resolves once the callback is finished (callback is awaited)
+ */
+ this.runOutsideOfSync = syncMutex.doExclusively;
+
/**
* This object contains "at your risk" and "no BC guarantees" objects for advanced use cases.
*
diff --git a/src/public/app/widgets/type_widgets/canvas.js b/src/public/app/widgets/type_widgets/canvas.js
index f25891a2dd..0fd9be909d 100644
--- a/src/public/app/widgets/type_widgets/canvas.js
+++ b/src/public/app/widgets/type_widgets/canvas.js
@@ -217,7 +217,7 @@ export default class ExcalidrawTypeWidget extends TypeWidget {
};
}
- const {elements, files, appState} = content;
+ const {elements, files, appState = {}} = content;
appState.theme = this.themeStyle;
diff --git a/src/public/app/widgets/type_widgets/options/advanced/database_anonymization.js b/src/public/app/widgets/type_widgets/options/advanced/database_anonymization.js
index a17e0675b1..9bdcce843e 100644
--- a/src/public/app/widgets/type_widgets/options/advanced/database_anonymization.js
+++ b/src/public/app/widgets/type_widgets/options/advanced/database_anonymization.js
@@ -20,6 +20,10 @@ const TPL = `
You can decide yourself if you want to provide a fully or lightly anonymized database. Even fully anonymized DB is very useful, however in some cases lightly anonymized database can speed up the process of bug identification and fixing.
`;
export default class BackupOptions extends OptionsWidget {
@@ -49,6 +55,8 @@ export default class BackupOptions extends OptionsWidget {
const {backupFile} = await server.post('database/backup-database');
toastService.showMessage(`Database has been backed up to ${backupFile}`, 10000);
+
+ this.refresh();
});
this.$dailyBackupEnabled = this.$widget.find(".daily-backup-enabled");
@@ -63,11 +71,25 @@ export default class BackupOptions extends OptionsWidget {
this.$monthlyBackupEnabled.on('change', () =>
this.updateCheckboxOption('monthlyBackupEnabled', this.$monthlyBackupEnabled));
+
+ this.$existingBackupList = this.$widget.find(".existing-backup-list");
}
optionsLoaded(options) {
this.setCheckboxState(this.$dailyBackupEnabled, options.dailyBackupEnabled);
this.setCheckboxState(this.$weeklyBackupEnabled, options.weeklyBackupEnabled);
this.setCheckboxState(this.$monthlyBackupEnabled, options.monthlyBackupEnabled);
+
+ server.get("database/backups").then(backupFiles => {
+ this.$existingBackupList.empty();
+
+ if (!backupFiles.length) {
+ backupFiles = [{filePath: "no backup yet", ctime: ''}];
+ }
+
+ for (const {filePath, ctime} of backupFiles) {
+ this.$existingBackupList.append($("
").text(`${filePath} ${ctime ? ` - ${ctime}` : ''}`));
+ }
+ });
}
}
diff --git a/src/routes/api/database.js b/src/routes/api/database.js
index 27658f39dd..d8d8cfa9a9 100644
--- a/src/routes/api/database.js
+++ b/src/routes/api/database.js
@@ -6,8 +6,8 @@ const backupService = require('../../services/backup');
const anonymizationService = require('../../services/anonymization');
const consistencyChecksService = require('../../services/consistency_checks');
-async function anonymize(req) {
- return await anonymizationService.createAnonymizedCopy(req.params.type);
+function getExistingBackups() {
+ return backupService.getExistingBackups();
}
async function backupDatabase() {
@@ -22,6 +22,18 @@ function vacuumDatabase() {
log.info("Database has been vacuumed.");
}
+function findAndFixConsistencyIssues() {
+ consistencyChecksService.runOnDemandChecks(true);
+}
+
+function getExistingAnonymizedDatabases() {
+ return anonymizationService.getExistingAnonymizedDatabases();
+}
+
+async function anonymize(req) {
+ return await anonymizationService.createAnonymizedCopy(req.params.type);
+}
+
function checkIntegrity() {
const results = sql.getRows("PRAGMA integrity_check");
@@ -32,14 +44,12 @@ function checkIntegrity() {
};
}
-function findAndFixConsistencyIssues() {
- consistencyChecksService.runOnDemandChecks(true);
-}
-
module.exports = {
+ getExistingBackups,
backupDatabase,
vacuumDatabase,
findAndFixConsistencyIssues,
+ getExistingAnonymizedDatabases,
anonymize,
checkIntegrity
};
diff --git a/src/routes/routes.js b/src/routes/routes.js
index 97a2f8de89..1cbe4cd83f 100644
--- a/src/routes/routes.js
+++ b/src/routes/routes.js
@@ -289,9 +289,11 @@ function register(app) {
apiRoute(GET, '/api/sql/schema', sqlRoute.getSchema);
apiRoute(PST, '/api/sql/execute/:noteId', sqlRoute.execute);
route(PST, '/api/database/anonymize/:type', [auth.checkApiAuthOrElectron, csrfMiddleware], databaseRoute.anonymize, apiResultHandler, false);
+ apiRoute(GET, '/api/database/anonymized-databases', databaseRoute.getExistingAnonymizedDatabases);
// backup requires execution outside of transaction
route(PST, '/api/database/backup-database', [auth.checkApiAuthOrElectron, csrfMiddleware], databaseRoute.backupDatabase, apiResultHandler, false);
+ apiRoute(GET, '/api/database/backups', databaseRoute.getExistingBackups);
// VACUUM requires execution outside of transaction
route(PST, '/api/database/vacuum-database', [auth.checkApiAuthOrElectron, csrfMiddleware], databaseRoute.vacuumDatabase, apiResultHandler, false);
diff --git a/src/services/anonymization.js b/src/services/anonymization.js
index 877a9b38b8..160b6e2a21 100644
--- a/src/services/anonymization.js
+++ b/src/services/anonymization.js
@@ -4,6 +4,7 @@ const dataDir = require("./data_dir");
const dateUtils = require("./date_utils");
const Database = require("better-sqlite3");
const sql = require("./sql");
+const path = require("path");
function getFullAnonymizationScript() {
// we want to delete all non-builtin attributes because they can contain sensitive names and values
@@ -70,7 +71,21 @@ async function createAnonymizedCopy(type) {
};
}
+function getExistingAnonymizedDatabases() {
+ if (!fs.existsSync(dataDir.ANONYMIZED_DB_DIR)) {
+ return [];
+ }
+
+ return fs.readdirSync(dataDir.ANONYMIZED_DB_DIR)
+ .filter(fileName => fileName.includes("anonymized"))
+ .map(fileName => ({
+ fileName: fileName,
+ filePath: path.resolve(dataDir.ANONYMIZED_DB_DIR, fileName)
+ }));
+}
+
module.exports = {
getFullAnonymizationScript,
- createAnonymizedCopy
+ createAnonymizedCopy,
+ getExistingAnonymizedDatabases
}
diff --git a/src/services/backend_script_api.js b/src/services/backend_script_api.js
index 1294e8e2c0..bbdc2b9347 100644
--- a/src/services/backend_script_api.js
+++ b/src/services/backend_script_api.js
@@ -19,6 +19,7 @@ const SpacedUpdate = require("./spaced_update");
const specialNotesService = require("./special_notes");
const branchService = require("./branches");
const exportService = require("./export/zip");
+const syncMutex = require("./sync_mutex.js");
/**
@@ -600,6 +601,20 @@ function BackendScriptApi(currentNote, apiParams) {
}
};
+ /**
+ * Sync process can make data intermittently inconsistent. Scripts which require strong data consistency
+ * can use this function to wait for a possible sync process to finish and prevent new sync process from starting
+ * while it is running.
+ *
+ * Because this is an async process, the inner callback doesn't have automatic transaction handling, so in case
+ * you need to make some DB changes, you need to surround your call with api.transactional(...)
+ *
+ * @method
+ * @param {function} callback - function to be executed while sync process is not running
+ * @returns {Promise} - resolves once the callback is finished (callback is awaited)
+ */
+ this.runOutsideOfSync = syncMutex.doExclusively;
+
/**
* This object contains "at your risk" and "no BC guarantees" objects for advanced use cases.
*
diff --git a/src/services/backup.js b/src/services/backup.js
index c7908524f3..29fbbd0a40 100644
--- a/src/services/backup.js
+++ b/src/services/backup.js
@@ -8,6 +8,22 @@ const log = require('./log');
const syncMutexService = require('./sync_mutex');
const cls = require('./cls');
const sql = require('./sql');
+const path = require('path');
+
+function getExistingBackups() {
+ if (!fs.existsSync(dataDir.BACKUP_DIR)) {
+ return [];
+ }
+
+ return fs.readdirSync(dataDir.BACKUP_DIR)
+ .filter(fileName => fileName.includes("backup"))
+ .map(fileName => {
+ const filePath = path.resolve(dataDir.BACKUP_DIR, fileName);
+ const stat = fs.statSync(filePath)
+
+ return {fileName, filePath, ctime: stat.ctime};
+ });
+}
function regularBackup() {
cls.init(() => {
@@ -58,6 +74,7 @@ if (!fs.existsSync(dataDir.BACKUP_DIR)) {
}
module.exports = {
+ getExistingBackups,
backupNow,
regularBackup
};
diff --git a/src/services/build.js b/src/services/build.js
index 6451155536..c461b9c7cf 100644
--- a/src/services/build.js
+++ b/src/services/build.js
@@ -1 +1 @@
-module.exports = { buildDate:"2023-09-29T00:54:45+02:00", buildRevision: "e5555beea9a1638fefa218118e0596f4cfc1f4d0" };
+module.exports = { buildDate:"2023-10-07T23:02:47+03:00", buildRevision: "3d15aeae58224ac8716dd58938458e89af9bf7a0" };
diff --git a/src/services/builtin_attributes.js b/src/services/builtin_attributes.js
index 0c2fa5f0ce..a4049e7758 100644
--- a/src/services/builtin_attributes.js
+++ b/src/services/builtin_attributes.js
@@ -68,6 +68,7 @@ module.exports = [
{ type: 'label', name: 'executeDescription'},
{ type: 'label', name: 'newNotesOnTop'},
{ type: 'label', name: 'clipperInbox'},
+ { type: 'label', name: 'webViewSrc', isDangerous: true },
// relation names
{ type: 'relation', name: 'internalLink' },
diff --git a/src/services/cloning.js b/src/services/cloning.js
index ed1c242141..4d66033732 100644
--- a/src/services/cloning.js
+++ b/src/services/cloning.js
@@ -7,7 +7,7 @@ const BBranch = require('../becca/entities/bbranch');
const becca = require("../becca/becca");
const log = require("./log");
-function cloneNoteToParentNote(noteId, parentNoteId, prefix) {
+function cloneNoteToParentNote(noteId, parentNoteId, prefix = null) {
if (!(noteId in becca.notes) || !(parentNoteId in becca.notes)) {
return { success: false, message: 'Note cannot be cloned because either the cloned note or the intended parent is deleted.' };
}
diff --git a/src/services/import/zip.js b/src/services/import/zip.js
index fcdb0cb5b5..d4c591610c 100644
--- a/src/services/import/zip.js
+++ b/src/services/import/zip.js
@@ -311,7 +311,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
return /^(?:[a-z]+:)?\/\//i.test(url);
}
- content = removeTrilumTags(content);
+ content = removeTriliumTags(content);
content = content.replace(/
([^<]*)<\/h1>/gi, (match, text) => {
if (noteTitle.trim() === text.trim()) {
@@ -393,7 +393,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
return content;
}
- function removeTrilumTags(content) {
+ function removeTriliumTags(content) {
const tagsToRemove = [
'
([^<]*)<\/h1>',
'([^<]*)<\/title>'
diff --git a/src/services/request.js b/src/services/request.js
index b67b807f34..0af02b7ea8 100644
--- a/src/services/request.js
+++ b/src/services/request.js
@@ -58,10 +58,6 @@ function exec(opts) {
request.on('error', err => reject(generateError(opts, err)));
request.on('response', response => {
- if (![200, 201, 204].includes(response.statusCode)) {
- reject(generateError(opts, `${response.statusCode} ${response.statusMessage}`));
- }
-
if (opts.cookieJar && response.headers['set-cookie']) {
opts.cookieJar.header = response.headers['set-cookie'];
}
@@ -71,15 +67,28 @@ function exec(opts) {
response.on('data', chunk => responseStr += chunk);
response.on('end', () => {
- try {
- const jsonObj = responseStr.trim() ? JSON.parse(responseStr) : null;
+ if ([200, 201, 204].includes(response.statusCode)) {
+ try {
+ const jsonObj = responseStr.trim() ? JSON.parse(responseStr) : null;
- resolve(jsonObj);
- }
- catch (e) {
- log.error(`Failed to deserialize sync response: ${responseStr}`);
+ resolve(jsonObj);
+ } catch (e) {
+ log.error(`Failed to deserialize sync response: ${responseStr}`);
+
+ reject(generateError(opts, e.message));
+ }
+ } else {
+ let errorMessage;
+
+ try {
+ const jsonObj = JSON.parse(responseStr);
+
+ errorMessage = jsonObj?.message || '';
+ } catch (e) {
+ errorMessage = responseStr.substr(0, Math.min(responseStr.length, 100));
+ }
- reject(generateError(opts, e.message));
+ reject(generateError(opts, `${response.statusCode} ${response.statusMessage} ${errorMessage}`));
}
});
});
diff --git a/src/services/search/search_result.js b/src/services/search/search_result.js
index 4c678661de..ca3811f8ec 100644
--- a/src/services/search/search_result.js
+++ b/src/services/search/search_result.js
@@ -22,6 +22,10 @@ class SearchResult {
const note = becca.notes[this.noteId];
+ if (note.noteId.toLowerCase() === fulltextQuery) {
+ this.score += 100;
+ }
+
if (note.title.toLowerCase() === fulltextQuery) {
this.score += 100; // high reward for exact match #3470
}
diff --git a/src/services/sync_mutex.js b/src/services/sync_mutex.js
index 819371e808..fb95d03c43 100644
--- a/src/services/sync_mutex.js
+++ b/src/services/sync_mutex.js
@@ -1,5 +1,5 @@
/**
- * Sync makes process can make data intermittently inconsistent. Processes which require strong data consistency
+ * Sync process can make data intermittently inconsistent. Processes which require strong data consistency
* (like consistency checks) can use this mutex to make sure sync isn't currently running.
*/
diff --git a/src/services/sync_update.js b/src/services/sync_update.js
index 919f3c8cea..d5bb4ab302 100644
--- a/src/services/sync_update.js
+++ b/src/services/sync_update.js
@@ -68,43 +68,28 @@ function updateEntity(remoteEC, remoteEntityRow, instanceId, updateContext) {
function updateNormalEntity(remoteEC, remoteEntityRow, instanceId, updateContext) {
const localEC = sql.getRow(`SELECT * FROM entity_changes WHERE entityName = ? AND entityId = ?`, [remoteEC.entityName, remoteEC.entityId]);
- if (!localEC?.isErased && remoteEC.isErased) {
- eraseEntity(remoteEC, instanceId);
- updateContext.erased++;
-
- return true;
- } else if (localEC?.isErased && !remoteEC.isErased) {
- // on this side, we can't unerase the entity, so force the entity to be erased on the other side.
- entityChangesService.putEntityChangeForOtherInstances(localEC);
-
- return false;
- } else if (localEC?.isErased && remoteEC.isErased) {
- updateContext.alreadyErased++;
- return false;
- }
-
if (!localEC || localEC.utcDateChanged <= remoteEC.utcDateChanged) {
- if (!remoteEntityRow) {
- throw new Error(`Empty entity row for: ${JSON.stringify(remoteEC)}`);
- }
-
- if (remoteEC.entityName === 'blobs' && remoteEntityRow.content !== null) {
- // we always use a Buffer object which is different from normal saving - there we use a simple string type for
- // "string notes". The problem is that in general, it's not possible to detect whether a blob content
- // is string note or note (syncs can arrive out of order)
- remoteEntityRow.content = Buffer.from(remoteEntityRow.content, 'base64');
-
- if (remoteEntityRow.content.byteLength === 0) {
- // there seems to be a bug which causes empty buffer to be stored as NULL which is then picked up as inconsistency
- // (possibly not a problem anymore with the newer better-sqlite3)
- remoteEntityRow.content = "";
+ if (remoteEC.isErased) {
+ if (localEC?.isErased) {
+ eraseEntity(remoteEC); // make sure it's erased anyway
+ updateContext.alreadyErased++;
+ return false; // we won't save entitychange in this case
+ } else {
+ eraseEntity(remoteEC);
+ updateContext.erased++;
+ }
+ } else {
+ if (!remoteEntityRow) {
+ throw new Error(`Empty entity row for: ${JSON.stringify(remoteEC)}`);
}
- }
- sql.replace(remoteEC.entityName, remoteEntityRow);
+ preProcessContent(remoteEC, remoteEntityRow);
- updateContext.updated[remoteEC.entityName] = updateContext.updated[remoteEC.entityName] || [];
- updateContext.updated[remoteEC.entityName].push(remoteEC.entityId);
+ sql.replace(remoteEC.entityName, remoteEntityRow);
+
+ updateContext.updated[remoteEC.entityName] = updateContext.updated[remoteEC.entityName] || [];
+ updateContext.updated[remoteEC.entityName].push(remoteEC.entityId);
+ }
if (!localEC || localEC.utcDateChanged < remoteEC.utcDateChanged || localEC.hash !== remoteEC.hash) {
entityChangesService.putEntityChangeWithInstanceId(remoteEC, instanceId);
@@ -121,6 +106,21 @@ function updateNormalEntity(remoteEC, remoteEntityRow, instanceId, updateContext
return false;
}
+function preProcessContent(remoteEC, remoteEntityRow) {
+ if (remoteEC.entityName === 'blobs' && remoteEntityRow.content !== null) {
+ // we always use a Buffer object which is different from normal saving - there we use a simple string type for
+ // "string notes". The problem is that in general, it's not possible to detect whether a blob content
+ // is string note or note (syncs can arrive out of order)
+ remoteEntityRow.content = Buffer.from(remoteEntityRow.content, 'base64');
+
+ if (remoteEntityRow.content.byteLength === 0) {
+ // there seems to be a bug which causes empty buffer to be stored as NULL which is then picked up as inconsistency
+ // (possibly not a problem anymore with the newer better-sqlite3)
+ remoteEntityRow.content = "";
+ }
+ }
+}
+
function updateNoteReordering(remoteEC, remoteEntityRow, instanceId) {
if (!remoteEntityRow) {
throw new Error(`Empty note_reordering body for: ${JSON.stringify(remoteEC)}`);
@@ -135,7 +135,7 @@ function updateNoteReordering(remoteEC, remoteEntityRow, instanceId) {
return true;
}
-function eraseEntity(entityChange, instanceId) {
+function eraseEntity(entityChange) {
const {entityName, entityId} = entityChange;
const entityNames = [
@@ -155,8 +155,6 @@ function eraseEntity(entityChange, instanceId) {
const primaryKeyName = entityConstructor.getEntityFromEntityName(entityName).primaryKeyName;
sql.execute(`DELETE FROM ${entityName} WHERE ${primaryKeyName} = ?`, [entityId]);
-
- entityChangesService.putEntityChangeWithInstanceId(entityChange, instanceId);
}
function logUpdateContext(updateContext) {
diff --git a/src/views/login.ejs b/src/views/login.ejs
index 9fd72c2f4c..392e6e854b 100644
--- a/src/views/login.ejs
+++ b/src/views/login.ejs
@@ -68,8 +68,6 @@
// https://stackoverflow.com/a/73731646/944162
function isMobile() {
- if ('maxTouchPoints' in navigator) return navigator.maxTouchPoints > 0;
-
const mQ = matchMedia?.('(pointer:coarse)');
if (mQ?.media === '(pointer:coarse)') return !!mQ.matches;