diff --git a/.eslintrc.json b/.eslintrc.json index bf52c1a..ca6e7bc 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -1,46 +1,47 @@ -{ - "env": { - "browser": true, - "es2021": true, - "node": true - }, - "extends": [ - "plugin:@typescript-eslint/recommended" - ], - "parser": "@typescript-eslint/parser", - "parserOptions": { - "ecmaVersion": "latest", - "sourceType": "module" - }, - "plugins": [ - "@typescript-eslint" - ], - "rules": { - "indent": [ "error", 4, { "SwitchCase": 1, "VariableDeclarator": 1, "MemberExpression": 1, "CallExpression": { "arguments": 1 } }], - "quotes": ["error", "single", { "avoidEscape": false, "allowTemplateLiterals": true }], - "semi": ["error", "always"], - "@typescript-eslint/no-explicit-any": [ - "off", // "error" if we want to enforce usage of `unknown` instead - { "ignoreRestArgs": false, "fixToUnknown": true } - ], - "comma-dangle": "off", - "@typescript-eslint/comma-dangle": ["error", { - "arrays": "always-multiline", - "objects": "always-multiline", - "imports": "always-multiline", - "exports": "always-multiline", - "enums": "always-multiline", - "functions": "only-multiline" - }], - "no-trailing-spaces": "error", - "eol-last": "error", - "curly": "error" - }, - "overrides": [{ - "files": "*.js", - "rules": { - "@typescript-eslint/no-var-requires": "off" - } - }], - "ignorePatterns": ["dist/**/*.*"] -} +{ + "env": { + "browser": true, + "es2021": true, + "node": true + }, + "extends": [ + "plugin:@typescript-eslint/recommended" + ], + "parser": "@typescript-eslint/parser", + "parserOptions": { + "ecmaVersion": "latest", + "sourceType": "module" + }, + "plugins": [ + "@typescript-eslint" + ], + "rules": { + "indent": [ "error", 4, { "SwitchCase": 1, "VariableDeclarator": 1, "MemberExpression": 1, "CallExpression": { "arguments": 1 } }], + "quotes": ["error", "single", { "avoidEscape": false, "allowTemplateLiterals": true }], + "semi": ["error", "always"], + "@typescript-eslint/no-explicit-any": [ + "off", // "error" if we want to enforce usage of `unknown` instead + { "ignoreRestArgs": false, "fixToUnknown": true } + ], + "comma-dangle": "off", + "@typescript-eslint/comma-dangle": ["error", { + "arrays": "always-multiline", + "objects": "always-multiline", + "imports": "always-multiline", + "exports": "always-multiline", + "enums": "always-multiline", + "functions": "only-multiline" + }], + "no-trailing-spaces": "error", + "eol-last": "error", + "curly": "error", + "no-invalid-this": "error" + }, + "overrides": [{ + "files": "*.js", + "rules": { + "@typescript-eslint/no-var-requires": "off" + } + }], + "ignorePatterns": ["dist/**/*.*"] +} diff --git a/package-lock.json b/package-lock.json index a2831f8..b9db11f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -23,7 +23,8 @@ ], "license": "MIT", "dependencies": { - "acebase-core": "^1.27.5", + "acebase-core": "file:../acebase-core", + "pino": "^8.15.7", "unidecode": "^0.1.8" }, "devDependencies": { @@ -42,8 +43,7 @@ } }, "../acebase-core": { - "version": "1.27.3", - "extraneous": true, + "version": "1.27.5", "license": "MIT", "devDependencies": { "@types/jasmine": "^3.7.4", @@ -587,14 +587,21 @@ "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/acebase-core": { - "version": "1.27.5", - "resolved": "https://registry.npmjs.org/acebase-core/-/acebase-core-1.27.5.tgz", - "integrity": "sha512-mogTRyHg+5/TYp6puXcdhTUmupPLGyWSBpzf/1gXANEk/QffaPvEFGzolJ5fTIVFl3UhVoYcUj2jEDbQHmGGMQ==", - "optionalDependencies": { - "rxjs": ">= 5.x <= 7.x" + "node_modules/abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "dependencies": { + "event-target-shim": "^5.0.0" + }, + "engines": { + "node": ">=6.5" } }, + "node_modules/acebase-core": { + "resolved": "../acebase-core", + "link": true + }, "node_modules/acorn": { "version": "8.8.2", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.2.tgz", @@ -755,6 +762,14 @@ "inherits": "2.0.1" } }, + "node_modules/atomic-sleep": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/atomic-sleep/-/atomic-sleep-1.0.0.tgz", + "integrity": "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==", + "engines": { + "node": ">=8.0.0" + } + }, "node_modules/available-typed-arrays": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz", @@ -777,7 +792,6 @@ "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", - "dev": true, "funding": [ { "type": "github", @@ -1707,11 +1721,18 @@ "node": ">=0.10.0" } }, + "node_modules/event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", + "engines": { + "node": ">=6" + } + }, "node_modules/events": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", - "dev": true, "engines": { "node": ">=0.8.x" } @@ -1772,6 +1793,14 @@ "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", "dev": true }, + "node_modules/fast-redact": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/fast-redact/-/fast-redact-3.3.0.tgz", + "integrity": "sha512-6T5V1QK1u4oF+ATxs1lWUmlEk6P2T9HqJG3e2DnHOdVgZy2rFJBoEnrIedcTXlkAHU/zKC+7KETJ+KGGKwxgMQ==", + "engines": { + "node": ">=6" + } + }, "node_modules/fast-safe-stringify": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", @@ -2151,7 +2180,6 @@ "version": "1.2.1", "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", - "dev": true, "funding": [ { "type": "github", @@ -2834,6 +2862,14 @@ "node": ">=0.10.0" } }, + "node_modules/on-exit-leak-free": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/on-exit-leak-free/-/on-exit-leak-free-2.1.2.tgz", + "integrity": "sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA==", + "engines": { + "node": ">=14.0.0" + } + }, "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -3039,6 +3075,79 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/pino": { + "version": "8.15.7", + "resolved": "https://registry.npmjs.org/pino/-/pino-8.15.7.tgz", + "integrity": "sha512-B0GTwUzUCXmGILPqL5MzeLWfq2KhdRArFNrtSKIIeZoES1zPDD72EGOgGvtdLSYyyEQZqE7fThRRNjNGx8nVZg==", + "dependencies": { + "atomic-sleep": "^1.0.0", + "fast-redact": "^3.1.1", + "on-exit-leak-free": "^2.1.0", + "pino-abstract-transport": "v1.1.0", + "pino-std-serializers": "^6.0.0", + "process-warning": "^2.0.0", + "quick-format-unescaped": "^4.0.3", + "real-require": "^0.2.0", + "safe-stable-stringify": "^2.3.1", + "sonic-boom": "^3.1.0", + "thread-stream": "^2.0.0" + }, + "bin": { + "pino": "bin.js" + } + }, + "node_modules/pino-abstract-transport": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/pino-abstract-transport/-/pino-abstract-transport-1.1.0.tgz", + "integrity": "sha512-lsleG3/2a/JIWUtf9Q5gUNErBqwIu1tUKTT3dUzaf5DySw9ra1wcqKjJjLX1VTY64Wk1eEOYsVGSaGfCK85ekA==", + "dependencies": { + "readable-stream": "^4.0.0", + "split2": "^4.0.0" + } + }, + "node_modules/pino-abstract-transport/node_modules/buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, + "node_modules/pino-abstract-transport/node_modules/readable-stream": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.4.2.tgz", + "integrity": "sha512-Lk/fICSyIhodxy1IDK2HazkeGjSmezAWX2egdtJnYhtzKEsBPJowlI6F6LPb5tqIQILrMbx22S5o3GuJavPusA==", + "dependencies": { + "abort-controller": "^3.0.0", + "buffer": "^6.0.3", + "events": "^3.3.0", + "process": "^0.11.10", + "string_decoder": "^1.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/pino-std-serializers": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/pino-std-serializers/-/pino-std-serializers-6.2.2.tgz", + "integrity": "sha512-cHjPPsE+vhj/tnhCy/wiMh3M3z3h/j15zHQX+S9GkTBgqJuTuJzYJ4gUyACLhDaJ7kk9ba9iRDmbH2tJU03OiA==" + }, "node_modules/prelude-ls": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", @@ -3052,7 +3161,6 @@ "version": "0.11.10", "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", - "dev": true, "engines": { "node": ">= 0.6.0" } @@ -3063,6 +3171,11 @@ "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", "dev": true }, + "node_modules/process-warning": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/process-warning/-/process-warning-2.2.0.tgz", + "integrity": "sha512-/1WZ8+VQjR6avWOgHeEPd7SDQmFQ1B5mC1eRXsCm5TarlNmx/wCsa5GEaxGm05BORRtyG/Ex/3xq3TuRvq57qg==" + }, "node_modules/public-encrypt": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/public-encrypt/-/public-encrypt-4.0.3.tgz", @@ -3128,6 +3241,11 @@ } ] }, + "node_modules/quick-format-unescaped": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/quick-format-unescaped/-/quick-format-unescaped-4.0.4.tgz", + "integrity": "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg==" + }, "node_modules/quick-lru": { "version": "6.1.1", "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-6.1.1.tgz", @@ -3366,6 +3484,14 @@ "safe-buffer": "~5.1.0" } }, + "node_modules/real-require": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/real-require/-/real-require-0.2.0.tgz", + "integrity": "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg==", + "engines": { + "node": ">= 12.13.0" + } + }, "node_modules/redent": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/redent/-/redent-4.0.0.tgz", @@ -3466,20 +3592,10 @@ "queue-microtask": "^1.2.2" } }, - "node_modules/rxjs": { - "version": "7.8.1", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz", - "integrity": "sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==", - "optional": true, - "dependencies": { - "tslib": "^2.1.0" - } - }, "node_modules/safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "dev": true, "funding": [ { "type": "github", @@ -3495,6 +3611,14 @@ } ] }, + "node_modules/safe-stable-stringify": { + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/safe-stable-stringify/-/safe-stable-stringify-2.4.3.tgz", + "integrity": "sha512-e2bDA2WJT0wxseVd4lsDP4+3ONX6HpMXQa1ZhFQ7SU+GjvORCmShbCMltrtIDfkYhVHrOcPtj+KhmDBdPdZD1g==", + "engines": { + "node": ">=10" + } + }, "node_modules/safer-buffer": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", @@ -3597,6 +3721,14 @@ "node": ">=8" } }, + "node_modules/sonic-boom": { + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-3.6.1.tgz", + "integrity": "sha512-QV+p5nXPiUiSMxn/k5bOL+hzCpafdj1voL+hywPZhheRSYyYp7CF15rNdz1evOXCUn/tFb7R62PDX1yJmtoTgg==", + "dependencies": { + "atomic-sleep": "^1.0.0" + } + }, "node_modules/source-map": { "version": "0.5.7", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", @@ -3657,6 +3789,14 @@ "integrity": "sha512-XkD+zwiqXHikFZm4AX/7JSCXA98U5Db4AFd5XUg/+9UNtnH75+Z9KxtpYiJZx36mUDVOwH83pl7yvCer6ewM3w==", "dev": true }, + "node_modules/split2": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", + "engines": { + "node": ">= 10.x" + } + }, "node_modules/stream-browserify": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/stream-browserify/-/stream-browserify-3.0.0.tgz", @@ -3731,7 +3871,6 @@ "version": "1.3.0", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "dev": true, "dependencies": { "safe-buffer": "~5.2.0" } @@ -3841,6 +3980,14 @@ "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", "dev": true }, + "node_modules/thread-stream": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/thread-stream/-/thread-stream-2.4.1.tgz", + "integrity": "sha512-d/Ex2iWd1whipbT681JmTINKw0ZwOUBZm7+Gjs64DHuX34mmw8vJL2bFAaNacaW72zYiTJxSHi5abUuOi5nsfg==", + "dependencies": { + "real-require": "^0.2.0" + } + }, "node_modules/through": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", @@ -3947,7 +4094,7 @@ "version": "2.5.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.1.tgz", "integrity": "sha512-KaI6gPil5m9vF7DKaoXxx1ia9fxS4qG5YveErRRVknPDXXriu5M8h48YRjB6h5ZUOKuAKlSJYb0GaDe8I39fRw==", - "devOptional": true + "dev": true }, "node_modules/tsutils": { "version": "3.21.0", @@ -4590,12 +4737,27 @@ "eslint-visitor-keys": "^3.3.0" } }, + "abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "requires": { + "event-target-shim": "^5.0.0" + } + }, "acebase-core": { - "version": "1.27.5", - "resolved": "https://registry.npmjs.org/acebase-core/-/acebase-core-1.27.5.tgz", - "integrity": "sha512-mogTRyHg+5/TYp6puXcdhTUmupPLGyWSBpzf/1gXANEk/QffaPvEFGzolJ5fTIVFl3UhVoYcUj2jEDbQHmGGMQ==", + "version": "file:../acebase-core", "requires": { - "rxjs": ">= 5.x <= 7.x" + "@types/jasmine": "^3.7.4", + "@types/node": "^14.14.6", + "@typescript-eslint/eslint-plugin": "^5.30.6", + "@typescript-eslint/parser": "^5.30.6", + "eslint": "^8.20.0", + "eslint-plugin-jasmine": "^4.1.3", + "jasmine": "^3.99.0", + "rxjs": ">= 5.x <= 7.x", + "tsc-esm-fix": "^2.13.0", + "typescript": "^5.0.4" } }, "acorn": { @@ -4728,6 +4890,11 @@ } } }, + "atomic-sleep": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/atomic-sleep/-/atomic-sleep-1.0.0.tgz", + "integrity": "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==" + }, "available-typed-arrays": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz", @@ -4743,8 +4910,7 @@ "base64-js": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", - "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", - "dev": true + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==" }, "bn.js": { "version": "5.2.1", @@ -5510,11 +5676,15 @@ "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", "dev": true }, + "event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==" + }, "events": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", - "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", - "dev": true + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==" }, "evp_bytestokey": { "version": "1.0.3", @@ -5568,6 +5738,11 @@ "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", "dev": true }, + "fast-redact": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/fast-redact/-/fast-redact-3.3.0.tgz", + "integrity": "sha512-6T5V1QK1u4oF+ATxs1lWUmlEk6P2T9HqJG3e2DnHOdVgZy2rFJBoEnrIedcTXlkAHU/zKC+7KETJ+KGGKwxgMQ==" + }, "fast-safe-stringify": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", @@ -5863,8 +6038,7 @@ "ieee754": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", - "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", - "dev": true + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==" }, "ignore": { "version": "5.2.4", @@ -6379,6 +6553,11 @@ "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", "dev": true }, + "on-exit-leak-free": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/on-exit-leak-free/-/on-exit-leak-free-2.1.2.tgz", + "integrity": "sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA==" + }, "once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -6536,6 +6715,61 @@ "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", "dev": true }, + "pino": { + "version": "8.15.7", + "resolved": "https://registry.npmjs.org/pino/-/pino-8.15.7.tgz", + "integrity": "sha512-B0GTwUzUCXmGILPqL5MzeLWfq2KhdRArFNrtSKIIeZoES1zPDD72EGOgGvtdLSYyyEQZqE7fThRRNjNGx8nVZg==", + "requires": { + "atomic-sleep": "^1.0.0", + "fast-redact": "^3.1.1", + "on-exit-leak-free": "^2.1.0", + "pino-abstract-transport": "v1.1.0", + "pino-std-serializers": "^6.0.0", + "process-warning": "^2.0.0", + "quick-format-unescaped": "^4.0.3", + "real-require": "^0.2.0", + "safe-stable-stringify": "^2.3.1", + "sonic-boom": "^3.1.0", + "thread-stream": "^2.0.0" + } + }, + "pino-abstract-transport": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/pino-abstract-transport/-/pino-abstract-transport-1.1.0.tgz", + "integrity": "sha512-lsleG3/2a/JIWUtf9Q5gUNErBqwIu1tUKTT3dUzaf5DySw9ra1wcqKjJjLX1VTY64Wk1eEOYsVGSaGfCK85ekA==", + "requires": { + "readable-stream": "^4.0.0", + "split2": "^4.0.0" + }, + "dependencies": { + "buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "requires": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, + "readable-stream": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.4.2.tgz", + "integrity": "sha512-Lk/fICSyIhodxy1IDK2HazkeGjSmezAWX2egdtJnYhtzKEsBPJowlI6F6LPb5tqIQILrMbx22S5o3GuJavPusA==", + "requires": { + "abort-controller": "^3.0.0", + "buffer": "^6.0.3", + "events": "^3.3.0", + "process": "^0.11.10", + "string_decoder": "^1.3.0" + } + } + } + }, + "pino-std-serializers": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/pino-std-serializers/-/pino-std-serializers-6.2.2.tgz", + "integrity": "sha512-cHjPPsE+vhj/tnhCy/wiMh3M3z3h/j15zHQX+S9GkTBgqJuTuJzYJ4gUyACLhDaJ7kk9ba9iRDmbH2tJU03OiA==" + }, "prelude-ls": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", @@ -6545,8 +6779,7 @@ "process": { "version": "0.11.10", "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", - "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", - "dev": true + "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==" }, "process-nextick-args": { "version": "2.0.1", @@ -6554,6 +6787,11 @@ "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", "dev": true }, + "process-warning": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/process-warning/-/process-warning-2.2.0.tgz", + "integrity": "sha512-/1WZ8+VQjR6avWOgHeEPd7SDQmFQ1B5mC1eRXsCm5TarlNmx/wCsa5GEaxGm05BORRtyG/Ex/3xq3TuRvq57qg==" + }, "public-encrypt": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/public-encrypt/-/public-encrypt-4.0.3.tgz", @@ -6600,6 +6838,11 @@ "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", "dev": true }, + "quick-format-unescaped": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/quick-format-unescaped/-/quick-format-unescaped-4.0.4.tgz", + "integrity": "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg==" + }, "quick-lru": { "version": "6.1.1", "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-6.1.1.tgz", @@ -6775,6 +7018,11 @@ } } }, + "real-require": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/real-require/-/real-require-0.2.0.tgz", + "integrity": "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg==" + }, "redent": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/redent/-/redent-4.0.0.tgz", @@ -6836,20 +7084,15 @@ "queue-microtask": "^1.2.2" } }, - "rxjs": { - "version": "7.8.1", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz", - "integrity": "sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==", - "optional": true, - "requires": { - "tslib": "^2.1.0" - } - }, "safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "dev": true + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" + }, + "safe-stable-stringify": { + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/safe-stable-stringify/-/safe-stable-stringify-2.4.3.tgz", + "integrity": "sha512-e2bDA2WJT0wxseVd4lsDP4+3ONX6HpMXQa1ZhFQ7SU+GjvORCmShbCMltrtIDfkYhVHrOcPtj+KhmDBdPdZD1g==" }, "safer-buffer": { "version": "2.1.2", @@ -6918,6 +7161,14 @@ "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", "dev": true }, + "sonic-boom": { + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-3.6.1.tgz", + "integrity": "sha512-QV+p5nXPiUiSMxn/k5bOL+hzCpafdj1voL+hywPZhheRSYyYp7CF15rNdz1evOXCUn/tFb7R62PDX1yJmtoTgg==", + "requires": { + "atomic-sleep": "^1.0.0" + } + }, "source-map": { "version": "0.5.7", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", @@ -6974,6 +7225,11 @@ "integrity": "sha512-XkD+zwiqXHikFZm4AX/7JSCXA98U5Db4AFd5XUg/+9UNtnH75+Z9KxtpYiJZx36mUDVOwH83pl7yvCer6ewM3w==", "dev": true }, + "split2": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==" + }, "stream-browserify": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/stream-browserify/-/stream-browserify-3.0.0.tgz", @@ -7046,7 +7302,6 @@ "version": "1.3.0", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "dev": true, "requires": { "safe-buffer": "~5.2.0" } @@ -7126,6 +7381,14 @@ "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", "dev": true }, + "thread-stream": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/thread-stream/-/thread-stream-2.4.1.tgz", + "integrity": "sha512-d/Ex2iWd1whipbT681JmTINKw0ZwOUBZm7+Gjs64DHuX34mmw8vJL2bFAaNacaW72zYiTJxSHi5abUuOi5nsfg==", + "requires": { + "real-require": "^0.2.0" + } + }, "through": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", @@ -7204,7 +7467,7 @@ "version": "2.5.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.1.tgz", "integrity": "sha512-KaI6gPil5m9vF7DKaoXxx1ia9fxS4qG5YveErRRVknPDXXriu5M8h48YRjB6h5ZUOKuAKlSJYb0GaDe8I39fRw==", - "devOptional": true + "dev": true }, "tsutils": { "version": "3.21.0", diff --git a/package.json b/package.json index 0f10019..aed3061 100644 --- a/package.json +++ b/package.json @@ -92,7 +92,8 @@ "author": "Ewout Stortenbeker (http://appy.one)", "license": "MIT", "dependencies": { - "acebase-core": "^1.27.5", + "acebase-core": "file:../acebase-core", + "pino": "^8.15.7", "unidecode": "^0.1.8" }, "devDependencies": { diff --git a/src/api-local.ts b/src/api-local.ts index aaa364b..d0ea9f4 100644 --- a/src/api-local.ts +++ b/src/api-local.ts @@ -1,7 +1,7 @@ import { AceBaseBase, IStreamLike, Api, EventSubscriptionCallback, ReflectionType, IReflectionNodeInfo, IReflectionChildrenInfo, StreamReadFunction, StreamWriteFunction, TransactionLogFilter, - LoggingLevel, Query, QueryOptions } from 'acebase-core'; + LoggingLevel, Query, QueryOptions, LoggerPlugin } from 'acebase-core'; import { AceBaseStorage, AceBaseStorageSettings } from './storage/binary'; import { SQLiteStorage, SQLiteStorageSettings } from './storage/sqlite'; import { MSSQLStorage, MSSQLStorageSettings } from './storage/mssql'; @@ -19,12 +19,14 @@ export class LocalApi extends Api { public db: AceBaseBase; public storage: Storage; public logLevel: LoggingLevel; + public logger: LoggerPlugin; constructor(dbname = 'default', init: { db: AceBaseBase, settings: AceBaseLocalSettings }, readyCallback: () => any) { super(); this.db = init.db; + this.logger = init.db.logger; - const storageEnv: StorageEnv = { logLevel: init.settings.logLevel }; + const storageEnv: StorageEnv = { logLevel: init.settings.logLevel, logColors: init.settings.logColors, logger: init.settings.logger }; if (typeof init.settings.storage === 'object') { // settings.storage.logLevel = settings.logLevel; if (SQLiteStorageSettings && (init.settings.storage instanceof SQLiteStorageSettings)) { // || env.settings.storage.type === 'sqlite' diff --git a/src/btree/binary-tree.spec.ts b/src/btree/binary-tree.spec.ts index 2930c8f..e455599 100644 --- a/src/btree/binary-tree.spec.ts +++ b/src/btree/binary-tree.spec.ts @@ -1,365 +1,365 @@ -import { BPlusTree, BinaryWriter, BinaryBPlusTree, BlacklistingSearchOperator, BinaryBPlusTreeLeafEntry } from '.'; -import { DebugLogger, ID } from 'acebase-core'; -import { BinaryBPlusTreeLeafEntryValue } from './binary-tree-leaf-entry-value'; - -describe('Unique Binary B+Tree', () => { - // Tests basic operations of the BinaryBPlusTree implementation - const FILL_FACTOR = 95; // AceBase uses 95% fill factor for key indexes - const AUTO_GROW = false; // autoGrow is not used by AceBase atm - const debug = new DebugLogger('log', 'B+Tree'); - const createBinaryTree = async () => { - const tree = new BPlusTree(100, true); - - const bytes = [] as number[]; - await tree.toBinary(true, BinaryWriter.forArray(bytes)); - const binaryTree = new BinaryBPlusTree({ readFn: bytes, debug }); - binaryTree.id = ID.generate(); // Assign an id to allow edits (is enforced by tree to make sure multiple concurrent edits to the same source are sync locked) - binaryTree.autoGrow = AUTO_GROW; - return binaryTree; - }; - - const rebuildTree = async (tree: BinaryBPlusTree) => { - const bytes = [] as number[]; - const id = tree.id; - await tree.rebuild(BinaryWriter.forArray(bytes), { fillFactor: FILL_FACTOR, keepFreeSpace: true, increaseMaxEntries: true }); - tree = new BinaryBPlusTree({ readFn: bytes, debug }); - tree.id = id; - tree.autoGrow = AUTO_GROW; - return tree; - }; - - it('is an instance', async () => { - const tree = await createBinaryTree(); - expect(tree).toBeInstanceOf(BinaryBPlusTree); - }); - - it('entries can added & found', async () => { - const tree = await createBinaryTree(); - - // Add 1 key - const testRecordPointer = [1,2,3,4]; - await tree.add('key', testRecordPointer); - - // Lookup the entry & check its value - const value = await tree.find('key') as BinaryBPlusTreeLeafEntryValue; - expect(value).not.toBeNull(); - for (let i = 0; i < testRecordPointer.length; i++) { - expect(value.recordPointer[i]).toEqual(testRecordPointer[i]); - } - }); - - describe('entries', () => { - - const TEST_KEYS = 1000; // This simulates the amount of children to be added to an AceBase node - const keys = [] as string[]; - // Create random keys - for (let i = 0; i < TEST_KEYS; i++) { - keys.push(ID.generate()); - } - - let tree: BinaryBPlusTree; - beforeAll(async () => { - // Create tree - tree = await createBinaryTree(); - - let rebuilds = 0; - - // Add keys 1 by 1 - for (let i = 0; i < keys.length; i++) { - const key = keys[i]; - const recordPointer = Array.from(key).map(ch => ch.charCodeAt(0)); // Fake (unique) recordpointer - try { - await tree.add(key, recordPointer); - } - catch(err) { - // While the tree grows, this happens. Rebuild the tree and try again - rebuilds++; - tree = await rebuildTree(tree); - await tree.add(key, recordPointer); // Retry add - } - } - - console.log(`Created a tree with ${keys.length} entries, ${rebuilds} rebuilds were needed`); - }); - - // Lookup all added entries - it('can be found', async () => { - for (let i = 0; i < keys.length; i++) { - const key = keys[i]; - const value = await tree.find(key); - expect(value).not.toBeNull(); - } - }); - - // Iterate the leafs from start to end, confirm the right order - it('can be iterated in ascending order', async () => { - let leaf = await tree.getFirstLeaf(); - expect(leaf).not.toBeNull(); - let lastEntry, count = 0; - while (leaf) { - for (let i = 0; i < leaf.entries.length; i++) { - count++; - const entry = leaf.entries[i]; - if (i > 0) { - // key > last - expect(entry.key > lastEntry.key).toBeTrue(); - } - lastEntry = entry; - } - leaf = leaf.getNext ? await leaf.getNext() : null; - } - expect(count).toEqual(keys.length); - }); - - // Iterate the leafs from end to start - it('can be iterated in descending order', async () => { - let leaf = await tree.getLastLeaf(); - expect(leaf).not.toBeNull(); - let count = 0; - let lastEntry: BinaryBPlusTreeLeafEntry; - while (leaf) { - for (let i = leaf.entries.length - 1; i >= 0 ; i--) { - count++; - const entry = leaf.entries[i]; - if (i < leaf.entries.length - 1) { - // key < last - expect(entry.key < lastEntry.key).toBeTrue(); - } - lastEntry = entry; - } - leaf = leaf.getPrevious ? await leaf.getPrevious() : null; - } - expect(count).toEqual(keys.length); - }); - - describe('can be queried', () => { - - const options = { entries: true, keys: true, values: true, count: true }; - - const checkResults = ( - result: Awaited>, - expectedKeys: string[], - log: string - ) => { - log && console.log(log); - expect(result.keyCount).toEqual(expectedKeys.length); - expect(result.valueCount).toEqual(expectedKeys.length); // unique tree, 1 value per key - expect(result.entries.length).toEqual(expectedKeys.length); - expect(result.keys.length).toEqual(expectedKeys.length); - expect(result.values.length).toEqual(expectedKeys.length); - const allFound = expectedKeys.every(key => result.keys.includes(key)); - expect(allFound).toBeTrue(); - }; - - it('with "==" operator', async () => { - // Find first entry - let result = await tree.search('==', keys[0], options); - checkResults(result, [keys[0]], `== "${keys[0]}": expecting 1 result`); - - // Find a random entry - const randomKey = keys[Math.floor(Math.random() * keys.length)]; - result = await tree.search('==', randomKey, options); - checkResults(result, [randomKey], `== "${randomKey}": expecting 1 result`); - }); - - it('with "!=" operator', async () => { - // Find all except 1 random entry - const excludeIndex = Math.floor(Math.random() * keys.length); - const excludeKey = keys[excludeIndex]; - const expectedKeys = keys.slice(0, excludeIndex).concat(keys.slice(excludeIndex+1)); - const result = await tree.search('!=', excludeKey, options); - checkResults(result, expectedKeys, `!= "${excludeKey}": expecting ${expectedKeys.length} results`); - }); - - it('with "<" operator', async () => { - // Find first 10 keys - const expectedKeys = keys.slice(0, 11); // Take 11, use last as < - const lessThanKey = expectedKeys.pop(); - const result = await tree.search('<', lessThanKey, options); - checkResults(result, expectedKeys, `< "${lessThanKey}": expecting ${expectedKeys.length} results`); - }); - - it('with "<=" operator', async () => { - // Find first 10 keys - const expectedKeys = keys.slice(0, 10); - const key = expectedKeys.slice(-1)[0]; - const result = await tree.search('<=', key, options); - checkResults(result, expectedKeys, `<= "${key}": expecting ${expectedKeys.length} results`); - }); - - it('with ">" operator', async () => { - // Find last 10 keys - const expectedKeys = keys.slice(-11); // Take 11, use first as > - const greaterThanKey = expectedKeys.shift(); - const result = await tree.search('>', greaterThanKey, options); - checkResults(result, expectedKeys, `> "${greaterThanKey}": expecting ${expectedKeys.length} results`); - }); - - it('with ">=" operator', async () => { - // Find last 10 keys - const expectedKeys = keys.slice(-10); - const result = await tree.search('>=', expectedKeys[0], options); - checkResults(result, expectedKeys, `>= "${expectedKeys[0]}": expecting ${expectedKeys.length} results`); - }); - - it('with "like" operator', async () => { - // All keys that start with the same 10 characters as the first key - let str = keys[0].slice(0, 10); - let expectedKeys = keys.filter(key => key.startsWith(str)); - let result = await tree.search('like', `${str}*`, options); - checkResults(result, expectedKeys, `like "${str}*": expecting ${expectedKeys.length} keys to start with "${str}"`); - - // All keys that end with the same 3 last characters of the first key - str = keys[0].slice(-3); - expectedKeys = keys.filter(key => key.endsWith(str)); - result = await tree.search('like', `*${str}`, options); - checkResults(result, expectedKeys, `like "*${str}": expecting ${expectedKeys.length} keys to end with "${str}"`); - - // All keys that contain the last 2 characters of the first key - str = keys[0].slice(-2); - expectedKeys = keys.filter(key => key.includes(str)); - result = await tree.search('like', `*${str}*`, options); - checkResults(result, expectedKeys, `like "*${str}*": expecting ${expectedKeys.length} keys to contain "${str}"`); - }); - - it('with "between" operator', async () => { - // Find custom range of keys - const [startIndex, endIndex] = [Math.floor(Math.random() * (keys.length-1)), Math.floor(Math.random() * (keys.length-1))].sort((a,b) => a < b ? -1 : 1); - const expectedKeys = startIndex === endIndex ? [keys[startIndex]] : keys.slice(startIndex, endIndex); - const firstKey = expectedKeys[0], lastKey = expectedKeys.slice(-1)[0]; - - let result = await tree.search('between', [firstKey, lastKey], options); - checkResults(result, expectedKeys, `between "${firstKey}" and "${lastKey}": expecting ${expectedKeys.length} results`); - - result = await tree.search('between', [lastKey, firstKey], options); - checkResults(result, expectedKeys, `between "${lastKey}" and "${firstKey}" (reversed): expecting ${expectedKeys.length} results`); - }); - - it('with "!between" operator', async () => { - // Find custom range of keys (before and after given indexes) - const [startIndex, endIndex] = [Math.floor(Math.random() * keys.length), Math.floor(Math.random() * keys.length)].sort((a,b) => a-b); // eg: [2,6] - const expectedKeys = keys.filter((key, index) => index < startIndex || index > endIndex); // eg: expect [1,2, 7,8,9] for indexes 2 and 6 of keys [1,2,3,4,5,6,7,8,9] - const firstKey = keys[startIndex], lastKey = keys[endIndex]; // eg: 3 and 6 - - let result = await tree.search('!between', [firstKey, lastKey], options); - checkResults(result, expectedKeys, `!between "${firstKey}" and "${lastKey}": expecting ${expectedKeys.length} results`); - - result = await tree.search('!between', [lastKey, firstKey], options); - checkResults(result, expectedKeys, `!between "${lastKey}" and "${firstKey}" (reversed): expecting ${expectedKeys.length} results`); - }); - - it('with "in" operator', async () => { - // Find 5 random keys - const r = () => Math.floor(Math.random() * keys.length); - const randomIndexes = [r(), r(), r(), r(), r()].reduce((indexes, index) => ((!indexes.includes(index) ? indexes.push(index) : 1), indexes), []); - const expectedKeys = randomIndexes.map(index => keys[index]); - const result = await tree.search('in', expectedKeys, options); - checkResults(result, expectedKeys, `in [${expectedKeys.map(key => `"${key}"`).join(',')}]: expecting ${expectedKeys.length} results`); - }); - - it('with "!in" operator', async () => { - // Find 5 random keys - const r = () => Math.floor(Math.random() * keys.length); - const randomIndexes = [r(), r(), r(), r(), r()].reduce((indexes, index) => ((!indexes.includes(index) ? indexes.push(index) : 1), indexes), []); - const blacklistedKeys = randomIndexes.map(index => keys[index]); - const expectedKeys = keys.reduce((allowed, key) => (!blacklistedKeys.includes(key) ? allowed.push(key) : 1) && allowed, []); - const result = await tree.search('!in', blacklistedKeys, options); - checkResults(result, expectedKeys, `!in [${blacklistedKeys.map(key => `"${key}"`).join(',')}]: expecting ${expectedKeys.length} results`); - }); - - it('with "exists" operator', async () => { - // Finds all keys with a defined value, same as search("!=", undefined) - // --> all keys in our test - const result = await tree.search('exists', undefined, options); - checkResults(result, keys, `exists: expecting ${keys.length} (all) results`); - }); - - it('with "!exists" operator', async () => { - // Finds results for key with undefined value, same as search("==", undefined) - // --> no keys in our test - const result = await tree.search('!exists', undefined, options); - checkResults(result, [], `!exists: expecting NO results`); - }); - - it('with BlacklistingSearchOperator', async () => { - const keysToBlacklist = keys.filter(key => Math.random() > 0.25); // blacklist ~75% - const expectedKeys = keys.filter(key => !keysToBlacklist.includes(key)); - - const blacklisted = [] as BinaryBPlusTreeLeafEntry[]; - const op = new BlacklistingSearchOperator(entry => { - if (keysToBlacklist.includes(entry.key as string)) { - blacklisted.push(entry); - return entry.values; // Return all values (1) as array to be blacklisted - } - }); - - let result = await tree.search(op, undefined, options); - checkResults(result, expectedKeys, `BlacklistingSearchOperator: expecting ${expectedKeys.length} results`); - expect(blacklisted.length).toEqual(keysToBlacklist.length); - - // Run again, using the previous results as filter. This should yield the same results - // No additional entries should have been blacklisted (blacklisted.length should remain the same!) - const filteredOptions = { filter: result.entries }; - Object.assign(filteredOptions, options); - result = await tree.search(op, undefined, filteredOptions); - expect(blacklisted.length).toEqual(keysToBlacklist.length); - checkResults(result, expectedKeys, `BlacklistingSearchOperator + filter: expecting ${expectedKeys.length} results`); - - // Run again, using blacklisted results as filter. This should yield no results - filteredOptions.filter = blacklisted; - result = await tree.search(op, undefined, filteredOptions); - expect(blacklisted.length).toEqual(keysToBlacklist.length); - checkResults(result, [], `BlacklistingSearchOperator + blacklist filter: expecting 0 results`); - }); - - it('with "matches" operator', async () => { - const regex = /[a-z]{6}/; - const expectedKeys = keys.filter(key => regex.test(key)); - const result = await tree.search('matches', regex, options); - checkResults(result, expectedKeys, `matches /${regex.source}/${regex.flags}: expecting ${expectedKeys.length} results`); - }); - - it('with "!matches" operator', async () => { - const regex = /[a-z]{6}/; - const expectedKeys = keys.filter(key => !regex.test(key)); - const result = await tree.search('!matches', regex, options); - checkResults(result, expectedKeys, `!matches /${regex.source}/${regex.flags}: expecting ${expectedKeys.length} results`); - }); - - }); - - afterAll(async () => { - // Remove all entries - let rebuilds = 0; - for (let i = 0; i < keys.length; i++) { - const key = keys[i]; - try { - await tree.remove(key); - } - catch(err) { - rebuilds++; - tree = await rebuildTree(tree); - await tree.remove(key); // Try again - } - } - - console.log(`Removed ${keys.length} entries from tree, ${rebuilds} rebuilds were needed`); - - // Expect the tree to be empty now - const leafStats = await tree.getFirstLeaf({ stats: true }); - expect(leafStats.entries.length).toEqual(0); - }); - }); - - it('returns null for keys not present', async () => { - const tree = await createBinaryTree(); - const value = await tree.find('unknown'); - expect(value).toBeNull(); - }); - - it('must not accept duplicate keys', async () => { - const tree = await createBinaryTree(); - await tree.add('unique_key', [1]); - await expectAsync(tree.add('unique_key', [2])).toBeRejected(); - }); -}); +import { BPlusTree, BinaryWriter, BinaryBPlusTree, BlacklistingSearchOperator, BinaryBPlusTreeLeafEntry } from '.'; +import { DebugLogger, ID } from 'acebase-core'; +import { BinaryBPlusTreeLeafEntryValue } from './binary-tree-leaf-entry-value'; + +describe('Unique Binary B+Tree', () => { + // Tests basic operations of the BinaryBPlusTree implementation + const FILL_FACTOR = 95; // AceBase uses 95% fill factor for key indexes + const AUTO_GROW = false; // autoGrow is not used by AceBase atm + const debug = new DebugLogger('log', 'B+Tree'); + const createBinaryTree = async () => { + const tree = new BPlusTree(100, true); + + const bytes = [] as number[]; + await tree.toBinary(true, BinaryWriter.forArray(bytes)); + const binaryTree = new BinaryBPlusTree({ readFn: bytes, logger: debug }); + binaryTree.id = ID.generate(); // Assign an id to allow edits (is enforced by tree to make sure multiple concurrent edits to the same source are sync locked) + binaryTree.autoGrow = AUTO_GROW; + return binaryTree; + }; + + const rebuildTree = async (tree: BinaryBPlusTree) => { + const bytes = [] as number[]; + const id = tree.id; + await tree.rebuild(BinaryWriter.forArray(bytes), { fillFactor: FILL_FACTOR, keepFreeSpace: true, increaseMaxEntries: true }); + tree = new BinaryBPlusTree({ readFn: bytes, logger: debug }); + tree.id = id; + tree.autoGrow = AUTO_GROW; + return tree; + }; + + it('is an instance', async () => { + const tree = await createBinaryTree(); + expect(tree).toBeInstanceOf(BinaryBPlusTree); + }); + + it('entries can added & found', async () => { + const tree = await createBinaryTree(); + + // Add 1 key + const testRecordPointer = [1,2,3,4]; + await tree.add('key', testRecordPointer); + + // Lookup the entry & check its value + const value = await tree.find('key') as BinaryBPlusTreeLeafEntryValue; + expect(value).not.toBeNull(); + for (let i = 0; i < testRecordPointer.length; i++) { + expect(value.recordPointer[i]).toEqual(testRecordPointer[i]); + } + }); + + describe('entries', () => { + + const TEST_KEYS = 1000; // This simulates the amount of children to be added to an AceBase node + const keys = [] as string[]; + // Create random keys + for (let i = 0; i < TEST_KEYS; i++) { + keys.push(ID.generate()); + } + + let tree: BinaryBPlusTree; + beforeAll(async () => { + // Create tree + tree = await createBinaryTree(); + + let rebuilds = 0; + + // Add keys 1 by 1 + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + const recordPointer = Array.from(key).map(ch => ch.charCodeAt(0)); // Fake (unique) recordpointer + try { + await tree.add(key, recordPointer); + } + catch(err) { + // While the tree grows, this happens. Rebuild the tree and try again + rebuilds++; + tree = await rebuildTree(tree); + await tree.add(key, recordPointer); // Retry add + } + } + + console.log(`Created a tree with ${keys.length} entries, ${rebuilds} rebuilds were needed`); + }); + + // Lookup all added entries + it('can be found', async () => { + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + const value = await tree.find(key); + expect(value).not.toBeNull(); + } + }); + + // Iterate the leafs from start to end, confirm the right order + it('can be iterated in ascending order', async () => { + let leaf = await tree.getFirstLeaf(); + expect(leaf).not.toBeNull(); + let lastEntry, count = 0; + while (leaf) { + for (let i = 0; i < leaf.entries.length; i++) { + count++; + const entry = leaf.entries[i]; + if (i > 0) { + // key > last + expect(entry.key > lastEntry.key).toBeTrue(); + } + lastEntry = entry; + } + leaf = leaf.getNext ? await leaf.getNext() : null; + } + expect(count).toEqual(keys.length); + }); + + // Iterate the leafs from end to start + it('can be iterated in descending order', async () => { + let leaf = await tree.getLastLeaf(); + expect(leaf).not.toBeNull(); + let count = 0; + let lastEntry: BinaryBPlusTreeLeafEntry; + while (leaf) { + for (let i = leaf.entries.length - 1; i >= 0 ; i--) { + count++; + const entry = leaf.entries[i]; + if (i < leaf.entries.length - 1) { + // key < last + expect(entry.key < lastEntry.key).toBeTrue(); + } + lastEntry = entry; + } + leaf = leaf.getPrevious ? await leaf.getPrevious() : null; + } + expect(count).toEqual(keys.length); + }); + + describe('can be queried', () => { + + const options = { entries: true, keys: true, values: true, count: true }; + + const checkResults = ( + result: Awaited>, + expectedKeys: string[], + log: string + ) => { + log && console.log(log); + expect(result.keyCount).toEqual(expectedKeys.length); + expect(result.valueCount).toEqual(expectedKeys.length); // unique tree, 1 value per key + expect(result.entries.length).toEqual(expectedKeys.length); + expect(result.keys.length).toEqual(expectedKeys.length); + expect(result.values.length).toEqual(expectedKeys.length); + const allFound = expectedKeys.every(key => result.keys.includes(key)); + expect(allFound).toBeTrue(); + }; + + it('with "==" operator', async () => { + // Find first entry + let result = await tree.search('==', keys[0], options); + checkResults(result, [keys[0]], `== "${keys[0]}": expecting 1 result`); + + // Find a random entry + const randomKey = keys[Math.floor(Math.random() * keys.length)]; + result = await tree.search('==', randomKey, options); + checkResults(result, [randomKey], `== "${randomKey}": expecting 1 result`); + }); + + it('with "!=" operator', async () => { + // Find all except 1 random entry + const excludeIndex = Math.floor(Math.random() * keys.length); + const excludeKey = keys[excludeIndex]; + const expectedKeys = keys.slice(0, excludeIndex).concat(keys.slice(excludeIndex+1)); + const result = await tree.search('!=', excludeKey, options); + checkResults(result, expectedKeys, `!= "${excludeKey}": expecting ${expectedKeys.length} results`); + }); + + it('with "<" operator', async () => { + // Find first 10 keys + const expectedKeys = keys.slice(0, 11); // Take 11, use last as < + const lessThanKey = expectedKeys.pop(); + const result = await tree.search('<', lessThanKey, options); + checkResults(result, expectedKeys, `< "${lessThanKey}": expecting ${expectedKeys.length} results`); + }); + + it('with "<=" operator', async () => { + // Find first 10 keys + const expectedKeys = keys.slice(0, 10); + const key = expectedKeys.slice(-1)[0]; + const result = await tree.search('<=', key, options); + checkResults(result, expectedKeys, `<= "${key}": expecting ${expectedKeys.length} results`); + }); + + it('with ">" operator', async () => { + // Find last 10 keys + const expectedKeys = keys.slice(-11); // Take 11, use first as > + const greaterThanKey = expectedKeys.shift(); + const result = await tree.search('>', greaterThanKey, options); + checkResults(result, expectedKeys, `> "${greaterThanKey}": expecting ${expectedKeys.length} results`); + }); + + it('with ">=" operator', async () => { + // Find last 10 keys + const expectedKeys = keys.slice(-10); + const result = await tree.search('>=', expectedKeys[0], options); + checkResults(result, expectedKeys, `>= "${expectedKeys[0]}": expecting ${expectedKeys.length} results`); + }); + + it('with "like" operator', async () => { + // All keys that start with the same 10 characters as the first key + let str = keys[0].slice(0, 10); + let expectedKeys = keys.filter(key => key.startsWith(str)); + let result = await tree.search('like', `${str}*`, options); + checkResults(result, expectedKeys, `like "${str}*": expecting ${expectedKeys.length} keys to start with "${str}"`); + + // All keys that end with the same 3 last characters of the first key + str = keys[0].slice(-3); + expectedKeys = keys.filter(key => key.endsWith(str)); + result = await tree.search('like', `*${str}`, options); + checkResults(result, expectedKeys, `like "*${str}": expecting ${expectedKeys.length} keys to end with "${str}"`); + + // All keys that contain the last 2 characters of the first key + str = keys[0].slice(-2); + expectedKeys = keys.filter(key => key.includes(str)); + result = await tree.search('like', `*${str}*`, options); + checkResults(result, expectedKeys, `like "*${str}*": expecting ${expectedKeys.length} keys to contain "${str}"`); + }); + + it('with "between" operator', async () => { + // Find custom range of keys + const [startIndex, endIndex] = [Math.floor(Math.random() * (keys.length-1)), Math.floor(Math.random() * (keys.length-1))].sort((a,b) => a < b ? -1 : 1); + const expectedKeys = startIndex === endIndex ? [keys[startIndex]] : keys.slice(startIndex, endIndex); + const firstKey = expectedKeys[0], lastKey = expectedKeys.slice(-1)[0]; + + let result = await tree.search('between', [firstKey, lastKey], options); + checkResults(result, expectedKeys, `between "${firstKey}" and "${lastKey}": expecting ${expectedKeys.length} results`); + + result = await tree.search('between', [lastKey, firstKey], options); + checkResults(result, expectedKeys, `between "${lastKey}" and "${firstKey}" (reversed): expecting ${expectedKeys.length} results`); + }); + + it('with "!between" operator', async () => { + // Find custom range of keys (before and after given indexes) + const [startIndex, endIndex] = [Math.floor(Math.random() * keys.length), Math.floor(Math.random() * keys.length)].sort((a,b) => a-b); // eg: [2,6] + const expectedKeys = keys.filter((key, index) => index < startIndex || index > endIndex); // eg: expect [1,2, 7,8,9] for indexes 2 and 6 of keys [1,2,3,4,5,6,7,8,9] + const firstKey = keys[startIndex], lastKey = keys[endIndex]; // eg: 3 and 6 + + let result = await tree.search('!between', [firstKey, lastKey], options); + checkResults(result, expectedKeys, `!between "${firstKey}" and "${lastKey}": expecting ${expectedKeys.length} results`); + + result = await tree.search('!between', [lastKey, firstKey], options); + checkResults(result, expectedKeys, `!between "${lastKey}" and "${firstKey}" (reversed): expecting ${expectedKeys.length} results`); + }); + + it('with "in" operator', async () => { + // Find 5 random keys + const r = () => Math.floor(Math.random() * keys.length); + const randomIndexes = [r(), r(), r(), r(), r()].reduce((indexes, index) => ((!indexes.includes(index) ? indexes.push(index) : 1), indexes), []); + const expectedKeys = randomIndexes.map(index => keys[index]); + const result = await tree.search('in', expectedKeys, options); + checkResults(result, expectedKeys, `in [${expectedKeys.map(key => `"${key}"`).join(',')}]: expecting ${expectedKeys.length} results`); + }); + + it('with "!in" operator', async () => { + // Find 5 random keys + const r = () => Math.floor(Math.random() * keys.length); + const randomIndexes = [r(), r(), r(), r(), r()].reduce((indexes, index) => ((!indexes.includes(index) ? indexes.push(index) : 1), indexes), []); + const blacklistedKeys = randomIndexes.map(index => keys[index]); + const expectedKeys = keys.reduce((allowed, key) => (!blacklistedKeys.includes(key) ? allowed.push(key) : 1) && allowed, []); + const result = await tree.search('!in', blacklistedKeys, options); + checkResults(result, expectedKeys, `!in [${blacklistedKeys.map(key => `"${key}"`).join(',')}]: expecting ${expectedKeys.length} results`); + }); + + it('with "exists" operator', async () => { + // Finds all keys with a defined value, same as search("!=", undefined) + // --> all keys in our test + const result = await tree.search('exists', undefined, options); + checkResults(result, keys, `exists: expecting ${keys.length} (all) results`); + }); + + it('with "!exists" operator', async () => { + // Finds results for key with undefined value, same as search("==", undefined) + // --> no keys in our test + const result = await tree.search('!exists', undefined, options); + checkResults(result, [], `!exists: expecting NO results`); + }); + + it('with BlacklistingSearchOperator', async () => { + const keysToBlacklist = keys.filter(() => Math.random() > 0.25); // blacklist ~75% + const expectedKeys = keys.filter(key => !keysToBlacklist.includes(key)); + + const blacklisted = [] as BinaryBPlusTreeLeafEntry[]; + const op = new BlacklistingSearchOperator(entry => { + if (keysToBlacklist.includes(entry.key as string)) { + blacklisted.push(entry); + return entry.values; // Return all values (1) as array to be blacklisted + } + }); + + let result = await tree.search(op, undefined, options); + checkResults(result, expectedKeys, `BlacklistingSearchOperator: expecting ${expectedKeys.length} results`); + expect(blacklisted.length).toEqual(keysToBlacklist.length); + + // Run again, using the previous results as filter. This should yield the same results + // No additional entries should have been blacklisted (blacklisted.length should remain the same!) + const filteredOptions = { filter: result.entries }; + Object.assign(filteredOptions, options); + result = await tree.search(op, undefined, filteredOptions); + expect(blacklisted.length).toEqual(keysToBlacklist.length); + checkResults(result, expectedKeys, `BlacklistingSearchOperator + filter: expecting ${expectedKeys.length} results`); + + // Run again, using blacklisted results as filter. This should yield no results + filteredOptions.filter = blacklisted; + result = await tree.search(op, undefined, filteredOptions); + expect(blacklisted.length).toEqual(keysToBlacklist.length); + checkResults(result, [], `BlacklistingSearchOperator + blacklist filter: expecting 0 results`); + }); + + it('with "matches" operator', async () => { + const regex = /[a-z]{6}/; + const expectedKeys = keys.filter(key => regex.test(key)); + const result = await tree.search('matches', regex, options); + checkResults(result, expectedKeys, `matches /${regex.source}/${regex.flags}: expecting ${expectedKeys.length} results`); + }); + + it('with "!matches" operator', async () => { + const regex = /[a-z]{6}/; + const expectedKeys = keys.filter(key => !regex.test(key)); + const result = await tree.search('!matches', regex, options); + checkResults(result, expectedKeys, `!matches /${regex.source}/${regex.flags}: expecting ${expectedKeys.length} results`); + }); + + }); + + afterAll(async () => { + // Remove all entries + let rebuilds = 0; + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + try { + await tree.remove(key); + } + catch(err) { + rebuilds++; + tree = await rebuildTree(tree); + await tree.remove(key); // Try again + } + } + + console.log(`Removed ${keys.length} entries from tree, ${rebuilds} rebuilds were needed`); + + // Expect the tree to be empty now + const leafStats = await tree.getFirstLeaf({ stats: true }); + expect(leafStats.entries.length).toEqual(0); + }); + }); + + it('returns null for keys not present', async () => { + const tree = await createBinaryTree(); + const value = await tree.find('unknown'); + expect(value).toBeNull(); + }); + + it('must not accept duplicate keys', async () => { + const tree = await createBinaryTree(); + await tree.add('unique_key', [1]); + await expectAsync(tree.add('unique_key', [2])).toBeRejected(); + }); +}); diff --git a/src/btree/binary-tree.ts b/src/btree/binary-tree.ts index 7dad614..c0ff138 100644 --- a/src/btree/binary-tree.ts +++ b/src/btree/binary-tree.ts @@ -1,4 +1,4 @@ -import { DebugLogger, Utils } from 'acebase-core'; +import { DebugLogger, LoggerPlugin, Utils } from 'acebase-core'; import { readByteLength, readSignedOffset, Uint8ArrayBuilder, writeByteLength, writeSignedOffset } from '../binary'; import { DetailedError } from '../detailed-error'; import { ThreadSafe, ThreadSafeLock } from '../thread-safe'; @@ -68,7 +68,7 @@ export class BinaryBPlusTree { private _fst: Array<{ index: number; length: number }>; public id: string; - private debug: DebugLogger; + private logger: LoggerPlugin; public info: { headerLength: number; @@ -110,12 +110,12 @@ export class BinaryBPlusTree { /** * logger instance */ - debug: DebugLogger; + logger: LoggerPlugin; }) { this._chunkSize = init.chunkSize ?? 1024; this._autoGrow = false; this.id = init.id; - this.debug = init.debug; + this.logger = init.logger; if (init.readFn instanceof Array) { let data = init.readFn; if (BPlusTree.debugBinary) { @@ -153,7 +153,7 @@ export class BinaryBPlusTree { } static async test(data: number[], debug: DebugLogger) { - const tree = new BinaryBPlusTree({ readFn: data, debug }); + const tree = new BinaryBPlusTree({ readFn: data, logger: debug }); let leaf = await tree.getFirstLeaf(); while (leaf) { @@ -174,7 +174,7 @@ export class BinaryBPlusTree { set autoGrow(grow: boolean) { this._autoGrow = grow === true; // if (this._autoGrow) { - // this.debug.warn('autoGrow enabled for binary tree'); + // this.logger.warn('autoGrow enabled for binary tree'); // } } @@ -207,7 +207,7 @@ export class BinaryBPlusTree { metadataKeys: [], }; // if (!this.info.hasLargePtrs) { - // this.debug.warn(`Warning: tree "${this.id}" is read-only because it contains small ptrs. it needs to be rebuilt`); + // this.logger.warn(`Warning: tree "${this.id}" is read-only because it contains small ptrs. it needs to be rebuilt`); // } let additionalHeaderBytes = 0; if (this.info.hasFillFactor) { additionalHeaderBytes += 1; } @@ -576,7 +576,7 @@ export class BinaryBPlusTree { // const oldLeafExtFreeBytes = leaf.extData.freeBytes; leaf.extData.freeBytes -= requiredSpace.bytes; // leaf.extData.length - (newOffset + requiredSpace.length); - // this.debug.log(`addValue :: moving ext_block from index ${oldIndex} to ${entry.extData.index}, leaf's ext_data_free_bytes reduces from ${oldLeafExtFreeBytes} to ${leaf.extData.freeBytes} bytes`) + //this.logger.info(`addValue :: moving ext_block from index ${oldIndex} to ${entry.extData.index}, leaf's ext_data_free_bytes reduces from ${oldLeafExtFreeBytes} to ${leaf.extData.freeBytes} bytes`) extBlockMoves = true; } } @@ -621,8 +621,8 @@ export class BinaryBPlusTree { // const displayIndex = index => (index + 4096).toString(16).toUpperCase(); // const displayBytes = bytes => '[' + bytes.map(b => b.toString(16)).join(',').toUpperCase() + ']'; try { - // this.debug.log(`TreeWrite:ext_block_length(${entry.extData.length}), ext_block_free_length(${entry.extData.freeBytes})${extBlockMoves ? ', value_list' : ''} :: ${extDataBlock.length} bytes at index ${displayIndex(self.index)}: ${displayBytes(extDataBlock.slice(0,4))}, ${displayBytes(extDataBlock.slice(4,8))}${extBlockMoves ? ', [...]' : ''}`); - // this.debug.log(`TreeWrite:value_list_length(${self.totalValues + 1}) :: ${valueListLengthData.length} bytes at index ${displayIndex(self._listLengthIndex)}: ${displayBytes(valueListLengthData)}`); + //this.logger.info(`TreeWrite:ext_block_length(${entry.extData.length}), ext_block_free_length(${entry.extData.freeBytes})${extBlockMoves ? ', value_list' : ''} :: ${extDataBlock.length} bytes at index ${displayIndex(self.index)}: ${displayBytes(extDataBlock.slice(0,4))}, ${displayBytes(extDataBlock.slice(4,8))}${extBlockMoves ? ', [...]' : ''}`); + //this.logger.info(`TreeWrite:value_list_length(${self.totalValues + 1}) :: ${valueListLengthData.length} bytes at index ${displayIndex(self._listLengthIndex)}: ${displayBytes(valueListLengthData)}`); const promises = [ // Write header (ext_block_length, ext_block_free_length) or entire ext_data_block to its index: tree._writeFn(extDataBlock, self.index), @@ -634,7 +634,7 @@ export class BinaryBPlusTree { // Write new ext_data_ptr in leaf entry's val_data let writeBytes = [0,0,0,0]; writeByteLength(writeBytes, 0, extDataOffset); - // this.debug.log(`TreeWrite:ext_data_ptr(${extDataOffset}) :: ${writeBytes.length} bytes at index ${displayIndex(self._listLengthIndex + 4)}: ${displayBytes(writeBytes)}`); + //this.logger.info(`TreeWrite:ext_data_ptr(${extDataOffset}) :: ${writeBytes.length} bytes at index ${displayIndex(self._listLengthIndex + 4)}: ${displayBytes(writeBytes)}`); let p = tree._writeFn(writeBytes, self._listLengthIndex + 4); promises.push(p); @@ -645,13 +645,13 @@ export class BinaryBPlusTree { + 4; // ext_byte_length writeBytes = [0,0,0,0]; writeByteLength(writeBytes, 0, leaf.extData.freeBytes); - // this.debug.log(`TreeWrite:ext_free_byte_length(${leaf.extData.freeBytes}) :: ${writeBytes.length} bytes at index ${displayIndex(leafExtFreeBytesIndex)}: ${displayBytes(writeBytes)}`); + //this.logger.info(`TreeWrite:ext_free_byte_length(${leaf.extData.freeBytes}) :: ${writeBytes.length} bytes at index ${displayIndex(leafExtFreeBytesIndex)}: ${displayBytes(writeBytes)}`); p = tree._writeFn(writeBytes, leafExtFreeBytesIndex); promises.push(p); } else { // write new value: - // this.debug.log(`TreeWrite:value :: ${extValueData.length} bytes at index ${displayIndex(newValueIndex)}: ${displayBytes(extValueData)}`); + //this.logger.info(`TreeWrite:value :: ${extValueData.length} bytes at index ${displayIndex(newValueIndex)}: ${displayBytes(extValueData)}`); const p = tree._writeFn(extValueData, newValueIndex); promises.push(p); } @@ -661,12 +661,12 @@ export class BinaryBPlusTree { // TEST // try { - // this.debug.log(`Values for entry '${entry.key}' updated: ${self.totalValues} values`); + // this.logger.info(`Values for entry '${entry.key}' updated: ${self.totalValues} values`); // await tree._testTree(); - // this.debug.log(`Successfully added value to entry '${entry.key}'`); + // this.logger.info(`Successfully added value to entry '${entry.key}'`); // } // catch (err) { - // this.debug.error(`Tree is broken after updating entry '${entry.key}': ${err.message}`); + // this.logger.error(`Tree is broken after updating entry '${entry.key}': ${err.message}`); // } } finally { @@ -678,7 +678,7 @@ export class BinaryBPlusTree { // await self.loadValues(); // } // catch (err) { - // this.debug.error(`Values are broken after updating entry '${entry.key}': ${err.message}`); + // this.logger.error(`Values are broken after updating entry '${entry.key}': ${err.message}`); // } }, @@ -782,8 +782,8 @@ export class BinaryBPlusTree { // If that fails too, move on to the next leaf until we get a succesful read. Using this strategy, data referenced from // broken leaf(s) will be skipped, following data will be able to be read again. const lastKey = leaf.entries.slice(-1)[0].key; - this.debug.warn(`B+Tree repair caught error: ${err.message}`); - this.debug.warn(`B+Tree repair starting at key >= "${lastKey}"`); + this.logger.warn(`B+Tree repair caught error: ${err.message}`); + this.logger.warn(`B+Tree repair starting at key >= "${lastKey}"`); const currentLeaf = await (async () => { if (leaf.parentNode) { return leaf; } try { @@ -808,7 +808,7 @@ export class BinaryBPlusTree { } if (!currentLeaf.parentNode.parentNode) { // parent node has no parent itself, there is no next leaf - this.debug.warn(`B+Tree repair: no more leafs in tree`); + this.logger.warn(`B+Tree repair: no more leafs in tree`); return null; } } @@ -850,11 +850,11 @@ export class BinaryBPlusTree { const nodeInfo = currentNodeEntry ? await currentNodeEntry.getLtChild() : await currentNode.getGtChild(); assert(nodeInfo.isLeaf, 'not a leaf!'); const nextLeaf = new BinaryBPlusTreeLeaf(nodeInfo); - this.debug.warn(`B+Tree repair: using leaf for key ${entryKey} at index ${nextLeaf.index}`); + this.logger.warn(`B+Tree repair: using leaf for key ${entryKey} at index ${nextLeaf.index}`); return nextLeaf; } catch (err) { - this.debug.warn( + this.logger.warn( `B+Tree repair: failed to load leaf for key ${entryKey} at index ${currentNodeEntry?.ltChildIndex ?? currentNode.gtChildIndex}: ${err.message}. ` + `Proceeding with next node entry.` ); @@ -863,7 +863,7 @@ export class BinaryBPlusTree { } } // no more nodes - this.debug.warn(`B+Tree repair: there are no more leafs to load`); + this.logger.warn(`B+Tree repair: there are no more leafs to load`); return null; } throw err; @@ -1215,13 +1215,13 @@ export class BinaryBPlusTree { // }, []).sort(binaryCompare) : null; - let totalMatches = 0; - let totalAdded = 0; + // let totalMatches = 0; + // let totalAdded = 0; const valuePromises = [] as Promise[]; const emptyValue: LeafEntryRecordPointer = []; const add = (entry: BinaryBPlusTreeLeafEntry): Promise | any => { - totalMatches += entry.totalValues; + // totalMatches += entry.totalValues; const requireValues = filterRecordPointers || include.entries || include.values || op instanceof BlacklistingSearchOperator; if (requireValues && typeof entry.extData === 'object' && !entry.extData.loaded) { // We haven't got its values yet @@ -1387,13 +1387,13 @@ export class BinaryBPlusTree { results.keyCount++; results.valueCount += entry.totalValues; } - totalAdded += entry.totalValues; + // totalAdded += entry.totalValues; }; // const t1 = Date.now(); // const ret = () => { // const t2 = Date.now(); - // this.debug.log(`tree.search [${op} ${param}] took ${t2-t1}ms, matched ${totalMatches} values, returning ${totalAdded} values in ${results.entries.length} entries`); + // this.logger.info(`tree.search [${op} ${param}] took ${t2-t1}ms, matched ${totalMatches} values, returning ${totalAdded} values in ${results.entries.length} entries`); // return results; // }; const ret = () => { @@ -1880,12 +1880,12 @@ export class BinaryBPlusTree { if (!this._fst) { this._fst = []; } if (index + length === this.info.byteLength - this.info.freeSpace) { // Cancel free space allocated at the end of the file - // this.debug.log(`Freeing ${length} bytes from index ${index} (at end of file)`); + //this.logger.info(`Freeing ${length} bytes from index ${index} (at end of file)`); this.info.freeSpace += length; await this._writeFn(writeByteLength([], 0, this.info.freeSpace), this.info.freeSpaceIndex); // free_byte_length } else { - // this.debug.log(`Freeing ${length} bytes from index ${index} to ${index+length}`); + //this.logger.info(`Freeing ${length} bytes from index ${index} to ${index+length}`); this._fst.push({ index, length }); // Normalize fst by joining adjacent blocks @@ -2066,7 +2066,7 @@ export class BinaryBPlusTree { const freedBytes = leaf.length + leaf.extData.length; - // this.debug.log(`Rebuilding leaf for entries "${leaf.entries[0].key}" to "${leaf.entries[leaf.entries.length-1].key}"`); + //this.logger.info(`Rebuilding leaf for entries "${leaf.entries[0].key}" to "${leaf.entries[leaf.entries.length-1].key}"`); options.applyChanges && options.applyChanges(newLeaf); // Start transaction @@ -2077,7 +2077,7 @@ export class BinaryBPlusTree { name: 'new leaf', action: async () => { const result = await this._writeLeaf(newLeaf); - // this.debug.log(`new leaf for entries "${newLeaf.entries[0].key}" to "${newLeaf.entries.slice(-1)[0].key}" was written successfully at index ${newLeaf.index} (used to be at ${leaf.index})`); + //this.logger.info(`new leaf for entries "${newLeaf.entries[0].key}" to "${newLeaf.entries.slice(-1)[0].key}" was written successfully at index ${newLeaf.index} (used to be at ${leaf.index})`); // // TEST leaf // const leaf = await this._findLeaf(newLeaf.entries[0].key); @@ -2232,7 +2232,7 @@ export class BinaryBPlusTree { const allocated = await this._requestFreeSpace(newNodeLength); - // this.debug.log(`Splitting node "${node.entries[0].key}" to "${node.entries.slice(-1)[0].key}", cutting at "${movingEntries[0].key}"`); + //this.logger.info(`Splitting node "${node.entries[0].key}" to "${node.entries.slice(-1)[0].key}", cutting at "${movingEntries[0].key}"`); // Create new node const newNode = new BinaryBPlusTreeNode({ @@ -2258,7 +2258,7 @@ export class BinaryBPlusTree { // newEntry.ltChildIndex = childIndex - newNode.index; // newNode.entries.push(newEntry); // }); - // this.debug.log(`Creating new node for ${movingEntries.length} entries`); + //this.logger.info(`Creating new node for ${movingEntries.length} entries`); // Update parent node entry pointing to this node const oldParentNode = new BinaryBPlusTreeNode({ @@ -2352,7 +2352,7 @@ export class BinaryBPlusTree { async _splitLeaf(leaf: BinaryBPlusTreeLeaf, options: { nextLeaf?: BinaryBPlusTreeLeaf; keepEntries?: number; cancelCallback?: () => unknown } = { nextLeaf: null, keepEntries: 0, cancelCallback: null }) { // split leaf if it could not be written. - // this.debug.log('splitLeaf'); + //this.logger.info('splitLeaf'); // There needs to be enough free space to store another leaf the size of current leaf if (typeof options.cancelCallback !== 'function') { @@ -2440,7 +2440,7 @@ export class BinaryBPlusTree { const allocated = await this._requestFreeSpace(newLeafLength + newLeafExtDataLength); - // this.debug.log(`Splitting leaf "${leaf.entries[0].key}" to "${leaf.entries.slice(-1)[0].key}", cutting at "${movingEntries[0].key}"`); + //this.logger.info(`Splitting leaf "${leaf.entries[0].key}" to "${leaf.entries.slice(-1)[0].key}", cutting at "${movingEntries[0].key}"`); const nextLeaf = options.nextLeaf; @@ -2473,7 +2473,7 @@ export class BinaryBPlusTree { // move entries leaf.entries.splice(-movingEntries.length); newLeaf.entries.push(...movingEntries); - // this.debug.log(`Creating new leaf for ${movingEntries.length} entries`); + //this.logger.info(`Creating new leaf for ${movingEntries.length} entries`); // Update parent node entry pointing to this leaf const oldParentNode = new BinaryBPlusTreeNode({ @@ -2582,8 +2582,8 @@ export class BinaryBPlusTree { // leaf = await leaf.getNext(); // keys.push(...leaf.entries.map(e => e.key)); // } - // this.debug.warn(`TREE TEST: testing ${keys.length} keys`); - // // this.debug.warn(keys); + // this.logger.warn(`TREE TEST: testing ${keys.length} keys`); + // //this.logger.warn(keys); // for (let i = 0; i < keys.length - 1; i++) { // const key1 = keys[i], key2 = keys[i + 1]; // assert(_isLess(key1, key2), `Key "${key1}" must be smaller than "${key2}"`); @@ -2594,7 +2594,7 @@ export class BinaryBPlusTree { // const entry = leaf?.entries.find(e => e.key === key) // assert(entry, `Key "${key}" must be in leaf`); // } - // this.debug.warn(`TREE TEST: testing ext_data`); + // this.logger.warn(`TREE TEST: testing ext_data`); // leaf = await this._getFirstLeaf(); // while (leaf) { // if (leaf.hasExtData) { @@ -2617,7 +2617,7 @@ export class BinaryBPlusTree { // } // leaf = leaf.hasNext ? await leaf.getNext() : null; // } - // this.debug.warn(`TREE TEST SUCCESSFUL`); + // this.logger.warn(`TREE TEST SUCCESSFUL`); // } async add(key: NodeEntryKeyType, recordPointer: LeafEntryRecordPointer, metadata?: LeafEntryMetaData) { @@ -2662,7 +2662,7 @@ export class BinaryBPlusTree { catch(err) { // Something went wrong adding the value. ext_data_block is probably full // and needs to grow - // this.debug.log(`Leaf rebuild necessary - unable to add value to key "${key}": ${err.message}`); + //this.logger.info(`Leaf rebuild necessary - unable to add value to key "${key}": ${err.message}`); if (err.code !== 'max-extdata-size-reached') { throw err; @@ -2760,7 +2760,7 @@ export class BinaryBPlusTree { // .then(() => { // // TEST the tree adjustments by getting the leaf with the added key, // // and then previous and next leafs! - // this.debug.warn(`TESTING leaf adjustment after adding "${key}". Remove code when all is well!`); + // this.logger.warn(`TESTING leaf adjustment after adding "${key}". Remove code when all is well!`); // return this._findLeaf(key); // }) // .then(leaf => { @@ -2836,18 +2836,18 @@ export class BinaryBPlusTree { // Leaf too large to save, must split const cancelCallback = () => undo.splice(0).reverse().forEach(fn => fn()); const keepEntries = leaf.hasNext ? 0 : this.info.entriesPerNode; - // this.debug.log('*process _splitLeaf'); + //this.logger.info('*process _splitLeaf'); await this._splitLeaf(leaf, { cancelCallback, keepEntries }); } else if (leaf.entries.length > 0 || !leaf.parentNode) { // Leaf has entries or is a single-leaf tree try { - // this.debug.log('*process _writeLeaf'); + //this.logger.info('*process _writeLeaf'); await this._writeLeaf(leaf); } catch (err) { // Leaf had no space left, try rebuilding it with more space - // this.debug.log('*process _rebuildLeaf'); + //this.logger.info('*process _rebuildLeaf'); await this._rebuildLeaf(leaf, { growData: true, growExtData: true, @@ -2857,7 +2857,7 @@ export class BinaryBPlusTree { } else if (leaf.parentNode.entries.length > 1) { // Remove leaf - // this.debug.log('*process _removeLeaf'); + //this.logger.info('*process _removeLeaf'); await this._removeLeaf(leaf); } else { @@ -2944,7 +2944,7 @@ export class BinaryBPlusTree { // debugRemoved.push(entry); leaf.entries.splice(entryIndex, 1); undo.push(() => { - // this.debug.log(`Undo remove ${entry.key}`); + //this.logger.info(`Undo remove ${entry.key}`); leaf.entries.splice(entryIndex, 0, entry); }); // if (entryIndex === 0 && !leaf.parentEntry) { @@ -3001,7 +3001,7 @@ export class BinaryBPlusTree { // for (let removedEntry of debugRemoved) { // const leaf = await this._findLeaf(removedEntry.key); // if (leaf.entries.find(e => _isEqual(e.key, removedEntry.key))) { - // this.debug.log(debugThrownError); + // this.logger.info(debugThrownError); // debugger; // } // } @@ -3360,7 +3360,7 @@ export class BinaryBPlusTree { } }; // let leafsSeen = 0; - // this.debug.log(`[${Date.toString()}] Starting tree rebuild`); + //this.logger.info(`[${Date.toString()}] Starting tree rebuild`); try { const getLeafStartKeys = async (entriesPerLeaf: number) => { mark('getLeafStartKeys.start'); @@ -3374,7 +3374,7 @@ export class BinaryBPlusTree { while (leaf) { mark(`getLeafStartKeys.loop${loop++}`); // leafsSeen++; - // this.debug.log(`Processing leaf with ${leaf.entries.length} entries, total=${totalEntries}`); + //this.logger.info(`Processing leaf with ${leaf.entries.length} entries, total=${totalEntries}`); // leafStats.debugEntries.push(...leaf.entries); if (leaf.entries.length === 0) { @@ -3415,7 +3415,7 @@ export class BinaryBPlusTree { } } - // this.debug.log(`Processed ${leafsSeen} leafs in source tree`); + //this.logger.info(`Processed ${leafsSeen} leafs in source tree`); leaf = leaf.getNext ? await leaf.getNext(options.repairMode) : null; } mark('getLeafStartKeys.end'); @@ -3476,7 +3476,7 @@ export class BinaryBPlusTree { allocatedBytes: options.allocatedBytes, keepFreeSpace: options.keepFreeSpace, reserveSpaceForNewEntries: options.reserveSpaceForNewEntries, - debug: this.debug, + logger: this.logger, }); mark('tree.createEnd'); @@ -3493,13 +3493,13 @@ export class BinaryBPlusTree { } finally { mark('end'); - // if (perf) { - // // inspect perf here - // this.debug.log(`[perf] tree rebuild took ${measure('start', 'end')}ms`); - // this.debug.log(`[perf] getLeafStartKeys: ${measure('getLeafStartKeys.start', 'getLeafStartKeys.end')}ms`); - // this.debug.log(`[perf] getEntries: ${measure('getEntries.first', 'getEntries.last')}ms`); - // this.debug.log(`[perf] tree.create: ${measure('tree.createStart', 'tree.createEnd')}ms`); - // } + if (perf) { + // inspect perf here + this.logger.trace(`[perf] tree rebuild took ${measure('start', 'end')}ms`); + this.logger.trace(`[perf] getLeafStartKeys: ${measure('getLeafStartKeys.start', 'getLeafStartKeys.end')}ms`); + this.logger.trace(`[perf] getEntries: ${measure('getEntries.first', 'getEntries.last')}ms`); + this.logger.trace(`[perf] tree.create: ${measure('tree.createStart', 'tree.createEnd')}ms`); + } } } @@ -3526,9 +3526,9 @@ export class BinaryBPlusTree { keepFreeSpace?: boolean; /** @default 0 */ reserveSpaceForNewEntries?: number; - debug: DebugLogger; + logger: LoggerPlugin; }) { - const { writer, debug } = options; + const { writer, logger } = options; if (typeof options.maxEntriesPerNode !== 'number') { options.maxEntriesPerNode = 255; } if (typeof options.fillFactor !== 'number') { options.fillFactor = 100; } @@ -3679,7 +3679,7 @@ export class BinaryBPlusTree { emptyLeaf = true; } - // debug.log(`Writing leaf with ${entries.length} entries at index ${index}, keys range: ["${entries[0].key}", "${entries[entries.length-1].key}"]`) + // logger.debug(`Writing leaf with ${entries.length} entries at index ${index}, keys range: ["${entries[0].key}", "${entries[entries.length-1].key}"]`) // assert(entries.every((entry, index, arr) => index === 0 || _isMoreOrEqual(entry.key, arr[index-1].key)), 'Leaf entries are not sorted ok'); const i = leafIndexes.length; // assert(emptyLeaf || _isEqual(leafStartKeys[i], entries[0].key), `first entry for leaf has wrong key, must be ${leafStartKeys[i]}!`); @@ -3774,12 +3774,12 @@ export class BinaryBPlusTree { // // }); // // }); // // debugTree.reverse(); // Now top-down - // // debug.error(debugTree); + // // logger.error(debugTree); // // debugTree.forEach((nodes, levelIndex) => { // // let allEntries = nodes.map(node => `[${node.entries.map(entry => entry.key).join(',')}]`).join(' | ') - // // debug.error(`node level ${levelIndex}: ${allEntries}`); + // // logger.error(`node level ${levelIndex}: ${allEntries}`); // // }); - // // debug.error(`leafs: [${leafStartKeys.join(`..] | [`)}]`); + // // logger.error(`leafs: [${leafStartKeys.join(`..] | [`)}]`); // }) // Now adjust the header data & write free bytes @@ -3887,7 +3887,7 @@ export class BinaryBPlusTree { allocatedBytes?: number; /** @default true */ keepFreeSpace?: boolean; - debug: DebugLogger; + logger: LoggerPlugin; }) { // Steps: // 1 - loop through all entries to calculate leaf start keys @@ -3895,7 +3895,7 @@ export class BinaryBPlusTree { // 3 - create leafs // const entriesPerLeaf = Math.round(options.maxEntriesPerNode * (options.fillFactor / 100)); - const { debug } = options; + const { logger } = options; const getLeafStartKeys = async (entriesPerLeaf: number) => { options.treeStatistics.totalEntries = 0; @@ -3995,7 +3995,7 @@ export class BinaryBPlusTree { keepFreeSpace: options.keepFreeSpace, maxEntriesPerNode: options.maxEntriesPerNode, metadataKeys: options.metadataKeys, - debug, + logger: logger, }); } diff --git a/src/data-index/array-index.ts b/src/data-index/array-index.ts index ed0aa33..d660836 100644 --- a/src/data-index/array-index.ts +++ b/src/data-index/array-index.ts @@ -1,274 +1,274 @@ -import { BlacklistingSearchOperator } from '../btree'; -import { DataIndex } from './data-index'; -import { DataIndexOptions } from './options'; -import type { Storage } from '../storage'; -import { IndexableValue, IndexableValueOrArray } from './shared'; -import { VALUE_TYPES } from '../node-value-types'; -import { IndexQueryResults } from './query-results'; -import { IndexQueryStats } from './query-stats'; -import { ArrayIndexQueryHint } from './array-index-query-hint'; - -/** - * An array index allows all values in an array node to be indexed and searched - */ -export class ArrayIndex extends DataIndex { - constructor(storage: Storage, path: string, key: string, options: DataIndexOptions) { - if (key === '{key}') { throw new Error('Cannot create array index on node keys'); } - super(storage, path, key, options); - } - - // get fileName() { - // return super.fileName.slice(0, -4) + '.array.idx'; - // } - - get type() { - return 'array'; - } - - async handleRecordUpdate(path: string, oldValue: unknown, newValue: unknown) { - const tmpOld = oldValue !== null && typeof oldValue === 'object' && this.key in oldValue ? (oldValue as any)[this.key] : null; - const tmpNew = newValue !== null && typeof newValue === 'object' && this.key in newValue ? (newValue as any)[this.key] : null; - - let oldEntries: IndexableValue[]; - if (tmpOld instanceof Array) { - // Only use unique values - oldEntries = tmpOld.reduce((unique, entry) => { - !unique.includes(entry) && unique.push(entry); - return unique; - }, []); - } - else { oldEntries = []; } - if (oldEntries.length === 0) { - // Add undefined entry to indicate empty array - oldEntries.push(undefined); - } - - let newEntries: IndexableValue[]; - if (tmpNew instanceof Array) { - // Only use unique values - newEntries = tmpNew.reduce((unique, entry) => { - !unique.includes(entry) && unique.push(entry); - return unique; - }, []); - } - else { newEntries = []; } - if (newEntries.length === 0) { - // Add undefined entry to indicate empty array - newEntries.push(undefined); - } - const removed = oldEntries.filter(entry => !newEntries.includes(entry)); - const added = newEntries.filter(entry => !oldEntries.includes(entry)); - - const mutated = { old: {} as any, new: {} as any }; - Object.assign(mutated.old, oldValue); - Object.assign(mutated.new, newValue); - - const promises = [] as Promise[]; - removed.forEach(entry => { - mutated.old[this.key] = entry; - mutated.new[this.key] = null; - const p = super.handleRecordUpdate(path, mutated.old, mutated.new); - promises.push(p); - }); - added.forEach(entry => { - mutated.old[this.key] = null; - mutated.new[this.key] = entry; - const p = super.handleRecordUpdate(path, mutated.old, mutated.new); - promises.push(p); - }); - await Promise.all(promises); - } - - build() { - return super.build({ - addCallback: (add, array: IndexableValue[], recordPointer, metadata) => { - if (!(array instanceof Array) || array.length === 0) { - // Add undefined entry to indicate empty array - add(undefined, recordPointer, metadata); - return []; - } - - // index unique items only - array.reduce((unique, value) => { - !unique.includes(value) && unique.push(value); - return unique; - }, []).forEach(value => { - add(value, recordPointer, metadata); - }); - return array; - }, - valueTypes: [VALUE_TYPES.ARRAY], - }); - } - - static get validOperators() { - // This is the only special index that does not use prefixed operators - // because these can also be used to query non-indexed arrays (but slower, of course..) - return ['contains', '!contains']; - } - get validOperators() { - return ArrayIndex.validOperators; - } - - async query(op: BlacklistingSearchOperator): Promise; - async query(op: string, val: IndexableValueOrArray, options?: { filter?: IndexQueryResults; }): Promise; - /** - * @param op "contains" or "!contains" - * @param val value to search for - */ - async query(op: string | BlacklistingSearchOperator, val?: IndexableValueOrArray, options?: { filter?: IndexQueryResults; }) { - if (op instanceof BlacklistingSearchOperator) { - throw new Error(`Not implemented: Can't query array index with blacklisting operator yet`); - } - if (!ArrayIndex.validOperators.includes(op)) { - throw new Error(`Array indexes can only be queried with operators ${ArrayIndex.validOperators.map(op => `"${op}"`).join(', ')}`); - } - if (options) { - this.storage.debug.warn('Not implemented: query options for array indexes are ignored'); - } - - // Check cache - const cache = this.cache(op, val); - if (cache) { - // Use cached results - return cache; - } - - const stats = new IndexQueryStats('array_index_query', val, true); - - if ((op === 'contains' || op === '!contains') && val instanceof Array && val.length === 0) { - // Added for #135: empty compare array for contains/!contains must match all values - stats.type = 'array_index_scan'; - const results = await super.query(new BlacklistingSearchOperator((_) => [])); - stats.stop(results.length); - results.filterKey = this.key; - results.stats = stats; - // Don't cache results - return results; - } - else if (op === 'contains') { - if (val instanceof Array) { - // recipesIndex.query('contains', ['egg','bacon']) - - // Get result count for each value in array - const countPromises = val.map(value => { - const wildcardIndex = typeof value !== 'string' ? -1 : ~(~value.indexOf('*') || ~value.indexOf('?')); - const valueOp = ~wildcardIndex ? 'like' : '=='; - - const step = new IndexQueryStats('count', value, true); - stats.steps.push(step); - - return this.count(valueOp, value) - .then(count => { - step.stop(count); - return { value, count }; - }); - }); - const counts = await Promise.all(countPromises); - // Start with the smallest result set - counts.sort((a, b) => { - if (a.count < b.count) { return -1; } - else if (a.count > b.count) { return 1; } - return 0; - }); - - let results: IndexQueryResults; - - if (counts[0].count === 0) { - stats.stop(0); - - this.storage.debug.log(`Value "${counts[0].value}" not found in index, 0 results for query ${op} ${val}`); - results = new IndexQueryResults(0); - results.filterKey = this.key; - results.stats = stats; - - // Add query hints for each unknown item - counts.forEach(c => { - if (c.count === 0) { - const hint = new ArrayIndexQueryHint(ArrayIndexQueryHint.types.missingValue, c.value); - results.hints.push(hint); - } - }); - - // Cache the empty result set - this.cache(op, val, results); - return results; - } - const allValues = counts.map(c => c.value); - - // Query 1 value, then filter results further and further - // Start with the smallest result set - const queryValue = (value: IndexableValue, filter?: IndexQueryResults) => { - const wildcardIndex = typeof value !== 'string' ? -1 : ~(~value.indexOf('*') || ~value.indexOf('?')); - const valueOp = ~wildcardIndex ? 'like' : '=='; - - return super.query(valueOp, value, { filter }) - .then(results => { - stats.steps.push(results.stats); - return results; - }); - }; - let valueIndex = 0; - // let resultsPerValue = new Array(values.length); - const nextValue = async () => { - const value = allValues[valueIndex]; - const fr = await queryValue(value, results); - results = fr; - valueIndex++; - if (results.length === 0 || valueIndex === allValues.length) { return; } - await nextValue(); - }; - await nextValue(); - results.filterKey = this.key; - - stats.stop(results.length); - results.stats = stats; - - // Cache results - delete results.entryValues; // No need to cache these. Free the memory - this.cache(op, val, results); - return results; - } - else { - // Single value query - const valueOp = - typeof val === 'string' && (val.includes('*') || val.includes('?')) - ? 'like' - : '=='; - const results = await super.query(valueOp, val); - stats.steps.push(results.stats); - results.stats = stats; - delete results.entryValues; - return results; - } - } - else if (op === '!contains') { - // DISABLED executing super.query('!=', val) because it returns false positives - // for arrays that "!contains" val, but does contain other values... - // Eg: an indexed array value of: ['bacon', 'egg', 'toast', 'sausage'], - // when executing index.query('!contains', 'bacon'), - // it will falsely match that record because the 2nd value 'egg' - // matches the filter ('egg' is not 'bacon') - - // NEW: BlacklistingSearchOperator will take all values in the index unless - // they are blacklisted along the way. Our callback determines whether to blacklist - // an entry's values, which we'll do if its key matches val - const customOp = new BlacklistingSearchOperator(entry => { - const blacklist = val === entry.key - || (val instanceof Array && val.includes(entry.key)); - if (blacklist) { return entry.values; } - }); - - stats.type = 'array_index_blacklist_scan'; - const results = await super.query(customOp); - stats.stop(results.length); - results.filterKey = this.key; - results.stats = stats; - - // Cache results - this.cache(op, val, results); - return results; - } - } -} - +import { BlacklistingSearchOperator } from '../btree'; +import { DataIndex } from './data-index'; +import { DataIndexOptions } from './options'; +import type { Storage } from '../storage'; +import { IndexableValue, IndexableValueOrArray } from './shared'; +import { VALUE_TYPES } from '../node-value-types'; +import { IndexQueryResults } from './query-results'; +import { IndexQueryStats } from './query-stats'; +import { ArrayIndexQueryHint } from './array-index-query-hint'; + +/** + * An array index allows all values in an array node to be indexed and searched + */ +export class ArrayIndex extends DataIndex { + constructor(storage: Storage, path: string, key: string, options: DataIndexOptions) { + if (key === '{key}') { throw new Error('Cannot create array index on node keys'); } + super(storage, path, key, options); + } + + // get fileName() { + // return super.fileName.slice(0, -4) + '.array.idx'; + // } + + get type() { + return 'array'; + } + + async handleRecordUpdate(path: string, oldValue: unknown, newValue: unknown) { + const tmpOld = oldValue !== null && typeof oldValue === 'object' && this.key in oldValue ? (oldValue as any)[this.key] : null; + const tmpNew = newValue !== null && typeof newValue === 'object' && this.key in newValue ? (newValue as any)[this.key] : null; + + let oldEntries: IndexableValue[]; + if (tmpOld instanceof Array) { + // Only use unique values + oldEntries = tmpOld.reduce((unique, entry) => { + !unique.includes(entry) && unique.push(entry); + return unique; + }, []); + } + else { oldEntries = []; } + if (oldEntries.length === 0) { + // Add undefined entry to indicate empty array + oldEntries.push(undefined); + } + + let newEntries: IndexableValue[]; + if (tmpNew instanceof Array) { + // Only use unique values + newEntries = tmpNew.reduce((unique, entry) => { + !unique.includes(entry) && unique.push(entry); + return unique; + }, []); + } + else { newEntries = []; } + if (newEntries.length === 0) { + // Add undefined entry to indicate empty array + newEntries.push(undefined); + } + const removed = oldEntries.filter(entry => !newEntries.includes(entry)); + const added = newEntries.filter(entry => !oldEntries.includes(entry)); + + const mutated = { old: {} as any, new: {} as any }; + Object.assign(mutated.old, oldValue); + Object.assign(mutated.new, newValue); + + const promises = [] as Promise[]; + removed.forEach(entry => { + mutated.old[this.key] = entry; + mutated.new[this.key] = null; + const p = super.handleRecordUpdate(path, mutated.old, mutated.new); + promises.push(p); + }); + added.forEach(entry => { + mutated.old[this.key] = null; + mutated.new[this.key] = entry; + const p = super.handleRecordUpdate(path, mutated.old, mutated.new); + promises.push(p); + }); + await Promise.all(promises); + } + + build() { + return super.build({ + addCallback: (add, array: IndexableValue[], recordPointer, metadata) => { + if (!(array instanceof Array) || array.length === 0) { + // Add undefined entry to indicate empty array + add(undefined, recordPointer, metadata); + return []; + } + + // index unique items only + array.reduce((unique, value) => { + !unique.includes(value) && unique.push(value); + return unique; + }, []).forEach(value => { + add(value, recordPointer, metadata); + }); + return array; + }, + valueTypes: [VALUE_TYPES.ARRAY], + }); + } + + static get validOperators() { + // This is the only special index that does not use prefixed operators + // because these can also be used to query non-indexed arrays (but slower, of course..) + return ['contains', '!contains']; + } + get validOperators() { + return ArrayIndex.validOperators; + } + + async query(op: BlacklistingSearchOperator): Promise; + async query(op: string, val: IndexableValueOrArray, options?: { filter?: IndexQueryResults; }): Promise; + /** + * @param op "contains" or "!contains" + * @param val value to search for + */ + async query(op: string | BlacklistingSearchOperator, val?: IndexableValueOrArray, options?: { filter?: IndexQueryResults; }) { + if (op instanceof BlacklistingSearchOperator) { + throw new Error(`Not implemented: Can't query array index with blacklisting operator yet`); + } + if (!ArrayIndex.validOperators.includes(op)) { + throw new Error(`Array indexes can only be queried with operators ${ArrayIndex.validOperators.map(op => `"${op}"`).join(', ')}`); + } + if (options) { + this.logger.warn('Not implemented: query options for array indexes are ignored'); + } + + // Check cache + const cache = this.cache(op, val); + if (cache) { + // Use cached results + return cache; + } + + const stats = new IndexQueryStats('array_index_query', val, true); + + if ((op === 'contains' || op === '!contains') && val instanceof Array && val.length === 0) { + // Added for #135: empty compare array for contains/!contains must match all values + stats.type = 'array_index_scan'; + const results = await super.query(new BlacklistingSearchOperator((_) => [])); + stats.stop(results.length); + results.filterKey = this.key; + results.stats = stats; + // Don't cache results + return results; + } + else if (op === 'contains') { + if (val instanceof Array) { + // recipesIndex.query('contains', ['egg','bacon']) + + // Get result count for each value in array + const countPromises = val.map(value => { + const wildcardIndex = typeof value !== 'string' ? -1 : ~(~value.indexOf('*') || ~value.indexOf('?')); + const valueOp = ~wildcardIndex ? 'like' : '=='; + + const step = new IndexQueryStats('count', value, true); + stats.steps.push(step); + + return this.count(valueOp, value) + .then(count => { + step.stop(count); + return { value, count }; + }); + }); + const counts = await Promise.all(countPromises); + // Start with the smallest result set + counts.sort((a, b) => { + if (a.count < b.count) { return -1; } + else if (a.count > b.count) { return 1; } + return 0; + }); + + let results: IndexQueryResults; + + if (counts[0].count === 0) { + stats.stop(0); + + this.logger.info(`Value "${counts[0].value}" not found in index, 0 results for query ${op} ${val}`); + results = new IndexQueryResults(0); + results.filterKey = this.key; + results.stats = stats; + + // Add query hints for each unknown item + counts.forEach(c => { + if (c.count === 0) { + const hint = new ArrayIndexQueryHint(ArrayIndexQueryHint.types.missingValue, c.value); + results.hints.push(hint); + } + }); + + // Cache the empty result set + this.cache(op, val, results); + return results; + } + const allValues = counts.map(c => c.value); + + // Query 1 value, then filter results further and further + // Start with the smallest result set + const queryValue = (value: IndexableValue, filter?: IndexQueryResults) => { + const wildcardIndex = typeof value !== 'string' ? -1 : ~(~value.indexOf('*') || ~value.indexOf('?')); + const valueOp = ~wildcardIndex ? 'like' : '=='; + + return super.query(valueOp, value, { filter }) + .then(results => { + stats.steps.push(results.stats); + return results; + }); + }; + let valueIndex = 0; + // let resultsPerValue = new Array(values.length); + const nextValue = async () => { + const value = allValues[valueIndex]; + const fr = await queryValue(value, results); + results = fr; + valueIndex++; + if (results.length === 0 || valueIndex === allValues.length) { return; } + await nextValue(); + }; + await nextValue(); + results.filterKey = this.key; + + stats.stop(results.length); + results.stats = stats; + + // Cache results + delete results.entryValues; // No need to cache these. Free the memory + this.cache(op, val, results); + return results; + } + else { + // Single value query + const valueOp = + typeof val === 'string' && (val.includes('*') || val.includes('?')) + ? 'like' + : '=='; + const results = await super.query(valueOp, val); + stats.steps.push(results.stats); + results.stats = stats; + delete results.entryValues; + return results; + } + } + else if (op === '!contains') { + // DISABLED executing super.query('!=', val) because it returns false positives + // for arrays that "!contains" val, but does contain other values... + // Eg: an indexed array value of: ['bacon', 'egg', 'toast', 'sausage'], + // when executing index.query('!contains', 'bacon'), + // it will falsely match that record because the 2nd value 'egg' + // matches the filter ('egg' is not 'bacon') + + // NEW: BlacklistingSearchOperator will take all values in the index unless + // they are blacklisted along the way. Our callback determines whether to blacklist + // an entry's values, which we'll do if its key matches val + const customOp = new BlacklistingSearchOperator(entry => { + const blacklist = val === entry.key + || (val instanceof Array && val.includes(entry.key)); + if (blacklist) { return entry.values; } + }); + + stats.type = 'array_index_blacklist_scan'; + const results = await super.query(customOp); + stats.stop(results.length); + results.filterKey = this.key; + results.stats = stats; + + // Cache results + this.cache(op, val, results); + return results; + } + } +} + diff --git a/src/data-index/data-index.ts b/src/data-index/data-index.ts index bc2883c..666179a 100644 --- a/src/data-index/data-index.ts +++ b/src/data-index/data-index.ts @@ -1,4 +1,4 @@ -import { PathInfo, Utils, ID, ColorStyle, Transport } from 'acebase-core'; +import { PathInfo, Utils, ID, ColorStyle, Transport, type LoggerPlugin } from 'acebase-core'; import { ThreadSafe } from '../thread-safe'; import type { Storage } from '../storage'; import { pfs } from '../promise-fs'; @@ -198,6 +198,8 @@ export class DataIndex { private _fileName?: string; + public logger: LoggerPlugin; + /** * Creates a new index */ @@ -231,6 +233,8 @@ export class DataIndex { values: 0, }, }; + + this.logger = storage.logger; } get allMetadataKeys() { @@ -269,7 +273,7 @@ export class DataIndex { this._cache.set(op, opCache); } // let clear = () => { - // // this.storage.debug.log(`Index ${this.description}, cache clean for ${op} "${val}"`); + // // this.logger.info(`Index ${this.description}, cache clean for ${op} "${val}"`); // opCache.delete(val); // } const scheduleClear = () => { @@ -283,13 +287,13 @@ export class DataIndex { reads: 0, timeout: scheduleClear(), extendLife: () => { - // this.storage.debug.log(`Index ${this.description}, cache lifetime extended for ${op} "${val}". reads: ${cache.reads}`); + // this.logger.info(`Index ${this.description}, cache lifetime extended for ${op} "${val}". reads: ${cache.reads}`); clearTimeout(cache.timeout); cache.timeout = scheduleClear(); }, }; opCache.set(val, cache); - // this.storage.debug.log(`Index ${this.description}, cached ${results.length} results for ${op} "${val}"`); + // this.logger.info(`Index ${this.description}, cached ${results.length} results for ${op} "${val}"`); } } @@ -318,6 +322,7 @@ export class DataIndex { // Read an index from file const filePath = fileName.includes('/') ? fileName : `${storage.settings.path}/${storage.name}.acebase/${fileName}`; const fd = await pfs.open(filePath, pfs.flags.read); + const logger = storage.logger; try { // Read signature let result = await pfs.read(fd, Buffer.alloc(10)); @@ -447,7 +452,7 @@ export class DataIndex { return dataIndex; } catch(err) { - storage.debug.error(err); + logger.error(err); pfs.close(fd); throw err; } @@ -590,7 +595,7 @@ export class DataIndex { idx.release(); } catch(err) { - this.storage.debug.error('Index rebuild error: ', err); + this.logger.error('Index rebuild error: ', err); this.state = DataIndex.STATE.ERROR; this._buildError = err; idx.release(); @@ -616,7 +621,7 @@ export class DataIndex { } catch(err) { // Could not update index --> leaf full? - this.storage.debug.verbose(`Could not update index ${this.description}: ${err.message}`.colorize(ColorStyle.yellow)); + this.logger.trace(`Could not update index ${this.description}: ${err.message}`.colorize(ColorStyle.yellow)); if (retry > 0 && opsCount === operations.length) { throw new Error(`DEV ERROR: unable to process operations because tree was rebuilt, and that didn't help?! --> ${err.stack}`); @@ -625,7 +630,7 @@ export class DataIndex { await this._rebuild(idx); // rebuild calls idx.close() and .release() // Process left-over operations - this.storage.debug.verbose('Index was rebuilt, retrying pending operations'); + this.logger.trace('Index was rebuilt, retrying pending operations'); idx = await this._getTree('exclusive'); await go(retry + 1); return true; // "rebuilt" @@ -633,11 +638,11 @@ export class DataIndex { }; const rebuilt = await go(); - // this.storage.debug.log(`Released update lock on index ${this.description}`.colorize(ColorStyle.blue)); + // this.logger.info(`Released update lock on index ${this.description}`.colorize(ColorStyle.blue)); const doneTime = Date.now(); const ms = doneTime - startTime; const duration = ms < 5000 ? ms + 'ms' : Math.round(ms / 1000) + 's'; - this.storage.debug.verbose(`Index ${this.description} was ${rebuilt ? 'rebuilt' : 'updated'} successfully for "/${path}", took ${duration}`.colorize(ColorStyle.green)); + this.logger.trace(`Index ${this.description} was ${rebuilt ? 'rebuilt' : 'updated'} successfully for "/${path}", took ${duration}`.colorize(ColorStyle.green)); // Process any queued updates return await this._processUpdateQueue(); @@ -667,7 +672,7 @@ export class DataIndex { } async handleRecordUpdate(path: string, oldValue: unknown, newValue: unknown, indexMetadata?: IndexMetaData): Promise { - this.storage.debug.verbose(`Handling index ${this.description} update request for "/${path}"`); + this.logger.trace(`Handling index ${this.description} update request for "/${path}"`); const getValues = (key: string, oldValue: unknown, newValue: unknown) => PathInfo.getPathKeys(key).reduce((values, key) => getChildValues(key, values.oldValue, values.newValue), { oldValue, newValue }) as { oldValue: IndexableValue; newValue: IndexableValue }; @@ -692,7 +697,7 @@ export class DataIndex { const includedValuesChanged = includedValues.some(values => compareValues(values.oldValue, values.newValue) !== 'identical'); if (!keyValueChanged && !includedValuesChanged) { - this.storage.debug.verbose(`Update on "/${path}" has no effective changes for index ${this.description}`); + this.logger.trace(`Update on "/${path}" has no effective changes for index ${this.description}`); return; } @@ -714,11 +719,11 @@ export class DataIndex { // Invalidate query cache this._cache.clear(); // Update the tree - this.storage.debug.verbose(`Updating index ${this.description} tree for "/${path}"`); + this.logger.trace(`Updating index ${this.description} tree for "/${path}"`); return await this._updateTree(path, keyValues.oldValue, keyValues.newValue, recordPointer, recordPointer, metadata); } else { - this.storage.debug.log(`Queueing index ${this.description} update for "/${path}"`); + this.logger.info(`Queueing index ${this.description} update for "/${path}"`); // Queue the update const update = { path, @@ -735,7 +740,7 @@ export class DataIndex { update.resolve = resolve; update.reject = reject; }).catch(err => { - this.storage.debug.error(`Unable to process queued update for "/${path}" on index ${this.description}:`, err); + this.logger.error(`Unable to process queued update for "/${path}" on index ${this.description}:`, err); }); this._updateQueue.push(update); @@ -1042,7 +1047,7 @@ export class DataIndex { const allowedKeyValueTypes = options && options.valueTypes ? options.valueTypes : indexableTypes; - this.storage.debug.log(`Index build ${this.description} started`.colorize(ColorStyle.blue)); + this.logger.info(`Index build ${this.description} started`.colorize(ColorStyle.blue)); let indexedValues = 0; // let addPromise; // let flushed = false; @@ -1080,7 +1085,7 @@ export class DataIndex { // }); // return; // } - this.storage.debug.log(`done writing values to ${buildFile}`); + this.logger.info(`done writing values to ${buildFile}`); if (streamState.wait) { buildWriteStream.once('drain', () => { buildWriteStream.end(resolve); @@ -1146,7 +1151,7 @@ export class DataIndex { } catch (reason) { // Record doesn't exist? No biggy - this.storage.debug.warn(`Could not get children of "/${path}": ${reason.message}`); + this.logger.warn(`Could not get children of "/${path}": ${reason.message}`); } // Iterate through the children in batches of max n nodes @@ -1342,7 +1347,7 @@ export class DataIndex { else { addIndexValue(keyValue, recordPointer, metadata); } - this.storage.debug.log(`Indexed "/${childPath}/${this.key}" value: '${keyValue}' (${typeof keyValue})`.colorize(ColorStyle.cyan)); + this.logger.info(`Indexed "/${childPath}/${this.key}" value: '${keyValue}' (${typeof keyValue})`.colorize(ColorStyle.cyan)); } // return addPromise; // Do we really have to wait for this? } @@ -1755,11 +1760,11 @@ export class DataIndex { // Done writing values to build file. // Now we have to group all values per key, sort them. // then create the binary B+tree. - this.storage.debug.log(`done writing build file ${buildFile}`); + this.logger.info(`done writing build file ${buildFile}`); await createMergeFile(); // Open merge file for reading, index file for writing - this.storage.debug.log(`done writing merge file ${mergeFile}`); + this.logger.info(`done writing merge file ${mergeFile}`); const [ readFD, writeFD ] = await Promise.all([ indexedValues === 0 ? -1 : pfs.open(mergeFile, pfs.flags.read), pfs.open(this.fileName, pfs.flags.write), @@ -1810,7 +1815,7 @@ export class DataIndex { isUnique: false, keepFreeSpace: true, metadataKeys: this.allMetadataKeys, - debug: this.storage.debug, + logger: this.logger, }, ); @@ -1825,11 +1830,11 @@ export class DataIndex { const doneTime = Date.now(); const duration = Math.round((doneTime - startTime) / 1000 / 60); - this.storage.debug.log(`Index ${this.description} was built successfully, took ${duration} minutes`.colorize(ColorStyle.green)); + this.logger.info(`Index ${this.description} was built successfully, took ${duration} minutes`.colorize(ColorStyle.green)); this.state = DataIndex.STATE.READY; } catch(err: unknown) { - this.storage.debug.error(`Error building index ${this.description}: ${(err as Error)?.message || err}`); + this.logger.error(`Error building index ${this.description}: ${(err as Error)?.message || err}`); this.state = DataIndex.STATE.ERROR; this._buildError = err as Error; throw err; @@ -2118,7 +2123,7 @@ export class DataIndex { await pfs.close(fd); } catch(err) { - this.storage.debug.error(err); + this.logger.error(err); throw err; } } @@ -2153,7 +2158,7 @@ export class DataIndex { readFn: reader, chunkSize: DISK_BLOCK_SIZE, writeFn: writer, - debug: this.storage.debug, + logger: this.logger, id: ID.generate(), // For tree locking }); tree.autoGrow = true; // Allow the tree to grow. DISABLE THIS IF THERE ARE MULTIPLE TREES IN THE INDEX FILE LATER! (which is not implemented yet) @@ -2168,7 +2173,7 @@ export class DataIndex { this._idx = null; await pfs.close(fd) .catch(err => { - this.storage.debug.warn(`Could not close index file "${this.fileName}":`, err); + this.logger.warn(`Could not close index file "${this.fileName}":`, err); }); }, /** Releases the acquired tree lock */ diff --git a/src/data-index/fulltext-index.ts b/src/data-index/fulltext-index.ts index b1a4e29..002e1b4 100644 --- a/src/data-index/fulltext-index.ts +++ b/src/data-index/fulltext-index.ts @@ -1,1041 +1,1041 @@ -import { DataIndex } from './data-index'; -import { DataIndexOptions } from './options'; -import { IndexQueryResults } from './query-results'; -import { Storage } from '../storage'; -import { IndexMetaData } from './shared'; -import { VALUE_TYPES } from '../node-value-types'; -import { BlacklistingSearchOperator } from '../btree'; -import { IndexQueryStats } from './query-stats'; -import { FullTextIndexQueryHint } from './fulltext-index-query-hint'; -import unidecode from '../unidecode'; -import { assert } from '../assert'; - -class WordInfo { - constructor(public word: string, public indexes: number[], public sourceIndexes: number[]) { } - get occurs() { - return this.indexes.length; - } -} - -// const _wordsRegex = /[\w']+/gmi; // TODO: should use a better pattern that supports non-latin characters -class TextInfo { - static get locales() { - return { - 'default': { - pattern: '[A-Za-z0-9\']+', - flags: 'gmi', - }, - 'en': { - // English stoplist from https://gist.github.com/sebleier/554280 - stoplist: ['i', 'me', 'my', 'myself', 'we', 'our', 'ours', 'ourselves', 'you', 'your', 'yours', 'yourself', 'yourselves', 'he', 'him', 'his', 'himself', 'she', 'her', 'hers', 'herself', 'it', 'its', 'itself', 'they', 'them', 'their', 'theirs', 'themselves', 'what', 'which', 'who', 'whom', 'this', 'that', 'these', 'those', 'am', 'is', 'are', 'was', 'were', 'be', 'been', 'being', 'have', 'has', 'had', 'having', 'do', 'does', 'did', 'doing', 'a', 'an', 'the', 'and', 'but', 'if', 'or', 'because', 'as', 'until', 'while', 'of', 'at', 'by', 'for', 'with', 'about', 'against', 'between', 'into', 'through', 'during', 'before', 'after', 'above', 'below', 'to', 'from', 'up', 'down', 'in', 'out', 'on', 'off', 'over', 'under', 'again', 'further', 'then', 'once', 'here', 'there', 'when', 'where', 'why', 'how', 'all', 'any', 'both', 'each', 'few', 'more', 'most', 'other', 'some', 'such', 'no', 'nor', 'not', 'only', 'own', 'same', 'so', 'than', 'too', 'very', 's', 't', 'can', 'will', 'just', 'don', 'should', 'now'], - }, - get(locale: string) { - const settings = {} as { pattern?: string, flags?: string, stoplist?: string[] }; - Object.assign(settings, this.default); - if (typeof this[locale] === 'undefined' && locale.indexOf('-') > 0) { - locale = locale.split('-')[1]; - } - if (typeof this[locale] === 'undefined') { - return settings; - } - Object.keys(this[locale]).forEach(key => { - (settings as any)[key] = this[locale][key]; - }); - return settings; - }, - }; - } - - public locale: string; - public words: Map; // WordInfo[]; - public ignored: string[]; - - getWordInfo(word: string): WordInfo { - return this.words.get(word); - } - - /** - * Reconstructs an array of words in the order they were encountered - */ - toSequence() { - const arr = [] as string[]; - for (const { word, indexes } of this.words.values()) { - for (const index of indexes) { - arr[index] = word; - } - } - return arr; - } - - /** - * Returns all unique words in an array - */ - toArray() { - const arr = [] as string[]; - for (const word of this.words.keys()) { - arr.push(word); - } - return arr; - } - - get uniqueWordCount() { - return this.words.size; //.length; - } - - get wordCount() { - let total = 0; - for (const wordInfo of this.words.values()) { - total += wordInfo.occurs; - } - return total; - // return this.words.reduce((total, word) => total + word.occurs, 0); - } - - constructor(text: string, options?: { - /** - * Set the text locale to accurately convert words to lowercase - * @default "en" - */ - locale?: string; - - /** - * Overrides the default RegExp pattern used - * @default "[\w']+" - */ - pattern?: RegExp | string; - - /** - * Add characters to the word detection regular expression. Useful to keep wildcards such as * and ? in query texts - */ - includeChars?: string; - - /** - * Overrides the default RegExp flags (`gmi`) used - * @default "gmi" - */ - flags?: string; - - /** - * Optional callback functions that pre-processes the value before performing word splitting. - */ - prepare?: (value: any, locale: string, keepChars: string) => string; - - /** - * Optional callback function that is able to perform word stemming. Will be executed before performing criteria checks - */ - stemming?: (word:string, locale:string) => string; - - /** - * Minimum length of words to include - * @default 1 - */ - minLength?: number; - - /** - * Maximum length of words to include, should be increased if you expect words in your texts - * like "antidisestablishmentarianism" (28), "floccinaucinihilipilification" (29) or "pneumonoultramicroscopicsilicovolcanoconiosis" (45) - * @default 25 - */ - maxLength?: number; - - /** - * Words to ignore. You can use a default stoplist from TextInfo.locales - */ - blacklist?: string[]; - - /** - * Words to include even if they do not meet the min & maxLength criteria - */ - whitelist?: string[]; - - /** - * Whether to use a default stoplist to blacklist words (if available for locale) - * @default false - */ - useStoplist?: boolean; - }) { - // this.text = text; // Be gone later... - this.locale = options.locale || 'en'; - const localeSettings = TextInfo.locales.get(this.locale); - let pattern = localeSettings.pattern; - if (options.pattern && options.pattern instanceof RegExp) { - pattern = options.pattern.source; - } - else if (typeof options.pattern === 'string') { - pattern = options.pattern; - } - if (options.includeChars) { - assert(pattern.indexOf('[') >= 0, 'pattern does not contain []'); - let insert = ''; - for (let i = 0; i < options.includeChars.length; i++) { - insert += '\\' + options.includeChars[i]; - } - let pos = -1; - while(true) { - const index = pattern.indexOf('[', pos + 1) + 1; - if (index === 0) { break; } - pattern = pattern.slice(0, index) + insert + pattern.slice(index); - pos = index; - } - } - let flags = localeSettings.flags; - if (typeof options.flags === 'string') { - flags = options.flags; - } - const re = new RegExp(pattern, flags); - const minLength = typeof options.minLength === 'number' ? options.minLength : 1; - const maxLength = typeof options.maxLength === 'number' ? options.maxLength : 25; - let blacklist = options.blacklist instanceof Array ? options.blacklist : []; - if (localeSettings.stoplist instanceof Array && options.useStoplist === true) { - blacklist = blacklist.concat(localeSettings.stoplist); - } - const whitelist = options.whitelist instanceof Array ? options.whitelist : []; - - const words = this.words = new Map(); - this.ignored = []; - if (text === null || typeof text === 'undefined') { return; } - - if (options.prepare) { - // Pre-process text. Allows decompression, decrypting, custom stemming etc - text = options.prepare(text, this.locale, `"${options.includeChars ?? ''}`); - } - - // Unidecode text to get ASCII characters only - function safe_unidecode (str: string) { - // Fix for occasional multi-pass issue, copied from https://github.com/FGRibreau/node-unidecode/issues/16 - let ret; - while (str !== (ret = unidecode(str))) { - str = ret; - } - return ret; - } - text = safe_unidecode(text); - - // Remove any single quotes, so "don't" will be stored as "dont", "isn't" as "isnt" etc - text = text.replace(/'/g, ''); - - // Process the text - // const wordsRegex = /[\w']+/gu; - let wordIndex = 0; - while(true) { - const match = re.exec(text); - if (match === null) { break; } - let word = match[0]; - - // TODO: use stemming such as snowball (https://www.npmjs.com/package/snowball-stemmers) - // to convert words like "having" to "have", and "cycles", "cycle", "cycling" to "cycl" - if (typeof options.stemming === 'function') { - // Let callback function perform word stemming - const stemmed = options.stemming(word, this.locale); - if (typeof stemmed !== 'string') { - // Ignore this word - if (this.ignored.indexOf(word) < 0) { - this.ignored.push(word); - } - // Do not increase wordIndex - continue; - } - word = stemmed; - } - - word = word.toLocaleLowerCase(this.locale); - - if (word.length < minLength || ~blacklist.indexOf(word)) { - // Word does not meet set criteria - if (!~whitelist.indexOf(word)) { - // Not whitelisted either - if (this.ignored.indexOf(word) < 0) { - this.ignored.push(word); - } - // Do not increase wordIndex - continue; - } - } - else if (word.length > maxLength) { - // Use the word, but cut it to the max length - word = word.slice(0, maxLength); - } - - let wordInfo = words.get(word); - if (wordInfo) { - wordInfo.indexes.push(wordIndex); - wordInfo.sourceIndexes.push(match.index); - } - else { - wordInfo = new WordInfo(word, [wordIndex], [match.index]); - words.set(word, wordInfo); - } - wordIndex++; - } - } - -} - -export interface FullTextIndexOptions extends DataIndexOptions { - /** - * FullText configuration settings. - * NOTE: these settings are not stored in the index file because they contain callback functions - * that might not work after a (de)serializion cycle. Besides this, it is also better for security - * reasons not to store executable code in index files! - * - * That means that in order to keep fulltext indexes working as intended, you will have to: - * - call `db.indexes.create` for fulltext indexes each time your app starts, even if the index exists already - * - rebuild the index if you change this config. (pass `rebuild: true` in the index options) - */ - config?: { - /** - * callback function that prepares a text value for indexing. - * Useful to perform any actions on the text before it is split into words, such as: - * - transforming compressed / encrypted data to strings - * - perform custom word stemming: allows you to replace strings like `I've` to `I have` - * Important: do not remove any of the characters passed in `keepChars` (`"*?`)! - */ - prepare?: (value: any, locale: string, keepChars?: string) => string; - - /** - * callback function that transforms (or filters) words being indexed - */ - transform?: (word: string, locale:string) => string; - - /** - * words to be ignored - */ - blacklist?: string[]; - - /** - * Uses a locale specific stoplist to automatically blacklist words - * @default true - */ - useStoplist?: boolean; - - /** - * Words to be included if they did not match other criteria - */ - whitelist?: string[]; - - /** - * Uses the value of a specific key as locale. Allows different languages to be indexed correctly, - * overrides options.textLocale - * @deprecated move to options.textLocaleKey - */ - localeKey?: string; - - /** - * Minimum length for words to be indexed (after transform) - */ - minLength?: number; - - /** - * Maximum length for words to be indexed (after transform) - */ - maxLength?: number; - } -} - -export interface FullTextContainsQueryOptions { - /** - * Locale to use for the words in the query. When omitted, the default index locale is used - */ - locale?: string; - - /** - * Used internally: treats the words in val as a phrase, eg: "word1 word2 word3": words need to occur in this exact order - */ - phrase?: boolean; - - /** - * Sets minimum amount of characters that have to be used for wildcard (sub)queries such as "a%" to guard the - * system against extremely large result sets. Length does not include the wildcard characters itself. Default - * value is 2 (allows "an*" but blocks "a*") - * @default 2 - */ - minimumWildcardWordLength?: number; -} - -/** - * A full text index allows all words in text nodes to be indexed and searched. - * Eg: "Every word in this text must be indexed." will be indexed with every word - * and can be queried with filters 'contains' and '!contains' a word, words or pattern. - * Eg: 'contains "text"', 'contains "text indexed"', 'contains "text in*"' will all match the text above. - * This does not use a thesauris or word lists (yet), so 'contains "query"' will not match. - * Each word will be stored and searched in lowercase - */ -export class FullTextIndex extends DataIndex { - - public config: FullTextIndexOptions['config']; - - constructor(storage: Storage, path: string, key: string, options: FullTextIndexOptions) { - if (key === '{key}') { throw new Error('Cannot create fulltext index on node keys'); } - super(storage, path, key, options); - // this.enableReverseLookup = true; - this.indexMetadataKeys = ['_occurs_']; //,'_indexes_' - this.config = options.config || {}; - if (this.config.localeKey) { - // localeKey is supported by all indexes now - storage.debug.warn(`fulltext index config option "localeKey" has been deprecated, as it is now supported for all indexes. Move the setting to the global index settings`); - this.textLocaleKey = this.config.localeKey; // Do use it now - } - } - - // get fileName() { - // return super.fileName.slice(0, -4) + '.fulltext.idx'; - // } - - get type() { - return 'fulltext'; - } - - getTextInfo(val: string, locale?: string) { - return new TextInfo(val, { - locale: locale ?? this.textLocale, - prepare: this.config.prepare, - stemming: this.config.transform, - blacklist: this.config.blacklist, - whitelist: this.config.whitelist, - useStoplist: this.config.useStoplist, - minLength: this.config.minLength, - maxLength: this.config.maxLength, - }); - } - - test(obj: any, op: 'fulltext:contains' | 'fulltext:!contains', val: string): boolean { - if (obj === null) { return op === 'fulltext:!contains'; } - const text = obj[this.key]; - if (typeof text === 'undefined') { return op === 'fulltext:!contains'; } - - const locale = obj?.[this.textLocaleKey] ?? this.textLocale; - const textInfo = this.getTextInfo(text, locale); - if (op === 'fulltext:contains') { - if (~val.indexOf(' OR ')) { - // split - const tests = val.split(' OR '); - return tests.some(val => this.test(text, op, val)); - } - else if (~val.indexOf('"')) { - // Phrase(s) used. We have to make sure the words used are not only in the text, - // but also in that exact order. - const phraseRegex = /"(.+?)"/g; - const phrases = []; - while (true) { - const match = phraseRegex.exec(val); - if (match === null) { break; } - const phrase = match[1]; - phrases.push(phrase); - val = val.slice(0, match.index) + val.slice(match.index + match[0].length); - phraseRegex.lastIndex = 0; - } - if (val.length > 0) { - phrases.push(val); - } - return phrases.every(phrase => { - const phraseInfo = this.getTextInfo(phrase, locale); - - // This was broken before TS port because WordInfo had an array of words that was not - // in the same order as the source words were. - // TODO: Thoroughly test this new code - const phraseWords = phraseInfo.toSequence(); - const occurrencesPerWord = phraseWords.map((word, i) => { - // Find word in text - const { indexes } = textInfo.words.get(word); - return indexes; - }); - const hasSequenceAtIndex = (wordIndex: number, occurrenceIndex: number): boolean => { - const startIndex = occurrencesPerWord[wordIndex]?.[occurrenceIndex]; - return occurrencesPerWord.slice(wordIndex + 1).every((occurences, i) => { - return occurences.some((index, j) => { - if (index !== startIndex + 1) { return false; } - return hasSequenceAtIndex(wordIndex + i, j); - }); - }); - }; - - // Find the existence of a sequence of words - // Loop: for each occurrence of the first word in text, remember its index - // Try to find second word in text with index+1 - // - found: try to find third word in text with index+2, etc (recursive) - // - not found: stop, proceed with next occurrence in main loop - return occurrencesPerWord[0].some((occurrence, i) => { - return hasSequenceAtIndex(0, i); - }); - - // const indexes = phraseInfo.words.map(word => textInfo.words.indexOf(word)); - // if (indexes[0] < 0) { return false; } - // for (let i = 1; i < indexes.length; i++) { - // if (indexes[i] - indexes[i-1] !== 1) { - // return false; - // } - // } - // return true; - }); - } - else { - // test 1 or more words - const wordsInfo = this.getTextInfo(val, locale); - return wordsInfo.toSequence().every(word => { - return textInfo.words.has(word); - }); - } - } - } - - async handleRecordUpdate(path: string, oldValue: any, newValue: any): Promise { - let oldText = oldValue !== null && typeof oldValue === 'object' && this.key in oldValue ? (oldValue as any)[this.key] : null; - let newText = newValue !== null && typeof newValue === 'object' && this.key in newValue ? (newValue as any)[this.key] : null; - - const oldLocale = oldValue?.[this.textLocaleKey] ?? this.textLocale, - newLocale = newValue?.[this.textLocaleKey] ?? this.textLocale; - - if (typeof oldText === 'object' && oldText instanceof Array) { - oldText = oldText.join(' '); - } - if (typeof newText === 'object' && newText instanceof Array) { - newText = newText.join(' '); - } - - const oldTextInfo = this.getTextInfo(oldText, oldLocale); - const newTextInfo = this.getTextInfo(newText, newLocale); - - // super._updateReverseLookupKey( - // path, - // oldText ? textEncoder.encode(oldText) : null, - // newText ? textEncoder.encode(newText) : null, - // metadata - // ); - - const oldWords = oldTextInfo.toArray(); //.words.map(w => w.word); - const newWords = newTextInfo.toArray(); //.words.map(w => w.word); - - const removed = oldWords.filter(word => newWords.indexOf(word) < 0); - const added = newWords.filter(word => oldWords.indexOf(word) < 0); - const changed = oldWords.filter(word => newWords.indexOf(word) >= 0).filter(word => { - const oldInfo = oldTextInfo.getWordInfo(word); - const newInfo = newTextInfo.getWordInfo(word); - return oldInfo.occurs !== newInfo.occurs || oldInfo.indexes.some((index, i) => newInfo.indexes[i] !== index); - }); - changed.forEach(word => { - // Word metadata changed. Simplest solution: remove and add again - removed.push(word); - added.push(word); - }); - const promises = [] as Promise[]; - // TODO: Prepare operations batch, then execute 1 tree update. - // Now every word is a seperate update which is not necessary! - removed.forEach(word => { - const p = super.handleRecordUpdate(path, { [this.key]: word }, { [this.key]: null }); - promises.push(p); - }); - added.forEach(word => { - const mutated: Record = { }; - Object.assign(mutated, newValue); - mutated[this.key] = word; - - const wordInfo = newTextInfo.getWordInfo(word); - // const indexMetadata = { - // '_occurs_': wordInfo.occurs, - // '_indexes_': wordInfo.indexes.join(',') - // }; - - let occurs = wordInfo.indexes.join(','); - if (occurs.length > 255) { - console.warn(`FullTextIndex ${this.description}: word "${word}" occurs too many times in "${path}/${this.key}" to store in index metadata. Truncating occurrences`); - const cutIndex = occurs.lastIndexOf(',', 255); - occurs = occurs.slice(0, cutIndex); - } - const indexMetadata = { - '_occurs_': occurs, - }; - const p = super.handleRecordUpdate(path, { [this.key]: null }, mutated, indexMetadata); - promises.push(p); - }); - await Promise.all(promises); - } - - build() { - return super.build({ - addCallback: (add, text: string | string[], recordPointer, metadata, env) => { - if (typeof text === 'object' && text instanceof Array) { - text = text.join(' '); - } - if (typeof text === 'undefined') { - text = ''; - } - const locale = env.locale || this.textLocale; - const textInfo = this.getTextInfo(text, locale); - if (textInfo.words.size === 0) { - this.storage.debug.warn(`No words found in "${typeof text === 'string' && text.length > 50 ? text.slice(0, 50) + '...' : text}" to fulltext index "${env.path}"`); - } - - // const revLookupKey = super._getRevLookupKey(env.path); - // tree.add(revLookupKey, textEncoder.encode(text), metadata); - - textInfo.words.forEach(wordInfo => { - - // IDEA: To enable fast '*word' queries (starting with wildcard), we can also store - // reversed words and run reversed query 'drow*' on it. we'd have to enable storing - // multiple B+Trees in a single index file: a 'forward' tree & a 'reversed' tree - - // IDEA: Following up on previous idea: being able to backtrack nodes within an index would - // help to speed up sorting queries on an indexed key, - // eg: query .take(10).filter('rating','>=', 8).sort('title') - // does not filter on key 'title', but can then use an index on 'title' for the sorting: - // it can take the results from the 'rating' index and backtrack the nodes' titles to quickly - // get a sorted top 10. We'd have to store a seperate 'backtrack' tree that uses recordPointers - // as the key, and 'title' values as recordPointers. Caveat: max string length for sorting would - // then be 255 ASCII chars, because that's the recordPointer size limit. - // The same boost can currently only be achieved by creating an index that includes 'title' in - // the index on 'rating' ==> db.indexes.create('movies', 'rating', { include: ['title'] }) - - // Extend metadata with more details about the word (occurrences, positions) - // const wordMetadata = { - // '_occurs_': wordInfo.occurs, - // '_indexes_': wordInfo.indexes.join(',') - // }; - - let occurs = wordInfo.indexes.join(','); - if (occurs.length > 255) { - console.warn(`FullTextIndex ${this.description}: word "${wordInfo.word}" occurs too many times to store in index metadata. Truncating occurrences`); - const cutIndex = occurs.lastIndexOf(',', 255); - occurs = occurs.slice(0, cutIndex); - } - const wordMetadata: IndexMetaData = { - '_occurs_': occurs, - }; - Object.assign(wordMetadata, metadata); - add(wordInfo.word, recordPointer, wordMetadata); - }); - return textInfo.toArray(); //words.map(info => info.word); - }, - valueTypes: [VALUE_TYPES.STRING], - }); - } - - static get validOperators() { - return ['fulltext:contains', 'fulltext:!contains']; - } - get validOperators() { - return FullTextIndex.validOperators; - } - - async query(op: string | BlacklistingSearchOperator, val?: string, options?: any) { - if (op instanceof BlacklistingSearchOperator) { - throw new Error(`Not implemented: Can't query fulltext index with blacklisting operator yet`); - } - if (op === 'fulltext:contains' || op === 'fulltext:!contains') { - return this.contains(op, val, options); - } - else { - throw new Error(`Fulltext indexes can only be queried with operators ${FullTextIndex.validOperators.map(op => `"${op}"`).join(', ')}`); - } - } - - /** - * - * @param op Operator to use, can be either "fulltext:contains" or "fulltext:!contains" - * @param val Text to search for. Can include * and ? wildcards, OR's for combined searches, and "quotes" for phrase searches - */ - async contains(op: 'fulltext:contains' | 'fulltext:!contains', val: string, options: FullTextContainsQueryOptions = { - phrase: false, - locale: undefined, - minimumWildcardWordLength: 2, - }): Promise { - if (!FullTextIndex.validOperators.includes(op)) { //if (op !== 'fulltext:contains' && op !== 'fulltext:not_contains') { - throw new Error(`Fulltext indexes can only be queried with operators ${FullTextIndex.validOperators.map(op => `"${op}"`).join(', ')}`); - } - - // Check cache - const cache = this.cache(op, val); - if (cache) { - // Use cached results - return Promise.resolve(cache); - } - - const stats = new IndexQueryStats(options.phrase ? 'fulltext_phrase_query' : 'fulltext_query', val, true); - - // const searchWordRegex = /[\w'?*]+/g; // Use TextInfo to find and transform words using index settings - const getTextInfo = (text: string) => { - const info = new TextInfo(text, { - locale: options.locale || this.textLocale, - prepare: this.config.prepare, - stemming: this.config.transform, - minLength: this.config.minLength, - maxLength: this.config.maxLength, - blacklist: this.config.blacklist, - whitelist: this.config.whitelist, - useStoplist: this.config.useStoplist, - includeChars: '*?', - }); - - // Ignore any wildcard words that do not meet the set minimum length - // This is to safeguard the system against (possibly unwanted) very large - // result sets - const words = info.toArray(); - let i; - while (i = words.findIndex(w => /^[*?]+$/.test(w)), i >= 0) { - // Word is wildcards only. Ignore - const word = words[i]; - info.ignored.push(word); - info.words.delete(word); - } - - if (options.minimumWildcardWordLength > 0) { - for (const word of words) { - const starIndex = word.indexOf('*'); - // min = 2, word = 'an*', starIndex = 2, ok! - // min = 3: starIndex < min: not ok! - if (starIndex > 0 && starIndex < options.minimumWildcardWordLength) { - info.ignored.push(word); - info.words.delete(word); - i--; - } - } - } - return info; - }; - - if (val.includes(' OR ')) { - // Multiple searches in one query: 'secret OR confidential OR "don't tell"' - // TODO: chain queries instead of running simultanious? - const queries = val.split(' OR '); - const promises = queries.map(q => this.query(op, q, options)); - const resultSets = await Promise.all(promises); - stats.steps.push(...resultSets.map(results => results.stats)); - - const mergeStep = new IndexQueryStats('merge_expand', { sets: resultSets.length, results: resultSets.reduce((total, set) => total + set.length, 0) }, true); - stats.steps.push(mergeStep); - - const merged = resultSets[0]; - resultSets.slice(1).forEach(results => { - results.forEach(result => { - const exists = ~merged.findIndex(r => r.path === result.path); - if (!exists) { merged.push(result); } - }); - }); - const results = IndexQueryResults.fromResults(merged, this.key); - mergeStep.stop(results.length); - - stats.stop(results.length); - results.stats = stats; - results.hints.push(...resultSets.reduce((hints, set) => { hints.push(...set.hints); return hints; }, [])); - return results; - } - if (val.includes('"')) { - // Phrase(s) used. We have to make sure the words used are not only in the text, - // but also in that exact order. - const phraseRegex = /"(.+?)"/g; - const phrases = []; - while (true) { - const match = phraseRegex.exec(val); - if (match === null) { break; } - const phrase = match[1]; - phrases.push(phrase); - val = val.slice(0, match.index) + val.slice(match.index + match[0].length); - phraseRegex.lastIndex = 0; - } - - const phraseOptions: typeof options = {}; - Object.assign(phraseOptions, options); - phraseOptions.phrase = true; - const promises = phrases.map(phrase => this.query(op, phrase, phraseOptions)); - - // Check if what is left over still contains words - if (val.length > 0 && getTextInfo(val).wordCount > 0) { //(val.match(searchWordRegex) !== null) { - // Add it - const promise = this.query(op, val, options); - promises.push(promise); - } - - const resultSets = await Promise.all(promises); - stats.steps.push(...resultSets.map(results => results.stats)); - - // Take shortest set, only keep results that are matched in all other sets - const mergeStep = new IndexQueryStats('merge_reduce', { sets: resultSets.length, results: resultSets.reduce((total, set) => total + set.length, 0) }, true); - resultSets.length > 1 && stats.steps.push(mergeStep); - - const shortestSet = resultSets.sort((a,b) => a.length < b.length ? -1 : 1)[0]; - const otherSets = resultSets.slice(1); - const matches = shortestSet.reduce((matches, match) => { - // Check if the key is present in the other result sets - const path = match.path; - const matchedInAllSets = otherSets.every(set => set.findIndex(match => match.path === path) >= 0); - if (matchedInAllSets) { matches.push(match); } - return matches; - }, new IndexQueryResults()); - matches.filterKey = this.key; - mergeStep.stop(matches.length); - - stats.stop(matches.length); - matches.stats = stats; - matches.hints.push(...resultSets.reduce((hints, set) => { hints.push(...set.hints); return hints; }, [])); - return matches; - } - - const info = getTextInfo(val); - - /** - * Add ignored words to the result hints - */ - function addIgnoredWordHints(results: IndexQueryResults) { - // Add hints for ignored words - info.ignored.forEach(word => { - const hint = new FullTextIndexQueryHint(FullTextIndexQueryHint.types.ignoredWord, word); - results.hints.push(hint); - }); - } - - const words = info.toArray(); - if (words.length === 0) { - // Resolve with empty array - stats.stop(0); - const results = IndexQueryResults.fromResults([], this.key); - results.stats = stats; - addIgnoredWordHints(results); - return results; - } - - if (op === 'fulltext:!contains') { - // NEW: Use BlacklistingSearchOperator that uses all (unique) values in the index, - // besides the ones that get blacklisted along the way by our callback function - const wordChecks = words.map(word => { - if (word.includes('*') || word.includes('?')) { - const pattern = '^' + word.replace(/\*/g, '.*').replace(/\?/g, '.') + '$'; - const re = new RegExp(pattern, 'i'); - return re; - } - return word; - }); - const customOp = new BlacklistingSearchOperator(entry => { - const blacklist = wordChecks.some(word => { - if (word instanceof RegExp) { - return word.test(entry.key as string); - } - return entry.key === word; - }); - if (blacklist) { return entry.values; } - }); - - stats.type = 'fulltext_blacklist_scan'; - const results = await super.query(customOp); - stats.stop(results.length); - results.filterKey = this.key; - results.stats = stats; - addIgnoredWordHints(results); - - // Cache results - this.cache(op, val, results); - return results; - } - - // op === 'fulltext:contains' - // Get result count for each word - const countPromises = words.map(word => { - const wildcardIndex = ~(~word.indexOf('*') || ~word.indexOf('?')); // TODO: improve readability - const wordOp = wildcardIndex >= 0 ? 'like' : '=='; - const step = new IndexQueryStats('count', { op: wordOp, word }, true); - stats.steps.push(step); - return super.count(wordOp, word) - .then(count => { - step.stop(count); - return { word, count }; - }); - }); - const counts = await Promise.all(countPromises); - // Start with the smallest result set - counts.sort((a, b) => { - if (a.count < b.count) { return -1; } - else if (a.count > b.count) { return 1; } - return 0; - }); - - let results: IndexQueryResults; - - if (counts[0].count === 0) { - stats.stop(0); - - this.storage.debug.log(`Word "${counts[0].word}" not found in index, 0 results for query ${op} "${val}"`); - results = new IndexQueryResults(0); - results.filterKey = this.key; - results.stats = stats; - addIgnoredWordHints(results); - - // Add query hints for each unknown word - counts.forEach(c => { - if (c.count === 0) { - const hint = new FullTextIndexQueryHint(FullTextIndexQueryHint.types.missingWord, c.word); - results.hints.push(hint); - } - }); - - // Cache the empty result set - this.cache(op, val, results); - return results; - } - const allWords = counts.map(c => c.word); - - // Sequentual method: query 1 word, then filter results further and further - // More or less performs the same as parallel, but uses less memory - // NEW: Start with the smallest result set - - // OLD: Use the longest word to search with, then filter those results - // const allWords = words.slice().sort((a,b) => { - // if (a.length < b.length) { return 1; } - // else if (a.length > b.length) { return -1; } - // return 0; - // }); - - const queryWord = async (word: string, filter: IndexQueryResults) => { - const wildcardIndex = ~(~word.indexOf('*') || ~word.indexOf('?')); // TODO: improve readability - const wordOp = wildcardIndex >= 0 ? 'like' : '=='; - // const step = new IndexQueryStats('query', { op: wordOp, word }, true); - // stats.steps.push(step); - const results = await super.query(wordOp, word, { filter }); - stats.steps.push(results.stats); - // step.stop(results.length); - return results; - }; - let wordIndex = 0; - const resultsPerWord: IndexQueryResults[] = new Array(words.length); - const nextWord = async () => { - const word = allWords[wordIndex]; - const t1 = Date.now(); - const fr = await queryWord(word, results); - const t2 = Date.now(); - this.storage.debug.log(`fulltext search for "${word}" took ${t2-t1}ms`); - resultsPerWord[words.indexOf(word)] = fr; - results = fr; - wordIndex++; - if (results.length === 0 || wordIndex === allWords.length) { return; } - await nextWord(); - }; - await nextWord(); - - type MetaDataWithOccursArray = IndexMetaData & { _occurs_: number[] }; - - if (options.phrase === true && allWords.length > 1) { - // Check which results have the words in the right order - const step = new IndexQueryStats('phrase_check', val, true); - stats.steps.push(step); - results = results.reduce((matches, match) => { - // the order of the resultsPerWord is in the same order as the given words, - // check if their metadata._occurs_ say the same about the indexed content - const path = match.path; - const wordMatches = resultsPerWord.map(results => { - return results.find(result => result.path === path); - }); - // Convert the _occurs_ strings to arrays we can use - wordMatches.forEach(match => { - (match.metadata as MetaDataWithOccursArray)._occurs_ = (match.metadata._occurs_ as string).split(',').map(parseInt); - }); - const check = (wordMatchIndex: number, prevWordIndex?: number): boolean => { - const sourceIndexes = (wordMatches[wordMatchIndex].metadata as MetaDataWithOccursArray)._occurs_; - if (typeof prevWordIndex !== 'number') { - // try with each sourceIndex of the first word - for (let i = 0; i < sourceIndexes.length; i++) { - const found = check(1, sourceIndexes[i]); - if (found) { return true; } - } - return false; - } - // We're in a recursive call on the 2nd+ word - if (sourceIndexes.includes(prevWordIndex + 1)) { - // This word came after the previous word, hooray! - // Proceed with next word, or report success if this was the last word to check - if (wordMatchIndex === wordMatches.length-1) { return true; } - return check(wordMatchIndex+1, prevWordIndex+1); - } - else { - return false; - } - }; - if (check(0)) { - matches.push(match); // Keep! - } - return matches; - }, new IndexQueryResults()); - step.stop(results.length); - } - results.filterKey = this.key; - - stats.stop(results.length); - results.stats = stats; - addIgnoredWordHints(results); - - // Cache results - delete results.entryValues; // No need to cache these. Free the memory - this.cache(op, val, results); - return results; - - // Parallel method: query all words at the same time, then combine results - // Uses more memory - // const promises = words.map(word => { - // const wildcardIndex = ~(~word.indexOf('*') || ~word.indexOf('?')); - // let wordOp; - // if (op === 'fulltext:contains') { - // wordOp = wildcardIndex >= 0 ? 'like' : '=='; - // } - // else if (op === 'fulltext:!contains') { - // wordOp = wildcardIndex >= 0 ? '!like' : '!='; - // } - // // return super.query(wordOp, word) - // return super.query(wordOp, word) - // }); - // return Promise.all(promises) - // .then(resultSets => { - // // Now only use matches that exist in all result sets - // const sortedSets = resultSets.slice().sort((a,b) => a.length < b.length ? -1 : 1) - // const shortestSet = sortedSets[0]; - // const otherSets = sortedSets.slice(1); - // let matches = shortestSet.reduce((matches, match) => { - // // Check if the key is present in the other result sets - // const path = match.path; - // const matchedInAllSets = otherSets.every(set => set.findIndex(match => match.path === path) >= 0); - // if (matchedInAllSets) { matches.push(match); } - // return matches; - // }, new IndexQueryResults()); - - // if (options.phrase === true && resultSets.length > 1) { - // // Check if the words are in the right order - // console.log(`Breakpoint time`); - // matches = matches.reduce((matches, match) => { - // // the order of the resultSets is in the same order as the given words, - // // check if their metadata._indexes_ say the same about the indexed content - // const path = match.path; - // const wordMatches = resultSets.map(set => { - // return set.find(match => match.path === path); - // }); - // // Convert the _indexes_ strings to arrays we can use - // wordMatches.forEach(match => { - // // match.metadata._indexes_ = match.metadata._indexes_.split(',').map(parseInt); - // match.metadata._occurs_ = match.metadata._occurs_.split(',').map(parseInt); - // }); - // const check = (wordMatchIndex, prevWordIndex) => { - // const sourceIndexes = wordMatches[wordMatchIndex].metadata._occurs_; //wordMatches[wordMatchIndex].metadata._indexes_; - // if (typeof prevWordIndex !== 'number') { - // // try with each sourceIndex of the first word - // for (let i = 0; i < sourceIndexes.length; i++) { - // const found = check(1, sourceIndexes[i]); - // if (found) { return true; } - // } - // return false; - // } - // // We're in a recursive call on the 2nd+ word - // if (~sourceIndexes.indexOf(prevWordIndex + 1)) { - // // This word came after the previous word, hooray! - // // Proceed with next word, or report success if this was the last word to check - // if (wordMatchIndex === wordMatches.length-1) { return true; } - // return check(wordMatchIndex+1, prevWordIndex+1); - // } - // else { - // return false; - // } - // } - // if (check(0)) { - // matches.push(match); // Keep! - // } - // return matches; - // }, new IndexQueryResults()); - // } - // matches.filterKey = this.key; - // return matches; - // }); - } -} +import { DataIndex } from './data-index'; +import { DataIndexOptions } from './options'; +import { IndexQueryResults } from './query-results'; +import { Storage } from '../storage'; +import { IndexMetaData } from './shared'; +import { VALUE_TYPES } from '../node-value-types'; +import { BlacklistingSearchOperator } from '../btree'; +import { IndexQueryStats } from './query-stats'; +import { FullTextIndexQueryHint } from './fulltext-index-query-hint'; +import unidecode from '../unidecode'; +import { assert } from '../assert'; + +class WordInfo { + constructor(public word: string, public indexes: number[], public sourceIndexes: number[]) { } + get occurs() { + return this.indexes.length; + } +} + +// const _wordsRegex = /[\w']+/gmi; // TODO: should use a better pattern that supports non-latin characters +class TextInfo { + static get locales() { + return { + 'default': { + pattern: '[A-Za-z0-9\']+', + flags: 'gmi', + }, + 'en': { + // English stoplist from https://gist.github.com/sebleier/554280 + stoplist: ['i', 'me', 'my', 'myself', 'we', 'our', 'ours', 'ourselves', 'you', 'your', 'yours', 'yourself', 'yourselves', 'he', 'him', 'his', 'himself', 'she', 'her', 'hers', 'herself', 'it', 'its', 'itself', 'they', 'them', 'their', 'theirs', 'themselves', 'what', 'which', 'who', 'whom', 'this', 'that', 'these', 'those', 'am', 'is', 'are', 'was', 'were', 'be', 'been', 'being', 'have', 'has', 'had', 'having', 'do', 'does', 'did', 'doing', 'a', 'an', 'the', 'and', 'but', 'if', 'or', 'because', 'as', 'until', 'while', 'of', 'at', 'by', 'for', 'with', 'about', 'against', 'between', 'into', 'through', 'during', 'before', 'after', 'above', 'below', 'to', 'from', 'up', 'down', 'in', 'out', 'on', 'off', 'over', 'under', 'again', 'further', 'then', 'once', 'here', 'there', 'when', 'where', 'why', 'how', 'all', 'any', 'both', 'each', 'few', 'more', 'most', 'other', 'some', 'such', 'no', 'nor', 'not', 'only', 'own', 'same', 'so', 'than', 'too', 'very', 's', 't', 'can', 'will', 'just', 'don', 'should', 'now'], + }, + get(locale: string) { + const settings = {} as { pattern?: string, flags?: string, stoplist?: string[] }; + Object.assign(settings, this.default); + if (typeof this[locale] === 'undefined' && locale.indexOf('-') > 0) { + locale = locale.split('-')[1]; + } + if (typeof this[locale] === 'undefined') { + return settings; + } + Object.keys(this[locale]).forEach(key => { + (settings as any)[key] = this[locale][key]; + }); + return settings; + }, + }; + } + + public locale: string; + public words: Map; // WordInfo[]; + public ignored: string[]; + + getWordInfo(word: string): WordInfo { + return this.words.get(word); + } + + /** + * Reconstructs an array of words in the order they were encountered + */ + toSequence() { + const arr = [] as string[]; + for (const { word, indexes } of this.words.values()) { + for (const index of indexes) { + arr[index] = word; + } + } + return arr; + } + + /** + * Returns all unique words in an array + */ + toArray() { + const arr = [] as string[]; + for (const word of this.words.keys()) { + arr.push(word); + } + return arr; + } + + get uniqueWordCount() { + return this.words.size; //.length; + } + + get wordCount() { + let total = 0; + for (const wordInfo of this.words.values()) { + total += wordInfo.occurs; + } + return total; + // return this.words.reduce((total, word) => total + word.occurs, 0); + } + + constructor(text: string, options?: { + /** + * Set the text locale to accurately convert words to lowercase + * @default "en" + */ + locale?: string; + + /** + * Overrides the default RegExp pattern used + * @default "[\w']+" + */ + pattern?: RegExp | string; + + /** + * Add characters to the word detection regular expression. Useful to keep wildcards such as * and ? in query texts + */ + includeChars?: string; + + /** + * Overrides the default RegExp flags (`gmi`) used + * @default "gmi" + */ + flags?: string; + + /** + * Optional callback functions that pre-processes the value before performing word splitting. + */ + prepare?: (value: any, locale: string, keepChars: string) => string; + + /** + * Optional callback function that is able to perform word stemming. Will be executed before performing criteria checks + */ + stemming?: (word:string, locale:string) => string; + + /** + * Minimum length of words to include + * @default 1 + */ + minLength?: number; + + /** + * Maximum length of words to include, should be increased if you expect words in your texts + * like "antidisestablishmentarianism" (28), "floccinaucinihilipilification" (29) or "pneumonoultramicroscopicsilicovolcanoconiosis" (45) + * @default 25 + */ + maxLength?: number; + + /** + * Words to ignore. You can use a default stoplist from TextInfo.locales + */ + blacklist?: string[]; + + /** + * Words to include even if they do not meet the min & maxLength criteria + */ + whitelist?: string[]; + + /** + * Whether to use a default stoplist to blacklist words (if available for locale) + * @default false + */ + useStoplist?: boolean; + }) { + // this.text = text; // Be gone later... + this.locale = options.locale || 'en'; + const localeSettings = TextInfo.locales.get(this.locale); + let pattern = localeSettings.pattern; + if (options.pattern && options.pattern instanceof RegExp) { + pattern = options.pattern.source; + } + else if (typeof options.pattern === 'string') { + pattern = options.pattern; + } + if (options.includeChars) { + assert(pattern.indexOf('[') >= 0, 'pattern does not contain []'); + let insert = ''; + for (let i = 0; i < options.includeChars.length; i++) { + insert += '\\' + options.includeChars[i]; + } + let pos = -1; + while(true) { + const index = pattern.indexOf('[', pos + 1) + 1; + if (index === 0) { break; } + pattern = pattern.slice(0, index) + insert + pattern.slice(index); + pos = index; + } + } + let flags = localeSettings.flags; + if (typeof options.flags === 'string') { + flags = options.flags; + } + const re = new RegExp(pattern, flags); + const minLength = typeof options.minLength === 'number' ? options.minLength : 1; + const maxLength = typeof options.maxLength === 'number' ? options.maxLength : 25; + let blacklist = options.blacklist instanceof Array ? options.blacklist : []; + if (localeSettings.stoplist instanceof Array && options.useStoplist === true) { + blacklist = blacklist.concat(localeSettings.stoplist); + } + const whitelist = options.whitelist instanceof Array ? options.whitelist : []; + + const words = this.words = new Map(); + this.ignored = []; + if (text === null || typeof text === 'undefined') { return; } + + if (options.prepare) { + // Pre-process text. Allows decompression, decrypting, custom stemming etc + text = options.prepare(text, this.locale, `"${options.includeChars ?? ''}`); + } + + // Unidecode text to get ASCII characters only + function safe_unidecode (str: string) { + // Fix for occasional multi-pass issue, copied from https://github.com/FGRibreau/node-unidecode/issues/16 + let ret; + while (str !== (ret = unidecode(str))) { + str = ret; + } + return ret; + } + text = safe_unidecode(text); + + // Remove any single quotes, so "don't" will be stored as "dont", "isn't" as "isnt" etc + text = text.replace(/'/g, ''); + + // Process the text + // const wordsRegex = /[\w']+/gu; + let wordIndex = 0; + while(true) { + const match = re.exec(text); + if (match === null) { break; } + let word = match[0]; + + // TODO: use stemming such as snowball (https://www.npmjs.com/package/snowball-stemmers) + // to convert words like "having" to "have", and "cycles", "cycle", "cycling" to "cycl" + if (typeof options.stemming === 'function') { + // Let callback function perform word stemming + const stemmed = options.stemming(word, this.locale); + if (typeof stemmed !== 'string') { + // Ignore this word + if (this.ignored.indexOf(word) < 0) { + this.ignored.push(word); + } + // Do not increase wordIndex + continue; + } + word = stemmed; + } + + word = word.toLocaleLowerCase(this.locale); + + if (word.length < minLength || ~blacklist.indexOf(word)) { + // Word does not meet set criteria + if (!~whitelist.indexOf(word)) { + // Not whitelisted either + if (this.ignored.indexOf(word) < 0) { + this.ignored.push(word); + } + // Do not increase wordIndex + continue; + } + } + else if (word.length > maxLength) { + // Use the word, but cut it to the max length + word = word.slice(0, maxLength); + } + + let wordInfo = words.get(word); + if (wordInfo) { + wordInfo.indexes.push(wordIndex); + wordInfo.sourceIndexes.push(match.index); + } + else { + wordInfo = new WordInfo(word, [wordIndex], [match.index]); + words.set(word, wordInfo); + } + wordIndex++; + } + } + +} + +export interface FullTextIndexOptions extends DataIndexOptions { + /** + * FullText configuration settings. + * NOTE: these settings are not stored in the index file because they contain callback functions + * that might not work after a (de)serializion cycle. Besides this, it is also better for security + * reasons not to store executable code in index files! + * + * That means that in order to keep fulltext indexes working as intended, you will have to: + * - call `db.indexes.create` for fulltext indexes each time your app starts, even if the index exists already + * - rebuild the index if you change this config. (pass `rebuild: true` in the index options) + */ + config?: { + /** + * callback function that prepares a text value for indexing. + * Useful to perform any actions on the text before it is split into words, such as: + * - transforming compressed / encrypted data to strings + * - perform custom word stemming: allows you to replace strings like `I've` to `I have` + * Important: do not remove any of the characters passed in `keepChars` (`"*?`)! + */ + prepare?: (value: any, locale: string, keepChars?: string) => string; + + /** + * callback function that transforms (or filters) words being indexed + */ + transform?: (word: string, locale:string) => string; + + /** + * words to be ignored + */ + blacklist?: string[]; + + /** + * Uses a locale specific stoplist to automatically blacklist words + * @default true + */ + useStoplist?: boolean; + + /** + * Words to be included if they did not match other criteria + */ + whitelist?: string[]; + + /** + * Uses the value of a specific key as locale. Allows different languages to be indexed correctly, + * overrides options.textLocale + * @deprecated move to options.textLocaleKey + */ + localeKey?: string; + + /** + * Minimum length for words to be indexed (after transform) + */ + minLength?: number; + + /** + * Maximum length for words to be indexed (after transform) + */ + maxLength?: number; + } +} + +export interface FullTextContainsQueryOptions { + /** + * Locale to use for the words in the query. When omitted, the default index locale is used + */ + locale?: string; + + /** + * Used internally: treats the words in val as a phrase, eg: "word1 word2 word3": words need to occur in this exact order + */ + phrase?: boolean; + + /** + * Sets minimum amount of characters that have to be used for wildcard (sub)queries such as "a%" to guard the + * system against extremely large result sets. Length does not include the wildcard characters itself. Default + * value is 2 (allows "an*" but blocks "a*") + * @default 2 + */ + minimumWildcardWordLength?: number; +} + +/** + * A full text index allows all words in text nodes to be indexed and searched. + * Eg: "Every word in this text must be indexed." will be indexed with every word + * and can be queried with filters 'contains' and '!contains' a word, words or pattern. + * Eg: 'contains "text"', 'contains "text indexed"', 'contains "text in*"' will all match the text above. + * This does not use a thesauris or word lists (yet), so 'contains "query"' will not match. + * Each word will be stored and searched in lowercase + */ +export class FullTextIndex extends DataIndex { + + public config: FullTextIndexOptions['config']; + + constructor(storage: Storage, path: string, key: string, options: FullTextIndexOptions) { + if (key === '{key}') { throw new Error('Cannot create fulltext index on node keys'); } + super(storage, path, key, options); + // this.enableReverseLookup = true; + this.indexMetadataKeys = ['_occurs_']; //,'_indexes_' + this.config = options.config || {}; + if (this.config.localeKey) { + // localeKey is supported by all indexes now + this.logger.warn(`fulltext index config option "localeKey" has been deprecated, as it is now supported for all indexes. Move the setting to the global index settings`); + this.textLocaleKey = this.config.localeKey; // Do use it now + } + } + + // get fileName() { + // return super.fileName.slice(0, -4) + '.fulltext.idx'; + // } + + get type() { + return 'fulltext'; + } + + getTextInfo(val: string, locale?: string) { + return new TextInfo(val, { + locale: locale ?? this.textLocale, + prepare: this.config.prepare, + stemming: this.config.transform, + blacklist: this.config.blacklist, + whitelist: this.config.whitelist, + useStoplist: this.config.useStoplist, + minLength: this.config.minLength, + maxLength: this.config.maxLength, + }); + } + + test(obj: any, op: 'fulltext:contains' | 'fulltext:!contains', val: string): boolean { + if (obj === null) { return op === 'fulltext:!contains'; } + const text = obj[this.key]; + if (typeof text === 'undefined') { return op === 'fulltext:!contains'; } + + const locale = obj?.[this.textLocaleKey] ?? this.textLocale; + const textInfo = this.getTextInfo(text, locale); + if (op === 'fulltext:contains') { + if (~val.indexOf(' OR ')) { + // split + const tests = val.split(' OR '); + return tests.some(val => this.test(text, op, val)); + } + else if (~val.indexOf('"')) { + // Phrase(s) used. We have to make sure the words used are not only in the text, + // but also in that exact order. + const phraseRegex = /"(.+?)"/g; + const phrases = []; + while (true) { + const match = phraseRegex.exec(val); + if (match === null) { break; } + const phrase = match[1]; + phrases.push(phrase); + val = val.slice(0, match.index) + val.slice(match.index + match[0].length); + phraseRegex.lastIndex = 0; + } + if (val.length > 0) { + phrases.push(val); + } + return phrases.every(phrase => { + const phraseInfo = this.getTextInfo(phrase, locale); + + // This was broken before TS port because WordInfo had an array of words that was not + // in the same order as the source words were. + // TODO: Thoroughly test this new code + const phraseWords = phraseInfo.toSequence(); + const occurrencesPerWord = phraseWords.map((word, i) => { + // Find word in text + const { indexes } = textInfo.words.get(word); + return indexes; + }); + const hasSequenceAtIndex = (wordIndex: number, occurrenceIndex: number): boolean => { + const startIndex = occurrencesPerWord[wordIndex]?.[occurrenceIndex]; + return occurrencesPerWord.slice(wordIndex + 1).every((occurences, i) => { + return occurences.some((index, j) => { + if (index !== startIndex + 1) { return false; } + return hasSequenceAtIndex(wordIndex + i, j); + }); + }); + }; + + // Find the existence of a sequence of words + // Loop: for each occurrence of the first word in text, remember its index + // Try to find second word in text with index+1 + // - found: try to find third word in text with index+2, etc (recursive) + // - not found: stop, proceed with next occurrence in main loop + return occurrencesPerWord[0].some((occurrence, i) => { + return hasSequenceAtIndex(0, i); + }); + + // const indexes = phraseInfo.words.map(word => textInfo.words.indexOf(word)); + // if (indexes[0] < 0) { return false; } + // for (let i = 1; i < indexes.length; i++) { + // if (indexes[i] - indexes[i-1] !== 1) { + // return false; + // } + // } + // return true; + }); + } + else { + // test 1 or more words + const wordsInfo = this.getTextInfo(val, locale); + return wordsInfo.toSequence().every(word => { + return textInfo.words.has(word); + }); + } + } + } + + async handleRecordUpdate(path: string, oldValue: any, newValue: any): Promise { + let oldText = oldValue !== null && typeof oldValue === 'object' && this.key in oldValue ? (oldValue as any)[this.key] : null; + let newText = newValue !== null && typeof newValue === 'object' && this.key in newValue ? (newValue as any)[this.key] : null; + + const oldLocale = oldValue?.[this.textLocaleKey] ?? this.textLocale, + newLocale = newValue?.[this.textLocaleKey] ?? this.textLocale; + + if (typeof oldText === 'object' && oldText instanceof Array) { + oldText = oldText.join(' '); + } + if (typeof newText === 'object' && newText instanceof Array) { + newText = newText.join(' '); + } + + const oldTextInfo = this.getTextInfo(oldText, oldLocale); + const newTextInfo = this.getTextInfo(newText, newLocale); + + // super._updateReverseLookupKey( + // path, + // oldText ? textEncoder.encode(oldText) : null, + // newText ? textEncoder.encode(newText) : null, + // metadata + // ); + + const oldWords = oldTextInfo.toArray(); //.words.map(w => w.word); + const newWords = newTextInfo.toArray(); //.words.map(w => w.word); + + const removed = oldWords.filter(word => newWords.indexOf(word) < 0); + const added = newWords.filter(word => oldWords.indexOf(word) < 0); + const changed = oldWords.filter(word => newWords.indexOf(word) >= 0).filter(word => { + const oldInfo = oldTextInfo.getWordInfo(word); + const newInfo = newTextInfo.getWordInfo(word); + return oldInfo.occurs !== newInfo.occurs || oldInfo.indexes.some((index, i) => newInfo.indexes[i] !== index); + }); + changed.forEach(word => { + // Word metadata changed. Simplest solution: remove and add again + removed.push(word); + added.push(word); + }); + const promises = [] as Promise[]; + // TODO: Prepare operations batch, then execute 1 tree update. + // Now every word is a seperate update which is not necessary! + removed.forEach(word => { + const p = super.handleRecordUpdate(path, { [this.key]: word }, { [this.key]: null }); + promises.push(p); + }); + added.forEach(word => { + const mutated: Record = { }; + Object.assign(mutated, newValue); + mutated[this.key] = word; + + const wordInfo = newTextInfo.getWordInfo(word); + // const indexMetadata = { + // '_occurs_': wordInfo.occurs, + // '_indexes_': wordInfo.indexes.join(',') + // }; + + let occurs = wordInfo.indexes.join(','); + if (occurs.length > 255) { + console.warn(`FullTextIndex ${this.description}: word "${word}" occurs too many times in "${path}/${this.key}" to store in index metadata. Truncating occurrences`); + const cutIndex = occurs.lastIndexOf(',', 255); + occurs = occurs.slice(0, cutIndex); + } + const indexMetadata = { + '_occurs_': occurs, + }; + const p = super.handleRecordUpdate(path, { [this.key]: null }, mutated, indexMetadata); + promises.push(p); + }); + await Promise.all(promises); + } + + build() { + return super.build({ + addCallback: (add, text: string | string[], recordPointer, metadata, env) => { + if (typeof text === 'object' && text instanceof Array) { + text = text.join(' '); + } + if (typeof text === 'undefined') { + text = ''; + } + const locale = env.locale || this.textLocale; + const textInfo = this.getTextInfo(text, locale); + if (textInfo.words.size === 0) { + this.logger.warn(`No words found in "${typeof text === 'string' && text.length > 50 ? text.slice(0, 50) + '...' : text}" to fulltext index "${env.path}"`); + } + + // const revLookupKey = super._getRevLookupKey(env.path); + // tree.add(revLookupKey, textEncoder.encode(text), metadata); + + textInfo.words.forEach(wordInfo => { + + // IDEA: To enable fast '*word' queries (starting with wildcard), we can also store + // reversed words and run reversed query 'drow*' on it. we'd have to enable storing + // multiple B+Trees in a single index file: a 'forward' tree & a 'reversed' tree + + // IDEA: Following up on previous idea: being able to backtrack nodes within an index would + // help to speed up sorting queries on an indexed key, + // eg: query .take(10).filter('rating','>=', 8).sort('title') + // does not filter on key 'title', but can then use an index on 'title' for the sorting: + // it can take the results from the 'rating' index and backtrack the nodes' titles to quickly + // get a sorted top 10. We'd have to store a seperate 'backtrack' tree that uses recordPointers + // as the key, and 'title' values as recordPointers. Caveat: max string length for sorting would + // then be 255 ASCII chars, because that's the recordPointer size limit. + // The same boost can currently only be achieved by creating an index that includes 'title' in + // the index on 'rating' ==> db.indexes.create('movies', 'rating', { include: ['title'] }) + + // Extend metadata with more details about the word (occurrences, positions) + // const wordMetadata = { + // '_occurs_': wordInfo.occurs, + // '_indexes_': wordInfo.indexes.join(',') + // }; + + let occurs = wordInfo.indexes.join(','); + if (occurs.length > 255) { + console.warn(`FullTextIndex ${this.description}: word "${wordInfo.word}" occurs too many times to store in index metadata. Truncating occurrences`); + const cutIndex = occurs.lastIndexOf(',', 255); + occurs = occurs.slice(0, cutIndex); + } + const wordMetadata: IndexMetaData = { + '_occurs_': occurs, + }; + Object.assign(wordMetadata, metadata); + add(wordInfo.word, recordPointer, wordMetadata); + }); + return textInfo.toArray(); //words.map(info => info.word); + }, + valueTypes: [VALUE_TYPES.STRING], + }); + } + + static get validOperators() { + return ['fulltext:contains', 'fulltext:!contains']; + } + get validOperators() { + return FullTextIndex.validOperators; + } + + async query(op: string | BlacklistingSearchOperator, val?: string, options?: any) { + if (op instanceof BlacklistingSearchOperator) { + throw new Error(`Not implemented: Can't query fulltext index with blacklisting operator yet`); + } + if (op === 'fulltext:contains' || op === 'fulltext:!contains') { + return this.contains(op, val, options); + } + else { + throw new Error(`Fulltext indexes can only be queried with operators ${FullTextIndex.validOperators.map(op => `"${op}"`).join(', ')}`); + } + } + + /** + * + * @param op Operator to use, can be either "fulltext:contains" or "fulltext:!contains" + * @param val Text to search for. Can include * and ? wildcards, OR's for combined searches, and "quotes" for phrase searches + */ + async contains(op: 'fulltext:contains' | 'fulltext:!contains', val: string, options: FullTextContainsQueryOptions = { + phrase: false, + locale: undefined, + minimumWildcardWordLength: 2, + }): Promise { + if (!FullTextIndex.validOperators.includes(op)) { //if (op !== 'fulltext:contains' && op !== 'fulltext:not_contains') { + throw new Error(`Fulltext indexes can only be queried with operators ${FullTextIndex.validOperators.map(op => `"${op}"`).join(', ')}`); + } + + // Check cache + const cache = this.cache(op, val); + if (cache) { + // Use cached results + return Promise.resolve(cache); + } + + const stats = new IndexQueryStats(options.phrase ? 'fulltext_phrase_query' : 'fulltext_query', val, true); + + // const searchWordRegex = /[\w'?*]+/g; // Use TextInfo to find and transform words using index settings + const getTextInfo = (text: string) => { + const info = new TextInfo(text, { + locale: options.locale || this.textLocale, + prepare: this.config.prepare, + stemming: this.config.transform, + minLength: this.config.minLength, + maxLength: this.config.maxLength, + blacklist: this.config.blacklist, + whitelist: this.config.whitelist, + useStoplist: this.config.useStoplist, + includeChars: '*?', + }); + + // Ignore any wildcard words that do not meet the set minimum length + // This is to safeguard the system against (possibly unwanted) very large + // result sets + const words = info.toArray(); + let i; + while (i = words.findIndex(w => /^[*?]+$/.test(w)), i >= 0) { + // Word is wildcards only. Ignore + const word = words[i]; + info.ignored.push(word); + info.words.delete(word); + } + + if (options.minimumWildcardWordLength > 0) { + for (const word of words) { + const starIndex = word.indexOf('*'); + // min = 2, word = 'an*', starIndex = 2, ok! + // min = 3: starIndex < min: not ok! + if (starIndex > 0 && starIndex < options.minimumWildcardWordLength) { + info.ignored.push(word); + info.words.delete(word); + i--; + } + } + } + return info; + }; + + if (val.includes(' OR ')) { + // Multiple searches in one query: 'secret OR confidential OR "don't tell"' + // TODO: chain queries instead of running simultanious? + const queries = val.split(' OR '); + const promises = queries.map(q => this.query(op, q, options)); + const resultSets = await Promise.all(promises); + stats.steps.push(...resultSets.map(results => results.stats)); + + const mergeStep = new IndexQueryStats('merge_expand', { sets: resultSets.length, results: resultSets.reduce((total, set) => total + set.length, 0) }, true); + stats.steps.push(mergeStep); + + const merged = resultSets[0]; + resultSets.slice(1).forEach(results => { + results.forEach(result => { + const exists = ~merged.findIndex(r => r.path === result.path); + if (!exists) { merged.push(result); } + }); + }); + const results = IndexQueryResults.fromResults(merged, this.key); + mergeStep.stop(results.length); + + stats.stop(results.length); + results.stats = stats; + results.hints.push(...resultSets.reduce((hints, set) => { hints.push(...set.hints); return hints; }, [])); + return results; + } + if (val.includes('"')) { + // Phrase(s) used. We have to make sure the words used are not only in the text, + // but also in that exact order. + const phraseRegex = /"(.+?)"/g; + const phrases = []; + while (true) { + const match = phraseRegex.exec(val); + if (match === null) { break; } + const phrase = match[1]; + phrases.push(phrase); + val = val.slice(0, match.index) + val.slice(match.index + match[0].length); + phraseRegex.lastIndex = 0; + } + + const phraseOptions: typeof options = {}; + Object.assign(phraseOptions, options); + phraseOptions.phrase = true; + const promises = phrases.map(phrase => this.query(op, phrase, phraseOptions)); + + // Check if what is left over still contains words + if (val.length > 0 && getTextInfo(val).wordCount > 0) { //(val.match(searchWordRegex) !== null) { + // Add it + const promise = this.query(op, val, options); + promises.push(promise); + } + + const resultSets = await Promise.all(promises); + stats.steps.push(...resultSets.map(results => results.stats)); + + // Take shortest set, only keep results that are matched in all other sets + const mergeStep = new IndexQueryStats('merge_reduce', { sets: resultSets.length, results: resultSets.reduce((total, set) => total + set.length, 0) }, true); + resultSets.length > 1 && stats.steps.push(mergeStep); + + const shortestSet = resultSets.sort((a,b) => a.length < b.length ? -1 : 1)[0]; + const otherSets = resultSets.slice(1); + const matches = shortestSet.reduce((matches, match) => { + // Check if the key is present in the other result sets + const path = match.path; + const matchedInAllSets = otherSets.every(set => set.findIndex(match => match.path === path) >= 0); + if (matchedInAllSets) { matches.push(match); } + return matches; + }, new IndexQueryResults()); + matches.filterKey = this.key; + mergeStep.stop(matches.length); + + stats.stop(matches.length); + matches.stats = stats; + matches.hints.push(...resultSets.reduce((hints, set) => { hints.push(...set.hints); return hints; }, [])); + return matches; + } + + const info = getTextInfo(val); + + /** + * Add ignored words to the result hints + */ + function addIgnoredWordHints(results: IndexQueryResults) { + // Add hints for ignored words + info.ignored.forEach(word => { + const hint = new FullTextIndexQueryHint(FullTextIndexQueryHint.types.ignoredWord, word); + results.hints.push(hint); + }); + } + + const words = info.toArray(); + if (words.length === 0) { + // Resolve with empty array + stats.stop(0); + const results = IndexQueryResults.fromResults([], this.key); + results.stats = stats; + addIgnoredWordHints(results); + return results; + } + + if (op === 'fulltext:!contains') { + // NEW: Use BlacklistingSearchOperator that uses all (unique) values in the index, + // besides the ones that get blacklisted along the way by our callback function + const wordChecks = words.map(word => { + if (word.includes('*') || word.includes('?')) { + const pattern = '^' + word.replace(/\*/g, '.*').replace(/\?/g, '.') + '$'; + const re = new RegExp(pattern, 'i'); + return re; + } + return word; + }); + const customOp = new BlacklistingSearchOperator(entry => { + const blacklist = wordChecks.some(word => { + if (word instanceof RegExp) { + return word.test(entry.key as string); + } + return entry.key === word; + }); + if (blacklist) { return entry.values; } + }); + + stats.type = 'fulltext_blacklist_scan'; + const results = await super.query(customOp); + stats.stop(results.length); + results.filterKey = this.key; + results.stats = stats; + addIgnoredWordHints(results); + + // Cache results + this.cache(op, val, results); + return results; + } + + // op === 'fulltext:contains' + // Get result count for each word + const countPromises = words.map(word => { + const wildcardIndex = ~(~word.indexOf('*') || ~word.indexOf('?')); // TODO: improve readability + const wordOp = wildcardIndex >= 0 ? 'like' : '=='; + const step = new IndexQueryStats('count', { op: wordOp, word }, true); + stats.steps.push(step); + return super.count(wordOp, word) + .then(count => { + step.stop(count); + return { word, count }; + }); + }); + const counts = await Promise.all(countPromises); + // Start with the smallest result set + counts.sort((a, b) => { + if (a.count < b.count) { return -1; } + else if (a.count > b.count) { return 1; } + return 0; + }); + + let results: IndexQueryResults; + + if (counts[0].count === 0) { + stats.stop(0); + + this.logger.info(`Word "${counts[0].word}" not found in index, 0 results for query ${op} "${val}"`); + results = new IndexQueryResults(0); + results.filterKey = this.key; + results.stats = stats; + addIgnoredWordHints(results); + + // Add query hints for each unknown word + counts.forEach(c => { + if (c.count === 0) { + const hint = new FullTextIndexQueryHint(FullTextIndexQueryHint.types.missingWord, c.word); + results.hints.push(hint); + } + }); + + // Cache the empty result set + this.cache(op, val, results); + return results; + } + const allWords = counts.map(c => c.word); + + // Sequentual method: query 1 word, then filter results further and further + // More or less performs the same as parallel, but uses less memory + // NEW: Start with the smallest result set + + // OLD: Use the longest word to search with, then filter those results + // const allWords = words.slice().sort((a,b) => { + // if (a.length < b.length) { return 1; } + // else if (a.length > b.length) { return -1; } + // return 0; + // }); + + const queryWord = async (word: string, filter: IndexQueryResults) => { + const wildcardIndex = ~(~word.indexOf('*') || ~word.indexOf('?')); // TODO: improve readability + const wordOp = wildcardIndex >= 0 ? 'like' : '=='; + // const step = new IndexQueryStats('query', { op: wordOp, word }, true); + // stats.steps.push(step); + const results = await super.query(wordOp, word, { filter }); + stats.steps.push(results.stats); + // step.stop(results.length); + return results; + }; + let wordIndex = 0; + const resultsPerWord: IndexQueryResults[] = new Array(words.length); + const nextWord = async () => { + const word = allWords[wordIndex]; + const t1 = Date.now(); + const fr = await queryWord(word, results); + const t2 = Date.now(); + this.logger.info(`fulltext search for "${word}" took ${t2-t1}ms`); + resultsPerWord[words.indexOf(word)] = fr; + results = fr; + wordIndex++; + if (results.length === 0 || wordIndex === allWords.length) { return; } + await nextWord(); + }; + await nextWord(); + + type MetaDataWithOccursArray = IndexMetaData & { _occurs_: number[] }; + + if (options.phrase === true && allWords.length > 1) { + // Check which results have the words in the right order + const step = new IndexQueryStats('phrase_check', val, true); + stats.steps.push(step); + results = results.reduce((matches, match) => { + // the order of the resultsPerWord is in the same order as the given words, + // check if their metadata._occurs_ say the same about the indexed content + const path = match.path; + const wordMatches = resultsPerWord.map(results => { + return results.find(result => result.path === path); + }); + // Convert the _occurs_ strings to arrays we can use + wordMatches.forEach(match => { + (match.metadata as MetaDataWithOccursArray)._occurs_ = (match.metadata._occurs_ as string).split(',').map(parseInt); + }); + const check = (wordMatchIndex: number, prevWordIndex?: number): boolean => { + const sourceIndexes = (wordMatches[wordMatchIndex].metadata as MetaDataWithOccursArray)._occurs_; + if (typeof prevWordIndex !== 'number') { + // try with each sourceIndex of the first word + for (let i = 0; i < sourceIndexes.length; i++) { + const found = check(1, sourceIndexes[i]); + if (found) { return true; } + } + return false; + } + // We're in a recursive call on the 2nd+ word + if (sourceIndexes.includes(prevWordIndex + 1)) { + // This word came after the previous word, hooray! + // Proceed with next word, or report success if this was the last word to check + if (wordMatchIndex === wordMatches.length-1) { return true; } + return check(wordMatchIndex+1, prevWordIndex+1); + } + else { + return false; + } + }; + if (check(0)) { + matches.push(match); // Keep! + } + return matches; + }, new IndexQueryResults()); + step.stop(results.length); + } + results.filterKey = this.key; + + stats.stop(results.length); + results.stats = stats; + addIgnoredWordHints(results); + + // Cache results + delete results.entryValues; // No need to cache these. Free the memory + this.cache(op, val, results); + return results; + + // Parallel method: query all words at the same time, then combine results + // Uses more memory + // const promises = words.map(word => { + // const wildcardIndex = ~(~word.indexOf('*') || ~word.indexOf('?')); + // let wordOp; + // if (op === 'fulltext:contains') { + // wordOp = wildcardIndex >= 0 ? 'like' : '=='; + // } + // else if (op === 'fulltext:!contains') { + // wordOp = wildcardIndex >= 0 ? '!like' : '!='; + // } + // // return super.query(wordOp, word) + // return super.query(wordOp, word) + // }); + // return Promise.all(promises) + // .then(resultSets => { + // // Now only use matches that exist in all result sets + // const sortedSets = resultSets.slice().sort((a,b) => a.length < b.length ? -1 : 1) + // const shortestSet = sortedSets[0]; + // const otherSets = sortedSets.slice(1); + // let matches = shortestSet.reduce((matches, match) => { + // // Check if the key is present in the other result sets + // const path = match.path; + // const matchedInAllSets = otherSets.every(set => set.findIndex(match => match.path === path) >= 0); + // if (matchedInAllSets) { matches.push(match); } + // return matches; + // }, new IndexQueryResults()); + + // if (options.phrase === true && resultSets.length > 1) { + // // Check if the words are in the right order + // console.log(`Breakpoint time`); + // matches = matches.reduce((matches, match) => { + // // the order of the resultSets is in the same order as the given words, + // // check if their metadata._indexes_ say the same about the indexed content + // const path = match.path; + // const wordMatches = resultSets.map(set => { + // return set.find(match => match.path === path); + // }); + // // Convert the _indexes_ strings to arrays we can use + // wordMatches.forEach(match => { + // // match.metadata._indexes_ = match.metadata._indexes_.split(',').map(parseInt); + // match.metadata._occurs_ = match.metadata._occurs_.split(',').map(parseInt); + // }); + // const check = (wordMatchIndex, prevWordIndex) => { + // const sourceIndexes = wordMatches[wordMatchIndex].metadata._occurs_; //wordMatches[wordMatchIndex].metadata._indexes_; + // if (typeof prevWordIndex !== 'number') { + // // try with each sourceIndex of the first word + // for (let i = 0; i < sourceIndexes.length; i++) { + // const found = check(1, sourceIndexes[i]); + // if (found) { return true; } + // } + // return false; + // } + // // We're in a recursive call on the 2nd+ word + // if (~sourceIndexes.indexOf(prevWordIndex + 1)) { + // // This word came after the previous word, hooray! + // // Proceed with next word, or report success if this was the last word to check + // if (wordMatchIndex === wordMatches.length-1) { return true; } + // return check(wordMatchIndex+1, prevWordIndex+1); + // } + // else { + // return false; + // } + // } + // if (check(0)) { + // matches.push(match); // Keep! + // } + // return matches; + // }, new IndexQueryResults()); + // } + // matches.filterKey = this.key; + // return matches; + // }); + } +} diff --git a/src/data-index/geo-index.ts b/src/data-index/geo-index.ts index ec11978..5dcab97 100644 --- a/src/data-index/geo-index.ts +++ b/src/data-index/geo-index.ts @@ -1,284 +1,284 @@ -import type { Storage } from '../storage'; -import { BlacklistingSearchOperator } from '../btree'; -import { VALUE_TYPES } from '../node-value-types'; -import { DataIndex } from './data-index'; -import { DataIndexOptions } from './options'; -import { IndexQueryResults } from './query-results'; -import { IndexQueryStats } from './query-stats'; -import { IndexableValueOrArray } from './shared'; -import * as Geohash from '../geohash'; - -function _getGeoRadiusPrecision(radiusM: number) { - if (typeof radiusM !== 'number') { return; } - if (radiusM < 0.01) { return 12; } - if (radiusM < 0.075) { return 11; } - if (radiusM < 0.6) { return 10; } - if (radiusM < 2.3) { return 9; } - if (radiusM < 19) { return 8; } - if (radiusM < 76) { return 7; } - if (radiusM < 610) { return 6; } - if (radiusM < 2400) { return 5; } - if (radiusM < 19500) { return 4; } - if (radiusM < 78700) { return 3; } - if (radiusM < 626000) { return 2; } - return 1; -} - -function _getGeoHash(obj: { lat: number; long: number }) { - if (typeof obj.lat !== 'number' || typeof obj.long !== 'number') { - return; - } - const precision = 10; //_getGeoRadiusPrecision(obj.radius); - const geohash = Geohash.encode(obj.lat, obj.long, precision); - return geohash; -} - -// Calculates which hashes (of different precisions) are within the radius of a point -function _hashesInRadius(lat: number, lon: number, radiusM: number, precision: number) { - - const isInCircle = (checkLat: number, checkLon: number, lat: number, lon: number, radiusM: number) => { - const deltaLon = checkLon - lon; - const deltaLat = checkLat - lat; - return Math.pow(deltaLon, 2) + Math.pow(deltaLat, 2) <= Math.pow(radiusM, 2); - }; - const getCentroid = (latitude: number, longitude: number, height: number, width: number) => { - const y_cen = latitude + (height / 2); - const x_cen = longitude + (width / 2); - return { x: x_cen, y: y_cen }; - }; - const convertToLatLon = (y: number, x: number, lat: number, lon: number) => { - const pi = 3.14159265359; - const r_earth = 6371000; - - const lat_diff = (y / r_earth) * (180 / pi); - const lon_diff = (x / r_earth) * (180 / pi) / Math.cos(lat * pi/180); - - const final_lat = lat + lat_diff; - const final_lon = lon + lon_diff; - - return { lat: final_lat, lon: final_lon }; - }; - - const x = 0; - const y = 0; - - const points = [] as Array<{ lat: number; lon: number; }>; - const geohashes = [] as string[]; - - const gridWidths = [5009400.0, 1252300.0, 156500.0, 39100.0, 4900.0, 1200.0, 152.9, 38.2, 4.8, 1.2, 0.149, 0.0370]; - const gridHeights = [4992600.0, 624100.0, 156000.0, 19500.0, 4900.0, 609.4, 152.4, 19.0, 4.8, 0.595, 0.149, 0.0199]; - - const height = gridHeights[precision-1] / 2; - const width = gridWidths[precision-1] / 2; - - const latMoves = Math.ceil(radiusM / height); - const lonMoves = Math.ceil(radiusM / width); - - for (let i = 0; i <= latMoves; i++) { - const tmpLat = y + height*i; - - for (let j = 0; j < lonMoves; j++) { - const tmpLon = x + width * j; - - if (isInCircle(tmpLat, tmpLon, y, x, radiusM)) { - const center = getCentroid(tmpLat, tmpLon, height, width); - points.push(convertToLatLon(center.y, center.x, lat, lon)); - points.push(convertToLatLon(-center.y, center.x, lat, lon)); - points.push(convertToLatLon(center.y, -center.x, lat, lon)); - points.push(convertToLatLon(-center.y, -center.x, lat, lon)); - } - } - } - - points.forEach(point => { - const hash = Geohash.encode(point.lat, point.lon, precision); - if (geohashes.indexOf(hash) < 0) { - geohashes.push(hash); - } - }); - - // Original optionally uses Georaptor compression of geohashes - // This is my simple implementation - geohashes.forEach((currentHash, index, arr) => { - const precision = currentHash.length; - const parentHash = currentHash.substr(0, precision-1); - let hashNeighbourMatches = 0; - const removeIndexes = []; - arr.forEach((otherHash, otherIndex) => { - if (otherHash.startsWith(parentHash)) { - removeIndexes.push(otherIndex); - if (otherHash.length == precision) { - hashNeighbourMatches++; - } - } - }); - if (hashNeighbourMatches === 32) { - // All 32 areas of a less precise geohash are included. - // Replace those with the less precise parent - for (let i = removeIndexes.length - 1; i >= 0; i--) { - arr.splice(i, 1); - } - arr.splice(index, 0, parentHash); - } - }); - - return geohashes; -} - -export class GeoIndex extends DataIndex { - constructor(storage: Storage, path: string, key: string, options: DataIndexOptions) { - if (key === '{key}') { throw new Error('Cannot create geo index on node keys'); } - super(storage, path, key, options); - } - - // get fileName() { - // return super.fileName.slice(0, -4) + '.geo.idx'; - // } - - get type() { - return 'geo'; - } - - async handleRecordUpdate(path: string, oldValue: unknown, newValue: unknown) { - const mutated = { old: {} as any, new: {} as any }; - oldValue !== null && typeof oldValue === 'object' && Object.assign(mutated.old, oldValue); - newValue !== null && typeof newValue === 'object' && Object.assign(mutated.new, newValue); - if (mutated.old[this.key] !== null && typeof mutated.old[this.key] === 'object') { - mutated.old[this.key] = _getGeoHash(mutated.old[this.key]); - } - if (mutated.new[this.key] !== null && typeof mutated.new[this.key] === 'object') { - mutated.new[this.key] = _getGeoHash(mutated.new[this.key]); - } - super.handleRecordUpdate(path, mutated.old, mutated.new); - } - - build() { - return super.build({ - addCallback: (add, obj: { lat: number; long: number; }, recordPointer, metadata) => { - if (typeof obj !== 'object') { - this.storage.debug.warn(`GeoIndex cannot index location because value "${obj}" is not an object`); - return; - } - if (typeof obj.lat !== 'number' || typeof obj.long !== 'number') { - this.storage.debug.warn(`GeoIndex cannot index location because lat (${obj.lat}) or long (${obj.long}) are invalid`); - return; - } - const geohash = _getGeoHash(obj); - add(geohash, recordPointer, metadata); - return geohash; - }, - valueTypes: [VALUE_TYPES.OBJECT], - }); - } - - static get validOperators() { - return ['geo:nearby']; - } - - get validOperators() { - return GeoIndex.validOperators; - } - - test(obj: any, op: 'geo:nearby', val: { lat: number; long: number; radius: number }) { - if (!this.validOperators.includes(op)) { - throw new Error(`Unsupported operator "${op}"`); - } - if (obj == null || typeof obj !== 'object') { - // No source object - return false; - } - const src = obj[this.key] as { lat: number; long: number }; - if (typeof src !== 'object' || typeof src.lat !== 'number' || typeof src.long !== 'number') { - // source object is not geo - return false; - } - if (typeof val !== 'object' || typeof val.lat !== 'number' || typeof val.long !== 'number' || typeof val.radius !== 'number') { - // compare object is not geo with radius - return false; - } - - const isInCircle = (checkLat: number, checkLon: number, lat: number, lon: number, radiusM: number) => { - const deltaLon = checkLon - lon; - const deltaLat = checkLat - lat; - return Math.pow(deltaLon, 2) + Math.pow(deltaLat, 2) <= Math.pow(radiusM, 2); - }; - return isInCircle(src.lat, src.long, val.lat, val.long, val.radius); - } - - async query(op: string | BlacklistingSearchOperator, val?: IndexableValueOrArray, options?: { filter?: IndexQueryResults; }) { - if (op instanceof BlacklistingSearchOperator) { - throw new Error(`Not implemented: Can't query geo index with blacklisting operator yet`); - } - if (options) { - this.storage.debug.warn('Not implemented: query options for geo indexes are ignored'); - } - if (op === 'geo:nearby') { - if (val === null || typeof val !== 'object' || !('lat' in val) || !('long' in val) || !('radius' in val) || typeof val.lat !== 'number' || typeof val.long !== 'number' || typeof val.radius !== 'number') { - throw new Error(`geo nearby query expects an object with numeric lat, long and radius properties`); - } - return this.nearby(val as { lat: number; long: number; radius: number }); - } - else { - throw new Error(`Geo indexes can only be queried with operators ${GeoIndex.validOperators.map(op => `"${op}"`).join(', ')}`); - } - } - /** - * @param op Only 'geo:nearby' is supported at the moment - */ - async nearby( - val: { - /** - * nearby query center latitude - */ - lat: number; - - /** - * nearby query center longitude - */ - long: number; - - /** - * nearby query radius in meters - */ - radius: number; - }, - ): Promise { - const op = 'geo:nearby'; - - // Check cache - const cached = this.cache(op, val); - if (cached) { - // Use cached results - return cached; - } - - if (typeof val.lat !== 'number' || typeof val.long !== 'number' || typeof val.radius !== 'number') { - throw new Error('geo:nearby query must supply an object with properties .lat, .long and .radius'); - } - const stats = new IndexQueryStats('geo_nearby_query', val, true); - - const precision = _getGeoRadiusPrecision(val.radius / 10); - const targetHashes = _hashesInRadius(val.lat, val.long, val.radius, precision); - - stats.queries = targetHashes.length; - - const promises = targetHashes.map(hash => { - return super.query('like', `${hash}*`); - }); - const resultSets= await Promise.all(promises); - - // Combine all results - const results = new IndexQueryResults(); - results.filterKey = this.key; - resultSets.forEach(set => { - set.forEach(match => results.push(match)); - }); - - stats.stop(results.length); - results.stats = stats; - - this.cache(op, val, results); - - return results; - } -} +import type { Storage } from '../storage'; +import { BlacklistingSearchOperator } from '../btree'; +import { VALUE_TYPES } from '../node-value-types'; +import { DataIndex } from './data-index'; +import { DataIndexOptions } from './options'; +import { IndexQueryResults } from './query-results'; +import { IndexQueryStats } from './query-stats'; +import { IndexableValueOrArray } from './shared'; +import * as Geohash from '../geohash'; + +function _getGeoRadiusPrecision(radiusM: number) { + if (typeof radiusM !== 'number') { return; } + if (radiusM < 0.01) { return 12; } + if (radiusM < 0.075) { return 11; } + if (radiusM < 0.6) { return 10; } + if (radiusM < 2.3) { return 9; } + if (radiusM < 19) { return 8; } + if (radiusM < 76) { return 7; } + if (radiusM < 610) { return 6; } + if (radiusM < 2400) { return 5; } + if (radiusM < 19500) { return 4; } + if (radiusM < 78700) { return 3; } + if (radiusM < 626000) { return 2; } + return 1; +} + +function _getGeoHash(obj: { lat: number; long: number }) { + if (typeof obj.lat !== 'number' || typeof obj.long !== 'number') { + return; + } + const precision = 10; //_getGeoRadiusPrecision(obj.radius); + const geohash = Geohash.encode(obj.lat, obj.long, precision); + return geohash; +} + +// Calculates which hashes (of different precisions) are within the radius of a point +function _hashesInRadius(lat: number, lon: number, radiusM: number, precision: number) { + + const isInCircle = (checkLat: number, checkLon: number, lat: number, lon: number, radiusM: number) => { + const deltaLon = checkLon - lon; + const deltaLat = checkLat - lat; + return Math.pow(deltaLon, 2) + Math.pow(deltaLat, 2) <= Math.pow(radiusM, 2); + }; + const getCentroid = (latitude: number, longitude: number, height: number, width: number) => { + const y_cen = latitude + (height / 2); + const x_cen = longitude + (width / 2); + return { x: x_cen, y: y_cen }; + }; + const convertToLatLon = (y: number, x: number, lat: number, lon: number) => { + const pi = 3.14159265359; + const r_earth = 6371000; + + const lat_diff = (y / r_earth) * (180 / pi); + const lon_diff = (x / r_earth) * (180 / pi) / Math.cos(lat * pi/180); + + const final_lat = lat + lat_diff; + const final_lon = lon + lon_diff; + + return { lat: final_lat, lon: final_lon }; + }; + + const x = 0; + const y = 0; + + const points = [] as Array<{ lat: number; lon: number; }>; + const geohashes = [] as string[]; + + const gridWidths = [5009400.0, 1252300.0, 156500.0, 39100.0, 4900.0, 1200.0, 152.9, 38.2, 4.8, 1.2, 0.149, 0.0370]; + const gridHeights = [4992600.0, 624100.0, 156000.0, 19500.0, 4900.0, 609.4, 152.4, 19.0, 4.8, 0.595, 0.149, 0.0199]; + + const height = gridHeights[precision-1] / 2; + const width = gridWidths[precision-1] / 2; + + const latMoves = Math.ceil(radiusM / height); + const lonMoves = Math.ceil(radiusM / width); + + for (let i = 0; i <= latMoves; i++) { + const tmpLat = y + height*i; + + for (let j = 0; j < lonMoves; j++) { + const tmpLon = x + width * j; + + if (isInCircle(tmpLat, tmpLon, y, x, radiusM)) { + const center = getCentroid(tmpLat, tmpLon, height, width); + points.push(convertToLatLon(center.y, center.x, lat, lon)); + points.push(convertToLatLon(-center.y, center.x, lat, lon)); + points.push(convertToLatLon(center.y, -center.x, lat, lon)); + points.push(convertToLatLon(-center.y, -center.x, lat, lon)); + } + } + } + + points.forEach(point => { + const hash = Geohash.encode(point.lat, point.lon, precision); + if (geohashes.indexOf(hash) < 0) { + geohashes.push(hash); + } + }); + + // Original optionally uses Georaptor compression of geohashes + // This is my simple implementation + geohashes.forEach((currentHash, index, arr) => { + const precision = currentHash.length; + const parentHash = currentHash.substr(0, precision-1); + let hashNeighbourMatches = 0; + const removeIndexes = []; + arr.forEach((otherHash, otherIndex) => { + if (otherHash.startsWith(parentHash)) { + removeIndexes.push(otherIndex); + if (otherHash.length == precision) { + hashNeighbourMatches++; + } + } + }); + if (hashNeighbourMatches === 32) { + // All 32 areas of a less precise geohash are included. + // Replace those with the less precise parent + for (let i = removeIndexes.length - 1; i >= 0; i--) { + arr.splice(i, 1); + } + arr.splice(index, 0, parentHash); + } + }); + + return geohashes; +} + +export class GeoIndex extends DataIndex { + constructor(storage: Storage, path: string, key: string, options: DataIndexOptions) { + if (key === '{key}') { throw new Error('Cannot create geo index on node keys'); } + super(storage, path, key, options); + } + + // get fileName() { + // return super.fileName.slice(0, -4) + '.geo.idx'; + // } + + get type() { + return 'geo'; + } + + async handleRecordUpdate(path: string, oldValue: unknown, newValue: unknown) { + const mutated = { old: {} as any, new: {} as any }; + oldValue !== null && typeof oldValue === 'object' && Object.assign(mutated.old, oldValue); + newValue !== null && typeof newValue === 'object' && Object.assign(mutated.new, newValue); + if (mutated.old[this.key] !== null && typeof mutated.old[this.key] === 'object') { + mutated.old[this.key] = _getGeoHash(mutated.old[this.key]); + } + if (mutated.new[this.key] !== null && typeof mutated.new[this.key] === 'object') { + mutated.new[this.key] = _getGeoHash(mutated.new[this.key]); + } + super.handleRecordUpdate(path, mutated.old, mutated.new); + } + + build() { + return super.build({ + addCallback: (add, obj: { lat: number; long: number; }, recordPointer, metadata) => { + if (typeof obj !== 'object') { + this.logger.warn(`GeoIndex cannot index location because value "${obj}" is not an object`); + return; + } + if (typeof obj.lat !== 'number' || typeof obj.long !== 'number') { + this.logger.warn(`GeoIndex cannot index location because lat (${obj.lat}) or long (${obj.long}) are invalid`); + return; + } + const geohash = _getGeoHash(obj); + add(geohash, recordPointer, metadata); + return geohash; + }, + valueTypes: [VALUE_TYPES.OBJECT], + }); + } + + static get validOperators() { + return ['geo:nearby']; + } + + get validOperators() { + return GeoIndex.validOperators; + } + + test(obj: any, op: 'geo:nearby', val: { lat: number; long: number; radius: number }) { + if (!this.validOperators.includes(op)) { + throw new Error(`Unsupported operator "${op}"`); + } + if (obj == null || typeof obj !== 'object') { + // No source object + return false; + } + const src = obj[this.key] as { lat: number; long: number }; + if (typeof src !== 'object' || typeof src.lat !== 'number' || typeof src.long !== 'number') { + // source object is not geo + return false; + } + if (typeof val !== 'object' || typeof val.lat !== 'number' || typeof val.long !== 'number' || typeof val.radius !== 'number') { + // compare object is not geo with radius + return false; + } + + const isInCircle = (checkLat: number, checkLon: number, lat: number, lon: number, radiusM: number) => { + const deltaLon = checkLon - lon; + const deltaLat = checkLat - lat; + return Math.pow(deltaLon, 2) + Math.pow(deltaLat, 2) <= Math.pow(radiusM, 2); + }; + return isInCircle(src.lat, src.long, val.lat, val.long, val.radius); + } + + async query(op: string | BlacklistingSearchOperator, val?: IndexableValueOrArray, options?: { filter?: IndexQueryResults; }) { + if (op instanceof BlacklistingSearchOperator) { + throw new Error(`Not implemented: Can't query geo index with blacklisting operator yet`); + } + if (options) { + this.logger.warn('Not implemented: query options for geo indexes are ignored'); + } + if (op === 'geo:nearby') { + if (val === null || typeof val !== 'object' || !('lat' in val) || !('long' in val) || !('radius' in val) || typeof val.lat !== 'number' || typeof val.long !== 'number' || typeof val.radius !== 'number') { + throw new Error(`geo nearby query expects an object with numeric lat, long and radius properties`); + } + return this.nearby(val as { lat: number; long: number; radius: number }); + } + else { + throw new Error(`Geo indexes can only be queried with operators ${GeoIndex.validOperators.map(op => `"${op}"`).join(', ')}`); + } + } + /** + * @param op Only 'geo:nearby' is supported at the moment + */ + async nearby( + val: { + /** + * nearby query center latitude + */ + lat: number; + + /** + * nearby query center longitude + */ + long: number; + + /** + * nearby query radius in meters + */ + radius: number; + }, + ): Promise { + const op = 'geo:nearby'; + + // Check cache + const cached = this.cache(op, val); + if (cached) { + // Use cached results + return cached; + } + + if (typeof val.lat !== 'number' || typeof val.long !== 'number' || typeof val.radius !== 'number') { + throw new Error('geo:nearby query must supply an object with properties .lat, .long and .radius'); + } + const stats = new IndexQueryStats('geo_nearby_query', val, true); + + const precision = _getGeoRadiusPrecision(val.radius / 10); + const targetHashes = _hashesInRadius(val.lat, val.long, val.radius, precision); + + stats.queries = targetHashes.length; + + const promises = targetHashes.map(hash => { + return super.query('like', `${hash}*`); + }); + const resultSets= await Promise.all(promises); + + // Combine all results + const results = new IndexQueryResults(); + results.filterKey = this.key; + resultSets.forEach(set => { + set.forEach(match => results.push(match)); + }); + + stats.stop(results.length); + results.stats = stats; + + this.cache(op, val, results); + + return results; + } +} diff --git a/src/index.ts b/src/index.ts index 58d13e4..80cc932 100644 --- a/src/index.ts +++ b/src/index.ts @@ -38,6 +38,9 @@ export { PartialArray, } from 'acebase-core'; +import { AceBase } from './acebase-local'; +export default AceBase; // Use AceBase as default export, allows: `import AceBase from 'acebase'` + export { AceBase, AceBaseLocalSettings, diff --git a/src/ipc/browser.ts b/src/ipc/browser.ts index 54bdcb6..c567e20 100644 --- a/src/ipc/browser.ts +++ b/src/ipc/browser.ts @@ -84,7 +84,7 @@ export class IPCPeer extends AceBaseIPCPeer { } else { // No localStorage either, this is probably an old browser running in a webworker - this.storage.debug.warn(`[BroadcastChannel] not supported`); + this.logger.warn(`[BroadcastChannel] not supported`); this.sendMessage = () => { /* No OP */}; return; } @@ -98,22 +98,22 @@ export class IPCPeer extends AceBaseIPCPeer { return; } - storage.debug.verbose(`[BroadcastChannel] received: `, message); + this.logger.trace(`[BroadcastChannel] received: `, message); if (message.type === 'hello' && message.from < this.masterPeerId) { // This peer was created before other peer we thought was the master this.masterPeerId = message.from; - storage.debug.log(`[BroadcastChannel] Tab ${this.masterPeerId} is the master.`); + this.logger.info(`[BroadcastChannel] Tab ${this.masterPeerId} is the master.`); } else if (message.type === 'bye' && message.from === this.masterPeerId) { // The master tab is leaving - storage.debug.log(`[BroadcastChannel] Master tab ${this.masterPeerId} is leaving`); + this.logger.info(`[BroadcastChannel] Master tab ${this.masterPeerId} is leaving`); // Elect new master const allPeerIds = this.peers.map(peer => peer.id).concat(this.id).filter(id => id !== this.masterPeerId); // All peers, including us, excluding the leaving master peer this.masterPeerId = allPeerIds.sort()[0]; - storage.debug.log(`[BroadcastChannel] ${this.masterPeerId === this.id ? 'We are' : `tab ${this.masterPeerId} is`} the new master. Requesting ${this._locks.length} locks (${this._locks.filter(r => !r.granted).length} pending)`); + this.logger.info(`[BroadcastChannel] ${this.masterPeerId === this.id ? 'We are' : `tab ${this.masterPeerId} is`} the new master. Requesting ${this._locks.length} locks (${this._locks.filter(r => !r.granted).length} pending)`); // Let the new master take over any locks and lock requests. const requests = this._locks.splice(0); // Copy and clear current lock requests before granted locks are requested again. @@ -158,7 +158,7 @@ export class IPCPeer extends AceBaseIPCPeer { } sendMessage(message: IMessage) { - this.storage.debug.verbose(`[BroadcastChannel] sending: `, message); + this.logger.trace(`[BroadcastChannel] sending: `, message); this.channel.postMessage(message); } diff --git a/src/ipc/index.ts b/src/ipc/index.ts index 66fb17f..74c300d 100644 --- a/src/ipc/index.ts +++ b/src/ipc/index.ts @@ -1,137 +1,137 @@ -import { AceBaseIPCPeer, IHelloMessage, IMessage } from './ipc'; -import { Storage } from '../storage'; -import * as Cluster from 'cluster'; -const cluster = Cluster.default ?? Cluster as any as typeof Cluster.default; // ESM and CJS compatible approach -export { RemoteIPCPeer, RemoteIPCServerConfig } from './remote'; -export { IPCSocketPeer, NetIPCServer } from './socket'; - -const masterPeerId = '[master]'; - -interface INodeIPCMessage extends IMessage { - /** - * name of the target database. Needed when multiple database use the same communication channel - */ - dbname: string -} - -interface EventEmitterLike { - addListener(event: string, handler: (...args: any[]) => any): any; - removeListener(event: string, handler: (...args: any[]) => any): any; -} - -/** - * Node cluster functionality - enables vertical scaling with forked processes. AceBase will enable IPC at startup, so - * any forked process will communicate database changes and events automatically. Locking of resources will be done by - * the cluster's primary (previously master) process. NOTE: if the master process dies, all peers stop working - */ -export class IPCPeer extends AceBaseIPCPeer { - - constructor(storage: Storage, dbname: string) { - - // Throw eror on PM2 clusters --> they should use an AceBase IPC server - const pm2id = process.env?.NODE_APP_INSTANCE || process.env?.pm_id; - if (typeof pm2id === 'string' && pm2id !== '0') { - throw new Error(`To use AceBase with pm2 in cluster mode, use an AceBase IPC server to enable interprocess communication.`); - } - - const peerId = cluster.isMaster ? masterPeerId : cluster.worker.id.toString(); - super(storage, peerId, dbname); - - this.masterPeerId = masterPeerId; - this.ipcType = 'node.cluster'; - - /** Adds an event handler to a Node.js EventEmitter that is automatically removed upon IPC exit */ - const bindEventHandler = (target: EventEmitterLike, event: string, handler: (...args: any[]) => any) => { - target.addListener(event, handler); - this.on('exit', () => target.removeListener(event, handler)); - }; - - // Setup process exit handler - bindEventHandler(process, 'SIGINT', () => { - this.exit(); - }); - - if (cluster.isMaster) { - bindEventHandler(cluster, 'online', (worker: Cluster.Worker) => { - // A new worker is started - // Do not add yet, wait for "hello" message - a forked process might not use the same db - bindEventHandler(worker, 'error', err => { - storage.debug.error(`Caught worker error:`, err); - }); - }); - - bindEventHandler(cluster, 'exit', (worker: Cluster.Worker) => { - // A worker has shut down - if (this.peers.find(peer => peer.id === worker.id.toString())) { - // Worker apparently did not have time to say goodbye, - // remove the peer ourselves - this.removePeer(worker.id.toString()); - - // Send "bye" message on their behalf - this.sayGoodbye(worker.id.toString()); - } - }); - } - - const handleMessage = (message: INodeIPCMessage) => { - if (typeof message !== 'object') { - // Ignore non-object IPC messages - return; - } - if (message.dbname !== this.dbname) { - // Ignore, message not meant for this database - return; - } - if (cluster.isMaster && message.to !== masterPeerId) { - // Message is meant for others (or all). Forward it - this.sendMessage(message); - } - if (message.to && message.to !== this.id) { - // Message is for somebody else. Ignore - return; - } - - return super.handleMessage(message); - }; - - if (cluster.isMaster) { - bindEventHandler(cluster, 'message', (worker: Cluster.Worker, message: INodeIPCMessage) => handleMessage(message)); - } - else { - bindEventHandler(cluster.worker, 'message', handleMessage); - } - - // if (!cluster.isMaster) { - // // Add master peer. Do we have to? - // this.addPeer(masterPeerId, false, false); - // } - - // Send hello to other peers - const helloMsg: IHelloMessage = { type: 'hello', from: this.id, data: undefined }; - this.sendMessage(helloMsg); - } - - public sendMessage(msg: IMessage) { - const message = msg as INodeIPCMessage; - message.dbname = this.dbname; - - if (cluster.isMaster) { - // If we are the master, send the message to the target worker(s) - this.peers - .filter(p => p.id !== message.from && (!message.to || p.id === message.to)) - .forEach(peer => { - const worker = cluster.workers[peer.id]; - worker && worker.send(message); // When debugging, worker might have stopped in the meantime - }); - } - else { - // Send the message to the master who will forward it to the target worker(s) - process.send(message); - } - } - - public async exit(code = 0) { - await super.exit(code); - } - -} +import { AceBaseIPCPeer, IHelloMessage, IMessage } from './ipc'; +import { Storage } from '../storage'; +import * as Cluster from 'cluster'; +const cluster = Cluster.default ?? Cluster as any as typeof Cluster.default; // ESM and CJS compatible approach +export { RemoteIPCPeer, RemoteIPCServerConfig } from './remote'; +export { IPCSocketPeer, NetIPCServer } from './socket'; + +const masterPeerId = '[master]'; + +interface INodeIPCMessage extends IMessage { + /** + * name of the target database. Needed when multiple database use the same communication channel + */ + dbname: string +} + +interface EventEmitterLike { + addListener(event: string, handler: (...args: any[]) => any): any; + removeListener(event: string, handler: (...args: any[]) => any): any; +} + +/** + * Node cluster functionality - enables vertical scaling with forked processes. AceBase will enable IPC at startup, so + * any forked process will communicate database changes and events automatically. Locking of resources will be done by + * the cluster's primary (previously master) process. NOTE: if the master process dies, all peers stop working + */ +export class IPCPeer extends AceBaseIPCPeer { + + constructor(storage: Storage, dbname: string) { + + // Throw eror on PM2 clusters --> they should use an AceBase IPC server + const pm2id = process.env?.NODE_APP_INSTANCE || process.env?.pm_id; + if (typeof pm2id === 'string' && pm2id !== '0') { + throw new Error(`To use AceBase with pm2 in cluster mode, use an AceBase IPC server to enable interprocess communication.`); + } + + const peerId = cluster.isMaster ? masterPeerId : cluster.worker.id.toString(); + super(storage, peerId, dbname); + + this.masterPeerId = masterPeerId; + this.ipcType = 'node.cluster'; + + /** Adds an event handler to a Node.js EventEmitter that is automatically removed upon IPC exit */ + const bindEventHandler = (target: EventEmitterLike, event: string, handler: (...args: any[]) => any) => { + target.addListener(event, handler); + this.on('exit', () => target.removeListener(event, handler)); + }; + + // Setup process exit handler + bindEventHandler(process, 'SIGINT', () => { + this.exit(); + }); + + if (cluster.isMaster) { + bindEventHandler(cluster, 'online', (worker: Cluster.Worker) => { + // A new worker is started + // Do not add yet, wait for "hello" message - a forked process might not use the same db + bindEventHandler(worker, 'error', err => { + this.logger.error(`Caught worker error:`, err); + }); + }); + + bindEventHandler(cluster, 'exit', (worker: Cluster.Worker) => { + // A worker has shut down + if (this.peers.find(peer => peer.id === worker.id.toString())) { + // Worker apparently did not have time to say goodbye, + // remove the peer ourselves + this.removePeer(worker.id.toString()); + + // Send "bye" message on their behalf + this.sayGoodbye(worker.id.toString()); + } + }); + } + + const handleMessage = (message: INodeIPCMessage) => { + if (typeof message !== 'object') { + // Ignore non-object IPC messages + return; + } + if (message.dbname !== this.dbname) { + // Ignore, message not meant for this database + return; + } + if (cluster.isMaster && message.to !== masterPeerId) { + // Message is meant for others (or all). Forward it + this.sendMessage(message); + } + if (message.to && message.to !== this.id) { + // Message is for somebody else. Ignore + return; + } + + return super.handleMessage(message); + }; + + if (cluster.isMaster) { + bindEventHandler(cluster, 'message', (worker: Cluster.Worker, message: INodeIPCMessage) => handleMessage(message)); + } + else { + bindEventHandler(cluster.worker, 'message', handleMessage); + } + + // if (!cluster.isMaster) { + // // Add master peer. Do we have to? + // this.addPeer(masterPeerId, false, false); + // } + + // Send hello to other peers + const helloMsg: IHelloMessage = { type: 'hello', from: this.id, data: undefined }; + this.sendMessage(helloMsg); + } + + public sendMessage(msg: IMessage) { + const message = msg as INodeIPCMessage; + message.dbname = this.dbname; + + if (cluster.isMaster) { + // If we are the master, send the message to the target worker(s) + this.peers + .filter(p => p.id !== message.from && (!message.to || p.id === message.to)) + .forEach(peer => { + const worker = cluster.workers[peer.id]; + worker && worker.send(message); // When debugging, worker might have stopped in the meantime + }); + } + else { + // Send the message to the master who will forward it to the target worker(s) + process.send(message); + } + } + + public async exit(code = 0) { + await super.exit(code); + } + +} diff --git a/src/ipc/ipc.ts b/src/ipc/ipc.ts index 256dde1..398eb33 100644 --- a/src/ipc/ipc.ts +++ b/src/ipc/ipc.ts @@ -1,4 +1,4 @@ -import { ID, SimpleEventEmitter } from 'acebase-core'; +import { ID, LoggerPlugin, SimpleEventEmitter } from 'acebase-core'; import { NodeLocker, NodeLock, LOCK_STATE } from '../node-lock'; import { Storage } from '../storage'; @@ -23,15 +23,18 @@ export abstract class AceBaseIPCPeer extends SimpleEventEmitter { private _nodeLocker: NodeLocker; + public logger: LoggerPlugin; + constructor(protected storage: Storage, protected id: string, public dbname: string = storage.name) { super(); - this._nodeLocker = new NodeLocker(storage.debug, storage.settings.lockTimeout); + this._nodeLocker = new NodeLocker(storage.logger, storage.settings.lockTimeout); + this.logger = storage.logger; // Setup db event listeners storage.on('subscribe', (subscription: { path: string, event: string, callback: AceBaseSubscribeCallback }) => { // Subscription was added to db - storage.debug.verbose(`database subscription being added on peer ${this.id}`); + this.logger.trace(`database subscription being added on peer ${this.id}`); const remoteSubscription = this.remoteSubscriptions.find(sub => sub.callback === subscription.callback); if (remoteSubscription) { @@ -92,17 +95,17 @@ export abstract class AceBaseIPCPeer extends SimpleEventEmitter { return this.once('exit'); } this._exiting = true; - this.storage.debug.warn(`Received ${this.isMaster ? 'master' : 'worker ' + this.id} process exit request`); + this.logger.warn(`Received ${this.isMaster ? 'master' : 'worker ' + this.id} process exit request`); if (this._locks.length > 0) { - this.storage.debug.warn(`Waiting for ${this.isMaster ? 'master' : 'worker'} ${this.id} locks to clear`); + this.logger.warn(`Waiting for ${this.isMaster ? 'master' : 'worker'} ${this.id} locks to clear`); await this.once('locks-cleared'); } // Send "bye" this.sayGoodbye(this.id); - this.storage.debug.warn(`${this.isMaster ? 'Master' : 'Worker ' + this.id} will now exit`); + this.logger.warn(`${this.isMaster ? 'Master' : 'Worker ' + this.id} will now exit`); this.emitOnce('exit', code); } @@ -155,7 +158,7 @@ export abstract class AceBaseIPCPeer extends SimpleEventEmitter { protected addRemoteSubscription(peerId: string, details:ISubscriptionData) { if (this._exiting) { return; } - // this.storage.debug.log(`remote subscription being added`); + // this.logger.debug(`remote subscription being added`); if (this.remoteSubscriptions.some(sub => sub.for === peerId && sub.event === details.event && sub.path === details.path)) { // We're already serving this event for the other peer. Ignore @@ -393,7 +396,7 @@ export abstract class AceBaseIPCPeer extends SimpleEventEmitter { const removeLock = (lockDetails: InternalLockInfo) => { this._locks.splice(this._locks.indexOf(lockDetails), 1); if (this._locks.length === 0) { - // this.storage.debug.log(`No more locks in worker ${this.id}`); + // this.logger.debug(`No more locks in worker ${this.id}`); this.emit('locks-cleared'); } }; @@ -449,7 +452,7 @@ export abstract class AceBaseIPCPeer extends SimpleEventEmitter { const req: IUnlockRequestMessage = { type: 'unlock-request', id: ID.generate(), from: this.id, to: this.masterPeerId, data: { id: lockInfo.lock.id } }; await this.request(req); lockInfo.lock.state = LOCK_STATE.DONE; - this.storage.debug.verbose(`Worker ${this.id} released lock ${lockInfo.lock.id} (tid ${lockInfo.lock.tid}, ${lockInfo.lock.comment}, "/${lockInfo.lock.path}", ${lockInfo.lock.forWriting ? 'write' : 'read'})`); + this.logger.trace(`Worker ${this.id} released lock ${lockInfo.lock.id} (tid ${lockInfo.lock.tid}, ${lockInfo.lock.comment}, "/${lockInfo.lock.path}", ${lockInfo.lock.forWriting ? 'write' : 'read'})`); removeLock(lockInfo); }, moveToParent: async () => { @@ -468,7 +471,7 @@ export abstract class AceBaseIPCPeer extends SimpleEventEmitter { return lockInfo.lock; }, }; - // this.storage.debug.log(`Worker ${this.id} received lock ${lock.id} (tid ${lock.tid}, ${lock.comment}, "/${lock.path}", ${lock.forWriting ? 'write' : 'read'})`); + // this.logger.debug(`Worker ${this.id} received lock ${lock.id} (tid ${lock.tid}, ${lock.comment}, "/${lock.path}", ${lock.forWriting ? 'write' : 'read'})`); return lockInfo.lock; }; @@ -520,7 +523,7 @@ export abstract class AceBaseIPCPeer extends SimpleEventEmitter { const req: ICustomRequestMessage = { type: 'request', from: this.id, to: this.masterPeerId, id: ID.generate(), data: request }; return this.request(req) .catch(err => { - this.storage.debug.error(err); + this.logger.error(err); throw err; }); } @@ -551,7 +554,7 @@ export abstract class AceBaseIPCPeer extends SimpleEventEmitter { * Enables or disables ipc event handling. When disabled, incoming event messages will be ignored. */ set eventsEnabled(enabled: boolean) { - this.storage.debug.log(`ipc events ${enabled ? 'enabled' : 'disabled'}`); + this.logger.info(`ipc events ${enabled ? 'enabled' : 'disabled'}`); this._eventsEnabled = enabled; } diff --git a/src/ipc/remote.ts b/src/ipc/remote.ts index 8b4e22a..b643da6 100644 --- a/src/ipc/remote.ts +++ b/src/ipc/remote.ts @@ -1,326 +1,326 @@ -import { ID, Utils } from 'acebase-core'; -import { AceBaseIPCPeer, IMessage } from './ipc'; -import { Storage } from '../storage'; -import * as http from 'http'; - -import type * as wsTypes from 'ws'; // @types/ws must always available - -// type MessageEventCallback = (event: MessageEvent) => any; - -export interface RemoteIPCServerConfig { - dbname: string, - host?: string, - port: number, - ssl?: boolean, - token?: string, - role: 'master'|'worker', -} - -const masterPeerId = '[master]'; -const WS_CLOSE_PING_TIMEOUT = 1; -const WS_CLOSE_PROCESS_EXIT = 2; -// const WS_CLOSE_UNAUTHORIZED = 3; -// const WS_CLOSE_WRONG_CLIENT = 4; -// const WS_CLOSE_SERVER_ERROR = 5; - -/** - * Remote IPC using an external server. Database changes and events will be synchronized automatically. - * Locking of resources will be done by a single master that needs to be known up front. Preferably, the master - * is a process that handles no database updates itself and only manages data locking and allocation for workers. - * - * To use Remote IPC, you have to start the following processes: - * - 1 AceBase IPC Server process - * - 1 AceBase database master process (optional, used in example 1) - * - 1+ AceBase server worker processes - * - * NOTE if your IPC server will be running on a public host (not `localhost`), make sure to use `ssl` and a secret - * `token` in your IPC configuration. - * - * @example - * // IPC server process (start-ipc-server.js) - * const { AceBaseIPCServer } = require('acebase-ipc-server'); - * const server = new AceBaseIPCServer({ host: 'localhost', port: 9163 }) - * - * // Dedicated db master process (start-db-master.js) - * const { AceBase } = require('acebase'); - * const db = new AceBase('mydb', { storage: { ipc: { host: 'localhost', port: 9163, ssl: false, role: 'master' } } }); - * - * // Server worker processes (start-db-server.js) - * const { AceBaseServer } = require('acebase-server'); - * const server = new AceBaseServer('mydb', { host: 'localhost', port: 5757, storage: { ipc: { host: 'localhost', port: 9163, ssl: false, role: 'worker' } } }); - * - * // PM2 ecosystem.config.js: - * module.exports = { - * apps: [{ - * name: "AceBase IPC Server", - * script: "./start-ipc-server.js" - * }, { - * name: "AceBase database master", - * script: "./start-db-master.js" - * }, { - * name: "AceBase database server", - * script: "./start-db-server.js", - * instances: "-2", // Uses all CPUs minus 2 - * exec_mode: "cluster" // Enables PM2 load balancing, see https://pm2.keymetrics.io/docs/usage/cluster-mode/ - * }] - * } - * - * @description - * Instead of starting a dedicated db master process, you can also start 1 `AceBaseServer` with `role: "master"` manually. - * Note that the db master will also handle http requests for clients in this case, which might not be desirable because it - * also has to handle IPC master tasks for other clients. See the following example: - * - * @example - * // Another example using only 2 startup apps: - * - 1 instance: AceBase IPC server - * - Multiple instances of your app - * - * // IPC server process (start-ipc-server.js) - * const { AceBaseIPCServer } = require('acebase-ipc-server'); - * const server = new AceBaseIPCServer({ host: 'localhost', port: 9163 }) - * - * // Server worker processes (start-db-server.js) - * const { AceBaseServer } = require('acebase-server'); - * const role = process.env.NODE_APP_INSTANCE === '0' ? 'master' : 'worker'; - * const server = new AceBaseServer('mydb', { host: 'localhost', port: 5757, storage: { ipc: { host: 'localhost', port: 9163, ssl: false, role } } }); - * - * // PM2 ecosystem.config.js: - * module.exports = { - * apps: [{ - * name: "AceBase IPC Server", - * script: "./start-ipc-server.js", - * instances: 1 - * }, { - * name: "AceBase database server", - * script: "./start-db-server.js", - * instances: "-1", // Uses all CPUs minus 1 - * exec_mode: "cluster" // Enables PM2 load balancing - * }] - * } - */ -export class RemoteIPCPeer extends AceBaseIPCPeer { - - private get version() { return '1.0.0'; } - private ws: wsTypes.WebSocket; - private queue = true; - private pending: { - in: string[], - out: string[] - } = { in: [], out: [] }; - private maxPayload = 100; // Initial setting, will be overridden by server config once connected - - constructor(storage: Storage, private config: RemoteIPCServerConfig) { - super(storage, config.role === 'master' ? masterPeerId : ID.generate(), config.dbname); - this.masterPeerId = masterPeerId; - - this.connect().catch(err => { - storage.debug.error(err.message); - this.exit(); - }); - } - - private async connect(options?: { maxRetries?: number }) { - const ws = await (async () => { - try { - return import('ws'); - } - catch { - throw new Error(`ws package is not installed. To fix this, run: npm install ws`); - } - })(); - return new Promise((resolve, reject) => { - let connected = false; - this.ws = new ws.WebSocket(`ws${this.config.ssl ? 's' : ''}://${this.config.host || 'localhost'}:${this.config.port}/${this.config.dbname}/connect?v=${this.version}&id=${this.id}&t=${this.config.token}`); // &role=${this.config.role} - - // Handle connection success - this.ws.addEventListener('open', async (/*event*/) => { - connected = true; - // Send any pending messages - this.pending.out.forEach(msg => { - this.ws.send(msg); - }); - this.pending.out = []; - this.queue = false; - resolve(); - }); - - // // Handle unexpected response (is documented at https://github.com/websockets/ws/blob/master/doc/ws.md#event-unexpected-response but doesn't appear to be working) - // (this.ws as any).addEventListener('unexpected-response', (req: http.ClientRequest, res: http.IncomingMessage) => { - // console.error(`Invalid response: ${res.statusCode} ${res.statusMessage}`); - // let closeCode; - // switch (res.statusCode) { - // case 401: closeCode = WS_CLOSE_UNAUTHORIZED; break; - // case 409: closeCode = WS_CLOSE_WRONG_CLIENT; break; - // case 500: closeCode = WS_CLOSE_SERVER_ERROR; break; - // } - // reject(new Error(`${res.statusCode} ${res.statusMessage}`)); - // }); - - // Handle connection error - this.ws.addEventListener('error', event => { - if (!connected) { - // We had no connection yet - if (event.message.includes('403')) { - reject(new Error('Cannot connect to IPC server: unauthorized')); - } - else if (event.message.includes('409')) { - reject(new Error('Cannot connect to IPC server: unsupported client version (too new or old)')); - } - else if (event.message.includes('500')) { - reject(new Error('Cannot connect to IPC server: server error')); - } - else if (typeof options?.maxRetries === 'undefined' || typeof options?.maxRetries === 'number' && options?.maxRetries > 0) { - const retryMs = 1000; // ms - this.storage.debug.error(`Unable to connect to remote IPC server (${event.message}). Trying again in ${retryMs}ms`); - const retryOptions:{ maxRetries?: number } = {}; - if (typeof typeof options?.maxRetries === 'number') { retryOptions.maxRetries = options.maxRetries-1; } - const timeout = setTimeout(() => { this.connect(retryOptions); }, retryMs); - timeout.unref?.(); - } - else { - reject(event); - } - } - }); - - // Send pings if connection is idle to actively monitor connectivity - let lastMessageReceived = Date.now(); - const pingInterval = setInterval(() => { - if (this._exiting) { return; } - const ms = Date.now() - lastMessageReceived; - if (ms > 10000) { - // Timeout if we didn't get response within 10 seconds - this.ws.close(WS_CLOSE_PING_TIMEOUT); // close event that follows will reconnect - } - else if (ms > 5000) { - // No messages received for 5s. Sending ping to trigger pong response - this.ws.send('ping'); - } - }, 500); - pingInterval.unref?.(); - - // Close connection if we're exiting - process.once('exit', () => { - this.ws.close(WS_CLOSE_PROCESS_EXIT); - }); - - // Handle disconnect - this.ws.addEventListener('close', (/*event*/) => { - // Disconnected. Try reconnecting immediately - if (!connected) { return; } // We weren't connected yet. Don't reconnect here, retries will be executed automatically - if (this._exiting) { return; } - this.storage.debug.error(`Connection to remote IPC server was lost. Trying to reconnect`); - clearInterval(pingInterval); - this.storage.invalidateCache?.(true, '', true, 'ipc_ws_disconnect'); // Make sure the entire cache is invalidated (AceBase storage has such cache) - this.connect(); - }); - - // Handle incoming messages - this.ws.addEventListener('message', async event => { - lastMessageReceived = Date.now(); - let str = event.data.toString(); - console.log(str); - if (str === 'pong') { - // We got a ping reply from the server - return; - } - else if (str.startsWith('welcome:')) { - // Welcome message with config - const config = JSON.parse(str.slice(8)); - this.maxPayload = config.maxPayload; - } - else if (str.startsWith('connect:')) { - // A new peer connected to the IPC server - // Do not add yet, wait for our own "hello" message - } - else if (str.startsWith('disconnect:')) { - // A peer has disconnected from the IPC server - const id = str.slice(11); - if (this.peers.find(peer => peer.id === id)) { - // Peer apparently did not have time to say goodbye, - // remove the peer ourselves - this.removePeer(id); - - // Send "bye" message on their behalf - this.sayGoodbye(id); - } - } - else if (str.startsWith('get:')) { - // Large message we have to fetch - const msgId = str.slice(4); - try { - str = await this.fetch('GET', `/${this.config.dbname}/receive?id=${this.id}&msg=${msgId}&t=${this.config.token}`); - const msg = JSON.parse(str); - super.handleMessage(msg); - } - catch (err) { - this.storage.debug.error(`Failed to receive message ${msgId}:`, err); - } - } - else if (str.startsWith('{')) { - // Normal message - const msg = JSON.parse(str); - super.handleMessage(msg); - } - else { - // Unknown event - console.warn(`Received unknown IPC message: "${str}"`); - } - }); - }); - } - - sendMessage(message: IMessage) { - this.storage.debug.verbose(`[RemoteIPC] sending: `, message); - let json = JSON.stringify(message); - if (typeof message.to === 'string') { - // Send to specific peer only - json = `to:${message.to};${json}`; - } - if (this.queue) { - this.pending.out.push(json); - } - else if (json.length > this.maxPayload) { - this.fetch('POST', `/${this.dbname}/send?id=${this.id}&t=${this.config.token}`, json); - } - else { - this.ws.send(json); - } - } - - async fetch(method: 'GET'|'POST', path: string, postData?: string) { - const options = { - hostname: this.config.host || 'localhost', - port: this.config.port, - path, - method, - headers: { - 'Content-Type': 'application/json', - 'Content-Length': Buffer.byteLength(postData || ''), - }, - }; - return await new Promise((resolve, reject) => { - const req = http.request(options, (res) => { - // console.log(`STATUS: ${res.statusCode}`); - // console.log(`HEADERS: ${JSON.stringify(res.headers)}`); - res.setEncoding('utf8'); - - let data = ''; - res.on('data', (chunk) => { - data += chunk; - }); - res.on('end', () => { - resolve(data); - }); - }); - - req.on('error', reject); - - // Write data to request body - req.write(postData); - req.end(); - }); - } - - -} +import { ID, Utils } from 'acebase-core'; +import { AceBaseIPCPeer, IMessage } from './ipc'; +import { Storage } from '../storage'; +import * as http from 'http'; + +import type * as wsTypes from 'ws'; // @types/ws must always available + +// type MessageEventCallback = (event: MessageEvent) => any; + +export interface RemoteIPCServerConfig { + dbname: string, + host?: string, + port: number, + ssl?: boolean, + token?: string, + role: 'master'|'worker', +} + +const masterPeerId = '[master]'; +const WS_CLOSE_PING_TIMEOUT = 1; +const WS_CLOSE_PROCESS_EXIT = 2; +// const WS_CLOSE_UNAUTHORIZED = 3; +// const WS_CLOSE_WRONG_CLIENT = 4; +// const WS_CLOSE_SERVER_ERROR = 5; + +/** + * Remote IPC using an external server. Database changes and events will be synchronized automatically. + * Locking of resources will be done by a single master that needs to be known up front. Preferably, the master + * is a process that handles no database updates itself and only manages data locking and allocation for workers. + * + * To use Remote IPC, you have to start the following processes: + * - 1 AceBase IPC Server process + * - 1 AceBase database master process (optional, used in example 1) + * - 1+ AceBase server worker processes + * + * NOTE if your IPC server will be running on a public host (not `localhost`), make sure to use `ssl` and a secret + * `token` in your IPC configuration. + * + * @example + * // IPC server process (start-ipc-server.js) + * const { AceBaseIPCServer } = require('acebase-ipc-server'); + * const server = new AceBaseIPCServer({ host: 'localhost', port: 9163 }) + * + * // Dedicated db master process (start-db-master.js) + * const { AceBase } = require('acebase'); + * const db = new AceBase('mydb', { storage: { ipc: { host: 'localhost', port: 9163, ssl: false, role: 'master' } } }); + * + * // Server worker processes (start-db-server.js) + * const { AceBaseServer } = require('acebase-server'); + * const server = new AceBaseServer('mydb', { host: 'localhost', port: 5757, storage: { ipc: { host: 'localhost', port: 9163, ssl: false, role: 'worker' } } }); + * + * // PM2 ecosystem.config.js: + * module.exports = { + * apps: [{ + * name: "AceBase IPC Server", + * script: "./start-ipc-server.js" + * }, { + * name: "AceBase database master", + * script: "./start-db-master.js" + * }, { + * name: "AceBase database server", + * script: "./start-db-server.js", + * instances: "-2", // Uses all CPUs minus 2 + * exec_mode: "cluster" // Enables PM2 load balancing, see https://pm2.keymetrics.io/docs/usage/cluster-mode/ + * }] + * } + * + * @description + * Instead of starting a dedicated db master process, you can also start 1 `AceBaseServer` with `role: "master"` manually. + * Note that the db master will also handle http requests for clients in this case, which might not be desirable because it + * also has to handle IPC master tasks for other clients. See the following example: + * + * @example + * // Another example using only 2 startup apps: + * - 1 instance: AceBase IPC server + * - Multiple instances of your app + * + * // IPC server process (start-ipc-server.js) + * const { AceBaseIPCServer } = require('acebase-ipc-server'); + * const server = new AceBaseIPCServer({ host: 'localhost', port: 9163 }) + * + * // Server worker processes (start-db-server.js) + * const { AceBaseServer } = require('acebase-server'); + * const role = process.env.NODE_APP_INSTANCE === '0' ? 'master' : 'worker'; + * const server = new AceBaseServer('mydb', { host: 'localhost', port: 5757, storage: { ipc: { host: 'localhost', port: 9163, ssl: false, role } } }); + * + * // PM2 ecosystem.config.js: + * module.exports = { + * apps: [{ + * name: "AceBase IPC Server", + * script: "./start-ipc-server.js", + * instances: 1 + * }, { + * name: "AceBase database server", + * script: "./start-db-server.js", + * instances: "-1", // Uses all CPUs minus 1 + * exec_mode: "cluster" // Enables PM2 load balancing + * }] + * } + */ +export class RemoteIPCPeer extends AceBaseIPCPeer { + + private get version() { return '1.0.0'; } + private ws: wsTypes.WebSocket; + private queue = true; + private pending: { + in: string[], + out: string[] + } = { in: [], out: [] }; + private maxPayload = 100; // Initial setting, will be overridden by server config once connected + + constructor(storage: Storage, private config: RemoteIPCServerConfig) { + super(storage, config.role === 'master' ? masterPeerId : ID.generate(), config.dbname); + this.masterPeerId = masterPeerId; + + this.connect().catch(err => { + this.logger.error(err.message); + this.exit(); + }); + } + + private async connect(options?: { maxRetries?: number }) { + const ws = await (async () => { + try { + return import('ws'); + } + catch { + throw new Error(`ws package is not installed. To fix this, run: npm install ws`); + } + })(); + return new Promise((resolve, reject) => { + let connected = false; + this.ws = new ws.WebSocket(`ws${this.config.ssl ? 's' : ''}://${this.config.host || 'localhost'}:${this.config.port}/${this.config.dbname}/connect?v=${this.version}&id=${this.id}&t=${this.config.token}`); // &role=${this.config.role} + + // Handle connection success + this.ws.addEventListener('open', async (/*event*/) => { + connected = true; + // Send any pending messages + this.pending.out.forEach(msg => { + this.ws.send(msg); + }); + this.pending.out = []; + this.queue = false; + resolve(); + }); + + // // Handle unexpected response (is documented at https://github.com/websockets/ws/blob/master/doc/ws.md#event-unexpected-response but doesn't appear to be working) + // (this.ws as any).addEventListener('unexpected-response', (req: http.ClientRequest, res: http.IncomingMessage) => { + // console.error(`Invalid response: ${res.statusCode} ${res.statusMessage}`); + // let closeCode; + // switch (res.statusCode) { + // case 401: closeCode = WS_CLOSE_UNAUTHORIZED; break; + // case 409: closeCode = WS_CLOSE_WRONG_CLIENT; break; + // case 500: closeCode = WS_CLOSE_SERVER_ERROR; break; + // } + // reject(new Error(`${res.statusCode} ${res.statusMessage}`)); + // }); + + // Handle connection error + this.ws.addEventListener('error', event => { + if (!connected) { + // We had no connection yet + if (event.message.includes('403')) { + reject(new Error('Cannot connect to IPC server: unauthorized')); + } + else if (event.message.includes('409')) { + reject(new Error('Cannot connect to IPC server: unsupported client version (too new or old)')); + } + else if (event.message.includes('500')) { + reject(new Error('Cannot connect to IPC server: server error')); + } + else if (typeof options?.maxRetries === 'undefined' || typeof options?.maxRetries === 'number' && options?.maxRetries > 0) { + const retryMs = 1000; // ms + this.logger.error(`Unable to connect to remote IPC server (${event.message}). Trying again in ${retryMs}ms`); + const retryOptions:{ maxRetries?: number } = {}; + if (typeof typeof options?.maxRetries === 'number') { retryOptions.maxRetries = options.maxRetries-1; } + const timeout = setTimeout(() => { this.connect(retryOptions); }, retryMs); + timeout.unref?.(); + } + else { + reject(event); + } + } + }); + + // Send pings if connection is idle to actively monitor connectivity + let lastMessageReceived = Date.now(); + const pingInterval = setInterval(() => { + if (this._exiting) { return; } + const ms = Date.now() - lastMessageReceived; + if (ms > 10000) { + // Timeout if we didn't get response within 10 seconds + this.ws.close(WS_CLOSE_PING_TIMEOUT); // close event that follows will reconnect + } + else if (ms > 5000) { + // No messages received for 5s. Sending ping to trigger pong response + this.ws.send('ping'); + } + }, 500); + pingInterval.unref?.(); + + // Close connection if we're exiting + process.once('exit', () => { + this.ws.close(WS_CLOSE_PROCESS_EXIT); + }); + + // Handle disconnect + this.ws.addEventListener('close', (/*event*/) => { + // Disconnected. Try reconnecting immediately + if (!connected) { return; } // We weren't connected yet. Don't reconnect here, retries will be executed automatically + if (this._exiting) { return; } + this.logger.error(`Connection to remote IPC server was lost. Trying to reconnect`); + clearInterval(pingInterval); + this.storage.invalidateCache?.(true, '', true, 'ipc_ws_disconnect'); // Make sure the entire cache is invalidated (AceBase storage has such cache) + this.connect(); + }); + + // Handle incoming messages + this.ws.addEventListener('message', async event => { + lastMessageReceived = Date.now(); + let str = event.data.toString(); + console.log(str); + if (str === 'pong') { + // We got a ping reply from the server + return; + } + else if (str.startsWith('welcome:')) { + // Welcome message with config + const config = JSON.parse(str.slice(8)); + this.maxPayload = config.maxPayload; + } + else if (str.startsWith('connect:')) { + // A new peer connected to the IPC server + // Do not add yet, wait for our own "hello" message + } + else if (str.startsWith('disconnect:')) { + // A peer has disconnected from the IPC server + const id = str.slice(11); + if (this.peers.find(peer => peer.id === id)) { + // Peer apparently did not have time to say goodbye, + // remove the peer ourselves + this.removePeer(id); + + // Send "bye" message on their behalf + this.sayGoodbye(id); + } + } + else if (str.startsWith('get:')) { + // Large message we have to fetch + const msgId = str.slice(4); + try { + str = await this.fetch('GET', `/${this.config.dbname}/receive?id=${this.id}&msg=${msgId}&t=${this.config.token}`); + const msg = JSON.parse(str); + super.handleMessage(msg); + } + catch (err) { + this.logger.error(`Failed to receive message ${msgId}:`, err); + } + } + else if (str.startsWith('{')) { + // Normal message + const msg = JSON.parse(str); + super.handleMessage(msg); + } + else { + // Unknown event + console.warn(`Received unknown IPC message: "${str}"`); + } + }); + }); + } + + sendMessage(message: IMessage) { + this.logger.trace(`[RemoteIPC] sending: `, message); + let json = JSON.stringify(message); + if (typeof message.to === 'string') { + // Send to specific peer only + json = `to:${message.to};${json}`; + } + if (this.queue) { + this.pending.out.push(json); + } + else if (json.length > this.maxPayload) { + this.fetch('POST', `/${this.dbname}/send?id=${this.id}&t=${this.config.token}`, json); + } + else { + this.ws.send(json); + } + } + + async fetch(method: 'GET'|'POST', path: string, postData?: string) { + const options = { + hostname: this.config.host || 'localhost', + port: this.config.port, + path, + method, + headers: { + 'Content-Type': 'application/json', + 'Content-Length': Buffer.byteLength(postData || ''), + }, + }; + return await new Promise((resolve, reject) => { + const req = http.request(options, (res) => { + // console.log(`STATUS: ${res.statusCode}`); + // console.log(`HEADERS: ${JSON.stringify(res.headers)}`); + res.setEncoding('utf8'); + + let data = ''; + res.on('data', (chunk) => { + data += chunk; + }); + res.on('end', () => { + resolve(data); + }); + }); + + req.on('error', reject); + + // Write data to request body + req.write(postData); + req.end(); + }); + } + + +} diff --git a/src/ipc/service/index.ts b/src/ipc/service/index.ts index c6fd53d..d783f99 100644 --- a/src/ipc/service/index.ts +++ b/src/ipc/service/index.ts @@ -1,99 +1,113 @@ -import { createServer, Socket } from 'net'; -import { getSocketPath } from './shared'; -import { AceBase, type AceBaseLocalSettings } from '../../'; -import { DebugLogger } from 'acebase-core'; - -const ERROR = Object.freeze({ - ALREADY_RUNNING: { code: 'already_running', exitCode: 2 }, - UNKNOWN: { code: 'unknown', exitCode: 3 }, - NO_DB: { code: 'no_db', exitCode: 4 }, -}); - -export async function startServer(dbFile: string, options: { logLevel: AceBaseLocalSettings['logLevel'], maxIdleTime: number, exit: (code: number) => void }) { - const fileMatch = dbFile.match(/^(?.*([\\\/]))(?.+)\.acebase\2(?[a-z]+)\.db$/); - if (!fileMatch) { - return options.exit(ERROR.NO_DB.exitCode); - } - const { storagePath, dbName, storageType } = fileMatch.groups; - const logger = new DebugLogger(options.logLevel, `[IPC service ${dbName}:${storageType}]`); - let db: AceBase; // Will be opened when listening - - const sockets = [] as Socket[]; - - const socketPath = getSocketPath(dbFile); - logger.log(`[starting socket server on path ${socketPath}`); - - const server = createServer(); - server.listen({ - path: socketPath, - readableAll: true, - writableAll: true, - }); - - server.on('listening', () => { - // Started successful - process.on('SIGINT', () => server.close()); - process.on('exit', (code) => { - logger.log(`exiting with code ${code}`); - }); - - // Start the "master" IPC client - db = new AceBase(dbName, { logLevel: options.logLevel, storage: { type: storageType, path: storagePath, ipc: server } }); - }); - - server.on('error', (err: Error & { code: string }) => { - if (err.code === 'EADDRINUSE') { - logger.log(`socket server already running`); - return options.exit(ERROR.ALREADY_RUNNING.exitCode); - } - logger.error(`socket server error ${err.code ?? err.message}`); - options.exit(ERROR.UNKNOWN.exitCode); - }); - - let connectionsMade = false; - server.on('connection', (socket) => { - // New socket connected handler - connectionsMade = true; - sockets.push(socket); - logger.log(`socket connected, total: ${sockets.length}`); - - socket.on('close', (hadError) => { - // Socket is closed - sockets.splice(sockets.indexOf(socket), 1); - logger.log(`socket disconnected${hadError ? ' because of an error' : ''}, total: ${sockets.length}`); - if (sockets.length === 0) { - const stop = () => { - logger.log(`closing server socket because there are no more connected clients, exiting with code 0`); - // Stop socket server - server.close((err) => { - options.exit(err ? ERROR.UNKNOWN.exitCode : 0); - }); - }; - if (options.maxIdleTime > 0) { - setTimeout(() => { - if (sockets.length === 0) { stop(); } - }, 5000); - } - else { - stop(); - } - } - }); - }); - - server.on('close', () => { - db.close(); - }); - - if (options.maxIdleTime > 0) { - setTimeout(() => { - if (!connectionsMade) { - logger.log(`closing server socket because no clients connected, exiting with code 0`); - // Stop socket server - server.close((err) => { - options.exit(err ? ERROR.UNKNOWN.exitCode : 0); - }); - } - }, options.maxIdleTime).unref(); - } -} +import { createServer, Socket } from 'net'; +import { getSocketPath } from './shared'; +import { AceBase, type AceBaseLocalSettings } from '../../'; +import { DebugLogger, LoggerPlugin } from 'acebase-core'; + +const ERROR = Object.freeze({ + ALREADY_RUNNING: { code: 'already_running', exitCode: 2 }, + UNKNOWN: { code: 'unknown', exitCode: 3 }, + NO_DB: { code: 'no_db', exitCode: 4 }, +}); + +export async function startServer( + dbFile: string, + options: { + /** path to code that returns an initialized logger plugin */ + loggerPluginPath?: string, + logLevel: AceBaseLocalSettings['logLevel'], + maxIdleTime: number, + exit: (code: number) => void + } +) { + const fileMatch = dbFile.match(/^(?.*([\\\/]))(?.+)\.acebase\2(?[a-z]+)\.db$/); + if (!fileMatch) { + return options.exit(ERROR.NO_DB.exitCode); + } + const { storagePath, dbName, storageType } = fileMatch.groups; + const logger = options.loggerPluginPath + ? await (async () => { + const logger = await import(options.loggerPluginPath); + return (logger.default ?? logger) as LoggerPlugin; + })() + : new DebugLogger(options.logLevel, `[IPC service ${dbName}:${storageType}]`); + let db: AceBase; // Will be opened when listening + + const sockets = [] as Socket[]; + + const socketPath = getSocketPath(dbFile); + logger.info(`[starting socket server on path ${socketPath}`); + + const server = createServer(); + server.listen({ + path: socketPath, + readableAll: true, + writableAll: true, + }); + + server.on('listening', () => { + // Started successful + process.on('SIGINT', () => server.close()); + process.on('exit', (code) => { + logger.info(`exiting with code ${code}`); + }); + + // Start the "master" IPC client + db = new AceBase(dbName, { logLevel: options.logLevel, logger, storage: { type: storageType, path: storagePath, ipc: server } }); + }); + + server.on('error', (err: Error & { code: string }) => { + if (err.code === 'EADDRINUSE') { + logger.info(`socket server already running`); + return options.exit(ERROR.ALREADY_RUNNING.exitCode); + } + logger.error(`socket server error ${err.code ?? err.message}`); + options.exit(ERROR.UNKNOWN.exitCode); + }); + + let connectionsMade = false; + server.on('connection', (socket) => { + // New socket connected handler + connectionsMade = true; + sockets.push(socket); + logger.info(`socket connected, total: ${sockets.length}`); + + socket.on('close', (hadError) => { + // Socket is closed + sockets.splice(sockets.indexOf(socket), 1); + logger.info(`socket disconnected${hadError ? ' because of an error' : ''}, total: ${sockets.length}`); + if (sockets.length === 0) { + const stop = () => { + logger.info(`closing server socket because there are no more connected clients, exiting with code 0`); + // Stop socket server + server.close((err) => { + options.exit(err ? ERROR.UNKNOWN.exitCode : 0); + }); + }; + if (options.maxIdleTime > 0) { + setTimeout(() => { + if (sockets.length === 0) { stop(); } + }, 5000); + } + else { + stop(); + } + } + }); + }); + + server.on('close', () => { + db.close(); + }); + + if (options.maxIdleTime > 0) { + setTimeout(() => { + if (!connectionsMade) { + logger.info(`closing server socket because no clients connected, exiting with code 0`); + // Stop socket server + server.close((err) => { + options.exit(err ? ERROR.UNKNOWN.exitCode : 0); + }); + } + }, options.maxIdleTime).unref(); + } +} diff --git a/src/ipc/service/start.ts b/src/ipc/service/start.ts index 783e77e..f97395f 100644 --- a/src/ipc/service/start.ts +++ b/src/ipc/service/start.ts @@ -1,27 +1,29 @@ -import { startServer } from '.'; -import { type AceBaseLocalSettings } from '../../'; - -(async () => { - try { - const dbFile = process.argv[2]; // full path to db storage file, eg '/home/ewout/project/db.acebase/data.db' - const settings = process.argv.slice(3).reduce((settings, arg, i, args) => { - switch (arg.toLowerCase()) { - case '--loglevel': settings.logLevel = args[i + 1] as AceBaseLocalSettings['logLevel']; break; - case '--maxidletime': settings.maxIdleTime = parseInt(args[i + 1]); break; - } - return settings; - }, { logLevel: 'log' as AceBaseLocalSettings['logLevel'], maxIdleTime: 5000 }); - - await startServer(dbFile, { - logLevel: settings.logLevel, - maxIdleTime: settings.maxIdleTime, - exit: (code) => { - process.exit(code); - }, - }); - } - catch (err) { - console.error(`Start error:`, err); - process.exit(1); - } -})(); +import { startServer } from '.'; +import { type AceBaseLocalSettings } from '../../'; + +(async () => { + try { + const dbFile = process.argv[2]; // full path to db storage file, eg '/home/ewout/project/db.acebase/data.db' + const settings = process.argv.slice(3).reduce((settings, arg, i, args) => { + switch (arg.toLowerCase()) { + case '--loglevel': settings.logLevel = args[i + 1] as AceBaseLocalSettings['logLevel']; break; + case '--maxidletime': settings.maxIdleTime = parseInt(args[i + 1]); break; + case '--logger': settings.loggerPluginPath = args[i + 1]; break; + } + return settings; + }, { logLevel: 'log', maxIdleTime: 5000 } as { loggerPluginPath?: string, logLevel: AceBaseLocalSettings['logLevel'], maxIdleTime: number }); + + await startServer(dbFile, { + loggerPluginPath: settings.loggerPluginPath, + logLevel: settings.logLevel, + maxIdleTime: settings.maxIdleTime, + exit: (code) => { + process.exit(code); + }, + }); + } + catch (err) { + console.error(`Start error:`, err); + process.exit(1); + } +})(); diff --git a/src/ipc/socket.ts b/src/ipc/socket.ts index 2a559a0..309abf1 100644 --- a/src/ipc/socket.ts +++ b/src/ipc/socket.ts @@ -1,262 +1,269 @@ -import { Socket, connect, Server } from 'net'; -import { resolve as resolvePath } from 'path'; -import { spawn } from 'child_process'; -import { AceBaseIPCPeer, IHelloMessage, IMessage } from './ipc'; -import { Storage } from '../storage'; -import { ID, Transport } from 'acebase-core'; -import { getSocketPath, MSG_DELIMITER } from './service/shared'; -// import { startServer } from './service'; -export { Server as NetIPCServer } from 'net'; - -const masterPeerId = '[master]'; - -interface EventEmitterLike { - addListener?(event: string, handler: (...args: any[]) => any): any; - removeListener?(event: string, handler: (...args: any[]) => any): any; - on?(event: string, handler: (...args: any[]) => any): any; - off?(event: string, handler: (...args: any[]) => any): any; -} - -/** - * Socket IPC implementation. Peers will attempt starting up a dedicated service process for the target database, - * or connect to an already running process. The service acts as the IPC master and governs over locks, space allocation - * and communication between peers. Communication between the processes is done using (very fast in-memory) Unix sockets. - * This IPC implementation allows different processes on a single machine to access the same database simultaniously without - * them having to explicitly configure their IPC settings. - * Currently can be used by passing `ipc: 'socket'` in AceBase's `storage` settings, will become the default soon. - */ -export class IPCSocketPeer extends AceBaseIPCPeer { - - public server?: Server; - - constructor(storage: Storage, ipcSettings: { ipcName: string; server: Server | null }) { - - const isMaster = storage.settings.ipc instanceof Server; - const peerId = isMaster ? masterPeerId : ID.generate(); - super(storage, peerId, ipcSettings.ipcName); - this.server = ipcSettings.server; - - this.masterPeerId = masterPeerId; - this.ipcType = 'node.socket'; - - const dbFile = resolvePath(storage.path, `${storage.settings.type}.db`); - const socketPath = getSocketPath(dbFile); - - /** Adds an event handler that is automatically removed upon IPC exit */ - const bindEventHandler = (target: EventEmitterLike, event: string, handler: (...args: any[]) => any) => { - (target.on ?? target.addListener).bind(target)(event, handler); - this.on('exit', () => (target.off ?? target.removeListener).bind(target)(event, handler)); - }; - - // Setup process exit handler - bindEventHandler(process, 'SIGINT', () => { - this.exit(); - }); - - if (!isMaster) { - // Try starting IPC service if it is not running yet. - // Use maxIdleTime 0 to allow tests to remove database files when done, make this configurable! - const service = spawn('node', [__dirname + '/service/start.js', dbFile, '--loglevel', storage.debug.level, '--maxidletime', '0'], { detached: true, stdio: 'ignore' }); - service.unref(); // Process is detached and allowed to keep running after we exit. Do not keep a reference to it, possibly preventing app exit. - - // For testing: - // startServer(dbFile, { - // maxIdleTime: 0, - // logLevel: storage.debug.level, - // exit: (code) => { - // storage.debug.log(`[IPC ${ipcSettings.ipcName}] service exited with code ${code}`); - // }, - // }); - } - - /** - * Socket connection with master (workers only) - */ - let socket: Socket | null = null; - let connected = false; - const queue = [] as IMessage[]; - - /** - * Maps peers to IPC sockets (master only) - */ - const peerSockets = isMaster ? new Map() : null; - - const handleMessage = (socket: Socket, message: IMessage) => { - if (typeof message !== 'object') { - // Ignore non-object IPC messages - return; - } - if (isMaster && message.to !== masterPeerId) { - // Message is meant for others (or all). Forward it - this.sendMessage(message); - } - if (message.to && message.to !== this.id) { - // Message is for somebody else. Ignore - return; - } - if (this.isMaster) { - if (message.type === 'hello') { - // Bind peer id to incoming socket - peerSockets.set(message.from, socket); - } - else if (message.type === 'bye') { - // Remove bound socket for peer - peerSockets.delete(message.from); - } - } - return super.handleMessage(message); - }; - - if (isMaster) { - this.server.on('connection', (socket) => { - // New socket connected. We don't know which peer it is until we get a "hello" message - let buffer = Buffer.alloc(0); // Buffer to store incomplete messages - socket.on('data', chunk => { - // Received data from a worker - buffer = Buffer.concat([buffer, chunk]); - - while (buffer.length > 0) { - const delimiterIndex = buffer.indexOf(MSG_DELIMITER); - if (delimiterIndex === -1) { - break; // wait for more data - } - - // Extract message from buffer - const message = buffer.subarray(0, delimiterIndex); - buffer = buffer.subarray(delimiterIndex + MSG_DELIMITER.length); - - try { - const json = message.toString('utf-8'); - // storage.debug.log(`[IPC ${ipcSettings.ipcName}] Received socket message: `, json); - const serialized = JSON.parse(json); - const msg = Transport.deserialize2(serialized); - handleMessage(socket, msg); - } - catch (err) { - storage.debug.error(`[IPC ${ipcSettings.ipcName}] Error parsing message: ${err}`); - } - } - }); - socket.on('close', (hadError) => { - // socket has disconnected. Find registered peer - for (const [peerId, peerSocket] of peerSockets.entries()) { - if (peerSocket === socket) { - // Worker apparently did not have time to say goodbye, - // remove the peer ourselves - this.removePeer(peerId); - - // Send "bye" message on their behalf - this.sayGoodbye(peerId); - break; - } - } - }); - }); - } - else { - const connectSocket = async (path: string) => { - const tryConnect = async (tries: number): Promise => { - try { - if (this._exiting) { return; } - const s = connect({ path }); - await new Promise((resolve, reject) => { - s.once('error', reject).unref(); - s.once('connect', resolve).unref(); - }); - storage.debug.log(`[IPC ${ipcSettings.ipcName}] peer ${this.id} successfully established connection to the service`); - socket = s; - connected = true; - } - catch (err) { - if (tries < 100) { - // Retry in 10ms - await new Promise(resolve => setTimeout(resolve, 100)); - return tryConnect(tries + 1); - } - storage.debug.error(`[IPC ${ipcSettings.ipcName}] peer ${this.id} cannot connect to service: ${err.message}`); - throw err; - } - }; - await tryConnect(1); - - this.once('exit', () => { - socket?.destroy(); - }); - - bindEventHandler(socket, 'close', (hadError) => { - // Connection to server closed - storage.debug.log(`IPC peer ${this.id} lost its connection to the service${hadError ? ' because of an error' : ''}`); - }); - - let buffer = Buffer.alloc(0); // Buffer to store incomplete messages - bindEventHandler(socket, 'data', chunk => { - // Received data from server - buffer = Buffer.concat([buffer, chunk]); - - while (buffer.length > 0) { - const delimiterIndex = buffer.indexOf(MSG_DELIMITER); - if (delimiterIndex === -1) { - break; // wait for more data - } - - // Extract message from buffer - const message = buffer.subarray(0, delimiterIndex); - buffer = buffer.subarray(delimiterIndex + MSG_DELIMITER.length); - - try { - const json = message.toString('utf-8'); - // storage.debug.log(`Received server message: `, json); - const serialized = JSON.parse(json); - const msg = Transport.deserialize2(serialized); - handleMessage(socket, msg); - } - catch (err) { - storage.debug.error(`Error parsing message: ${err}`); - } - } - }); - - connected = true; - while (queue.length) { - const message = queue.shift(); - this.sendMessage(message); - } - }; - connectSocket(socketPath); - } - - this.sendMessage = (message: IMessage) => { - const serialized = Transport.serialize2(message); - const buffer = Buffer.from(JSON.stringify(serialized) + MSG_DELIMITER); - if (this.isMaster) { - // We are the master, send the message to the target worker(s) - this.peers - .filter(p => p.id !== message.from && (!message.to || p.id === message.to)) - .forEach(peer => { - const socket = peerSockets.get(peer.id); - socket?.write(buffer); - }); - } - else if (connected) { - // Send the message to the master who will forward it to the target worker(s) - socket.write(buffer); - } - else { - // Not connected yet, queue message - queue.push(message); - } - }; - - // Send hello to other peers - const helloMsg: IHelloMessage = { type: 'hello', from: this.id, data: undefined }; - this.sendMessage(helloMsg); - } - - // eslint-disable-next-line @typescript-eslint/no-unused-vars - sendMessage(message: IMessage) { throw new Error('Must be set by constructor'); } - - public async exit(code = 0) { - await super.exit(code); - } - -} +import { Socket, connect, Server } from 'net'; +import { resolve as resolvePath } from 'path'; +import { spawn } from 'child_process'; +import { AceBaseIPCPeer, IHelloMessage, IMessage } from './ipc'; +import { Storage } from '../storage'; +import { DebugLogger, ID, Transport } from 'acebase-core'; +import { getSocketPath, MSG_DELIMITER } from './service/shared'; +import { startServer } from './service'; +export { Server as NetIPCServer } from 'net'; + +const masterPeerId = '[master]'; + +interface EventEmitterLike { + addListener?(event: string, handler: (...args: any[]) => any): any; + removeListener?(event: string, handler: (...args: any[]) => any): any; + on?(event: string, handler: (...args: any[]) => any): any; + off?(event: string, handler: (...args: any[]) => any): any; +} + +/** + * Socket IPC implementation. Peers will attempt starting up a dedicated service process for the target database, + * or connect to an already running process. The service acts as the IPC master and governs over locks, space allocation + * and communication between peers. Communication between the processes is done using (very fast in-memory) Unix sockets. + * This IPC implementation allows different processes on a single machine to access the same database simultaniously without + * them having to explicitly configure their IPC settings. + * Currently can be used by passing `ipc: 'socket'` in AceBase's `storage` settings, will become the default soon. + */ +export class IPCSocketPeer extends AceBaseIPCPeer { + + public server?: Server; + + constructor(storage: Storage, ipcSettings: { ipcName: string; server: Server | null; maxIdleTime?: number; loggerPluginPath?: string }) { + + const isMaster = storage.settings.ipc instanceof Server; + const peerId = isMaster ? masterPeerId : ID.generate(); + super(storage, peerId, ipcSettings.ipcName); + this.server = ipcSettings.server; + + this.masterPeerId = masterPeerId; + this.ipcType = 'node.socket'; + + const dbFile = resolvePath(storage.path, `${storage.settings.type}.db`); + const socketPath = getSocketPath(dbFile); + + /** Adds an event handler that is automatically removed upon IPC exit */ + const bindEventHandler = (target: EventEmitterLike, event: string, handler: (...args: any[]) => any) => { + (target.on ?? target.addListener).bind(target)(event, handler); + this.on('exit', () => (target.off ?? target.removeListener).bind(target)(event, handler)); + }; + + // Setup process exit handler + bindEventHandler(process, 'SIGINT', () => { + this.exit(); + }); + + if (!isMaster) { + // Try starting IPC service if it is not running yet. + const args = [ + __dirname + '/service/start.js', + dbFile, + ...(this.logger instanceof DebugLogger ? ['--loglevel', this.logger.level] : []), + ...(ipcSettings.loggerPluginPath ? ['--logger', ipcSettings.loggerPluginPath] : []), + '--maxidletime', ipcSettings.maxIdleTime?.toString() ?? '0', // Use maxIdleTime 0 to allow tests to remove database files when done + ]; + const service = spawn('node', args, { detached: true, stdio: 'ignore' }); + service.unref(); // Process is detached and allowed to keep running after we exit. Do not keep a reference to it, possibly preventing app exit. + + // // For testing: + // startServer(dbFile, { + // maxIdleTime: 0, + // ...(this.logger instanceof DebugLogger && { logLevel: this.logger.level }), + // ...(ipcSettings.loggerPluginPath && { loggerPluginPath: ipcSettings.loggerPluginPath }), + // exit: (code) => { + // this.logger.info(`[IPC ${ipcSettings.ipcName}] service exited with code ${code}`); + // }, + // }); + } + + /** + * Socket connection with master (workers only) + */ + let socket: Socket | null = null; + let connected = false; + const queue = [] as IMessage[]; + + /** + * Maps peers to IPC sockets (master only) + */ + const peerSockets = isMaster ? new Map() : null; + + const handleMessage = (socket: Socket, message: IMessage) => { + if (typeof message !== 'object') { + // Ignore non-object IPC messages + return; + } + if (isMaster && message.to !== masterPeerId) { + // Message is meant for others (or all). Forward it + this.sendMessage(message); + } + if (message.to && message.to !== this.id) { + // Message is for somebody else. Ignore + return; + } + if (this.isMaster) { + if (message.type === 'hello') { + // Bind peer id to incoming socket + peerSockets.set(message.from, socket); + } + else if (message.type === 'bye') { + // Remove bound socket for peer + peerSockets.delete(message.from); + } + } + return super.handleMessage(message); + }; + + if (isMaster) { + this.server.on('connection', (socket) => { + // New socket connected. We don't know which peer it is until we get a "hello" message + let buffer = Buffer.alloc(0); // Buffer to store incomplete messages + socket.on('data', chunk => { + // Received data from a worker + buffer = Buffer.concat([buffer, chunk]); + + while (buffer.length > 0) { + const delimiterIndex = buffer.indexOf(MSG_DELIMITER); + if (delimiterIndex === -1) { + break; // wait for more data + } + + // Extract message from buffer + const message = buffer.subarray(0, delimiterIndex); + buffer = buffer.subarray(delimiterIndex + MSG_DELIMITER.length); + + try { + const json = message.toString('utf-8'); + // this.logger.debug(`[IPC ${ipcSettings.ipcName}] Received socket message: `, json); + const serialized = JSON.parse(json); + const msg = Transport.deserialize2(serialized); + handleMessage(socket, msg); + } + catch (err) { + this.logger.error(`[IPC ${ipcSettings.ipcName}] Error parsing message: ${err}`); + } + } + }); + socket.on('close', (hadError) => { + // socket has disconnected. Find registered peer + for (const [peerId, peerSocket] of peerSockets.entries()) { + if (peerSocket === socket) { + // Worker apparently did not have time to say goodbye, + // remove the peer ourselves + this.removePeer(peerId); + + // Send "bye" message on their behalf + this.sayGoodbye(peerId); + break; + } + } + }); + }); + } + else { + const connectSocket = async (path: string) => { + const tryConnect = async (tries: number): Promise => { + try { + if (this._exiting) { return; } + const s = connect({ path }); + await new Promise((resolve, reject) => { + s.once('error', reject).unref(); + s.once('connect', resolve).unref(); + }); + this.logger.info(`[IPC ${ipcSettings.ipcName}] peer ${this.id} successfully established connection to the service`); + socket = s; + connected = true; + } + catch (err) { + if (tries < 100) { + // Retry in 10ms + await new Promise(resolve => setTimeout(resolve, 100)); + return tryConnect(tries + 1); + } + this.logger.error(`[IPC ${ipcSettings.ipcName}] peer ${this.id} cannot connect to service: ${err.message}`); + throw err; + } + }; + await tryConnect(1); + + this.once('exit', () => { + socket?.destroy(); + }); + + bindEventHandler(socket, 'close', (hadError) => { + // Connection to server closed + this.logger.info(`IPC peer ${this.id} lost its connection to the service${hadError ? ' because of an error' : ''}`); + }); + + let buffer = Buffer.alloc(0); // Buffer to store incomplete messages + bindEventHandler(socket, 'data', chunk => { + // Received data from server + buffer = Buffer.concat([buffer, chunk]); + + while (buffer.length > 0) { + const delimiterIndex = buffer.indexOf(MSG_DELIMITER); + if (delimiterIndex === -1) { + break; // wait for more data + } + + // Extract message from buffer + const message = buffer.subarray(0, delimiterIndex); + buffer = buffer.subarray(delimiterIndex + MSG_DELIMITER.length); + + try { + const json = message.toString('utf-8'); + // this.logger.debug(`Received server message: `, json); + const serialized = JSON.parse(json); + const msg = Transport.deserialize2(serialized); + handleMessage(socket, msg); + } + catch (err) { + this.logger.error(`Error parsing message: ${err}`); + } + } + }); + + connected = true; + while (queue.length) { + const message = queue.shift(); + this.sendMessage(message); + } + }; + connectSocket(socketPath); + } + + this.sendMessage = (message: IMessage) => { + const serialized = Transport.serialize2(message); + const buffer = Buffer.from(JSON.stringify(serialized) + MSG_DELIMITER); + if (this.isMaster) { + // We are the master, send the message to the target worker(s) + this.peers + .filter(p => p.id !== message.from && (!message.to || p.id === message.to)) + .forEach(peer => { + const socket = peerSockets.get(peer.id); + socket?.write(buffer); + }); + } + else if (connected) { + // Send the message to the master who will forward it to the target worker(s) + socket.write(buffer); + } + else { + // Not connected yet, queue message + queue.push(message); + } + }; + + // Send hello to other peers + const helloMsg: IHelloMessage = { type: 'hello', from: this.id, data: undefined }; + this.sendMessage(helloMsg); + } + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + sendMessage(message: IMessage) { throw new Error('Must be set by constructor'); } + + public async exit(code = 0) { + await super.exit(code); + } + +} diff --git a/src/node-lock.ts b/src/node-lock.ts index 09958e3..590e29b 100644 --- a/src/node-lock.ts +++ b/src/node-lock.ts @@ -1,366 +1,364 @@ -import { PathInfo, ID, DebugLogger } from 'acebase-core'; -import { assert } from './assert'; - -const DEBUG_MODE = false; -const DEFAULT_LOCK_TIMEOUT = 120; // in seconds - -export const LOCK_STATE = { - PENDING: 'pending', - LOCKED: 'locked', - EXPIRED: 'expired', - DONE: 'done', -}; - -export class NodeLocker { - - private _locks: NodeLock[] = []; - private _lastTid = 0; - - /** - * When .quit() is called, will be set to the quit promise's resolve function - */ - private _quit: () => void; - private debug: DebugLogger; - public timeout: number; - - /** - * Provides locking mechanism for nodes, ensures no simultanious read and writes happen to overlapping paths - */ - constructor(debug: DebugLogger, lockTimeout = DEFAULT_LOCK_TIMEOUT) { - this.debug = debug; - this.timeout = lockTimeout * 1000; - } - - setTimeout(timeout: number) { - this.timeout = timeout * 1000; - } - - createTid() { - return DEBUG_MODE ? ++this._lastTid : ID.generate(); - } - - _allowLock(path: string, tid: string|number, forWriting: boolean) { - /** - * Disabled path locking because of the following issue: - * - * Process 1 requests WRITE lock on "/users/ewout", is GRANTED - * Process 2 requests READ lock on "", is DENIED (process 1 writing to a descendant) - * Process 3 requests WRITE lock on "/posts/post1", is GRANTED - * Process 1 requests READ lock on "/" because of bound events, is DENIED (3 is writing to a descendant) - * Process 3 requests READ lock on "/" because of bound events, is DENIED (1 is writing to a descendant) - * - * --> DEADLOCK! - * - * Now simply makes sure one transaction has write access at the same time, - * might change again in the future... - */ - - const conflict = this._locks - .find(otherLock => { - return ( - otherLock.tid !== tid - && otherLock.state === LOCK_STATE.LOCKED - && (forWriting || otherLock.forWriting) - ); - }); - return { allow: !conflict, conflict }; - } - - quit() { - return new Promise(resolve => { - if (this._locks.length === 0) { return resolve(); } - this._quit = resolve; - }); - } - - /** - * Safely reject a pending lock, catching any unhandled promise rejections (that should not happen in the first place, obviously) - * @param lock - */ - _rejectLock(lock: NodeLock, err: Error) { - this._locks.splice(this._locks.indexOf(lock), 1); // Remove from queue - clearTimeout(lock.timeout); - try { - lock.reject(err); - } - catch(err) { - console.error(`Unhandled promise rejection:`, err); - } - } - - _processLockQueue() { - if (this._quit) { - // Reject all pending locks - const quitError = new Error('Quitting'); - this._locks - .filter(lock => lock.state === LOCK_STATE.PENDING) - .forEach(lock => this._rejectLock(lock, quitError)); - // Resolve quit promise if queue is empty: - if (this._locks.length === 0) { - this._quit(); - } - } - const pending = this._locks - .filter(lock => - lock.state === LOCK_STATE.PENDING, - // && (lock.waitingFor === null || lock.waitingFor.state !== LOCK_STATE.LOCKED) - // Commented out above, because waitingFor lock might have moved to a different non-conflicting path in the meantime - ) - .sort((a,b) => { - // // Writes get higher priority so all reads get the most recent data - // if (a.forWriting === b.forWriting) { - // if (a.requested < b.requested) { return -1; } - // else { return 1; } - // } - // else if (a.forWriting) { return -1; } - if (a.priority && !b.priority) { return -1; } - else if (!a.priority && b.priority) { return 1; } - return a.requested - b.requested; - }); - pending.forEach(lock => { - const check = this._allowLock(lock.path, lock.tid, lock.forWriting); - lock.waitingFor = check.conflict || null; - if (check.allow) { - this.lock(lock) - .then(lock.resolve) - .catch(err => this._rejectLock(lock, err)); - } - }); - } - - /** - * Locks a path for writing. While the lock is in place, it's value cannot be changed by other transactions. - * @param path path being locked - * @param tid a unique value to identify your transaction - * @param forWriting if the record will be written to. Multiple read locks can be granted access at the same time if there is no write lock. Once a write lock is granted, no others can read from or write to it. - * @returns returns a promise with the lock object once it is granted. It's .release method can be used as a shortcut to .unlock(path, tid) to release the lock - */ - async lock( - path: string, - tid: string, - forWriting?: boolean, - comment?: string, - options?: { withPriority?: boolean; noTimeout?: boolean } - ): Promise; - async lock(lock: NodeLock): Promise; - async lock( - path: string|NodeLock, - tid?: string, - forWriting = true, - comment = '', - options: { withPriority?: boolean; noTimeout?: boolean } = { withPriority: false, noTimeout: false }, - ): Promise { - let lock: NodeLock, proceed: boolean; - if (path instanceof NodeLock) { - lock = path; - //lock.comment = `(retry: ${lock.comment})`; - proceed = true; - } - else if (this._locks.findIndex((l => l.tid === tid && l.state === LOCK_STATE.EXPIRED)) >= 0) { - throw new Error(`lock on tid ${tid} has expired, not allowed to continue`); - } - else if (this._quit && !options.withPriority) { - throw new Error(`Quitting`); - } - else { - DEBUG_MODE && console.error(`${forWriting ? 'write' : 'read'} lock requested on "${path}" by tid ${tid} (${comment})`); - // // Test the requested lock path - // let duplicateKeys = getPathKeys(path) - // .reduce((r, key) => { - // let i = r.findIndex(c => c.key === key); - // if (i >= 0) { r[i].count++; } - // else { r.push({ key, count: 1 }) } - // return r; - // }, []) - // .filter(c => c.count > 1) - // .map(c => c.key); - // if (duplicateKeys.length > 0) { - // console.log(`ALERT: Duplicate keys found in path "/${path}"`.colorize([ColorStyle.dim, ColorStyle.bgRed]); - // } - lock = new NodeLock(this, path, tid, forWriting, options.withPriority === true); - lock.comment = comment; - this._locks.push(lock); - const check = this._allowLock(path, tid, forWriting); - lock.waitingFor = check.conflict || null; - proceed = check.allow; - } - if (proceed) { - DEBUG_MODE && console.error(`${lock.forWriting ? 'write' : 'read'} lock ALLOWED on "${lock.path}" by tid ${lock.tid} (${lock.comment})`); - lock.state = LOCK_STATE.LOCKED; - if (typeof lock.granted === 'number') { - //debug.warn(`lock :: ALLOWING ${lock.forWriting ? "write" : "read" } lock on path "/${lock.path}" by tid ${lock.tid}; ${lock.comment}`); - } - else { - lock.granted = Date.now(); - if (options.noTimeout !== true) { - lock.expires = Date.now() + this.timeout; - //debug.warn(`lock :: GRANTED ${lock.forWriting ? "write" : "read" } lock on path "/${lock.path}" by tid ${lock.tid}; ${lock.comment}`); - let timeoutCount = 0; - const timeoutHandler = () => { - // Autorelease timeouts must only fire when there is something wrong in the - // executing (AceBase) code, eg an unhandled promise rejection causing a lock not - // to be released. To guard against programming errors, we will issue 3 warning - // messages before releasing the lock. - - if (lock.state !== LOCK_STATE.LOCKED) { return; } - - timeoutCount++; - if (timeoutCount <= 3) { - // Warn first. - this.debug.warn(`${lock.forWriting ? 'write' : 'read' } lock on path "/${lock.path}" by tid ${lock.tid} (${lock.comment}) is taking a long time to complete [${timeoutCount}]`); - lock.timeout = setTimeout(timeoutHandler, this.timeout / 4); - return; - } - this.debug.error(`lock :: ${lock.forWriting ? 'write' : 'read' } lock on path "/${lock.path}" by tid ${lock.tid} (${lock.comment}) took too long`); - lock.state = LOCK_STATE.EXPIRED; - // let allTransactionLocks = _locks.filter(l => l.tid === lock.tid).sort((a,b) => a.requested < b.requested ? -1 : 1); - // let transactionsDebug = allTransactionLocks.map(l => `${l.state} ${l.forWriting ? "WRITE" : "read"} ${l.comment}`).join("\n"); - // debug.error(transactionsDebug); - - this._processLockQueue(); - }; - - lock.timeout = setTimeout(timeoutHandler, this.timeout / 4); - } - } - return lock; - } - else { - // Keep pending until clashing lock(s) is/are removed - //debug.warn(`lock :: QUEUED ${lock.forWriting ? "write" : "read" } lock on path "/${lock.path}" by tid ${lock.tid}; ${lock.comment}`); - assert(lock.state === LOCK_STATE.PENDING); - return new Promise((resolve, reject) => { - lock.resolve = resolve; - lock.reject = reject; - }); - } - } - - unlock(lockOrId: NodeLock | NodeLock['id'], comment: string, processQueue = true) { - let lock, i; - if (lockOrId instanceof NodeLock) { - lock = lockOrId; - i = this._locks.indexOf(lock); - } - else { - const id = lockOrId; - i = this._locks.findIndex(l => l.id === id); - lock = this._locks[i]; - } - if (i < 0) { - const msg = `lock on "/${lock.path}" for tid ${lock.tid} wasn't found; ${comment}`; - // debug.error(`unlock :: ${msg}`); - throw new Error(msg); - } - lock.state = LOCK_STATE.DONE; - clearTimeout(lock.timeout); - this._locks.splice(i, 1); - DEBUG_MODE && console.error(`${lock.forWriting ? 'write' : 'read'} lock RELEASED on "${lock.path}" by tid ${lock.tid}`); - //debug.warn(`unlock :: RELEASED ${lock.forWriting ? "write" : "read" } lock on "/${lock.path}" for tid ${lock.tid}; ${lock.comment}; ${comment}`); - - processQueue && this._processLockQueue(); - return lock; - } - - list() { - return this._locks || []; - } - - isAllowed(path: string, tid: string | number, forWriting: boolean) { - return this._allowLock(path, tid, forWriting).allow; - } -} - -let lastid = 0; -export class NodeLock { - - static get LOCK_STATE() { return LOCK_STATE; } - - state = LOCK_STATE.PENDING; - requested: number = Date.now(); - granted: number; - expires: number; - comment = ''; - waitingFor: NodeLock = null; - id: number = ++lastid; - history: { action: string; path: string; forWriting: boolean; comment?: string }[] = []; - timeout: NodeJS.Timeout; - - resolve: (lock: NodeLock) => void; - reject: (err: Error) => void; - - /** - * Constructor for a record lock - * @param {NodeLocker} locker - * @param {string} path - * @param {string} tid - * @param {boolean} forWriting - * @param {boolean} priority - */ - constructor( - private locker: NodeLocker, - public path: string, - public tid: string, - public forWriting: boolean, - public priority = false) { - } - - async release(comment?: string) { - //return this.storage.unlock(this.path, this.tid, comment); - this.history.push({ action: 'release', path: this.path, forWriting: this.forWriting, comment }); - return this.locker.unlock(this, comment || this.comment); - } - - async moveToParent() { - const parentPath = PathInfo.get(this.path).parentPath; //getPathInfo(this.path).parent; - const allowed = this.locker.isAllowed(parentPath, this.tid, this.forWriting); //_allowLock(parentPath, this.tid, this.forWriting); - if (allowed) { - DEBUG_MODE && console.error(`moveToParent ALLOWED for ${this.forWriting ? 'write' : 'read'} lock on "${this.path}" by tid ${this.tid} (${this.comment})`); - this.history.push({ path: this.path, forWriting: this.forWriting, action: 'moving to parent' }); - this.waitingFor = null; - this.path = parentPath; - // this.comment = `moved to parent: ${this.comment}`; - return this; - } - else { - // Unlock without processing the queue - DEBUG_MODE && console.error(`moveToParent QUEUED for ${this.forWriting ? 'write' : 'read'} lock on "${this.path}" by tid ${this.tid} (${this.comment})`); - this.locker.unlock(this, `moveLockToParent: ${this.comment}`, false); - // Lock parent node with priority to jump the queue - const newLock = await this.locker.lock(parentPath, this.tid, this.forWriting, this.comment, { withPriority: true }); - DEBUG_MODE && console.error(`QUEUED moveToParent ALLOWED for ${this.forWriting ? 'write' : 'read'} lock on "${this.path}" by tid ${this.tid} (${this.comment})`); - newLock.history = this.history; - newLock.history.push({ path: this.path, forWriting: this.forWriting, action: 'moving to parent through queue (priority)' }); - return newLock; - } - } - - // /** - // * Not used? Will be removed - // */ - // moveTo(otherPath: string, forWriting: boolean) { - // //const check = _allowLock(otherPath, this.tid, forWriting); - // const allowed = this.locker.isAllowed(otherPath, this.tid, forWriting); - // if (allowed) { - // this.history.push({ path: this.path, forWriting: this.forWriting, action: `moving to "${otherPath}"` }); - // this.waitingFor = null; - // this.path = otherPath; - // this.forWriting = forWriting; - // // this.comment = `moved to "/${otherPath}": ${this.comment}`; - // return Promise.resolve(this); - // } - // else { - // // Unlock without processing the queue - // this.locker.unlock(this, `moving to "/${otherPath}": ${this.comment}`, false); - - // // Lock other node with priority to jump the queue - // return this.locker.lock(otherPath, this.tid, forWriting, this.comment, { withPriority: true }) // `moved to "/${otherPath}" (queued): ${this.comment}` - // .then(newLock => { - // newLock.history = this.history; - // newLock.history.push({ path: this.path, forWriting: this.forWriting, action: `moved to "${otherPath}" through queue` }); - // return newLock; - // }); - // } - // } - -} +import { PathInfo, ID, LoggerPlugin } from 'acebase-core'; +import { assert } from './assert'; + +const DEBUG_MODE = false; +const DEFAULT_LOCK_TIMEOUT = 120; // in seconds + +export const LOCK_STATE = { + PENDING: 'pending', + LOCKED: 'locked', + EXPIRED: 'expired', + DONE: 'done', +}; + +export class NodeLocker { + + private _locks: NodeLock[] = []; + private _lastTid = 0; + + /** + * When .quit() is called, will be set to the quit promise's resolve function + */ + private _quit: () => void; + public timeout: number; + + /** + * Provides locking mechanism for nodes, ensures no simultanious read and writes happen to overlapping paths + */ + constructor(private logger: LoggerPlugin, lockTimeout = DEFAULT_LOCK_TIMEOUT) { + this.timeout = lockTimeout * 1000; + } + + setTimeout(timeout: number) { + this.timeout = timeout * 1000; + } + + createTid() { + return DEBUG_MODE ? ++this._lastTid : ID.generate(); + } + + _allowLock(path: string, tid: string|number, forWriting: boolean) { + /** + * Disabled path locking because of the following issue: + * + * Process 1 requests WRITE lock on "/users/ewout", is GRANTED + * Process 2 requests READ lock on "", is DENIED (process 1 writing to a descendant) + * Process 3 requests WRITE lock on "/posts/post1", is GRANTED + * Process 1 requests READ lock on "/" because of bound events, is DENIED (3 is writing to a descendant) + * Process 3 requests READ lock on "/" because of bound events, is DENIED (1 is writing to a descendant) + * + * --> DEADLOCK! + * + * Now simply makes sure one transaction has write access at the same time, + * might change again in the future... + */ + + const conflict = this._locks + .find(otherLock => { + return ( + otherLock.tid !== tid + && otherLock.state === LOCK_STATE.LOCKED + && (forWriting || otherLock.forWriting) + ); + }); + return { allow: !conflict, conflict }; + } + + quit() { + return new Promise(resolve => { + if (this._locks.length === 0) { return resolve(); } + this._quit = resolve; + }); + } + + /** + * Safely reject a pending lock, catching any unhandled promise rejections (that should not happen in the first place, obviously) + * @param lock + */ + _rejectLock(lock: NodeLock, err: Error) { + this._locks.splice(this._locks.indexOf(lock), 1); // Remove from queue + clearTimeout(lock.timeout); + try { + lock.reject(err); + } + catch(err) { + console.error(`Unhandled promise rejection:`, err); + } + } + + _processLockQueue() { + if (this._quit) { + // Reject all pending locks + const quitError = new Error('Quitting'); + this._locks + .filter(lock => lock.state === LOCK_STATE.PENDING) + .forEach(lock => this._rejectLock(lock, quitError)); + // Resolve quit promise if queue is empty: + if (this._locks.length === 0) { + this._quit(); + } + } + const pending = this._locks + .filter(lock => + lock.state === LOCK_STATE.PENDING, + // && (lock.waitingFor === null || lock.waitingFor.state !== LOCK_STATE.LOCKED) + // Commented out above, because waitingFor lock might have moved to a different non-conflicting path in the meantime + ) + .sort((a,b) => { + // // Writes get higher priority so all reads get the most recent data + // if (a.forWriting === b.forWriting) { + // if (a.requested < b.requested) { return -1; } + // else { return 1; } + // } + // else if (a.forWriting) { return -1; } + if (a.priority && !b.priority) { return -1; } + else if (!a.priority && b.priority) { return 1; } + return a.requested - b.requested; + }); + pending.forEach(lock => { + const check = this._allowLock(lock.path, lock.tid, lock.forWriting); + lock.waitingFor = check.conflict || null; + if (check.allow) { + this.lock(lock) + .then(lock.resolve) + .catch(err => this._rejectLock(lock, err)); + } + }); + } + + /** + * Locks a path for writing. While the lock is in place, it's value cannot be changed by other transactions. + * @param path path being locked + * @param tid a unique value to identify your transaction + * @param forWriting if the record will be written to. Multiple read locks can be granted access at the same time if there is no write lock. Once a write lock is granted, no others can read from or write to it. + * @returns returns a promise with the lock object once it is granted. It's .release method can be used as a shortcut to .unlock(path, tid) to release the lock + */ + async lock( + path: string, + tid: string, + forWriting?: boolean, + comment?: string, + options?: { withPriority?: boolean; noTimeout?: boolean } + ): Promise; + async lock(lock: NodeLock): Promise; + async lock( + path: string|NodeLock, + tid?: string, + forWriting = true, + comment = '', + options: { withPriority?: boolean; noTimeout?: boolean } = { withPriority: false, noTimeout: false }, + ): Promise { + let lock: NodeLock, proceed: boolean; + if (path instanceof NodeLock) { + lock = path; + //lock.comment = `(retry: ${lock.comment})`; + proceed = true; + } + else if (this._locks.findIndex((l => l.tid === tid && l.state === LOCK_STATE.EXPIRED)) >= 0) { + throw new Error(`lock on tid ${tid} has expired, not allowed to continue`); + } + else if (this._quit && !options.withPriority) { + throw new Error(`Quitting`); + } + else { + DEBUG_MODE && console.error(`${forWriting ? 'write' : 'read'} lock requested on "${path}" by tid ${tid} (${comment})`); + // // Test the requested lock path + // let duplicateKeys = getPathKeys(path) + // .reduce((r, key) => { + // let i = r.findIndex(c => c.key === key); + // if (i >= 0) { r[i].count++; } + // else { r.push({ key, count: 1 }) } + // return r; + // }, []) + // .filter(c => c.count > 1) + // .map(c => c.key); + // if (duplicateKeys.length > 0) { + // console.log(`ALERT: Duplicate keys found in path "/${path}"`.colorize([ColorStyle.dim, ColorStyle.bgRed]); + // } + lock = new NodeLock(this, path, tid, forWriting, options.withPriority === true); + lock.comment = comment; + this._locks.push(lock); + const check = this._allowLock(path, tid, forWriting); + lock.waitingFor = check.conflict || null; + proceed = check.allow; + } + if (proceed) { + DEBUG_MODE && console.error(`${lock.forWriting ? 'write' : 'read'} lock ALLOWED on "${lock.path}" by tid ${lock.tid} (${lock.comment})`); + lock.state = LOCK_STATE.LOCKED; + if (typeof lock.granted === 'number') { + //debug.warn(`lock :: ALLOWING ${lock.forWriting ? "write" : "read" } lock on path "/${lock.path}" by tid ${lock.tid}; ${lock.comment}`); + } + else { + lock.granted = Date.now(); + if (options.noTimeout !== true) { + lock.expires = Date.now() + this.timeout; + //debug.warn(`lock :: GRANTED ${lock.forWriting ? "write" : "read" } lock on path "/${lock.path}" by tid ${lock.tid}; ${lock.comment}`); + let timeoutCount = 0; + const timeoutHandler = () => { + // Autorelease timeouts must only fire when there is something wrong in the + // executing (AceBase) code, eg an unhandled promise rejection causing a lock not + // to be released. To guard against programming errors, we will issue 3 warning + // messages before releasing the lock. + + if (lock.state !== LOCK_STATE.LOCKED) { return; } + + timeoutCount++; + if (timeoutCount <= 3) { + // Warn first. + this.logger.warn(`${lock.forWriting ? 'write' : 'read' } lock on path "/${lock.path}" by tid ${lock.tid} (${lock.comment}) is taking a long time to complete [${timeoutCount}]`); + lock.timeout = setTimeout(timeoutHandler, this.timeout / 4); + return; + } + this.logger.error(`lock :: ${lock.forWriting ? 'write' : 'read' } lock on path "/${lock.path}" by tid ${lock.tid} (${lock.comment}) took too long`); + lock.state = LOCK_STATE.EXPIRED; + // let allTransactionLocks = _locks.filter(l => l.tid === lock.tid).sort((a,b) => a.requested < b.requested ? -1 : 1); + // let transactionsDebug = allTransactionLocks.map(l => `${l.state} ${l.forWriting ? "WRITE" : "read"} ${l.comment}`).join("\n"); + // debug.error(transactionsDebug); + + this._processLockQueue(); + }; + + lock.timeout = setTimeout(timeoutHandler, this.timeout / 4); + } + } + return lock; + } + else { + // Keep pending until clashing lock(s) is/are removed + //debug.warn(`lock :: QUEUED ${lock.forWriting ? "write" : "read" } lock on path "/${lock.path}" by tid ${lock.tid}; ${lock.comment}`); + assert(lock.state === LOCK_STATE.PENDING); + return new Promise((resolve, reject) => { + lock.resolve = resolve; + lock.reject = reject; + }); + } + } + + unlock(lockOrId: NodeLock | NodeLock['id'], comment: string, processQueue = true) { + let lock, i; + if (lockOrId instanceof NodeLock) { + lock = lockOrId; + i = this._locks.indexOf(lock); + } + else { + const id = lockOrId; + i = this._locks.findIndex(l => l.id === id); + lock = this._locks[i]; + } + if (i < 0) { + const msg = `lock on "/${lock.path}" for tid ${lock.tid} wasn't found; ${comment}`; + // debug.error(`unlock :: ${msg}`); + throw new Error(msg); + } + lock.state = LOCK_STATE.DONE; + clearTimeout(lock.timeout); + this._locks.splice(i, 1); + DEBUG_MODE && console.error(`${lock.forWriting ? 'write' : 'read'} lock RELEASED on "${lock.path}" by tid ${lock.tid}`); + //debug.warn(`unlock :: RELEASED ${lock.forWriting ? "write" : "read" } lock on "/${lock.path}" for tid ${lock.tid}; ${lock.comment}; ${comment}`); + + processQueue && this._processLockQueue(); + return lock; + } + + list() { + return this._locks || []; + } + + isAllowed(path: string, tid: string | number, forWriting: boolean) { + return this._allowLock(path, tid, forWriting).allow; + } +} + +let lastid = 0; +export class NodeLock { + + static get LOCK_STATE() { return LOCK_STATE; } + + state = LOCK_STATE.PENDING; + requested: number = Date.now(); + granted: number; + expires: number; + comment = ''; + waitingFor: NodeLock = null; + id: number = ++lastid; + history: { action: string; path: string; forWriting: boolean; comment?: string }[] = []; + timeout: NodeJS.Timeout; + + resolve: (lock: NodeLock) => void; + reject: (err: Error) => void; + + /** + * Constructor for a record lock + * @param {NodeLocker} locker + * @param {string} path + * @param {string} tid + * @param {boolean} forWriting + * @param {boolean} priority + */ + constructor( + private locker: NodeLocker, + public path: string, + public tid: string, + public forWriting: boolean, + public priority = false) { + } + + async release(comment?: string) { + //return this.storage.unlock(this.path, this.tid, comment); + this.history.push({ action: 'release', path: this.path, forWriting: this.forWriting, comment }); + return this.locker.unlock(this, comment || this.comment); + } + + async moveToParent() { + const parentPath = PathInfo.get(this.path).parentPath; //getPathInfo(this.path).parent; + const allowed = this.locker.isAllowed(parentPath, this.tid, this.forWriting); //_allowLock(parentPath, this.tid, this.forWriting); + if (allowed) { + DEBUG_MODE && console.error(`moveToParent ALLOWED for ${this.forWriting ? 'write' : 'read'} lock on "${this.path}" by tid ${this.tid} (${this.comment})`); + this.history.push({ path: this.path, forWriting: this.forWriting, action: 'moving to parent' }); + this.waitingFor = null; + this.path = parentPath; + // this.comment = `moved to parent: ${this.comment}`; + return this; + } + else { + // Unlock without processing the queue + DEBUG_MODE && console.error(`moveToParent QUEUED for ${this.forWriting ? 'write' : 'read'} lock on "${this.path}" by tid ${this.tid} (${this.comment})`); + this.locker.unlock(this, `moveLockToParent: ${this.comment}`, false); + // Lock parent node with priority to jump the queue + const newLock = await this.locker.lock(parentPath, this.tid, this.forWriting, this.comment, { withPriority: true }); + DEBUG_MODE && console.error(`QUEUED moveToParent ALLOWED for ${this.forWriting ? 'write' : 'read'} lock on "${this.path}" by tid ${this.tid} (${this.comment})`); + newLock.history = this.history; + newLock.history.push({ path: this.path, forWriting: this.forWriting, action: 'moving to parent through queue (priority)' }); + return newLock; + } + } + + // /** + // * Not used? Will be removed + // */ + // moveTo(otherPath: string, forWriting: boolean) { + // //const check = _allowLock(otherPath, this.tid, forWriting); + // const allowed = this.locker.isAllowed(otherPath, this.tid, forWriting); + // if (allowed) { + // this.history.push({ path: this.path, forWriting: this.forWriting, action: `moving to "${otherPath}"` }); + // this.waitingFor = null; + // this.path = otherPath; + // this.forWriting = forWriting; + // // this.comment = `moved to "/${otherPath}": ${this.comment}`; + // return Promise.resolve(this); + // } + // else { + // // Unlock without processing the queue + // this.locker.unlock(this, `moving to "/${otherPath}": ${this.comment}`, false); + + // // Lock other node with priority to jump the queue + // return this.locker.lock(otherPath, this.tid, forWriting, this.comment, { withPriority: true }) // `moved to "/${otherPath}" (queued): ${this.comment}` + // .then(newLock => { + // newLock.history = this.history; + // newLock.history.push({ path: this.path, forWriting: this.forWriting, action: `moved to "${otherPath}" through queue` }); + // return newLock; + // }); + // } + // } + +} diff --git a/src/query.ts b/src/query.ts index 7cf4068..f60652b 100644 --- a/src/query.ts +++ b/src/query.ts @@ -1,18 +1,10 @@ -import { AceBaseBase, ID, PathInfo } from 'acebase-core'; -import type { Api, EventSubscriptionCallback, Query, QueryOptions, QueryFilter, QueryOrder } from 'acebase-core'; +import { ID, PathInfo } from 'acebase-core'; +import type { EventSubscriptionCallback, Query, QueryOptions, QueryFilter, QueryOrder } from 'acebase-core'; import { VALUE_TYPES } from './node-value-types'; import { NodeNotFoundError } from './node-errors'; -import { Storage } from './storage'; import { DataIndex, FullTextIndex, IndexQueryResults } from './data-index'; import { AsyncTaskBatch } from './async-task-batch'; - -/** - * TODO: import once LocalApi has been ported to TypeScript - */ -type LocalApi = Api & { - db: AceBaseBase; - storage: Storage; -} +import type { LocalApi } from './api-local'; // eslint-disable-next-line @typescript-eslint/no-empty-function const noop = () => {}; @@ -86,7 +78,7 @@ export async function executeQuery( const val = node.value; if (val === null) { // Record was deleted, but index isn't updated yet? - api.storage.debug.warn(`Indexed result "/${path}" does not have a record!`); + api.logger.warn(`Indexed result "/${path}" does not have a record!`); // TODO: let index rebuild return; } @@ -310,7 +302,7 @@ export async function executeQuery( // const usingIndexes = ourFilters.map(filter => filter.index).filter(index => index); const indexDescriptions = usingIndexes.map(index => index.description).join(', '); - usingIndexes.length > 0 && api.storage.debug.log(`Using indexes for query: ${indexDescriptions}`); + usingIndexes.length > 0 && api.logger.info(`Using indexes for query: ${indexDescriptions}`); // Filters that should run on all nodes after indexed results: const tableScanFilters = queryFilters.filter(filter => !filter.index); @@ -388,7 +380,7 @@ export async function executeQuery( }; if (queryFilters.length === 0 && query.take === 0) { - api.storage.debug.warn(`Filterless queries must use .take to limit the results. Defaulting to 100 for query on path "${path}"`); + api.logger.warn(`Filterless queries must use .take to limit the results. Defaulting to 100 for query on path "${path}"`); query.take = 100; } @@ -396,7 +388,7 @@ export async function executeQuery( const sortIndex = querySort[0].index; const ascending = query.take < 0 ? !querySort[0].ascending : querySort[0].ascending; if (queryFilters.length === 0 && querySort.slice(1).every(s => sortIndex.allMetadataKeys.includes(s.key))) { - api.storage.debug.log(`Using index for sorting: ${sortIndex.description}`); + api.logger.info(`Using index for sorting: ${sortIndex.description}`); const metadataSort = querySort.slice(1).map(s => { s.index = sortIndex; // Assign index to skip later processing of this sort operation return { key: s.key, ascending: s.ascending }; @@ -547,10 +539,10 @@ export async function executeQuery( const batch = { promises: [] as Promise[], - add(promise: Promise) { + async add(promise: Promise) { this.promises.push(promise); if (this.promises.length >= 1000) { - return Promise.all(this.promises.splice(0)).then(_ => undefined); + await Promise.all(this.promises.splice(0)); } }, }; @@ -616,7 +608,7 @@ export async function executeQuery( catch (reason) { // No record? if (!(reason instanceof NodeNotFoundError)) { - api.storage.debug.warn(`Error getting child stream: ${reason}`); + api.logger.warn(`Error getting child stream: ${reason}`); } return []; } diff --git a/src/storage/binary/index.ts b/src/storage/binary/index.ts index b369e91..9f808e3 100644 --- a/src/storage/binary/index.ts +++ b/src/storage/binary/index.ts @@ -14,6 +14,7 @@ import { Uint8ArrayBuilder } from '../../binary'; import { IAceBaseIPCLock } from '../../ipc/ipc'; import { BinaryBPlusTreeTransactionOperation } from '../../btree/binary-tree-transaction-operation'; import { NodeLock } from '../../node-lock'; +import { LoggerPlugin } from 'acebase-core'; const { concatTypedArrays, bytesToNumber, bytesToBigint, numberToBytes, bigintToBytes, encodeString, decodeString, cloneObject } = Utils; const REMOVED_CHILD_DATA_IMPLEMENTED = false; // not used yet - allows marking of deleted children without having to rewrite the whole node @@ -144,7 +145,7 @@ export class AceBaseStorage extends Storage { if (this.type === 'data' && settings.transactions.log === true) { // Get/create storage for mutations logging const txSettings = new AceBaseStorageSettings({ type: 'transaction', path: settings.path, removeVoidProperties: true, transactions: settings.transactions, ipc: settings.ipc }); - this.txStorage = new AceBaseStorage(name, txSettings, { logLevel: 'error' }); + this.txStorage = new AceBaseStorage(name, txSettings, { logLevel: 'error', logColors: false, logger: this.logger }); } this.once('ready', () => { @@ -251,7 +252,7 @@ export class AceBaseStorage extends Storage { return -1; } if (/^[0-9]+$/.test(key)) { - return -1; //storage.debug.error(`Adding KIT key "${key}"?!!`); + return -1; //this.logger.error(`Adding KIT key "${key}"?!!`); } let index = KIT.keys.indexOf(key); if (index < 0) { @@ -320,7 +321,7 @@ export class AceBaseStorage extends Storage { load: async () => { const data = Buffer.alloc(KIT.length); const { bytesRead } = await pfs.read(this.file, data, 0, data.length, KIT.fileIndex).catch(err => { - this.debug.error('Error reading KIT from file: ', err); + this.logger.error('Error reading KIT from file: ', err); throw err; }); @@ -339,8 +340,8 @@ export class AceBaseStorage extends Storage { } KIT.bytesUsed = index; KIT.keys = keys; - this.debug.log(`KIT read, ${KIT.keys.length} keys indexed`.colorize(ColorStyle.bold)); - //storage.debug.log(keys); + this.logger.info(`KIT read, ${KIT.keys.length} keys indexed`.colorize(ColorStyle.bold)); + //this.logger.debug(keys); return keys; }, }; @@ -566,7 +567,7 @@ export class AceBaseStorage extends Storage { .sort((a, b) => a.end - a.start < b.end - b.start ? -1 : 1) .slice(0, n); const totalRecords = ranges.reduce((records, range) => records + (range.end - range.start), 0); - this.debug.warn(`FST grew too big to store in the database file, removing ${n} entries for ${totalRecords} records`); + this.logger.warn(`FST grew too big to store in the database file, removing ${n} entries for ${totalRecords} records`); ranges.forEach(range => { const i = FST.ranges.indexOf(range); FST.ranges.splice(i, 1); @@ -596,7 +597,7 @@ export class AceBaseStorage extends Storage { FST.bytesUsed = index; const promise = this.writeData(FST.fileIndex, data, 0, bytesToWrite).catch(err => { - this.debug.error('Error writing FST: ', err); + this.logger.error('Error writing FST: ', err); }); const writes = [promise]; if (updatedPageCount === true) { @@ -606,15 +607,15 @@ export class AceBaseStorage extends Storage { writes.push(promise); } await Promise.all(writes); - //this.debug.log(`FST saved, ${this.bytesUsed} bytes used for ${FST.ranges.length} ranges`); + //this.logger.debug(`FST saved, ${this.bytesUsed} bytes used for ${FST.ranges.length} ranges`); }, load: async () => { if (!this.ipc.isMaster) { return []; } const data = Buffer.alloc(FST.length); const { bytesRead } = await pfs.read(this.file, data, 0, data.length, this.FST.fileIndex).catch(err => { - this.debug.error('Error reading FST from file'); - this.debug.error(err); + this.logger.error('Error reading FST from file'); + this.logger.error(err); throw err; }); // Interpret the read data @@ -636,7 +637,7 @@ export class AceBaseStorage extends Storage { FST.pages = allocatedPages; FST.bytesUsed = index; FST.ranges = ranges; - this.debug.log(`FST read, ${allocatedPages} pages allocated, ${freeRangeCount} free ranges`.colorize(ColorStyle.bold)); + this.logger.info(`FST read, ${allocatedPages} pages allocated, ${freeRangeCount} free ranges`.colorize(ColorStyle.bold)); return ranges; }, }; @@ -666,7 +667,7 @@ export class AceBaseStorage extends Storage { rootRecord.pageNr = address.pageNr; rootRecord.recordNr = address.recordNr; rootRecord.exists = true; - // this.debug.log(`Root record address updated to ${address.pageNr}, ${address.recordNr}`.colorize(ColorStyle.bold)); + // this.logger.debug(`Root record address updated to ${address.pageNr}, ${address.recordNr}`.colorize(ColorStyle.bold)); if (!fromIPC) { // Notify others @@ -679,7 +680,7 @@ export class AceBaseStorage extends Storage { view.setUint16(4, address.recordNr); const bytesWritten = await this.writeData(HEADER_INDEXES.ROOT_RECORD_ADDRESS, bytes, 0, bytes.length); - this.debug.log(`Root record address updated to ${address.pageNr}, ${address.recordNr}`.colorize(ColorStyle.bold)); + this.logger.info(`Root record address updated to ${address.pageNr}, ${address.recordNr}`.colorize(ColorStyle.bold)); } }, }; @@ -697,8 +698,8 @@ export class AceBaseStorage extends Storage { const openDatabaseFile = async (justCreated = false) => { const handleError = (err: any, txt: string) => { - this.debug.error(txt); - this.debug.error(err); + this.logger.error(txt); + this.logger.error(err); if (this.file) { pfs.close(this.file).catch(err => { // ... @@ -799,12 +800,12 @@ export class AceBaseStorage extends Storage { if (this.settings.maxInlineValueSize === 0) { this.settings.maxInlineValueSize = 65536; } const intro = ColorStyle.dim; - this.debug.log(`Database "${name}" details:`.colorize(intro)); - this.debug.log('- Type: AceBase binary'.colorize(intro)); - this.debug.log(`- Record size: ${this.settings.recordSize} bytes`.colorize(intro)); - this.debug.log(`- Page size: ${this.settings.pageSize} records (${this.settings.pageSize * this.settings.recordSize} bytes)`.colorize(intro)); - this.debug.log(`- Max inline value size: ${this.settings.maxInlineValueSize} bytes`.colorize(intro)); - this.debug.log(`- Root record address: ${this.rootRecord.pageNr}, ${this.rootRecord.recordNr}`.colorize(intro)); + this.logger.info(`Database "${name}" details:`.colorize(intro)); + this.logger.info('- Type: AceBase binary'.colorize(intro)); + this.logger.info(`- Record size: ${this.settings.recordSize} bytes`.colorize(intro)); + this.logger.info(`- Page size: ${this.settings.pageSize} records (${this.settings.pageSize * this.settings.recordSize} bytes)`.colorize(intro)); + this.logger.info(`- Max inline value size: ${this.settings.maxInlineValueSize} bytes`.colorize(intro)); + this.logger.info(`- Root record address: ${this.rootRecord.pageNr}, ${this.rootRecord.recordNr}`.colorize(intro)); await this.KIT.load(); // Read Key Index Table await this.FST.load(); // Read Free Space Table @@ -897,9 +898,9 @@ export class AceBaseStorage extends Storage { this.ipc.once('exit', code => { // Close database file - this.debug.log(`Closing db ${this.ipc.dbname}`); + this.logger.info(`Closing db ${this.ipc.dbname}`); pfs.close(this.file).catch(err => { - this.debug.error('Could not close database:', err); + this.logger.error('Could not close database:', err); }); }); } @@ -926,7 +927,7 @@ export class AceBaseStorage extends Storage { length = buffer.byteLength; } const { bytesWritten } = await pfs.write(this.file, buffer as Buffer, offset, length, fileIndex).catch(err => { - this.debug.error('Error writing to file', err); + this.logger.error('Error writing to file', err); throw err; }); this.stats.writes++; @@ -963,8 +964,8 @@ export class AceBaseStorage extends Storage { return bytesRead; } catch (err) { - this.debug.error('Error reading record', buffer, offset, length, fileIndex); - this.debug.error(err); + this.logger.error('Error reading record', buffer, offset, length, fileIndex); + this.logger.error(err); throw err; } } @@ -1108,7 +1109,7 @@ export class AceBaseStorage extends Storage { finally { if (targetNodeInfo) { const msg = `Node at path "${targetPath}" is not broken: it is a(n) ${targetNodeInfo.valueTypeName} stored ${targetNodeInfo.address ? `@${targetNodeInfo.address.pageNr},${targetNodeInfo.address.recordNr}` : 'inline'}${targetNodeInfo.value ? ` with value ${targetNodeInfo.value}` : ''}`; - this.debug.warn(msg); + this.logger.warn(msg); if (!options.ignoreIntact) { throw new Error(msg); } @@ -1132,7 +1133,7 @@ export class AceBaseStorage extends Storage { const removedValueIndicator = '[[removed]]'; const isArray = nodeInfo.valueType === VALUE_TYPES.ARRAY; if (isArray && !options.markAsRemoved) { - this.debug.warn(`Node at path "${path}" is an Array, cannot remove entry at index ${key}: marking it as "${removedValueIndicator}" instead`); + this.logger.warn(`Node at path "${path}" is an Array, cannot remove entry at index ${key}: marking it as "${removedValueIndicator}" instead`); options.markAsRemoved = true; } const nodeReader = new NodeReader(this, nodeInfo.address, lock, false); @@ -1200,13 +1201,13 @@ export class AceBaseStorage extends Storage { await this.FST.release(nodeReader.recordInfo.allocation.ranges); } catch (err) { - this.debug.error(`Could not release previously allocated ranges for "/${path}": ${err}`); + this.logger.error(`Could not release previously allocated ranges for "/${path}": ${err}`); } // throw new Error(`Node at path "/${path}" was not rewritten at the same location. Fix failed`); } } - this.debug.log(`Successfully fixed node at path "${targetPath}" by ${options.markAsRemoved ? `marking key "${key}" of parent node "${path}" as removed ("${removedValueIndicator}")` : `removing key "${key}" from parent node "${path}"`}`); + this.logger.info(`Successfully fixed node at path "${targetPath}" by ${options.markAsRemoved ? `marking key "${key}" of parent node "${path}" as removed ("${removedValueIndicator}")` : `removing key "${key}" from parent node "${path}"`}`); // Make sure cached address is removed. this.invalidateCache(false, targetPath, true); @@ -1221,7 +1222,7 @@ export class AceBaseStorage extends Storage { * @param path */ async repairNodeTree(path: string) { - this.debug.warn(`Starting node tree repair for path "/${path}"`); + this.logger.warn(`Starting node tree repair for path "/${path}"`); const tid = this.createTid(); let lock = await this.nodeLocker.lock(path, tid.toString(), true, 'repairNodeTree'); try { @@ -1251,7 +1252,7 @@ export class AceBaseStorage extends Storage { } const tree = new BinaryBPlusTree({ readFn: nodeReader._treeDataReader.bind(nodeReader), - debug: this.debug, + logger: this.logger, id: `path:${path}`, }); const newRecordInfo = await _rebuildKeyTree(tree, nodeReader, { repairMode: true }); @@ -1271,14 +1272,14 @@ export class AceBaseStorage extends Storage { if (deallocate.totalAddresses > 0) { // Release record allocation marked for deallocation deallocate.normalize(); - this.debug.verbose(`Releasing ${deallocate.totalAddresses} addresses (${deallocate.ranges.length} ranges) previously used by node "/${path}" and/or descendants: ${deallocate}`.colorize(ColorStyle.grey)); + this.logger.trace(`Releasing ${deallocate.totalAddresses} addresses (${deallocate.ranges.length} ranges) previously used by node "/${path}" and/or descendants: ${deallocate}`.colorize(ColorStyle.grey)); await this.FST.release(deallocate.ranges); } } - this.debug.warn(`Successfully repaired node tree for path "/${path}"`); + this.logger.warn(`Successfully repaired node tree for path "/${path}"`); } catch (err) { - this.debug.error(`Failed to repair node tree for path "/${path}": ${err.stack}`); + this.logger.error(`Failed to repair node tree for path "/${path}": ${err.stack}`); } finally { lock.release(); @@ -1354,7 +1355,7 @@ export class AceBaseStorage extends Storage { await this._updateNode('history', { [cursor]: item }, { merge: true, _internal: true }); } catch(err) { - this.debug.error('Failed to add to transaction log: ', err); + this.logger.error('Failed to add to transaction log: ', err); } }; @@ -1956,7 +1957,7 @@ export class AceBaseStorage extends Storage { } catch(err) { if (!(err instanceof NodeNotFoundError)) { - this.debug.error(`Error getting children: ${err.stack}`); + this.logger.error(`Error getting children: ${err.stack}`); } throw err; } @@ -2008,7 +2009,7 @@ export class AceBaseStorage extends Storage { // TODO: release acebase-cli with ability to do that } else { - this.debug.error('DEBUG THIS: getNode error:', err); + this.logger.error('DEBUG THIS: getNode error:', err); } throw err; } @@ -2152,7 +2153,7 @@ export class AceBaseStorage extends Storage { return childInfo; } catch(err) { - this.debug.error('DEBUG THIS: getNodeInfo error', err); + this.logger.error('DEBUG THIS: getNodeInfo error', err); throw err; } finally { @@ -2268,7 +2269,7 @@ export class AceBaseStorage extends Storage { context: null, }, ): Promise { - // this.debug.log(`Update request for node "/${path}"`); + // this.logger.debug(`Update request for node "/${path}"`); const tid = options.tid || this.createTid(); // ID.generate(); const pathInfo = PathInfo.get(path); @@ -2356,7 +2357,7 @@ export class AceBaseStorage extends Storage { if (deallocate && deallocate.totalAddresses > 0) { // Release record allocation marked for deallocation deallocate.normalize(); - this.debug.verbose(`Releasing ${deallocate.totalAddresses} addresses (${deallocate.ranges.length} ranges) previously used by node "/${path}" and/or descendants: ${deallocate}`.colorize(ColorStyle.grey)); + this.logger.trace(`Releasing ${deallocate.totalAddresses} addresses (${deallocate.ranges.length} ranges) previously used by node "/${path}" and/or descendants: ${deallocate}`.colorize(ColorStyle.grey)); // // TEMP check, remove loop when all is good: // storage.nodeCache._cache.forEach((entry, path) => { @@ -2378,10 +2379,10 @@ export class AceBaseStorage extends Storage { } // catch(err) { // // if (err instanceof SchemaValidationError) { - // // !recursive && this.debug.error(`Schema validation error ${options.merge ? 'updating' : 'setting'} path "${path}": `, err.reason); + // // !recursive && this.logger.error(`Schema validation error ${options.merge ? 'updating' : 'setting'} path "${path}": `, err.reason); // // } // if (!(err instanceof SchemaValidationError)) { - // this.debug.error(`Node.update ERROR: `, err.message); + // this.logger.error(`Node.update ERROR: `, err.message); // } // throw err; //return false; // } @@ -2574,6 +2575,7 @@ class CorruptRecordError extends Error { } class NodeReader { recordInfo: RecordInfo = null; + logger: LoggerPlugin; constructor( public storage: AceBaseStorage, @@ -2585,6 +2587,7 @@ class NodeReader { if (!(address instanceof BinaryNodeAddress)) { throw new TypeError('address argument must be a BinaryNodeAddress'); } + this.logger = storage.logger; const key = `${address.pageNr},${address.recordNr}`; if (key in stack) { @@ -2613,7 +2616,7 @@ class NodeReader { const parentAddress = stack[Object.keys(stack).find(key => stack[key].path === pathInfo.parentPath)]; // const error = new CorruptRecordError(stack.slice(-1)[0], pathInfo.key, `Recursive read of record address ${clash.pageNr},${clash.recordNr}. Record "/${pathInfo.parentPath}" is corrupt: property "${pathInfo.key}" refers to the address belonging to path "/${clash.path}"`); const error = new CorruptRecordError(parentAddress, pathInfo.key, `CORRUPT RECORD: key "${pathInfo.key}" in "/${parentAddress.path}" (@${parentAddress.pageNr},${parentAddress.recordNr}) refers to address @${clash.pageNr},${clash.recordNr} which was already used to read "/${clash.path}". Recursive or repeated reading has been prevented.`); - this.storage.debug.error(error.message); + this.logger.error(error.message); throw error; } stack[key] = address; @@ -2741,7 +2744,7 @@ class NodeReader { await this.readHeader(); } - this.storage.debug.log(`Reading node "/${this.address.path}" from address ${this.address.pageNr},${this.address.recordNr}`.colorize(ColorStyle.magenta)); + this.logger.info(`Reading node "/${this.address.path}" from address ${this.address.pageNr},${this.address.recordNr}`.colorize(ColorStyle.magenta)); switch (this.recordInfo.valueType) { case VALUE_TYPES.STRING: { @@ -2850,18 +2853,18 @@ class NodeReader { // NodeCache.update(child.address, child.valueType); // Cache its address // } // // else if (!cachedAddress.equals(child.address)) { - // // this.storage.debug.warn(`Using cached address to read child node "/${child.address.path}" from address ${cachedAddress.pageNr},${cachedAddress.recordNr} instead of (${child.address.pageNr},${child.address.recordNr})`.colorize(ColorStyle.magenta)); + // // this.logger.warn(`Using cached address to read child node "/${child.address.path}" from address ${cachedAddress.pageNr},${cachedAddress.recordNr} instead of (${child.address.pageNr},${child.address.recordNr})`.colorize(ColorStyle.magenta)); // // child.address = cachedAddress; // // } // } - // this.storage.debug.log(`Reading child node "/${child.address.path}" from ${child.address.pageNr},${child.address.recordNr}`.colorize(ColorStyle.magenta)); + // this.logger.debug(`Reading child node "/${child.address.path}" from ${child.address.pageNr},${child.address.recordNr}`.colorize(ColorStyle.magenta)); const reader = new NodeReader(this.storage, child.address, childLock, this.updateCache, this.stack); const val = await reader.getValue(childOptions); (obj as any)[isArray ? child.index : child.key] = val; } catch (reason) { - this.storage.debug.error('NodeReader.getValue:child error: ', reason); + this.logger.error('NodeReader.getValue:child error: ', reason); throw reason; } finally { @@ -2906,7 +2909,7 @@ class NodeReader { return obj; } catch (err) { - this.storage.debug.error(err); + this.logger.error(err); throw err; } } @@ -3003,7 +3006,7 @@ class NodeReader { if (isLastChunk) { proceed = false; } let index = 1; while (proceed) { - //this.storage.debug.log(address.path); + //this.logger.debug(address.path); const chunk = chunks[index]; let fileIndex = this.storage.getRecordFileIndex(chunk.pageNr, chunk.recordNr); let length = chunk.length * bytesPerRecord; @@ -3092,7 +3095,7 @@ class NodeReader { const createStreamFromBinaryTree = async () => { const tree = new BinaryBPlusTree({ readFn: this._treeDataReader.bind(this), - debug: this.storage.debug, + logger: this.storage.logger, id: `path:${this.address.path}`, // Prefix to fix #168 }); @@ -3568,7 +3571,7 @@ class NodeReader { readFn: this._treeDataReader.bind(this), chunkSize: 1024 * 100, // 100KB reads/writes writeFn: this._treeDataWriter.bind(this), - debug: this.storage.debug, + logger: this.storage.logger, id: 'record@' + this.recordInfo.address.toString(), }); } @@ -3582,6 +3585,7 @@ async function _mergeNode(storage: AceBaseStorage, nodeInfo: BinaryNodeInfo, upd throw new TypeError('updates parameter must be an object'); } + const logger = storage.logger; let nodeReader = new NodeReader(storage, nodeInfo.address, lock, false); const affectedKeys: Array = Object.keys(updates); const changes = new NodeChangeTracker(nodeInfo.path); @@ -3670,7 +3674,7 @@ async function _mergeNode(storage: AceBaseStorage, nodeInfo: BinaryNodeInfo, upd }); if (changes.all.length === 0) { - storage.debug.log(`No effective changes to update node "/${nodeInfo.path}" with`.colorize(ColorStyle.yellow)); + logger.info(`No effective changes to update node "/${nodeInfo.path}" with`.colorize(ColorStyle.yellow)); return done(nodeReader.recordInfo); } @@ -3704,7 +3708,7 @@ async function _mergeNode(storage: AceBaseStorage, nodeInfo: BinaryNodeInfo, upd } const maxDebugItems = 10; - storage.debug.log(`Node "/${nodeInfo.path}" being updated:${isInternalUpdate ? ' (internal)' : ''} adding ${changes.inserts.length} keys (${changes.inserts.slice(0, maxDebugItems).map(ch => `"${ch.keyOrIndex}"`).join(',')}${changes.inserts.length > maxDebugItems ? '...' : ''}), updating ${changes.updates.length} keys (${changes.updates.slice(0, maxDebugItems).map(ch => `"${ch.keyOrIndex}"`).join(',')}${changes.updates.length > maxDebugItems ? '...' : ''}), removing ${changes.deletes.length} keys (${changes.deletes.slice(0, maxDebugItems).map(ch => `"${ch.keyOrIndex}"`).join(',')}${changes.deletes.length > maxDebugItems ? '...' : ''})`.colorize(ColorStyle.cyan)); + logger.info(`Node "/${nodeInfo.path}" being updated:${isInternalUpdate ? ' (internal)' : ''} adding ${changes.inserts.length} keys (${changes.inserts.slice(0, maxDebugItems).map(ch => `"${ch.keyOrIndex}"`).join(',')}${changes.inserts.length > maxDebugItems ? '...' : ''}), updating ${changes.updates.length} keys (${changes.updates.slice(0, maxDebugItems).map(ch => `"${ch.keyOrIndex}"`).join(',')}${changes.updates.length > maxDebugItems ? '...' : ''}), removing ${changes.deletes.length} keys (${changes.deletes.slice(0, maxDebugItems).map(ch => `"${ch.keyOrIndex}"`).join(',')}${changes.deletes.length > maxDebugItems ? '...' : ''})`.colorize(ColorStyle.cyan)); if (!isInternalUpdate) { // Update cache (remove entries or mark them as deleted) // const pathInfo = PathInfo.get(nodeInfo.path); @@ -3805,14 +3809,14 @@ async function _mergeNode(storage: AceBaseStorage, nodeInfo: BinaryNodeInfo, upd opCountsLog.push(operations.length); try { await tree.transaction(operations); - storage.debug.log(`Updated tree for node "/${nodeInfo.path}"`.colorize(ColorStyle.green)); + logger.info(`Updated tree for node "/${nodeInfo.path}"`.colorize(ColorStyle.green)); return recordInfo; // We do our own cleanup, return current allocation which is always the same as nodeReader.recordInfo } catch (err) { - storage.debug.log(`Could not update tree for "/${nodeInfo.path}"${retry > 0 ? ` (retry ${retry})` : ''}: ${err.message}, ${err.codes}`.colorize(ColorStyle.yellow)); + logger.info(`Could not update tree for "/${nodeInfo.path}"${retry > 0 ? ` (retry ${retry})` : ''}: ${err.message}, ${err.codes}`.colorize(ColorStyle.yellow)); if (err.hasErrorCode && err.hasErrorCode('tree-full-no-autogrow')) { - storage.debug.verbose('Tree needs more space'); + logger.trace('Tree needs more space'); const growBytes = Math.ceil(tree.info.byteLength * 0.1); // grow 10% const bytesRequired = tree.info.byteLength + growBytes; @@ -3852,7 +3856,7 @@ async function _mergeNode(storage: AceBaseStorage, nodeInfo: BinaryNodeInfo, upd } else { // Failed to update the binary data, we need to rebuild the tree - storage.debug.verbose(`B+Tree for path ${nodeInfo.path} needs rebuild`); + logger.trace(`B+Tree for path ${nodeInfo.path} needs rebuild`); fixHistory.push({ err, fix: 'rebuild' }); recordInfo = await _rebuildKeyTree(tree, nodeReader, { reserveSpaceForNewEntries: changes.inserts.length - changes.deletes.length }); } @@ -3870,7 +3874,7 @@ async function _mergeNode(storage: AceBaseStorage, nodeInfo: BinaryNodeInfo, upd readFn: nodeReader._treeDataReader.bind(nodeReader), chunkSize: 1024 * 100, // 100KB reads/writes writeFn: nodeReader._treeDataWriter.bind(nodeReader), - debug: storage.debug, + logger: storage.logger, id: 'record@' + nodeReader.recordInfo.address.toString(), }); @@ -3930,7 +3934,7 @@ async function _mergeNode(storage: AceBaseStorage, nodeInfo: BinaryNodeInfo, upd * Creates or overwrites a node */ async function _createNode(storage: AceBaseStorage, nodeInfo: BinaryNodeInfo, newValue: any, lock: IAceBaseIPCLock, invalidateCache = true) { - storage.debug.log(`Node "/${nodeInfo.path}" is being ${nodeInfo.exists ? 'overwritten' : 'created'}`.colorize(ColorStyle.cyan)); + storage.logger.info(`Node "/${nodeInfo.path}" is being ${nodeInfo.exists ? 'overwritten' : 'created'}`.colorize(ColorStyle.cyan)); let currentAllocation: NodeAllocation = null; if (nodeInfo.exists && nodeInfo.address) { @@ -4384,7 +4388,7 @@ async function _write( // value_page_nr := 4 byte number // value_record_nr := 2 byte number // - + const logger = storage.logger; const bytesPerRecord = storage.settings.recordSize; let headerByteLength = 0, totalBytes = 0, requiredRecords = 0, lastChunkSize = 0; @@ -4427,13 +4431,13 @@ async function _write( : await storage.FST.allocate(requiredRecords); let allocation = new NodeAllocation(ranges); - !useExistingAllocation && storage.debug.verbose(`Allocated ${allocation.totalAddresses} addresses for node "/${path}": ${allocation}`.colorize(ColorStyle.grey)); + !useExistingAllocation && logger.trace(`Allocated ${allocation.totalAddresses} addresses for node "/${path}": ${allocation}`.colorize(ColorStyle.grey)); calculateStorageNeeds(allocation.ranges.length); if (requiredRecords < allocation.totalAddresses) { const addresses = allocation.addresses; const deallocate = addresses.splice(requiredRecords); - storage.debug.verbose(`Requested ${deallocate.length} too many addresses to store node "/${path}", releasing them`.colorize(ColorStyle.grey)); + logger.trace(`Requested ${deallocate.length} too many addresses to store node "/${path}", releasing them`.colorize(ColorStyle.grey)); storage.FST.release(NodeAllocation.fromAdresses(deallocate).ranges); allocation = NodeAllocation.fromAdresses(addresses); calculateStorageNeeds(allocation.ranges.length); @@ -4517,7 +4521,7 @@ async function _write( const nodeInfo = new BinaryNodeInfo({ path, type, exists: true, address }); storage.updateCache(false, nodeInfo, true); // hasMoved? - storage.debug.log(`Node "/${address.path}" saved at address ${address.pageNr},${address.recordNr} - ${allocation.totalAddresses} addresses, ${bytesWritten} bytes written in ${chunks} chunk(s)`.colorize(ColorStyle.green)); + logger.info(`Node "/${address.path}" saved at address ${address.pageNr},${address.recordNr} - ${allocation.totalAddresses} addresses, ${bytesWritten} bytes written in ${chunks} chunk(s)`.colorize(ColorStyle.green)); // storage.logwrite({ address: address, allocation, chunks, bytesWritten }); let recordInfo; @@ -4542,13 +4546,14 @@ async function _write( } catch (reason) { // If any write failed, what do we do? - storage.debug.error(`Failed to write node "/${path}": ${reason}`); + logger.error(`Failed to write node "/${path}": ${reason}`); throw reason; } } async function _rebuildKeyTree(tree: BinaryBPlusTree, nodeReader: NodeReader, options: Parameters[1]) { const storage = nodeReader.storage; + const logger = storage.logger; const path = nodeReader.address.path; const tempFilepath = `${storage.settings.path}/${storage.name}.acebase/tree-${ID.generate()}.tmp`; let bytesWritten = 0; @@ -4582,7 +4587,7 @@ async function _rebuildKeyTree(tree: BinaryBPlusTree, nodeReader: NodeReader, op .then(() => pfs.rm(tempFilepath)) .catch(err => { // Error removing the file? - storage.debug.error(`Can't remove temp rebuild file ${tempFilepath}: `, err); + logger.error(`Can't remove temp rebuild file ${tempFilepath}: `, err); }); return newRecordInfo; diff --git a/src/storage/context.ts b/src/storage/context.ts index 4a3aeee..7697c63 100644 --- a/src/storage/context.ts +++ b/src/storage/context.ts @@ -1,11 +1,11 @@ -import { DebugLogger } from 'acebase-core'; -import { Storage } from '.'; -import { DataIndex } from '../data-index'; -import { AceBaseIPCPeer } from '../ipc/ipc'; - -export interface IndexesContext { - storage: Storage, - debug: DebugLogger, - ipc: AceBaseIPCPeer, - indexes: DataIndex[], -} +import type { LoggerPlugin } from 'acebase-core'; +import type { Storage } from '.'; +import type { DataIndex } from '../data-index'; +import type { AceBaseIPCPeer } from '../ipc/ipc'; + +export interface IndexesContext { + storage: Storage, + logger: LoggerPlugin, + ipc: AceBaseIPCPeer, + indexes: DataIndex[], +} diff --git a/src/storage/create-index.ts b/src/storage/create-index.ts index 0c542ba..786ca6f 100644 --- a/src/storage/create-index.ts +++ b/src/storage/create-index.ts @@ -1,127 +1,127 @@ -import { ColorStyle } from 'acebase-core'; -import { DataIndex, ArrayIndex, FullTextIndex, GeoIndex } from '../data-index'; -import { pfs } from '../promise-fs'; -import { IndexesContext } from './context'; - -export interface CreateIndexOptions { - rebuild?: boolean; - - /** - * special index to create: 'array', 'fulltext' or 'geo' - */ - type?: 'normal' | 'array' | 'fulltext' | 'geo'; - - /** - * keys to include with the indexed values. Can be used to speed up results sorting and - * to quickly apply additional filters. - */ - include?: string[]; - - /** - * Specifies whether texts should be indexed using case sensitivity. Setting this to `true` - * will cause words with mixed casings (eg "word", "Word" and "WORD") to be indexed separately. - * Default is `false` - * @default false - */ - caseSensitive?: boolean; - - /** - * Specifies the default locale of indexed texts. Used to convert indexed strings - * to lowercase if `caseSensitive` is set to `true`. - * Should be a 2-character language code such as "en" for English and "nl" for Dutch, - * or an LCID string for country specific locales such as "en-us" for American English, - * "en-gb" for British English, etc - */ - textLocale?: string; - - /** - * Specifies a key in the source data that contains the locale to use - * instead of the default specified in `textLocale` - */ - textLocaleKey?: string; - - /** - * additional index-specific configuration settings - */ - config?: any -} - -/** -* Creates an index on specified path and key(s) -* @param path location of objects to be indexed. Eg: "users" to index all children of the "users" node; or "chats/*\/members" to index all members of all chats -* @param key for now - one key to index. Once our B+tree implementation supports nested trees, we can allow multiple fields -*/ -export async function createIndex( - context: IndexesContext, - path: string, - key: string, - options: CreateIndexOptions, -): Promise { - if (!context.storage.indexes.supported) { - throw new Error('Indexes are not supported in current environment because it requires Node.js fs'); - } - // path = path.replace(/\/\*$/, ""); // Remove optional trailing "/*" - const { ipc, debug, indexes, storage } = context; - - const rebuild = options && options.rebuild === true; - const indexType = (options && options.type) || 'normal'; - let includeKeys = (options && options.include) || []; - if (typeof includeKeys === 'string') { includeKeys = [includeKeys]; } - const existingIndex = indexes.find(index => - index.path === path && index.key === key && index.type === indexType - && index.includeKeys.length === includeKeys.length - && index.includeKeys.every((key, index) => includeKeys[index] === key), - ); - - if (existingIndex && options.config) { - // Additional index config params are not saved to index files, apply them to the in-memory index now - (existingIndex as any).config = options.config; - } - - if (existingIndex && rebuild !== true) { - debug.log(`Index on "/${path}/*/${key}" already exists`.colorize(ColorStyle.inverse)); - return existingIndex; - } - - if (!ipc.isMaster) { - // Pass create request to master - const result = await ipc.sendRequest({ type: 'index.create', path, key, options }); - if (result.ok) { - return storage.indexes.add(result.fileName); - } - throw new Error(result.reason); - } - - await pfs.mkdir(`${storage.settings.path}/${storage.name}.acebase`).catch(err => { - if (err.code !== 'EEXIST') { - throw err; - } - }); - - const index = existingIndex || (() => { - const { include, caseSensitive, textLocale, textLocaleKey } = options; - const indexOptions = { include, caseSensitive, textLocale, textLocaleKey }; - switch (indexType) { - case 'array': return new ArrayIndex(storage, path, key, { ...indexOptions }); - case 'fulltext': return new FullTextIndex(storage, path, key, { ...indexOptions, config: options.config }); - case 'geo': return new GeoIndex(storage, path, key, { ...indexOptions }); - default: return new DataIndex(storage, path, key, { ...indexOptions }); - } - })(); - if (!existingIndex) { - indexes.push(index); - } - try { - await index.build(); - } - catch(err) { - context.debug.error(`Index build on "/${path}/*/${key}" failed: ${err.message} (code: ${err.code})`.colorize(ColorStyle.red)); - if (!existingIndex) { - // Only remove index if we added it. Build may have failed because someone tried creating the index more than once, or rebuilding it while it was building... - indexes.splice(indexes.indexOf(index), 1); - } - throw err; - } - ipc.sendNotification({ type: 'index.created', fileName: index.fileName, path, key, options }); - return index; -} +import { ColorStyle } from 'acebase-core'; +import { DataIndex, ArrayIndex, FullTextIndex, GeoIndex } from '../data-index'; +import { pfs } from '../promise-fs'; +import { IndexesContext } from './context'; + +export interface CreateIndexOptions { + rebuild?: boolean; + + /** + * special index to create: 'array', 'fulltext' or 'geo' + */ + type?: 'normal' | 'array' | 'fulltext' | 'geo'; + + /** + * keys to include with the indexed values. Can be used to speed up results sorting and + * to quickly apply additional filters. + */ + include?: string[]; + + /** + * Specifies whether texts should be indexed using case sensitivity. Setting this to `true` + * will cause words with mixed casings (eg "word", "Word" and "WORD") to be indexed separately. + * Default is `false` + * @default false + */ + caseSensitive?: boolean; + + /** + * Specifies the default locale of indexed texts. Used to convert indexed strings + * to lowercase if `caseSensitive` is set to `true`. + * Should be a 2-character language code such as "en" for English and "nl" for Dutch, + * or an LCID string for country specific locales such as "en-us" for American English, + * "en-gb" for British English, etc + */ + textLocale?: string; + + /** + * Specifies a key in the source data that contains the locale to use + * instead of the default specified in `textLocale` + */ + textLocaleKey?: string; + + /** + * additional index-specific configuration settings + */ + config?: any +} + +/** +* Creates an index on specified path and key(s) +* @param path location of objects to be indexed. Eg: "users" to index all children of the "users" node; or "chats/*\/members" to index all members of all chats +* @param key for now - one key to index. Once our B+tree implementation supports nested trees, we can allow multiple fields +*/ +export async function createIndex( + context: IndexesContext, + path: string, + key: string, + options: CreateIndexOptions, +): Promise { + if (!context.storage.indexes.supported) { + throw new Error('Indexes are not supported in current environment because it requires Node.js fs'); + } + // path = path.replace(/\/\*$/, ""); // Remove optional trailing "/*" + const { ipc, logger, indexes, storage } = context; + + const rebuild = options && options.rebuild === true; + const indexType = (options && options.type) || 'normal'; + let includeKeys = (options && options.include) || []; + if (typeof includeKeys === 'string') { includeKeys = [includeKeys]; } + const existingIndex = indexes.find(index => + index.path === path && index.key === key && index.type === indexType + && index.includeKeys.length === includeKeys.length + && index.includeKeys.every((key, index) => includeKeys[index] === key), + ); + + if (existingIndex && options.config) { + // Additional index config params are not saved to index files, apply them to the in-memory index now + (existingIndex as any).config = options.config; + } + + if (existingIndex && rebuild !== true) { + logger.info(`Index on "/${path}/*/${key}" already exists`.colorize(ColorStyle.inverse)); + return existingIndex; + } + + if (!ipc.isMaster) { + // Pass create request to master + const result = await ipc.sendRequest({ type: 'index.create', path, key, options }); + if (result.ok) { + return storage.indexes.add(result.fileName); + } + throw new Error(result.reason); + } + + await pfs.mkdir(`${storage.settings.path}/${storage.name}.acebase`).catch(err => { + if (err.code !== 'EEXIST') { + throw err; + } + }); + + const index = existingIndex || (() => { + const { include, caseSensitive, textLocale, textLocaleKey } = options; + const indexOptions = { include, caseSensitive, textLocale, textLocaleKey }; + switch (indexType) { + case 'array': return new ArrayIndex(storage, path, key, { ...indexOptions }); + case 'fulltext': return new FullTextIndex(storage, path, key, { ...indexOptions, config: options.config }); + case 'geo': return new GeoIndex(storage, path, key, { ...indexOptions }); + default: return new DataIndex(storage, path, key, { ...indexOptions }); + } + })(); + if (!existingIndex) { + indexes.push(index); + } + try { + await index.build(); + } + catch(err) { + context.logger.error(`Index build on "/${path}/*/${key}" failed: ${err.message} (code: ${err.code})`.colorize(ColorStyle.red)); + if (!existingIndex) { + // Only remove index if we added it. Build may have failed because someone tried creating the index more than once, or rebuilding it while it was building... + indexes.splice(indexes.indexOf(index), 1); + } + throw err; + } + ipc.sendNotification({ type: 'index.created', fileName: index.fileName, path, key, options }); + return index; +} diff --git a/src/storage/custom/index.ts b/src/storage/custom/index.ts index 917337a..9f03aa7 100644 --- a/src/storage/custom/index.ts +++ b/src/storage/custom/index.ts @@ -304,11 +304,11 @@ export class CustomStorage extends Storage { } private async _init() { - this.debug.log(`Database "${this.name}" details:`.colorize(ColorStyle.dim)); - this.debug.log(`- Type: CustomStorage`.colorize(ColorStyle.dim)); - this.debug.log(`- Path: ${this.settings.path}`.colorize(ColorStyle.dim)); - this.debug.log(`- Max inline value size: ${this.settings.maxInlineValueSize}`.colorize(ColorStyle.dim)); - this.debug.log(`- Autoremove undefined props: ${this.settings.removeVoidProperties}`.colorize(ColorStyle.dim)); + this.logger.info(`Database "${this.name}" details:`.colorize(ColorStyle.dim)); + this.logger.info(`- Type: CustomStorage`.colorize(ColorStyle.dim)); + this.logger.info(`- Path: ${this.settings.path}`.colorize(ColorStyle.dim)); + this.logger.info(`- Max inline value size: ${this.settings.maxInlineValueSize}`.colorize(ColorStyle.dim)); + this.logger.info(`- Autoremove undefined props: ${this.settings.removeVoidProperties}`.colorize(ColorStyle.dim)); // Create root node if it's not there yet await this._customImplementation.ready(); @@ -632,7 +632,7 @@ export class CustomStorage extends Storage { const isArray = mainNode.type === VALUE_TYPES.ARRAY; if (currentRow) { // update - this.debug.log(`Node "/${path}" is being ${options.merge ? 'updated' : 'overwritten'}`.colorize(ColorStyle.cyan)); + this.logger.info(`Node "/${path}" is being ${options.merge ? 'updated' : 'overwritten'}`.colorize(ColorStyle.cyan)); // If existing is an array or object, we have to find out which children are affected if (currentIsObjectOrArray || newIsObjectOrArray) { @@ -745,7 +745,7 @@ export class CustomStorage extends Storage { else { // Current node does not exist, create it and any child nodes // write all child nodes that must be stored in their own record - this.debug.log(`Node "/${path}" is being created`.colorize(ColorStyle.cyan)); + this.logger.info(`Node "/${path}" is being created`.colorize(ColorStyle.cyan)); if (isArray) { // Check if the array is "intact" (all entries have an index from 0 to the end with no gaps) @@ -786,7 +786,7 @@ export class CustomStorage extends Storage { */ private async _deleteNode(path: string, options: { transaction: CustomStorageTransaction }) { const pathInfo = PathInfo.get(path); - this.debug.log(`Node "/${path}" is being deleted`.colorize(ColorStyle.cyan)); + this.logger.info(`Node "/${path}" is being deleted`.colorize(ColorStyle.cyan)); const deletePaths = [path]; let checkExecuted = false; @@ -808,7 +808,7 @@ export class CustomStorage extends Storage { const transaction = options.transaction; await transaction.descendantsOf(path, { metadata: false, value: false }, includeDescendantCheck, addDescendant); - this.debug.log(`Nodes ${deletePaths.map(p => `"/${p}"`).join(',')} are being deleted`.colorize(ColorStyle.cyan)); + this.logger.info(`Nodes ${deletePaths.map(p => `"/${p}"`).join(',')} are being deleted`.colorize(ColorStyle.cyan)); return transaction.removeMultiple(deletePaths); } @@ -1091,7 +1091,7 @@ export class CustomStorage extends Storage { await transaction.descendantsOf(path, { metadata: true, value: true }, includeDescendantCheck, addDescendant); - this.debug.log(`Read node "/${path}" and ${filtered ? '(filtered) ' : ''}descendants from ${descRows.length + 1} records`.colorize(ColorStyle.magenta)); + this.logger.info(`Read node "/${path}" and ${filtered ? '(filtered) ' : ''}descendants from ${descRows.length + 1} records`.colorize(ColorStyle.magenta)); const result = targetNode; @@ -1143,7 +1143,7 @@ export class CustomStorage extends Storage { const mergePossible = typeof parent[key] === typeof nodeValue && [VALUE_TYPES.OBJECT, VALUE_TYPES.ARRAY].includes(nodeType); if (!mergePossible) { // Ignore the value in the child record, see issue #20: "Assertion failed: Merging child values can only be done if existing and current values are both an array or object" - this.debug.error(`The value stored in node "${otherNode.path}" cannot be merged with the parent node, value will be ignored. This error should disappear once the target node value is updated. See issue #20 for more information`, { path, parent, key, nodeType, nodeValue }); + this.logger.error(`The value stored in node "${otherNode.path}" cannot be merged with the parent node, value will be ignored. This error should disappear once the target node value is updated. See issue #20 for more information`, { path, parent, key, nodeType, nodeValue }); } else { Object.keys(nodeValue).forEach(childKey => { diff --git a/src/storage/index.ts b/src/storage/index.ts index 996e891..f460312 100644 --- a/src/storage/index.ts +++ b/src/storage/index.ts @@ -1,4 +1,4 @@ -import { Utils, DebugLogger, PathInfo, ID, PathReference, ascii85, SimpleEventEmitter, SchemaDefinition, DataRetrievalOptions, ISchemaCheckResult, LoggingLevel } from 'acebase-core'; +import { Utils, DebugLogger, PathInfo, ID, PathReference, ascii85, SimpleEventEmitter, SchemaDefinition, DataRetrievalOptions, ISchemaCheckResult, LoggingLevel, type LoggerPlugin } from 'acebase-core'; import { VALUE_TYPES } from '../node-value-types'; import { NodeRevisionError } from '../node-errors'; import { NodeInfo } from '../node-info'; @@ -30,7 +30,9 @@ export interface IWriteNodeResult { } /** - * Client config for usage with an acebase-ipc-server + * Client config for usage with an acebase-ipc-server. See https://github.com/appy-one/acebase-ipc-server + * Use this to horizontally scale database access: this allows multiple machines (or isolated instances of your app) to access and modify the + * database simultaneously. */ export interface IPCClientSettings { /** @@ -61,6 +63,28 @@ export interface IPCClientSettings { role: 'master' | 'worker'; } +/** + * IPC settings to automatically spawn (or connect to) a local service/daemon process. + * Use this to vertically scale database access: this allows multiple processes/threads on a single machine to access and modify the + * database simultaneously. + */ +export interface IPCSocketSettings { + /** + * Use 'socket' IPC service/daemon with additional options + */ + role: 'socket'; + + /** + * Max time in ms to keep started daemon running after the last client disconnects, defaults to 5000 (5s) + */ + maxIdleTime?: number; + + /** + * Path to code that returns an initialized logger plugin. Uses the built-in logger if not specified + */ + loggerPluginPath?: string; +} + export interface TransactionLogSettings { log?: boolean; maxAge?: number; @@ -112,7 +136,7 @@ export class StorageSettings { * IPC settings if you are using AceBase in pm2 or cloud-based clusters, or (NEW) `'socket'` to connect * to an automatically spawned IPC service ("daemon") on this machine */ - ipc?: IPCClientSettings | 'socket' | NetIPCServer; + ipc?: IPCClientSettings | 'socket' | IPCSocketSettings | NetIPCServer; /** * Settings for optional transaction logging @@ -133,6 +157,8 @@ export class StorageSettings { export interface StorageEnv { logLevel: LoggingLevel; + logColors: boolean; + logger?: LoggerPlugin; } export type SubscriptionCallback = (err: Error, path: string, newValue: any, oldValue: any, context: any) => void; @@ -141,7 +167,7 @@ export type InternalDataRetrievalOptions = DataRetrievalOptions & { tid?: strin export class Storage extends SimpleEventEmitter { - public debug: DebugLogger; + public logger: LoggerPlugin; public stats: any; public ipc: IPCPeer | RemoteIPCPeer | IPCSocketPeer; @@ -166,22 +192,30 @@ export class Storage extends SimpleEventEmitter { constructor(public name: string, public settings: StorageSettings, env: StorageEnv) { super(); - this.debug = new DebugLogger(env.logLevel, `[${name}${typeof settings.type === 'string' && settings.type !== 'data' ? `:${settings.type}` : ''}]`); // `├ ${name} ┤` // `[🧱${name}]` + this.logger = env.logger ?? new DebugLogger(env.logLevel, `[${name}${typeof settings.type === 'string' && settings.type !== 'data' ? `:${settings.type}` : ''}]`); // `├ ${name} ┤` // `[🧱${name}]` // Setup IPC to allow vertical scaling (multiple threads sharing locks and data) const ipcName = name + (typeof settings.type === 'string' ? `_${settings.type}` : ''); - if (settings.ipc === 'socket' || settings.ipc instanceof NetIPCServer) { - const ipcSettings = { ipcName, server: settings.ipc instanceof NetIPCServer ? settings.ipc : null }; + const ipcSocketSettings = typeof settings.ipc === 'object' && settings.ipc !== null && 'role' in settings.ipc && settings.ipc.role === 'socket' + ? settings.ipc + : null; + if (ipcSocketSettings || settings.ipc === 'socket' || settings.ipc instanceof NetIPCServer) { + const ipcSettings = { + ipcName, + server: settings.ipc instanceof NetIPCServer ? settings.ipc : null, + ...(ipcSocketSettings && { maxIdleTime: ipcSocketSettings.maxIdleTime, loggerPluginPath: ipcSocketSettings.loggerPluginPath }), + }; this.ipc = new IPCSocketPeer(this, ipcSettings); } else if (settings.ipc) { - if (typeof settings.ipc.port !== 'number') { + const ipcClientSettings = settings.ipc as IPCClientSettings; + if (typeof ipcClientSettings.port !== 'number') { throw new Error('IPC port number must be a number'); } - if (!['master','worker'].includes(settings.ipc.role)) { - throw new Error(`IPC client role must be either "master" or "worker", not "${settings.ipc.role}"`); + if (!['master','worker'].includes(ipcClientSettings.role)) { + throw new Error(`IPC client role must be either "master" or "worker", not "${ipcClientSettings.role}"`); } - const ipcSettings = Object.assign({ dbname: ipcName }, settings.ipc); + const ipcSettings = Object.assign({ dbname: ipcName }, ipcClientSettings); this.ipc = new RemoteIPCPeer(this, ipcSettings); } else { @@ -225,7 +259,7 @@ export class Storage extends SimpleEventEmitter { rebuild: false, }, ) => { - const context: IndexesContext = { storage: this, debug: this.debug, indexes: this._indexes, ipc: this.ipc }; + const context: IndexesContext = { storage: this, logger: this.logger, indexes: this._indexes, ipc: this.ipc }; return createIndex(context, path, key, options); }, @@ -298,7 +332,7 @@ export class Storage extends SimpleEventEmitter { if (err.code !== 'ENOENT') { // If the directory is not found, there are no file indexes. (probably not supported by used storage class) // Only complain if error is something else - this.debug.error(err); + this.logger.error(err); } } const promises = [] as Promise[]; @@ -334,7 +368,7 @@ export class Storage extends SimpleEventEmitter { return index; } catch(err) { - this.debug.error(err); + this.logger.error(err); return null; } }, @@ -361,7 +395,7 @@ export class Storage extends SimpleEventEmitter { close: async () => { // Close all indexes - const promises = this.indexes.list().map(index => index.close().catch(err => this.debug.error(err))); + const promises = this.indexes.list().map(index => index.close().catch(err => this.logger.error(err))); await Promise.all(promises); }, @@ -383,7 +417,7 @@ export class Storage extends SimpleEventEmitter { let pathSubs = this._eventSubscriptions[path]; if (!pathSubs) { pathSubs = this._eventSubscriptions[path] = []; } // if (pathSubs.findIndex(ps => ps.type === type && ps.callback === callback)) { - // storage.debug.warn(`Identical subscription of type ${type} on path "${path}" being added`); + // this.logger.warn(`Identical subscription of type ${type} on path "${path}" being added`); // } pathSubs.push({ created: Date.now(), type, callback }); this.emit('subscribe', { path, event: type, callback }); // Enables IPC peers to be notified @@ -754,7 +788,7 @@ export class Storage extends SimpleEventEmitter { valueOptions.include = keysFilter; } if (topEventPath === '' && typeof valueOptions.include === 'undefined') { - this.debug.warn('WARNING: One or more value event listeners on the root node are causing the entire database value to be read to facilitate change tracking. Using "value", "notify_value", "child_changed" and "notify_child_changed" events on the root node are a bad practice because of the significant performance impact. Use "mutated" or "mutations" events instead'); + this.logger.warn('WARNING: One or more value event listeners on the root node are causing the entire database value to be read to facilitate change tracking. Using "value", "notify_value", "child_changed" and "notify_child_changed" events on the root node are a bad practice because of the significant performance impact. Use "mutated" or "mutations" events instead'); } const node = await this.getNode(topEventPath, valueOptions); currentValue = node.value; @@ -1054,7 +1088,7 @@ export class Storage extends SimpleEventEmitter { const triggerAllEvents = () => { // Notify all event subscriptions, should be executed with a delay - // this.debug.verbose(`Triggering events caused by ${options && options.merge ? '(merge) ' : ''}write on "${path}":`, value); + // this.logger.debug(`Triggering events caused by ${options && options.merge ? '(merge) ' : ''}write on "${path}":`, value); eventSubscriptions .filter(sub => !['mutated','mutations','notify_mutated','notify_mutations'].includes(sub.type)) .map(sub => { @@ -1134,21 +1168,22 @@ export class Storage extends SimpleEventEmitter { const mutationEvents = eventSubscriptions.filter(sub => ['mutated', 'mutations', 'notify_mutated', 'notify_mutations'].includes(sub.type)); mutationEvents.forEach(sub => { // Get the target data this subscription is interested in - let currentPath = topEventPath; + const currentPath = topEventPath; // const trailPath = sub.eventPath.slice(currentPath.length).replace(/^\//, ''); // eventPath can contain vars and * ? const trailKeys = PathInfo.getPathKeys(sub.eventPath).slice(PathInfo.getPathKeys(currentPath).length); //PathInfo.getPathKeys(trailPath); - + const events = [] as Array<{ target: (string|number)[]; vars: Array<{ name: string; value: string|number }>; oldValue: any; - newValue: any; + newValue: any; }>; - let oldValue = topEventData, newValue = newTopEventData; + const oldValue = topEventData; + const newValue = newTopEventData; const processNextTrailKey = (target: typeof trailKeys, currentTarget: typeof trailKeys, oldValue: any, newValue: any, vars: Array<{ name: string; value: string|number }>) => { if (target.length === 0) { // Add it - return events.push({ target: currentTarget, oldValue, newValue, vars }) + return events.push({ target: currentTarget, oldValue, newValue, vars }); } const subKey = target[0]; const keys = new Set(); @@ -1156,10 +1191,10 @@ export class Storage extends SimpleEventEmitter { if (isWildcardKey) { // Recursive for each key in oldValue and newValue if (oldValue !== null && typeof oldValue === 'object') { - Object.keys(oldValue).forEach(key => keys.add(key)); + Object.keys(oldValue).forEach(key => keys.add(key)); } if (newValue !== null && typeof newValue === 'object') { - Object.keys(newValue).forEach(key => keys.add(key)); + Object.keys(newValue).forEach(key => keys.add(key)); } } else { @@ -1385,12 +1420,12 @@ export class Storage extends SimpleEventEmitter { newValue = callback(node.value); if (newValue instanceof Promise) { newValue = await newValue.catch(err => { - this.debug.error(`Error in transaction callback: ${err.message}`); + this.logger.error(`Error in transaction callback: ${err.message}`); }); } } catch (err) { - this.debug.error(`Error in transaction callback: ${err.message}`); + this.logger.error(`Error in transaction callback: ${err.message}`); } if (typeof newValue === 'undefined') { // Callback did not return value. Cancel transaction @@ -1526,7 +1561,7 @@ export class Storage extends SimpleEventEmitter { return isMatch; } catch (err) { - this.debug.error(`Error matching on "${path}": `, err); + this.logger.error(`Error matching on "${path}": `, err); throw err; } }; // checkNode @@ -2249,7 +2284,7 @@ export class Storage extends SimpleEventEmitter { // Parse schema, add or update it const definition = new SchemaDefinition(schema, { warnOnly, - warnCallback: (message: string) => this.debug.warn(message), + warnCallback: (message: string) => this.logger.warn(message), }); const item = this._schemas.find(s => s.path === path); if (item) { diff --git a/src/storage/mssql/index.ts b/src/storage/mssql/index.ts index 367eede..166ef3b 100644 --- a/src/storage/mssql/index.ts +++ b/src/storage/mssql/index.ts @@ -300,18 +300,18 @@ export class MSSQLStorage extends Storage { // Get root record info this.rootRecord = await this.getNodeInfo(''); - this.debug.log(`Database "${this.name}" details:`.colorize(ColorStyle.dim)); - this.debug.log(`- Type: MSSQL`.colorize(ColorStyle.dim)); - this.debug.log(`- Server: ${this.settings.server}:${this.settings.port}`.colorize(ColorStyle.dim)); - this.debug.log(`- Database: ${this.settings.database}`.colorize(ColorStyle.dim)); - this.debug.log(`- Max inline value size: ${this.settings.maxInlineValueSize}`.colorize(ColorStyle.dim)); + this.logger.info(`Database "${this.name}" details:`.colorize(ColorStyle.dim)); + this.logger.info(`- Type: MSSQL`.colorize(ColorStyle.dim)); + this.logger.info(`- Server: ${this.settings.server}:${this.settings.port}`.colorize(ColorStyle.dim)); + this.logger.info(`- Database: ${this.settings.database}`.colorize(ColorStyle.dim)); + this.logger.info(`- Max inline value size: ${this.settings.maxInlineValueSize}`.colorize(ColorStyle.dim)); // Load indexes await this.indexes.load(); this.emit('ready'); } catch (err) { - this.debug.error(`Error initializing MSSQL database: ${err.message}`); + this.logger.error(`Error initializing MSSQL database: ${err.message}`); this.emit('error', err); } } @@ -637,7 +637,7 @@ export class MSSQLStorage extends Storage { // Insert or update node if (currentRow) { // update - this.debug.log(`Node "/${path}" is being ${options.merge ? 'updated' : 'overwritten'}`.colorize(ColorStyle.cyan)); + this.logger.info(`Node "/${path}" is being ${options.merge ? 'updated' : 'overwritten'}`.colorize(ColorStyle.cyan)); const updateMainNode = () => { const sql = `UPDATE nodes SET type = @type, text_value = @text_value, binary_value = @binary_value, json_value = @json_value, modified = @modified, revision_nr = revision_nr + 1, revision = @revision @@ -720,7 +720,7 @@ export class MSSQLStorage extends Storage { else { // Current node does not exist, create it and any child nodes // write all child nodes that must be stored in their own record - this.debug.log(`Node "/${path}" is being created`.colorize(ColorStyle.cyan)); + this.logger.info(`Node "/${path}" is being created`.colorize(ColorStyle.cyan)); const childCreatePromises = Object.keys(childNodeValues).map(key => { const childPath = PathInfo.getChildPath(path, key); @@ -957,7 +957,7 @@ export class MSSQLStorage extends Storage { return result; } - this.debug.log(`Read node "/${path}" and ${filtered ? '(filtered) ' : ''}children from ${rows.length} records`.colorize(ColorStyle.magenta)); + this.logger.info(`Read node "/${path}" and ${filtered ? '(filtered) ' : ''}children from ${rows.length} records`.colorize(ColorStyle.magenta)); const targetPathKeys = PathInfo.getPathKeys(path); const targetRow = rows.find(row => row.path === path); diff --git a/src/storage/sqlite/index.ts b/src/storage/sqlite/index.ts index 1901c81..15e142a 100644 --- a/src/storage/sqlite/index.ts +++ b/src/storage/sqlite/index.ts @@ -317,9 +317,9 @@ export class SQLiteStorage extends Storage { // Get root record info this.rootRecord = await this.getNodeInfo(''); - this.debug.log(`Database "${this.name}" details:`.colorize(ColorStyle.dim)); - this.debug.log(`- Type: SQLite`.colorize(ColorStyle.dim)); - this.debug.log(`- Max inline value size: ${this.settings.maxInlineValueSize}`.colorize(ColorStyle.dim)); + this.logger.info(`Database "${this.name}" details:`.colorize(ColorStyle.dim)); + this.logger.info(`- Type: SQLite`.colorize(ColorStyle.dim)); + this.logger.info(`- Max inline value size: ${this.settings.maxInlineValueSize}`.colorize(ColorStyle.dim)); // Load indexes await this.indexes.load(); @@ -523,7 +523,7 @@ export class SQLiteStorage extends Storage { // Insert or update node if (currentRow) { // update - this.debug.log(`Node "/${path}" is being ${options.merge ? 'updated' : 'overwritten'}`.colorize(ColorStyle.cyan)); + this.logger.info(`Node "/${path}" is being ${options.merge ? 'updated' : 'overwritten'}`.colorize(ColorStyle.cyan)); const updateMainNode = () => { const sql = `UPDATE nodes SET type = $type, text_value = $text_value, binary_value = $binary_value, json_value = $json_value, modified = $modified, revision_nr = revision_nr + 1, revision = $revision @@ -630,7 +630,7 @@ export class SQLiteStorage extends Storage { else { // Current node does not exist, create it and any child nodes // write all child nodes that must be stored in their own record - this.debug.log(`Node "/${path}" is being created`.colorize(ColorStyle.cyan)); + this.logger.info(`Node "/${path}" is being created`.colorize(ColorStyle.cyan)); const childCreatePromises = Object.keys(childNodeValues).map(async key => { const childPath = PathInfo.getChildPath(path, key); @@ -877,7 +877,7 @@ export class SQLiteStorage extends Storage { return result; } - this.debug.log(`Read node "/${path}" and ${filtered ? '(filtered) ' : ''}children from ${childRows.length} records`.colorize(ColorStyle.magenta)); + this.logger.info(`Read node "/${path}" and ${filtered ? '(filtered) ' : ''}children from ${childRows.length} records`.colorize(ColorStyle.magenta)); const targetPathKeys = PathInfo.getPathKeys(path); const targetRow = childRows.find(row => row.path === path); diff --git a/src/test/custom-logger.ts b/src/test/custom-logger.ts new file mode 100644 index 0000000..600b510 --- /dev/null +++ b/src/test/custom-logger.ts @@ -0,0 +1,3 @@ +import Pino from 'pino'; +const logger = Pino({ level: 'trace' }); +export default logger; diff --git a/src/test/tempdb.ts b/src/test/tempdb.ts index c263fa1..49a6c2b 100644 --- a/src/test/tempdb.ts +++ b/src/test/tempdb.ts @@ -1,5 +1,7 @@ import { AceBase, ID, AceBaseLocalSettings } from '..'; import { readdir, rm, rmdir } from 'fs/promises'; +// import { resolve as resolvePath } from 'path'; +import customLogger from './custom-logger'; export async function createTempDB(enable: { transactionLogging?: boolean; logLevel?: 'verbose'|'log'|'warn'|'error'; config?: (options: any) => void } = {}) { // Create temp db @@ -11,7 +13,10 @@ export async function createTempDB(enable: { transactionLogging?: boolean; logLe if (typeof enable.config === 'function') { enable.config(options); } - options.storage.ipc = 'socket'; + // options.storage.ipc = 'socket'; + // options.storage.ipc = { role: 'socket', maxIdleTime: 0, loggerPluginPath: resolvePath(__dirname, 'custom-logger.js') }; + options.logger = customLogger; + // options.logColors = false; const db = new AceBase(dbname, options); await db.ready();