diff --git a/migrate.sh b/migrate.sh new file mode 100755 index 00000000..6e130613 --- /dev/null +++ b/migrate.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -o allexport +source ./migrate.env +set +o allexport +shift +node scripts/migrateMariadbToPostgresql.mjs \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index 79eeec34..203a68d7 100644 --- a/package-lock.json +++ b/package-lock.json @@ -4498,6 +4498,17 @@ "integrity": "sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==", "dev": true }, + "node_modules/@types/pg": { + "version": "8.6.5", + "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.6.5.tgz", + "integrity": "sha512-tOkGtAqRVkHa/PVZicq67zuujI4Oorfglsr2IbKofDwBSysnaqSx7W1mDqFqdkGE6Fbgh+PZAl0r/BWON/mozw==", + "dev": true, + "dependencies": { + "@types/node": "*", + "pg-protocol": "*", + "pg-types": "^2.2.0" + } + }, "node_modules/@types/prettier": { "version": "2.7.0", "resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-2.7.0.tgz", @@ -7099,6 +7110,11 @@ "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==" }, + "node_modules/boolean": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/boolean/-/boolean-3.2.0.tgz", + "integrity": "sha512-d0II/GO9uf9lfUHH2BQsjxzRJZBdsjgsBiW4BvhWk/3qoKwQFjIDVN19PfX8F2D/r9PCMTtLWjYVCFrpeYUzsw==" + }, "node_modules/bootstrap": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/bootstrap/-/bootstrap-5.2.1.tgz", @@ -7251,14 +7267,29 @@ "node_modules/buffer-from": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", - "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", - "dev": true + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==" }, "node_modules/buffer-shims": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/buffer-shims/-/buffer-shims-1.0.0.tgz", "integrity": "sha512-Zy8ZXMyxIT6RMTeY7OP/bDndfj6bwCan7SS98CEndS6deHwWPpseeHlwarNcBim+etXnF9HBc1non5JgDaJU1g==" }, + "node_modules/buffer-writer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz", + "integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==", + "engines": { + "node": ">=4" + } + }, + "node_modules/bufferput": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/bufferput/-/bufferput-0.1.3.tgz", + "integrity": "sha512-nmPV88vDNzf0VMU1bdQ4A1oBlRR9y+CXfwWKfyKUgI2ZIkvreNzLMM3tkz0Lapb6f+Cz1V001UWRBsoGVCjqdw==", + "engines": { + "node": ">=0.3.0" + } + }, "node_modules/builtin-modules": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.3.0.tgz", @@ -8375,6 +8406,33 @@ "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" }, + "node_modules/concat-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-2.0.0.tgz", + "integrity": "sha512-MWufYdFw53ccGjCA+Ol7XJYpAlW6/prSMzuPOTRnJGcGzuhLn4Scrz7qf6o8bROZ514ltazcIFJZevcfbo0x7A==", + "engines": [ + "node >= 6.0" + ], + "dependencies": { + "buffer-from": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^3.0.2", + "typedarray": "^0.0.6" + } + }, + "node_modules/concat-stream/node_modules/readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/connect-history-api-fallback": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz", @@ -8636,7 +8694,6 @@ "version": "3.25.1", "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.25.1.tgz", "integrity": "sha512-sr0FY4lnO1hkQ4gLDr24K0DGnweGO1QwSj5BpfQjpSJPdqWalja4cTps29Y/PJVG/P7FYlPDkH3hO+Tr0CvDgQ==", - "dev": true, "hasInstallScript": true, "funding": { "type": "opencollective", @@ -9351,7 +9408,6 @@ "version": "1.1.4", "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz", "integrity": "sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==", - "dev": true, "dependencies": { "has-property-descriptors": "^1.0.0", "object-keys": "^1.1.1" @@ -9505,6 +9561,11 @@ "node": ">=8" } }, + "node_modules/discontinuous-range": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/discontinuous-range/-/discontinuous-range-1.0.0.tgz", + "integrity": "sha512-c68LpLbO+7kP/b1Hr1qs8/BJ09F5khZGTxqxZuhzxpmwJKOgRFHJWIb9/KmqnqHhLdO55aOxFH/EGBvUQbL/RQ==" + }, "node_modules/dns-equal": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/dns-equal/-/dns-equal-1.0.0.tgz", @@ -9949,6 +10010,11 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/es6-error": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz", + "integrity": "sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==" + }, "node_modules/escalade": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", @@ -11138,11 +11204,61 @@ "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" }, + "node_modules/fast-json-stringify": { + "version": "2.7.13", + "resolved": "https://registry.npmjs.org/fast-json-stringify/-/fast-json-stringify-2.7.13.tgz", + "integrity": "sha512-ar+hQ4+OIurUGjSJD1anvYSDcUflywhKjfxnsW4TBTD7+u0tJufv6DKRWoQk3vI6YBOWMoz0TQtfbe7dxbQmvA==", + "dependencies": { + "ajv": "^6.11.0", + "deepmerge": "^4.2.2", + "rfdc": "^1.2.0", + "string-similarity": "^4.0.1" + }, + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/fast-json-stringify/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/fast-json-stringify/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + }, "node_modules/fast-levenshtein": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==" }, + "node_modules/fast-printf": { + "version": "1.6.9", + "resolved": "https://registry.npmjs.org/fast-printf/-/fast-printf-1.6.9.tgz", + "integrity": "sha512-FChq8hbz65WMj4rstcQsFB0O7Cy++nmbNfLYnD9cYv2cRn8EG6k/MGn9kO/tjO66t09DLDugj3yL+V2o6Qftrg==", + "dependencies": { + "boolean": "^3.1.4" + }, + "engines": { + "node": ">=10.0" + } + }, + "node_modules/fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" + }, "node_modules/fastq": { "version": "1.13.0", "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", @@ -11833,6 +11949,52 @@ "node": ">=8.0.0" } }, + "node_modules/get-stack-trace": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/get-stack-trace/-/get-stack-trace-2.1.1.tgz", + "integrity": "sha512-dhqSDD9lHU/6FvIZ9KbXGmVK6IKr9ZskZtNOUvhlCiONlnqatu4FmAeRbxCfJJVuQ0NWfz6dAbibKQg19B7AmQ==", + "dependencies": { + "bluebird": "^3.7.1", + "source-map": "^0.8.0-beta.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/get-stack-trace/node_modules/source-map": { + "version": "0.8.0-beta.0", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.8.0-beta.0.tgz", + "integrity": "sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA==", + "dependencies": { + "whatwg-url": "^7.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/get-stack-trace/node_modules/tr46": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-1.0.1.tgz", + "integrity": "sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/get-stack-trace/node_modules/webidl-conversions": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-4.0.2.tgz", + "integrity": "sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==" + }, + "node_modules/get-stack-trace/node_modules/whatwg-url": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-7.1.0.tgz", + "integrity": "sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==", + "dependencies": { + "lodash.sortby": "^4.7.0", + "tr46": "^1.0.1", + "webidl-conversions": "^4.0.2" + } + }, "node_modules/get-stream": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", @@ -12036,6 +12198,20 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/globalthis": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.3.tgz", + "integrity": "sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==", + "dependencies": { + "define-properties": "^1.1.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/globalyzer": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/globalyzer/-/globalyzer-0.1.0.tgz", @@ -12224,7 +12400,6 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", - "dev": true, "dependencies": { "get-intrinsic": "^1.1.1" }, @@ -12666,6 +12841,15 @@ "node": ">=10.17.0" } }, + "node_modules/hyperid": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/hyperid/-/hyperid-2.3.1.tgz", + "integrity": "sha512-mIbI7Ymn6MCdODaW1/6wdf5lvvXzmPsARN4zTLakMmcziBOuP4PxCBJvHF6kbAIHX6H4vAELx/pDmt0j6Th5RQ==", + "dependencies": { + "uuid": "^8.3.2", + "uuid-parse": "^1.1.0" + } + }, "node_modules/iconv-lite": { "version": "0.4.24", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", @@ -12934,6 +13118,14 @@ "node": ">=4" } }, + "node_modules/int64-buffer": { + "version": "0.99.1007", + "resolved": "https://registry.npmjs.org/int64-buffer/-/int64-buffer-0.99.1007.tgz", + "integrity": "sha512-XDBEu44oSTqlvCSiOZ/0FoUkpWu/vwjJLGSKDabNISPQNZ5wub1FodGHBljRsrR0IXRPq7SslshZYMuA55CgTQ==", + "engines": { + "node": ">= 4.5.0" + } + }, "node_modules/internal-slot": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.3.tgz", @@ -13268,7 +13460,6 @@ "version": "5.0.0", "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==", - "dev": true, "engines": { "node": ">=0.10.0" } @@ -13440,6 +13631,11 @@ "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", "dev": true }, + "node_modules/iso8601-duration": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/iso8601-duration/-/iso8601-duration-1.3.0.tgz", + "integrity": "sha512-K4CiUBzo3YeWk76FuET/dQPH03WE04R94feo5TSKQCXpoXQt9E4yx2CnY737QZnSAI3PI4WlKo/zfqizGx52QQ==" + }, "node_modules/isobject": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/isobject/-/isobject-4.0.0.tgz", @@ -15015,8 +15211,7 @@ "node_modules/lodash.sortby": { "version": "4.7.0", "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz", - "integrity": "sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==", - "dev": true + "integrity": "sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==" }, "node_modules/lodash.uniq": { "version": "4.5.0", @@ -15631,6 +15826,11 @@ "resolved": "https://registry.npmjs.org/monotonic-timestamp/-/monotonic-timestamp-0.0.8.tgz", "integrity": "sha512-3fQw+dAni/JJ4rkvMY7EZOz+tM+yuhrY3tKLJk74YOp/DQR0Ip+9yiKzZrC40uQ+Kin86s5TOjmL6UmxljOAfA==" }, + "node_modules/moo": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/moo/-/moo-0.5.1.tgz", + "integrity": "sha512-I1mnb5xn4fO80BH9BLcF0yLypy2UKl+Cb01Fu0hJRkJjlCRtxZMWkTdAtDd5ZqCOxtCkhmRwyI57vWT+1iZ67w==" + }, "node_modules/mri": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz", @@ -15653,6 +15853,11 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, + "node_modules/multi-fork": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/multi-fork/-/multi-fork-0.0.2.tgz", + "integrity": "sha512-SHWGuze0cZNiH+JGJQFlB1k7kZLGFCvW1Xo5Fcpe86KICkC3aVTJWpjUcmyYcLCB0I6gdzKLCia/bTIw2ggl8A==" + }, "node_modules/multicast-dns": { "version": "7.2.5", "resolved": "https://registry.npmjs.org/multicast-dns/-/multicast-dns-7.2.5.tgz", @@ -15714,6 +15919,32 @@ "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", "dev": true }, + "node_modules/nearley": { + "version": "2.20.1", + "resolved": "https://registry.npmjs.org/nearley/-/nearley-2.20.1.tgz", + "integrity": "sha512-+Mc8UaAebFzgV+KpI5n7DasuuQCHA89dmwm7JXw3TV43ukfNQ9DnBH3Mdb2g/I4Fdxc26pwimBWvjIw0UAILSQ==", + "dependencies": { + "commander": "^2.19.0", + "moo": "^0.5.0", + "railroad-diagrams": "^1.0.0", + "randexp": "0.4.6" + }, + "bin": { + "nearley-railroad": "bin/nearley-railroad.js", + "nearley-test": "bin/nearley-test.js", + "nearley-unparse": "bin/nearley-unparse.js", + "nearleyc": "bin/nearleyc.js" + }, + "funding": { + "type": "individual", + "url": "https://nearley.js.org/#give-to-nearley" + } + }, + "node_modules/nearley/node_modules/commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + }, "node_modules/negotiator": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", @@ -18485,7 +18716,6 @@ "version": "1.1.1", "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", - "dev": true, "engines": { "node": ">= 0.4" } @@ -18528,8 +18758,7 @@ "node_modules/obuf": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz", - "integrity": "sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==", - "dev": true + "integrity": "sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==" }, "node_modules/on-finished": { "version": "2.4.1", @@ -18745,6 +18974,14 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/p-defer": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-defer/-/p-defer-3.0.0.tgz", + "integrity": "sha512-ugZxsxmtTln604yeYd29EGrNhazN2lywetzpKhfmQjW/VJmhpDmWbiX+h0zL8V91R0UXkhb3KtPmyq9PZw3aYw==", + "engines": { + "node": ">=8" + } + }, "node_modules/p-each-series": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/p-each-series/-/p-each-series-2.2.0.tgz", @@ -18857,6 +19094,11 @@ "node": ">=6" } }, + "node_modules/packet-reader": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz", + "integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==" + }, "node_modules/param-case": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/param-case/-/param-case-3.0.4.tgz", @@ -19017,6 +19259,151 @@ "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", "integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow==" }, + "node_modules/pg": { + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.8.0.tgz", + "integrity": "sha512-UXYN0ziKj+AeNNP7VDMwrehpACThH7LUl/p8TDFpEUuSejCUIwGSfxpHsPvtM6/WXFy6SU4E5RG4IJV/TZAGjw==", + "dependencies": { + "buffer-writer": "2.0.0", + "packet-reader": "1.0.0", + "pg-connection-string": "^2.5.0", + "pg-pool": "^3.5.2", + "pg-protocol": "^1.5.0", + "pg-types": "^2.1.0", + "pgpass": "1.x" + }, + "engines": { + "node": ">= 8.0.0" + }, + "peerDependencies": { + "pg-native": ">=3.0.1" + }, + "peerDependenciesMeta": { + "pg-native": { + "optional": true + } + } + }, + "node_modules/pg-connection-string": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz", + "integrity": "sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==" + }, + "node_modules/pg-copy-streams": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/pg-copy-streams/-/pg-copy-streams-6.0.4.tgz", + "integrity": "sha512-FH6q2nFo0n2cFacLyIKorjDz8AOYtxrAANx1XMvYbKWNM2geY731gZstuP4mXMlqO6urRl9oIscFxf3GMIg3Ng==", + "dependencies": { + "obuf": "^1.1.2" + } + }, + "node_modules/pg-copy-streams-binary": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-copy-streams-binary/-/pg-copy-streams-binary-2.2.0.tgz", + "integrity": "sha512-jPCWgTR8004wz5XOI2sc09+IMwE7YMeINYCabwPMCPtlgj2ay81VLCClMkj/u+xOeisRcN8vCrIZ4FrqlaTyBQ==", + "dependencies": { + "bl": "^4.0.3", + "bufferput": "^0.1.3", + "ieee754": "^1.1.13", + "int64-buffer": "^0.99.1007", + "multi-fork": "0.0.2", + "through2": "^3.0.1" + } + }, + "node_modules/pg-copy-streams-binary/node_modules/through2": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.2.tgz", + "integrity": "sha512-enaDQ4MUyP2W6ZyT6EsMzqBPZaM/avg8iuo+l2d3QCs0J+6RaqkHV/2/lOwDTueBHeJ/2LG9lrLW3d5rWPucuQ==", + "dependencies": { + "inherits": "^2.0.4", + "readable-stream": "2 || 3" + } + }, + "node_modules/pg-cursor": { + "version": "2.7.4", + "resolved": "https://registry.npmjs.org/pg-cursor/-/pg-cursor-2.7.4.tgz", + "integrity": "sha512-CNWwOzTTZ9QvphoOL+Wg/7pmVr9GnAWBjPbuK2FRclrB4A/WRO/ssCJ9BlkzIGmmofK2M/LyokNHgsLSn+fMHA==", + "peerDependencies": { + "pg": "^8" + } + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-numeric": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/pg-numeric/-/pg-numeric-1.0.2.tgz", + "integrity": "sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw==", + "engines": { + "node": ">=4" + } + }, + "node_modules/pg-pool": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.5.2.tgz", + "integrity": "sha512-His3Fh17Z4eg7oANLob6ZvH8xIVen3phEZh2QuyrIl4dQSDVEabNducv6ysROKpDNPSD+12tONZVWfSgMvDD9w==", + "peerDependencies": { + "pg": ">=8.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.5.0.tgz", + "integrity": "sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ==" + }, + "node_modules/pg-query-stream": { + "version": "4.2.4", + "resolved": "https://registry.npmjs.org/pg-query-stream/-/pg-query-stream-4.2.4.tgz", + "integrity": "sha512-Et3gTrWn4C2rj4LVioNq1QDd7aH/3mSJcBm79jZALv3wopvx9bWENtbOYZbHQ6KM+IkfFxs0JF1ZLjMDJ9/N6Q==", + "dependencies": { + "pg-cursor": "^2.7.4" + } + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pgpass": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz", + "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", + "dependencies": { + "split2": "^4.1.0" + } + }, + "node_modules/pgpass/node_modules/split2": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.1.0.tgz", + "integrity": "sha512-VBiJxFkxiXRlUIeyMQi8s4hgvKCSjtknJv/LVYbrgALPwf5zSKmEwV9Lst25AkvMDnvxODugjdl6KZgwKM1WYQ==", + "engines": { + "node": ">= 10.x" + } + }, + "node_modules/pgsql-ast-parser": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/pgsql-ast-parser/-/pgsql-ast-parser-11.0.0.tgz", + "integrity": "sha512-vY5dr03b1ZnobWBPi5dy0fovK/kgXkueTLKWwVaC+Ql6VITZCURAHAnT3yAczZAjsbOyaP5Jr6A+Ol+pru8e9A==", + "dependencies": { + "moo": "^0.5.1", + "nearley": "^2.19.5" + } + }, "node_modules/picocolors": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", @@ -19816,6 +20203,46 @@ "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", "dev": true }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "dependencies": { + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-range": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/postgres-range/-/postgres-range-1.1.3.tgz", + "integrity": "sha512-VdlZoocy5lCP0c/t66xAfclglEapXPCIVhqqJRncYpvbCgImF0w67aPKfbqUMr72tO2k5q0TdTZwCLjPTI6C9g==" + }, "node_modules/prelude-ls": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", @@ -20376,6 +20803,23 @@ "node": ">=8" } }, + "node_modules/railroad-diagrams": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/railroad-diagrams/-/railroad-diagrams-1.0.0.tgz", + "integrity": "sha512-cz93DjNeLY0idrCNOH6PviZGRN9GJhsdm9hpn1YCS879fj4W+x5IFJhhkRZcwVgMmFF7R82UA/7Oh+R8lLZg6A==" + }, + "node_modules/randexp": { + "version": "0.4.6", + "resolved": "https://registry.npmjs.org/randexp/-/randexp-0.4.6.tgz", + "integrity": "sha512-80WNmd9DA0tmZrw9qQa62GPPWfuXJknrmVmLcxvq4uZBdYqb1wYoKTmnlGUchvVWe0XiLupYkBoXVOxz3C8DYQ==", + "dependencies": { + "discontinuous-range": "1.0.0", + "ret": "~0.1.10" + }, + "engines": { + "node": ">=0.12" + } + }, "node_modules/randombytes": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", @@ -21090,6 +21534,14 @@ "node": ">=4" } }, + "node_modules/ret": { + "version": "0.1.15", + "resolved": "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz", + "integrity": "sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==", + "engines": { + "node": ">=0.12" + } + }, "node_modules/retry": { "version": "0.13.1", "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz", @@ -21109,6 +21561,11 @@ "node": ">=0.10.0" } }, + "node_modules/rfdc": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.0.tgz", + "integrity": "sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA==" + }, "node_modules/rimraf": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", @@ -21123,6 +21580,22 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/roarr": { + "version": "7.12.3", + "resolved": "https://registry.npmjs.org/roarr/-/roarr-7.12.3.tgz", + "integrity": "sha512-EhX9kTlWj4wTqQ0qVX2XvnYZqXhyi3zO/Rq7zAkCcmnawBAZrRpii71PiFbjyWS8yi1TrxWrShpcGBT6WGdCAw==", + "dependencies": { + "boolean": "^3.1.4", + "fast-json-stringify": "^2.7.10", + "fast-printf": "^1.6.9", + "fast-safe-stringify": "^2.1.1", + "globalthis": "^1.0.2", + "semver-compare": "^1.0.0" + }, + "engines": { + "node": ">=12.0" + } + }, "node_modules/rollup": { "version": "2.79.0", "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.79.0.tgz", @@ -21460,6 +21933,11 @@ "node": ">=10" } }, + "node_modules/semver-compare": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/semver-compare/-/semver-compare-1.0.0.tgz", + "integrity": "sha512-YM3/ITh2MJ5MtzaM429anh+x2jiLVjqILF4m4oyQB18W7Ggea7BfqdH/wGMK7dDiMghv/6WG7znWMwUDzJiXow==" + }, "node_modules/semver-diff": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/semver-diff/-/semver-diff-3.1.1.tgz", @@ -21545,6 +22023,20 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" }, + "node_modules/serialize-error": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/serialize-error/-/serialize-error-8.1.0.tgz", + "integrity": "sha512-3NnuWfM6vBYoy5gZFvHiYsVbafvI9vZv/+jlIigFn4oP4zjNPK3LhcY0xSCgeb1a5L8jO71Mit9LlNoi2UfDDQ==", + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/serialize-javascript": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", @@ -21863,6 +22355,122 @@ "resolved": "https://registry.npmjs.org/sliced/-/sliced-0.0.5.tgz", "integrity": "sha512-9bYT917D6H3+q8GlQBJmLVz3bc4OeVGfZ2BB12wvLnluTGfG6/8UdOUbKJDW1EEx9SZMDbjnatkau5/XcUeyOw==" }, + "node_modules/slonik": { + "version": "31.2.1", + "resolved": "https://registry.npmjs.org/slonik/-/slonik-31.2.1.tgz", + "integrity": "sha512-yxkHVGpz/Wn+1trwV8Sk5YsQgFWekNC/aVTBkOgpP+qtOPADs9/Qp/uJdxPL1l2h5zS9RuxKazuqr9yv4qEAQw==", + "dependencies": { + "concat-stream": "^2.0.0", + "es6-error": "^4.1.1", + "fast-safe-stringify": "^2.1.1", + "get-stack-trace": "^2.1.1", + "hyperid": "^2.3.1", + "is-plain-object": "^5.0.0", + "iso8601-duration": "^1.3.0", + "p-defer": "^3.0.0", + "pg": "^8.8.0", + "pg-copy-streams": "^6.0.4", + "pg-copy-streams-binary": "^2.2.0", + "pg-cursor": "^2.7.4", + "pg-protocol": "^1.5.0", + "pg-types": "^4.0.0", + "postgres-array": "^3.0.1", + "postgres-interval": "^4.0.0", + "roarr": "^7.12.3", + "serialize-error": "^8.0.0", + "through2": "^4.0.2", + "zod": "^3.19.0" + }, + "engines": { + "node": ">=10.0" + } + }, + "node_modules/slonik-interceptor-field-name-transformation": { + "version": "1.6.5", + "resolved": "https://registry.npmjs.org/slonik-interceptor-field-name-transformation/-/slonik-interceptor-field-name-transformation-1.6.5.tgz", + "integrity": "sha512-SF7EenJQDKQBrMnnrvFPhZu2W9d8I9Egajw7hzemklqwboA7yK12jtSqKOHiFPpacH0lVP1xHdUh4aZaZKmbYw==", + "dependencies": { + "camelcase": "^6.2.1", + "core-js": "^3.19.3" + }, + "engines": { + "node": ">=8.0" + }, + "peerDependencies": { + "slonik": ">=27.0.0" + } + }, + "node_modules/slonik-interceptor-field-name-transformation/node_modules/camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/slonik/node_modules/pg-types": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-4.0.0.tgz", + "integrity": "sha512-q4I7zG+d2mDg52WdrOA0TmBvab9ZBC8DE8+opl3gSegnH5ml+0pKbICOfRKXgwQ5aa6NRjLoF5pEDs0YpGvFrw==", + "dependencies": { + "pg-int8": "1.0.1", + "pg-numeric": "1.0.2", + "postgres-array": "~3.0.1", + "postgres-bytea": "~3.0.0", + "postgres-date": "~2.0.1", + "postgres-interval": "^3.0.0", + "postgres-range": "^1.1.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/slonik/node_modules/pg-types/node_modules/postgres-interval": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-3.0.0.tgz", + "integrity": "sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw==", + "engines": { + "node": ">=12" + } + }, + "node_modules/slonik/node_modules/postgres-array": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-3.0.1.tgz", + "integrity": "sha512-h7i53Dw2Yq3a1uuZ6lbVFAkvMMwssJ8jkzeAg0XaZm1XIFF/t/s+tockdqbWTymyEm07dVenOQbFisEi+kj8uA==", + "engines": { + "node": ">=12" + } + }, + "node_modules/slonik/node_modules/postgres-bytea": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-3.0.0.tgz", + "integrity": "sha512-CNd4jim9RFPkObHSjVHlVrxoVQXz7quwNFpz7RY1okNNme49+sVyiTvTRobiLV548Hx/hb1BG+iE7h9493WzFw==", + "dependencies": { + "obuf": "~1.1.2" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/slonik/node_modules/postgres-date": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-2.0.1.tgz", + "integrity": "sha512-YtMKdsDt5Ojv1wQRvUhnyDJNSr2dGIC96mQVKz7xufp07nfuFONzdaowrMHjlAzY6GDLd4f+LUHHAAM1h4MdUw==", + "engines": { + "node": ">=12" + } + }, + "node_modules/slonik/node_modules/postgres-interval": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-4.0.0.tgz", + "integrity": "sha512-OWeL7kyEKJiY7mCmVY+c7/6uhAlt/colA/Nl/Mgls/M3jssrQzFra04iNWnD/qAmG7TsCSgWAASCyiaoBOP/sg==", + "engines": { + "node": ">=12" + } + }, "node_modules/snappy": { "version": "7.1.1", "resolved": "https://registry.npmjs.org/snappy/-/snappy-7.1.1.tgz", @@ -22274,6 +22882,11 @@ "node": ">=10" } }, + "node_modules/string-similarity": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/string-similarity/-/string-similarity-4.0.4.tgz", + "integrity": "sha512-/q/8Q4Bl4ZKAPjj8WerIBJWALKkaPRfrvhfF8k/B23i4nzrlRj2/go1m90In7nG/3XDSbOo0+pu6RvCTM9RGMQ==" + }, "node_modules/string-width": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", @@ -23238,7 +23851,6 @@ "version": "4.0.2", "resolved": "https://registry.npmjs.org/through2/-/through2-4.0.2.tgz", "integrity": "sha512-iOqSav00cVxEEICeD7TjLB1sueEL+81Wpzp2bY17uZjZN0pWZPuo4suZ/61VujxmqSGFfgOcNuTZ85QJwNZQpw==", - "dev": true, "dependencies": { "readable-stream": "3" } @@ -23247,7 +23859,6 @@ "version": "3.6.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "dev": true, "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", @@ -23601,7 +24212,6 @@ "version": "0.20.2", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", - "dev": true, "engines": { "node": ">=10" }, @@ -23621,6 +24231,11 @@ "node": ">= 0.6" } }, + "node_modules/typedarray": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", + "integrity": "sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==" + }, "node_modules/typescript": { "version": "4.8.3", "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.8.3.tgz", @@ -23865,11 +24480,15 @@ "version": "8.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "dev": true, "bin": { "uuid": "dist/bin/uuid" } }, + "node_modules/uuid-parse": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/uuid-parse/-/uuid-parse-1.1.0.tgz", + "integrity": "sha512-OdmXxA8rDsQ7YpNVbKSJkNzTw2I+S5WsbMDnCtIWSQaosNAcWtFuI/YK1TjzUI6nbkgiqEyh8gWngfcv8Asd9A==" + }, "node_modules/v8-compile-cache-lib": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", @@ -25547,7 +26166,6 @@ "version": "4.0.2", "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", - "dev": true, "engines": { "node": ">=0.4" } @@ -25848,9 +26466,17 @@ "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", "optional": true }, + "node_modules/zod": { + "version": "3.19.1", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.19.1.tgz", + "integrity": "sha512-LYjZsEDhCdYET9ikFu6dVPGp2YH9DegXjdJToSzD9rO6fy4qiRYFoyEYwps88OseJlPyl2NOe2iJuhEhL7IpEA==", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, "packages/core": { "name": "enterprise-core", - "version": "2.22.0", + "version": "2.22.1", "dependencies": { "ajv": "^8.11.0", "bcryptjs": "^2.4.3", @@ -25860,7 +26486,12 @@ "jsonschema": "^1.4.1", "mysql": "^2.18.1", "node-cache": "^5.1.2", + "pg": "^8.8.0", + "pg-query-stream": "^4.2.4", + "pgsql-ast-parser": "^11.0.0", "promise-mysql": "^5.2.0", + "slonik": "^31.2.1", + "slonik-interceptor-field-name-transformation": "^1.6.5", "tslib": "^2.4.0", "uuid": "^9.0.0", "validate.js": "^0.13.1", @@ -25875,6 +26506,7 @@ "@types/mysql": "^2.15.21", "@types/node": "^18.7.16", "@types/node-cache": "^4.2.5", + "@types/pg": "8.6.5", "@types/triple-beam": "^1.3.2", "@types/uuid": "^8.3.4", "typescript": "^4.8.3" @@ -25890,7 +26522,7 @@ }, "packages/scraper": { "name": "enterprise-scraper", - "version": "2.22.0", + "version": "2.22.1", "dependencies": { "axios": "^0.27.2", "axios-cookiejar-support": "^4.0.3", @@ -25923,7 +26555,7 @@ }, "packages/server": { "name": "enterprise-server", - "version": "2.22.0", + "version": "2.22.1", "dependencies": { "ajv-formats": "^2.1.1", "brotli": "^1.3.3", @@ -25968,7 +26600,7 @@ }, "packages/website": { "name": "enterprise-website", - "version": "2.22.0", + "version": "2.22.1", "dependencies": { "@fortawesome/fontawesome-free": "^6.2.0", "@fortawesome/fontawesome-svg-core": "^6.2.0", @@ -29403,6 +30035,17 @@ "integrity": "sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==", "dev": true }, + "@types/pg": { + "version": "8.6.5", + "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.6.5.tgz", + "integrity": "sha512-tOkGtAqRVkHa/PVZicq67zuujI4Oorfglsr2IbKofDwBSysnaqSx7W1mDqFqdkGE6Fbgh+PZAl0r/BWON/mozw==", + "dev": true, + "requires": { + "@types/node": "*", + "pg-protocol": "*", + "pg-types": "^2.2.0" + } + }, "@types/prettier": { "version": "2.7.0", "resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-2.7.0.tgz", @@ -31483,6 +32126,11 @@ "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==" }, + "boolean": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/boolean/-/boolean-3.2.0.tgz", + "integrity": "sha512-d0II/GO9uf9lfUHH2BQsjxzRJZBdsjgsBiW4BvhWk/3qoKwQFjIDVN19PfX8F2D/r9PCMTtLWjYVCFrpeYUzsw==" + }, "bootstrap": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/bootstrap/-/bootstrap-5.2.1.tgz", @@ -31578,14 +32226,23 @@ "buffer-from": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", - "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", - "dev": true + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==" }, "buffer-shims": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/buffer-shims/-/buffer-shims-1.0.0.tgz", "integrity": "sha512-Zy8ZXMyxIT6RMTeY7OP/bDndfj6bwCan7SS98CEndS6deHwWPpseeHlwarNcBim+etXnF9HBc1non5JgDaJU1g==" }, + "buffer-writer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz", + "integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==" + }, + "bufferput": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/bufferput/-/bufferput-0.1.3.tgz", + "integrity": "sha512-nmPV88vDNzf0VMU1bdQ4A1oBlRR9y+CXfwWKfyKUgI2ZIkvreNzLMM3tkz0Lapb6f+Cz1V001UWRBsoGVCjqdw==" + }, "builtin-modules": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.3.0.tgz", @@ -32465,6 +33122,29 @@ "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" }, + "concat-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-2.0.0.tgz", + "integrity": "sha512-MWufYdFw53ccGjCA+Ol7XJYpAlW6/prSMzuPOTRnJGcGzuhLn4Scrz7qf6o8bROZ514ltazcIFJZevcfbo0x7A==", + "requires": { + "buffer-from": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^3.0.2", + "typedarray": "^0.0.6" + }, + "dependencies": { + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + } + } + }, "connect-history-api-fallback": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz", @@ -32655,8 +33335,7 @@ "core-js": { "version": "3.25.1", "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.25.1.tgz", - "integrity": "sha512-sr0FY4lnO1hkQ4gLDr24K0DGnweGO1QwSj5BpfQjpSJPdqWalja4cTps29Y/PJVG/P7FYlPDkH3hO+Tr0CvDgQ==", - "dev": true + "integrity": "sha512-sr0FY4lnO1hkQ4gLDr24K0DGnweGO1QwSj5BpfQjpSJPdqWalja4cTps29Y/PJVG/P7FYlPDkH3hO+Tr0CvDgQ==" }, "core-js-compat": { "version": "3.25.1", @@ -33170,7 +33849,6 @@ "version": "1.1.4", "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz", "integrity": "sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==", - "dev": true, "requires": { "has-property-descriptors": "^1.0.0", "object-keys": "^1.1.1" @@ -33280,6 +33958,11 @@ "path-type": "^4.0.0" } }, + "discontinuous-range": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/discontinuous-range/-/discontinuous-range-1.0.0.tgz", + "integrity": "sha512-c68LpLbO+7kP/b1Hr1qs8/BJ09F5khZGTxqxZuhzxpmwJKOgRFHJWIb9/KmqnqHhLdO55aOxFH/EGBvUQbL/RQ==" + }, "dns-equal": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/dns-equal/-/dns-equal-1.0.0.tgz", @@ -33542,6 +34225,7 @@ "@types/mysql": "^2.15.21", "@types/node": "^18.7.16", "@types/node-cache": "^4.2.5", + "@types/pg": "8.6.5", "@types/triple-beam": "^1.3.2", "@types/uuid": "^8.3.4", "ajv": "^8.11.0", @@ -33552,7 +34236,12 @@ "jsonschema": "^1.4.1", "mysql": "^2.18.1", "node-cache": "^5.1.2", + "pg": "^8.8.0", + "pg-query-stream": "^4.2.4", + "pgsql-ast-parser": "^11.0.0", "promise-mysql": "^5.2.0", + "slonik": "^31.2.1", + "slonik-interceptor-field-name-transformation": "^1.6.5", "tslib": "^2.4.0", "typescript": "^4.8.3", "uuid": "^9.0.0", @@ -33785,6 +34474,11 @@ "is-symbol": "^1.0.2" } }, + "es6-error": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz", + "integrity": "sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==" + }, "escalade": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", @@ -34614,11 +35308,53 @@ "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" }, + "fast-json-stringify": { + "version": "2.7.13", + "resolved": "https://registry.npmjs.org/fast-json-stringify/-/fast-json-stringify-2.7.13.tgz", + "integrity": "sha512-ar+hQ4+OIurUGjSJD1anvYSDcUflywhKjfxnsW4TBTD7+u0tJufv6DKRWoQk3vI6YBOWMoz0TQtfbe7dxbQmvA==", + "requires": { + "ajv": "^6.11.0", + "deepmerge": "^4.2.2", + "rfdc": "^1.2.0", + "string-similarity": "^4.0.1" + }, + "dependencies": { + "ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "requires": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + } + } + }, "fast-levenshtein": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==" }, + "fast-printf": { + "version": "1.6.9", + "resolved": "https://registry.npmjs.org/fast-printf/-/fast-printf-1.6.9.tgz", + "integrity": "sha512-FChq8hbz65WMj4rstcQsFB0O7Cy++nmbNfLYnD9cYv2cRn8EG6k/MGn9kO/tjO66t09DLDugj3yL+V2o6Qftrg==", + "requires": { + "boolean": "^3.1.4" + } + }, + "fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" + }, "fastq": { "version": "1.13.0", "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", @@ -35137,6 +35873,48 @@ "dev": true, "peer": true }, + "get-stack-trace": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/get-stack-trace/-/get-stack-trace-2.1.1.tgz", + "integrity": "sha512-dhqSDD9lHU/6FvIZ9KbXGmVK6IKr9ZskZtNOUvhlCiONlnqatu4FmAeRbxCfJJVuQ0NWfz6dAbibKQg19B7AmQ==", + "requires": { + "bluebird": "^3.7.1", + "source-map": "^0.8.0-beta.0" + }, + "dependencies": { + "source-map": { + "version": "0.8.0-beta.0", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.8.0-beta.0.tgz", + "integrity": "sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA==", + "requires": { + "whatwg-url": "^7.0.0" + } + }, + "tr46": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-1.0.1.tgz", + "integrity": "sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==", + "requires": { + "punycode": "^2.1.0" + } + }, + "webidl-conversions": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-4.0.2.tgz", + "integrity": "sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==" + }, + "whatwg-url": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-7.1.0.tgz", + "integrity": "sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==", + "requires": { + "lodash.sortby": "^4.7.0", + "tr46": "^1.0.1", + "webidl-conversions": "^4.0.2" + } + } + } + }, "get-stream": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", @@ -35296,6 +36074,14 @@ "type-fest": "^0.20.2" } }, + "globalthis": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.3.tgz", + "integrity": "sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==", + "requires": { + "define-properties": "^1.1.3" + } + }, "globalyzer": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/globalyzer/-/globalyzer-0.1.0.tgz", @@ -35439,7 +36225,6 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", - "dev": true, "requires": { "get-intrinsic": "^1.1.1" } @@ -35755,6 +36540,15 @@ "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", "dev": true }, + "hyperid": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/hyperid/-/hyperid-2.3.1.tgz", + "integrity": "sha512-mIbI7Ymn6MCdODaW1/6wdf5lvvXzmPsARN4zTLakMmcziBOuP4PxCBJvHF6kbAIHX6H4vAELx/pDmt0j6Th5RQ==", + "requires": { + "uuid": "^8.3.2", + "uuid-parse": "^1.1.0" + } + }, "iconv-lite": { "version": "0.4.24", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", @@ -35948,6 +36742,11 @@ } } }, + "int64-buffer": { + "version": "0.99.1007", + "resolved": "https://registry.npmjs.org/int64-buffer/-/int64-buffer-0.99.1007.tgz", + "integrity": "sha512-XDBEu44oSTqlvCSiOZ/0FoUkpWu/vwjJLGSKDabNISPQNZ5wub1FodGHBljRsrR0IXRPq7SslshZYMuA55CgTQ==" + }, "internal-slot": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.3.tgz", @@ -36181,8 +36980,7 @@ "is-plain-object": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", - "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==", - "dev": true + "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==" }, "is-potential-custom-element-name": { "version": "1.0.1", @@ -36303,6 +37101,11 @@ "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", "dev": true }, + "iso8601-duration": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/iso8601-duration/-/iso8601-duration-1.3.0.tgz", + "integrity": "sha512-K4CiUBzo3YeWk76FuET/dQPH03WE04R94feo5TSKQCXpoXQt9E4yx2CnY737QZnSAI3PI4WlKo/zfqizGx52QQ==" + }, "isobject": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/isobject/-/isobject-4.0.0.tgz", @@ -37564,8 +38367,7 @@ "lodash.sortby": { "version": "4.7.0", "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz", - "integrity": "sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==", - "dev": true + "integrity": "sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==" }, "lodash.uniq": { "version": "4.5.0", @@ -38027,6 +38829,11 @@ "resolved": "https://registry.npmjs.org/monotonic-timestamp/-/monotonic-timestamp-0.0.8.tgz", "integrity": "sha512-3fQw+dAni/JJ4rkvMY7EZOz+tM+yuhrY3tKLJk74YOp/DQR0Ip+9yiKzZrC40uQ+Kin86s5TOjmL6UmxljOAfA==" }, + "moo": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/moo/-/moo-0.5.1.tgz", + "integrity": "sha512-I1mnb5xn4fO80BH9BLcF0yLypy2UKl+Cb01Fu0hJRkJjlCRtxZMWkTdAtDd5ZqCOxtCkhmRwyI57vWT+1iZ67w==" + }, "mri": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz", @@ -38043,6 +38850,11 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, + "multi-fork": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/multi-fork/-/multi-fork-0.0.2.tgz", + "integrity": "sha512-SHWGuze0cZNiH+JGJQFlB1k7kZLGFCvW1Xo5Fcpe86KICkC3aVTJWpjUcmyYcLCB0I6gdzKLCia/bTIw2ggl8A==" + }, "multicast-dns": { "version": "7.2.5", "resolved": "https://registry.npmjs.org/multicast-dns/-/multicast-dns-7.2.5.tgz", @@ -38092,6 +38904,24 @@ "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", "dev": true }, + "nearley": { + "version": "2.20.1", + "resolved": "https://registry.npmjs.org/nearley/-/nearley-2.20.1.tgz", + "integrity": "sha512-+Mc8UaAebFzgV+KpI5n7DasuuQCHA89dmwm7JXw3TV43ukfNQ9DnBH3Mdb2g/I4Fdxc26pwimBWvjIw0UAILSQ==", + "requires": { + "commander": "^2.19.0", + "moo": "^0.5.0", + "railroad-diagrams": "^1.0.0", + "randexp": "0.4.6" + }, + "dependencies": { + "commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + } + } + }, "negotiator": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", @@ -40031,8 +40861,7 @@ "object-keys": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", - "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", - "dev": true + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==" }, "object.assign": { "version": "4.1.4", @@ -40060,8 +40889,7 @@ "obuf": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz", - "integrity": "sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==", - "dev": true + "integrity": "sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==" }, "on-finished": { "version": "2.4.1", @@ -40221,6 +41049,11 @@ } } }, + "p-defer": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-defer/-/p-defer-3.0.0.tgz", + "integrity": "sha512-ugZxsxmtTln604yeYd29EGrNhazN2lywetzpKhfmQjW/VJmhpDmWbiX+h0zL8V91R0UXkhb3KtPmyq9PZw3aYw==" + }, "p-each-series": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/p-each-series/-/p-each-series-2.2.0.tgz", @@ -40294,6 +41127,11 @@ "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", "dev": true }, + "packet-reader": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz", + "integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==" + }, "param-case": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/param-case/-/param-case-3.0.4.tgz", @@ -40417,6 +41255,128 @@ "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", "integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow==" }, + "pg": { + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.8.0.tgz", + "integrity": "sha512-UXYN0ziKj+AeNNP7VDMwrehpACThH7LUl/p8TDFpEUuSejCUIwGSfxpHsPvtM6/WXFy6SU4E5RG4IJV/TZAGjw==", + "requires": { + "buffer-writer": "2.0.0", + "packet-reader": "1.0.0", + "pg-connection-string": "^2.5.0", + "pg-pool": "^3.5.2", + "pg-protocol": "^1.5.0", + "pg-types": "^2.1.0", + "pgpass": "1.x" + } + }, + "pg-connection-string": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz", + "integrity": "sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==" + }, + "pg-copy-streams": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/pg-copy-streams/-/pg-copy-streams-6.0.4.tgz", + "integrity": "sha512-FH6q2nFo0n2cFacLyIKorjDz8AOYtxrAANx1XMvYbKWNM2geY731gZstuP4mXMlqO6urRl9oIscFxf3GMIg3Ng==", + "requires": { + "obuf": "^1.1.2" + } + }, + "pg-copy-streams-binary": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-copy-streams-binary/-/pg-copy-streams-binary-2.2.0.tgz", + "integrity": "sha512-jPCWgTR8004wz5XOI2sc09+IMwE7YMeINYCabwPMCPtlgj2ay81VLCClMkj/u+xOeisRcN8vCrIZ4FrqlaTyBQ==", + "requires": { + "bl": "^4.0.3", + "bufferput": "^0.1.3", + "ieee754": "^1.1.13", + "int64-buffer": "^0.99.1007", + "multi-fork": "0.0.2", + "through2": "^3.0.1" + }, + "dependencies": { + "through2": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.2.tgz", + "integrity": "sha512-enaDQ4MUyP2W6ZyT6EsMzqBPZaM/avg8iuo+l2d3QCs0J+6RaqkHV/2/lOwDTueBHeJ/2LG9lrLW3d5rWPucuQ==", + "requires": { + "inherits": "^2.0.4", + "readable-stream": "2 || 3" + } + } + } + }, + "pg-cursor": { + "version": "2.7.4", + "resolved": "https://registry.npmjs.org/pg-cursor/-/pg-cursor-2.7.4.tgz", + "integrity": "sha512-CNWwOzTTZ9QvphoOL+Wg/7pmVr9GnAWBjPbuK2FRclrB4A/WRO/ssCJ9BlkzIGmmofK2M/LyokNHgsLSn+fMHA==", + "requires": {} + }, + "pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==" + }, + "pg-numeric": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/pg-numeric/-/pg-numeric-1.0.2.tgz", + "integrity": "sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw==" + }, + "pg-pool": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.5.2.tgz", + "integrity": "sha512-His3Fh17Z4eg7oANLob6ZvH8xIVen3phEZh2QuyrIl4dQSDVEabNducv6ysROKpDNPSD+12tONZVWfSgMvDD9w==", + "requires": {} + }, + "pg-protocol": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.5.0.tgz", + "integrity": "sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ==" + }, + "pg-query-stream": { + "version": "4.2.4", + "resolved": "https://registry.npmjs.org/pg-query-stream/-/pg-query-stream-4.2.4.tgz", + "integrity": "sha512-Et3gTrWn4C2rj4LVioNq1QDd7aH/3mSJcBm79jZALv3wopvx9bWENtbOYZbHQ6KM+IkfFxs0JF1ZLjMDJ9/N6Q==", + "requires": { + "pg-cursor": "^2.7.4" + } + }, + "pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "requires": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + } + }, + "pgpass": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz", + "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", + "requires": { + "split2": "^4.1.0" + }, + "dependencies": { + "split2": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.1.0.tgz", + "integrity": "sha512-VBiJxFkxiXRlUIeyMQi8s4hgvKCSjtknJv/LVYbrgALPwf5zSKmEwV9Lst25AkvMDnvxODugjdl6KZgwKM1WYQ==" + } + } + }, + "pgsql-ast-parser": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/pgsql-ast-parser/-/pgsql-ast-parser-11.0.0.tgz", + "integrity": "sha512-vY5dr03b1ZnobWBPi5dy0fovK/kgXkueTLKWwVaC+Ql6VITZCURAHAnT3yAczZAjsbOyaP5Jr6A+Ol+pru8e9A==", + "requires": { + "moo": "^0.5.1", + "nearley": "^2.19.5" + } + }, "picocolors": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", @@ -40938,6 +41898,34 @@ "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", "dev": true }, + "postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==" + }, + "postgres-bytea": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==" + }, + "postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==" + }, + "postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "requires": { + "xtend": "^4.0.0" + } + }, + "postgres-range": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/postgres-range/-/postgres-range-1.1.3.tgz", + "integrity": "sha512-VdlZoocy5lCP0c/t66xAfclglEapXPCIVhqqJRncYpvbCgImF0w67aPKfbqUMr72tO2k5q0TdTZwCLjPTI6C9g==" + }, "prelude-ls": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", @@ -41308,6 +42296,20 @@ "integrity": "sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==", "dev": true }, + "railroad-diagrams": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/railroad-diagrams/-/railroad-diagrams-1.0.0.tgz", + "integrity": "sha512-cz93DjNeLY0idrCNOH6PviZGRN9GJhsdm9hpn1YCS879fj4W+x5IFJhhkRZcwVgMmFF7R82UA/7Oh+R8lLZg6A==" + }, + "randexp": { + "version": "0.4.6", + "resolved": "https://registry.npmjs.org/randexp/-/randexp-0.4.6.tgz", + "integrity": "sha512-80WNmd9DA0tmZrw9qQa62GPPWfuXJknrmVmLcxvq4uZBdYqb1wYoKTmnlGUchvVWe0XiLupYkBoXVOxz3C8DYQ==", + "requires": { + "discontinuous-range": "1.0.0", + "ret": "~0.1.10" + } + }, "randombytes": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", @@ -41867,6 +42869,11 @@ } } }, + "ret": { + "version": "0.1.15", + "resolved": "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz", + "integrity": "sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==" + }, "retry": { "version": "0.13.1", "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz", @@ -41879,6 +42886,11 @@ "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", "dev": true }, + "rfdc": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.0.tgz", + "integrity": "sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA==" + }, "rimraf": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", @@ -41887,6 +42899,19 @@ "glob": "^7.1.3" } }, + "roarr": { + "version": "7.12.3", + "resolved": "https://registry.npmjs.org/roarr/-/roarr-7.12.3.tgz", + "integrity": "sha512-EhX9kTlWj4wTqQ0qVX2XvnYZqXhyi3zO/Rq7zAkCcmnawBAZrRpii71PiFbjyWS8yi1TrxWrShpcGBT6WGdCAw==", + "requires": { + "boolean": "^3.1.4", + "fast-json-stringify": "^2.7.10", + "fast-printf": "^1.6.9", + "fast-safe-stringify": "^2.1.1", + "globalthis": "^1.0.2", + "semver-compare": "^1.0.0" + } + }, "rollup": { "version": "2.79.0", "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.79.0.tgz", @@ -42148,6 +43173,11 @@ "lru-cache": "^6.0.0" } }, + "semver-compare": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/semver-compare/-/semver-compare-1.0.0.tgz", + "integrity": "sha512-YM3/ITh2MJ5MtzaM429anh+x2jiLVjqILF4m4oyQB18W7Ggea7BfqdH/wGMK7dDiMghv/6WG7znWMwUDzJiXow==" + }, "semver-diff": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/semver-diff/-/semver-diff-3.1.1.tgz", @@ -42218,6 +43248,14 @@ } } }, + "serialize-error": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/serialize-error/-/serialize-error-8.1.0.tgz", + "integrity": "sha512-3NnuWfM6vBYoy5gZFvHiYsVbafvI9vZv/+jlIigFn4oP4zjNPK3LhcY0xSCgeb1a5L8jO71Mit9LlNoi2UfDDQ==", + "requires": { + "type-fest": "^0.20.2" + } + }, "serialize-javascript": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", @@ -42487,6 +43525,95 @@ "resolved": "https://registry.npmjs.org/sliced/-/sliced-0.0.5.tgz", "integrity": "sha512-9bYT917D6H3+q8GlQBJmLVz3bc4OeVGfZ2BB12wvLnluTGfG6/8UdOUbKJDW1EEx9SZMDbjnatkau5/XcUeyOw==" }, + "slonik": { + "version": "31.2.1", + "resolved": "https://registry.npmjs.org/slonik/-/slonik-31.2.1.tgz", + "integrity": "sha512-yxkHVGpz/Wn+1trwV8Sk5YsQgFWekNC/aVTBkOgpP+qtOPADs9/Qp/uJdxPL1l2h5zS9RuxKazuqr9yv4qEAQw==", + "requires": { + "concat-stream": "^2.0.0", + "es6-error": "^4.1.1", + "fast-safe-stringify": "^2.1.1", + "get-stack-trace": "^2.1.1", + "hyperid": "^2.3.1", + "is-plain-object": "^5.0.0", + "iso8601-duration": "^1.3.0", + "p-defer": "^3.0.0", + "pg": "^8.8.0", + "pg-copy-streams": "^6.0.4", + "pg-copy-streams-binary": "^2.2.0", + "pg-cursor": "^2.7.4", + "pg-protocol": "^1.5.0", + "pg-types": "^4.0.0", + "postgres-array": "^3.0.1", + "postgres-interval": "^4.0.0", + "roarr": "^7.12.3", + "serialize-error": "^8.0.0", + "through2": "^4.0.2", + "zod": "^3.19.0" + }, + "dependencies": { + "pg-types": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-4.0.0.tgz", + "integrity": "sha512-q4I7zG+d2mDg52WdrOA0TmBvab9ZBC8DE8+opl3gSegnH5ml+0pKbICOfRKXgwQ5aa6NRjLoF5pEDs0YpGvFrw==", + "requires": { + "pg-int8": "1.0.1", + "pg-numeric": "1.0.2", + "postgres-array": "~3.0.1", + "postgres-bytea": "~3.0.0", + "postgres-date": "~2.0.1", + "postgres-interval": "^3.0.0", + "postgres-range": "^1.1.1" + }, + "dependencies": { + "postgres-interval": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-3.0.0.tgz", + "integrity": "sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw==" + } + } + }, + "postgres-array": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-3.0.1.tgz", + "integrity": "sha512-h7i53Dw2Yq3a1uuZ6lbVFAkvMMwssJ8jkzeAg0XaZm1XIFF/t/s+tockdqbWTymyEm07dVenOQbFisEi+kj8uA==" + }, + "postgres-bytea": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-3.0.0.tgz", + "integrity": "sha512-CNd4jim9RFPkObHSjVHlVrxoVQXz7quwNFpz7RY1okNNme49+sVyiTvTRobiLV548Hx/hb1BG+iE7h9493WzFw==", + "requires": { + "obuf": "~1.1.2" + } + }, + "postgres-date": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-2.0.1.tgz", + "integrity": "sha512-YtMKdsDt5Ojv1wQRvUhnyDJNSr2dGIC96mQVKz7xufp07nfuFONzdaowrMHjlAzY6GDLd4f+LUHHAAM1h4MdUw==" + }, + "postgres-interval": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-4.0.0.tgz", + "integrity": "sha512-OWeL7kyEKJiY7mCmVY+c7/6uhAlt/colA/Nl/Mgls/M3jssrQzFra04iNWnD/qAmG7TsCSgWAASCyiaoBOP/sg==" + } + } + }, + "slonik-interceptor-field-name-transformation": { + "version": "1.6.5", + "resolved": "https://registry.npmjs.org/slonik-interceptor-field-name-transformation/-/slonik-interceptor-field-name-transformation-1.6.5.tgz", + "integrity": "sha512-SF7EenJQDKQBrMnnrvFPhZu2W9d8I9Egajw7hzemklqwboA7yK12jtSqKOHiFPpacH0lVP1xHdUh4aZaZKmbYw==", + "requires": { + "camelcase": "^6.2.1", + "core-js": "^3.19.3" + }, + "dependencies": { + "camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==" + } + } + }, "snappy": { "version": "7.1.1", "resolved": "https://registry.npmjs.org/snappy/-/snappy-7.1.1.tgz", @@ -42830,6 +43957,11 @@ "strip-ansi": "^6.0.0" } }, + "string-similarity": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/string-similarity/-/string-similarity-4.0.4.tgz", + "integrity": "sha512-/q/8Q4Bl4ZKAPjj8WerIBJWALKkaPRfrvhfF8k/B23i4nzrlRj2/go1m90In7nG/3XDSbOo0+pu6RvCTM9RGMQ==" + }, "string-width": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", @@ -43550,7 +44682,6 @@ "version": "4.0.2", "resolved": "https://registry.npmjs.org/through2/-/through2-4.0.2.tgz", "integrity": "sha512-iOqSav00cVxEEICeD7TjLB1sueEL+81Wpzp2bY17uZjZN0pWZPuo4suZ/61VujxmqSGFfgOcNuTZ85QJwNZQpw==", - "dev": true, "requires": { "readable-stream": "3" }, @@ -43559,7 +44690,6 @@ "version": "3.6.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "dev": true, "requires": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", @@ -43812,8 +44942,7 @@ "type-fest": { "version": "0.20.2", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", - "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", - "dev": true + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==" }, "type-is": { "version": "1.6.18", @@ -43824,6 +44953,11 @@ "mime-types": "~2.1.24" } }, + "typedarray": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", + "integrity": "sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==" + }, "typescript": { "version": "4.8.3", "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.8.3.tgz", @@ -44008,8 +45142,12 @@ "uuid": { "version": "8.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "dev": true + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" + }, + "uuid-parse": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/uuid-parse/-/uuid-parse-1.1.0.tgz", + "integrity": "sha512-OdmXxA8rDsQ7YpNVbKSJkNzTw2I+S5WsbMDnCtIWSQaosNAcWtFuI/YK1TjzUI6nbkgiqEyh8gWngfcv8Asd9A==" }, "v8-compile-cache-lib": { "version": "3.0.1", @@ -45390,8 +46528,7 @@ "xtend": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", - "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", - "dev": true + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" }, "y18n": { "version": "5.0.8", @@ -45622,6 +46759,11 @@ "optional": true } } + }, + "zod": { + "version": "3.19.1", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.19.1.tgz", + "integrity": "sha512-LYjZsEDhCdYET9ikFu6dVPGp2YH9DegXjdJToSzD9rO6fy4qiRYFoyEYwps88OseJlPyl2NOe2iJuhEhL7IpEA==" } } } diff --git a/packages/core/package.json b/packages/core/package.json index 81736533..fe1f0109 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -18,7 +18,12 @@ "jsonschema": "^1.4.1", "mysql": "^2.18.1", "node-cache": "^5.1.2", + "pg": "^8.8.0", + "pg-query-stream": "^4.2.4", + "pgsql-ast-parser": "^11.0.0", "promise-mysql": "^5.2.0", + "slonik": "^31.2.1", + "slonik-interceptor-field-name-transformation": "^1.6.5", "tslib": "^2.4.0", "uuid": "^9.0.0", "validate.js": "^0.13.1", @@ -33,6 +38,7 @@ "@types/mysql": "^2.15.21", "@types/node": "^18.7.16", "@types/node-cache": "^4.2.5", + "@types/pg": "8.6.5", "@types/triple-beam": "^1.3.2", "@types/uuid": "^8.3.4", "typescript": "^4.8.3" diff --git a/packages/core/src/database/contexts/appEventContext.ts b/packages/core/src/database/contexts/appEventContext.ts index 6e2b0631..fd18ac14 100644 --- a/packages/core/src/database/contexts/appEventContext.ts +++ b/packages/core/src/database/contexts/appEventContext.ts @@ -1,21 +1,24 @@ -import { AppEvent, AppEventFilter } from "../../types"; -import { SubContext } from "./subContext"; +import { sql } from "slonik"; +import { AppEventFilter } from "../../types"; +import { appEvent, AppEvent } from "../databaseTypes"; +import { joinAnd, joinIdentifier } from "./helper"; +import { QueryContext } from "./queryContext"; -export class AppEventContext extends SubContext { - public async addAppEvent(event: AppEvent): Promise { - const newEvent = await this.query( - "INSERT INTO app_events (`program`, `date`, `type`) VALUES (?,?,?) RETURNING `id`, `program`, `date`, `type`", - [event.program, event.date, event.type], +export class AppEventContext extends QueryContext { + public async addAppEvent(event: Readonly): Promise { + return this.con.one( + sql.type(appEvent)` + INSERT INTO app_events (program, date, type) + VALUES (${event.program},${sql.timestamp(event.date)},${event.type}) + RETURNING id, program, date, type`, ); - return newEvent[0]; } - public async updateAppEvent(event: AppEvent): Promise { + public async updateAppEvent(event: Readonly): Promise { await this.update( "app_events", - (updates, values) => { - updates.push("`date` = ?"); - values.push(event.date); + () => { + return [sql`date = ${sql.timestamp(event.date)}`]; }, { column: "id", @@ -24,48 +27,40 @@ export class AppEventContext extends SubContext { ); } - public async getAppEvents(filter: AppEventFilter = {}): Promise { - let sort = ""; - - if (Array.isArray(filter.sortOrder)) { - sort = filter.sortOrder.join(","); - } else if (filter.sortOrder) { - sort = filter.sortOrder; + public async getAppEvents(filter: AppEventFilter = {}): Promise { + if (!Array.isArray(filter.sortOrder) && filter.sortOrder) { + filter.sortOrder = [filter.sortOrder]; } - const where = []; - const values = []; - - if (Array.isArray(filter.program)) { - where.push("type IN (" + filter.program.map(() => "?").join(",") + ")"); - values.push(...filter.program); - } else if (filter.program) { - where.push("program = ?"); - values.push(filter.program); + if (!Array.isArray(filter.program) && filter.program) { + filter.program = [filter.program]; } - - if (Array.isArray(filter.type)) { - where.push("type IN (" + filter.type.map(() => "?").join(",") + ")"); - values.push(...filter.type); - } else if (filter.type) { - where.push("type = ?"); - values.push(filter.type); + if (!Array.isArray(filter.type) && filter.type) { + filter.type = [filter.type]; } + const whereFilter = []; + if (filter.fromDate) { - where.push("date >= ?"); - values.push(filter.fromDate); + whereFilter.push(sql`date >= ${sql.timestamp(filter.fromDate)}`); } - if (filter.toDate) { - where.push("date <= ?"); - values.push(filter.toDate); + whereFilter.push(sql`date >= ${sql.timestamp(filter.toDate)}`); + } + if (Array.isArray(filter.program)) { + const array = sql.array(filter.program, "text"); + whereFilter.push(sql`program = ANY(${array})`); } + if (Array.isArray(filter.type)) { + const array = sql.array(filter.type, "text"); + whereFilter.push(sql`program = ANY(${array})`); + } + + const whereClause = whereFilter.length ? sql` WHERE ${joinAnd(whereFilter)}` : sql``; + const orderClause = filter.sortOrder?.length ? sql` ORDER BY ${joinIdentifier(filter.sortOrder)}` : sql``; - return this.query( - `SELECT id, program, date, type FROM app_events${where.length ? " WHERE " + where.join(" AND ") : ""}${ - sort ? " ORDER BY " + sort : "" - };`, - values, + return this.con.any( + sql.type(appEvent)` + SELECT id, program, date, type FROM app_events${whereClause}${orderClause};`, ); } } diff --git a/packages/core/src/database/contexts/customHookContext.ts b/packages/core/src/database/contexts/customHookContext.ts index 8370f75f..dad8d56a 100644 --- a/packages/core/src/database/contexts/customHookContext.ts +++ b/packages/core/src/database/contexts/customHookContext.ts @@ -1,53 +1,47 @@ -import { SubContext } from "./subContext"; -import { isInvalidId } from "../../tools"; -import { storeModifications } from "../sqlTools"; -import { CustomHook } from "@/types"; import { ValidationError } from "../../error"; +import { QueryContext } from "./queryContext"; +import { customHook, entity, CustomHook } from "../databaseTypes"; +import { sql } from "slonik"; +import { isString } from "validate.js"; -export class CustomHookContext extends SubContext { - public async addHook(value: CustomHook): Promise { +export class CustomHookContext extends QueryContext { + public async addHook(value: Readonly): Promise { if (value.id) { throw new ValidationError("Cannot add Hook with id already defined"); } - if (typeof value.state === "object") { - value.state = JSON.stringify(value.state); - } - let result = await this.query( - "INSERT IGNORE INTO custom_hook (name, state, hookState, comment) VALUES (?,?,?,?);", - [value.name, value.state, value.hookState, value.comment], + const state = isString(value.state) ? JSON.parse(value.state as string) : value.enabled; + + const id = await this.con.oneFirst( + sql.type(entity)` + INSERT INTO custom_hook (name, state, enabled, comment) + VALUES (${value.name},${sql.jsonb(state)},${value.enabled},${value.comment}) + ON CONFLICT DO NOTHING RETURNING id;`, ); - if (!Number.isInteger(result.insertId) || result.insertId === 0) { - throw new ValidationError(`invalid ID ${result.insertId + ""}`); - } - storeModifications("custom_hook", "insert", result); - result = { ...value, id: result.insertId }; - return result; + // FIXME: storeModifications("custom_hook", "insert", result); + + return { ...value, id }; } - public async getHooks(): Promise { - return this.query("SELECT id, name, state, updated_at, hookState, comment FROM custom_hook;"); + public async getHooks(): Promise { + return this.con.any( + sql.type(customHook)` + SELECT id, name, state, updated_at, enabled, comment FROM custom_hook;`, + ); } public async updateHook(value: CustomHook): Promise { - if (isInvalidId(value.id)) { - throw new ValidationError(`Invalid id: '${value.id}'`); - } - const updateResult = await this.update( + customHook.parse(value); + await this.update( "custom_hook", - (updates, values) => { - updates.push("comment = ?"); - values.push(value.comment); - - updates.push("hookState = ?"); - values.push(value.hookState); - - updates.push("name = ?"); - values.push(value.name); - - updates.push("state = ?"); - values.push(value.state); + () => { + return [ + sql`comment = ${value.comment}`, + sql`enabled = ${value.enabled}`, + sql`name = ${value.name}`, + sql`state = ${sql.jsonb(value.state)}`, + ]; }, { column: "id", @@ -55,7 +49,7 @@ export class CustomHookContext extends SubContext { }, ); - storeModifications("custom_hook", "update", updateResult); + // FIXME storeModifications("custom_hook", "update", updateResult); return value; } diff --git a/packages/core/src/database/contexts/databaseContext.ts b/packages/core/src/database/contexts/databaseContext.ts index a8036a91..45aea807 100644 --- a/packages/core/src/database/contexts/databaseContext.ts +++ b/packages/core/src/database/contexts/databaseContext.ts @@ -1,93 +1,137 @@ -import { ignore } from "../../tools"; import { Trigger } from "../trigger"; -import mySql from "promise-mysql"; -import { DbTrigger, QueryContext } from "./queryContext"; -import { SubContext } from "./subContext"; +import { QueryContext } from "./queryContext"; import { EmptyPromise } from "../../types"; +import { dbTrigger, DbTrigger } from "../databaseTypes"; +import { sql } from "slonik"; +import { joinIdentifier } from "./helper"; const database = "enterprise"; -export class DatabaseContext extends SubContext { - public constructor(parentContext: QueryContext) { - super(parentContext); - } - - public getDatabaseVersion(): Promise> { - return this.query("SELECT version FROM enterprise_database_info LIMIT 1;"); +export class DatabaseContext extends QueryContext { + public async getDatabaseVersion(): Promise { + return this.con.oneFirst<{ version: number }>( + sql`SELECT version FROM enterprise_database_info ORDER BY version DESC LIMIT 1;`, + ); } - public getServerVersion(): Promise<[{ version: string }]> { - return this.query("SELECT version() as version"); + public async getServerVersion(): Promise { + return this.con.oneFirst<{ version: string }>(sql`SELECT version()`); } public async startMigration(): Promise { - return ( - this.query("UPDATE enterprise_database_info SET migrating=1;") - // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access - .then((value) => value && value.changedRows === 1) - ); + const value = await this.con.query(sql`UPDATE enterprise_database_info SET migrating=false;`); + return value.rowCount === 1; } public async stopMigration(): Promise { - return ( - this.query("UPDATE enterprise_database_info SET migrating=0;") - // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access - .then((value) => value && value.changedRows === 1) - ); + const value = await this.con.query(sql`UPDATE enterprise_database_info SET migrating=false;`); + return value.rowCount === 1; } public async updateDatabaseVersion(version: number): EmptyPromise { - return this.query("UPDATE enterprise_database_info SET version=?;", version).then(ignore); + await this.con.query(sql`UPDATE enterprise_database_info SET version=${version};`); } - public createDatabase(): EmptyPromise { - return this.query(`CREATE DATABASE ${database};`).then(ignore); + public async createDatabase(): EmptyPromise { + await this.con.query(sql`CREATE DATABASE ${sql.identifier([database])};`); } - public getTables(): Promise { - return this.query("SHOW TABLES;") as Promise; + public getTables(): Promise { + return this.con.any(sql`SHOW TABLES;`); } - public getTriggers(): Promise { - return this.query("SHOW TRIGGERS;") as Promise; + public getTablesPg(): Promise> { + return this.con.any<{ schemaname: string; tablename: string }>( + sql`select schemaname, tablename from pg_catalog.pg_tables;`, + ); + } + + public async getTriggers(): Promise { + return this.con.any(sql.type(dbTrigger)`SHOW TRIGGERS;`); + } + + public async getTriggersPg(): Promise { + return this.con.any(sql` + SELECT + action_timing as timing, + trigger_schema as table, + trigger_name as trigger, + event_manipulation as event + FROM + information_schema.triggers + `); + } + + public async getIndices(): Promise> { + return this.con.any(sql` + select + t.relname as table_name, + i.relname as index_name, + array_agg(a.attname) as column_names + from + pg_catalog.pg_class t + join pg_catalog.pg_attribute a on t.oid = a.attrelid + join pg_catalog.pg_index ix on t.oid = ix.indrelid + join pg_catalog.pg_class i on a.attnum = any(ix.indkey) + and i.oid = ix.indexrelid + join pg_catalog.pg_namespace n on n.oid = t.relnamespace + where t.relkind = 'r' and n.nspname = 'public' + group by t.relname, i.relname; + `); } public createTrigger(trigger: Trigger): Promise { const schema = trigger.createSchema(); - return this.query(schema); + return this.con.query(sql.literalValue(schema)); } - public dropTrigger(trigger: string): EmptyPromise { - return this.query(`DROP TRIGGER IF EXISTS ${mySql.escapeId(trigger)};`).then(ignore); + public async dropTrigger(trigger: string): EmptyPromise { + await this.con.query(sql`DROP TRIGGER IF EXISTS ${sql.identifier([trigger])};`); } - public createTable(table: string, columns: string[]): EmptyPromise { - return this.query(`CREATE TABLE ${mySql.escapeId(table)} (${columns.join(", ")});`).then(ignore); + public async createTable(table: string, columns: string[]): EmptyPromise { + // FIXME: this will probably not work + await this.con.query(sql`CREATE TABLE ${sql.identifier([table])} (${sql.literalValue(columns.join(", "))});`); } - public addColumn(tableName: string, columnDefinition: string): EmptyPromise { - return this.query(`ALTER TABLE ${tableName} ADD COLUMN ${columnDefinition};`).then(ignore); + public async addColumn(tableName: string, columnDefinition: string): EmptyPromise { + // FIXME: this will probably not work + await this.con.query( + sql`ALTER TABLE ${sql.identifier([tableName])} ADD COLUMN ${sql.literalValue(columnDefinition)};`, + ); } - public alterColumn(tableName: string, columnDefinition: string): EmptyPromise { - return this.query(`ALTER TABLE ${tableName} MODIFY COLUMN ${columnDefinition};`).then(ignore); + public async alterColumn(tableName: string, columnDefinition: string): EmptyPromise { + await this.con.query( + sql`ALTER TABLE ${sql.identifier([tableName])} MODIFY COLUMN ${sql.literalValue(columnDefinition)};`, + ); } - public changeColumn(tableName: string, oldName: string, newName: string, columnDefinition: string): EmptyPromise { - return this.query(`ALTER TABLE ${tableName} CHANGE COLUMN ${oldName} ${newName} ${columnDefinition};`).then(ignore); + public async changeColumn( + tableName: string, + oldName: string, + newName: string, + columnDefinition: string, + ): EmptyPromise { + await this.con.query( + sql` + ALTER TABLE ${sql.identifier([tableName])} + CHANGE COLUMN ${sql.identifier([oldName])} ${sql.identifier([newName])} ${sql.literalValue(columnDefinition)};`, + ); } - public addUnique(tableName: string, indexName: string, ...columns: string[]): EmptyPromise { - columns = columns.map((value) => mySql.escapeId(value)); - const index = mySql.escapeId(indexName); - const table = mySql.escapeId(tableName); - return this.query(`CREATE UNIQUE INDEX ${index} ON ${table} (${columns.join(", ")});`).then(ignore); + public async addUnique(tableName: string, indexName: string, ...columns: string[]): EmptyPromise { + const index = sql.identifier([indexName]); + const table = sql.identifier([tableName]); + + await this.con.query(sql`CREATE UNIQUE INDEX IF NOT EXISTS ${index} ON ${table} (${joinIdentifier(columns)});`); } - public dropIndex(tableName: string, indexName: string): EmptyPromise { - const index = mySql.escapeId(indexName); - const table = mySql.escapeId(tableName); - return this.query(`DROP INDEX IF EXISTS ${index} ON ${table};`).then(ignore); + public async dropIndex(tableName: string, indexName: string): EmptyPromise { + const index = sql.identifier([indexName]); + const table = sql.identifier([tableName]); + + await this.con.query(sql`DROP INDEX IF EXISTS ${index} ON ${table};`); } /** @@ -97,16 +141,16 @@ export class DatabaseContext extends SubContext { * * @param tableName the table to create the index on * @param indexName the name for the index - * @param columnNames the columns the index should be build for + * @param columns the columns the index should be build for */ - public addIndex(tableName: string, indexName: string, columnNames: string[]): EmptyPromise { - const index = mySql.escapeId(indexName); - const table = mySql.escapeId(tableName); - const columns = columnNames.map((name) => mySql.escapeId(name)).join(","); - return this.query(`CREATE INDEX IF NOT EXISTS ${index} ON ${table} (${columns});`).then(ignore); + public async addIndex(tableName: string, indexName: string, columns: string[]): EmptyPromise { + const index = sql.identifier([indexName]); + const table = sql.identifier([tableName]); + + await this.con.query(sql`CREATE INDEX IF NOT EXISTS ${index} ON ${table} (${joinIdentifier(columns)});`); } - public addForeignKey( + public async addForeignKey( tableName: string, constraintName: string, column: string, @@ -115,37 +159,37 @@ export class DatabaseContext extends SubContext { onDelete?: string, onUpdate?: string, ): EmptyPromise { - const index = mySql.escapeId(column); - const table = mySql.escapeId(tableName); - const refTable = mySql.escapeId(referencedTable); - const refColumn = mySql.escapeId(referencedColumn); - const name = mySql.escapeId(constraintName); - let query = `ALTER TABLE ${table} ADD FOREIGN KEY ${name} (${index}) REFERENCES ${refTable} (${refColumn})`; + const index = this.escapeIdentifier(column); + const table = this.escapeIdentifier(tableName); + const refTable = this.escapeIdentifier(referencedTable); + const refColumn = this.escapeIdentifier(referencedColumn); + const name = this.escapeIdentifier(constraintName); + + const onUpdateAction = onUpdate ? sql` ON UPDATE ${sql.literalValue(onUpdate)}` : sql``; + const onDeleteAction = onDelete ? sql` ON DELETE ${sql.literalValue(onDelete)}` : sql``; - if (onDelete) { - query += " ON DELETE " + onDelete; - } - if (onUpdate) { - query += " ON UPDATE " + onUpdate; - } - return this.query(query + ";").then(ignore); + await this.con.query(sql` + ALTER TABLE ${table} + ADD FOREIGN KEY ${name} (${index}) REFERENCES ${refTable} (${refColumn})${onUpdateAction}${onDeleteAction};`); } - public dropForeignKey(tableName: string, indexName: string): EmptyPromise { - const index = mySql.escapeId(indexName); - const table = mySql.escapeId(tableName); - return this.query(`ALTER TABLE ${table} DROP FOREIGN KEY ${index}`).then(ignore); + public async dropForeignKey(tableName: string, indexName: string): EmptyPromise { + const index = this.escapeIdentifier(indexName); + const table = this.escapeIdentifier(tableName); + + await this.con.query(sql`ALTER TABLE ${table} DROP FOREIGN KEY ${index}`); } - public addPrimaryKey(tableName: string, ...columns: string[]): EmptyPromise { - columns = columns.map((value) => mySql.escapeId(value)); + public async addPrimaryKey(tableName: string, ...columns: string[]): EmptyPromise { + const table = this.escapeIdentifier(tableName); + const primaryColumns = joinIdentifier(columns); - const table = mySql.escapeId(tableName); - return this.query(`ALTER TABLE ${table} ADD PRIMARY KEY (${columns.join(", ")})`).then(ignore); + await this.con.query(sql`ALTER TABLE ${table} ADD PRIMARY KEY (${primaryColumns})`); } - public dropPrimaryKey(tableName: string): EmptyPromise { - const table = mySql.escapeId(tableName); - return this.query(`ALTER TABLE ${table} DROP PRIMARY KEY`).then(ignore); + public async dropPrimaryKey(tableName: string): EmptyPromise { + const table = this.escapeIdentifier(tableName); + + await this.con.query(sql`ALTER TABLE ${table} DROP PRIMARY KEY`); } } diff --git a/packages/core/src/database/contexts/episodeContext.ts b/packages/core/src/database/contexts/episodeContext.ts index 755e9e2f..a892249f 100644 --- a/packages/core/src/database/contexts/episodeContext.ts +++ b/packages/core/src/database/contexts/episodeContext.ts @@ -1,261 +1,85 @@ -import { SubContext } from "./subContext"; +import { Uuid, EmptyPromise, Nullable, TypedQuery, Insert } from "../../types"; +import { checkIndices, combiIndex, getElseSet, separateIndex } from "../../tools"; +import { DatabaseError, isDuplicateError, ValidationError } from "../../error"; +import { QueryContext } from "./queryContext"; import { + entity, Episode, + episodeContentData, EpisodeContentData, - EpisodeRelease, - MetaResult, - ProgressResult, - ReadEpisode, - Result, + PureEpisode, + pureEpisode, SimpleEpisode, + simpleEpisode, + SimpleEpisodeReleases, + SimpleReadEpisode, SimpleRelease, - DisplayReleasesResponse, - MediumRelease, - Uuid, - EmptyPromise, - PromiseMultiSingle, - MultiSingleValue, - Optional, - Nullable, - UpdateMedium, - TypedQuery, - PureEpisode, -} from "../../types"; -import { - checkIndices, - combiIndex, - getElseSet, - ignore, - MediaType, - multiSingle, - promiseMultiSingle, - separateIndex, - batch, - hasPropType, -} from "../../tools"; -import logger from "../../logger"; -import { MysqlServerError } from "../mysqlError"; -import { escapeLike } from "../storages/storageTools"; -import { OkPacket } from "mysql"; -import { storeModifications, toSqlList } from "../sqlTools"; -import { DatabaseError, ValidationError } from "../../error"; +} from "../databaseTypes"; +import { sql } from "slonik"; +import { EpisodeReleaseContext } from "./episodeReleaseContext"; -export class EpisodeContext extends SubContext { +export class EpisodeContext extends QueryContext { /** * Return a Query of all episodes and together with the read progress and date of the given user uuid. * @param uuid uuid to check the progress of */ public async getAll(uuid: Uuid): Promise> { - return this.queryStream( - "SELECT episode.id, episode.partialIndex, episode.totalIndex, episode.combiIndex, " + - "episode.part_id as partId, coalesce(progress, 0) as progress, read_date as readDate " + - "FROM episode LEFT JOIN user_episode ON episode.id=user_episode.episode_id " + - "AND user_uuid IS NULL OR user_uuid=?", - uuid, - ); - } - - public async getAllReleases(): Promise> { - return this.queryStream( - "SELECT episode_id as episodeId, source_type as sourceType, toc_id as tocId, releaseDate, locked, url, title FROM episode_release", - ); - } - - public async getDisplayReleases( - latestDate: Date, - untilDate: Nullable, - read: Nullable, - uuid: Uuid, - ignoredLists: number[], - requiredLists: number[], - ignoredMedia: number[], - requiredMedia: number[], - ): Promise { - const progressCondition = read == null ? "1" : read ? "progress = 1" : "(progress IS NULL OR progress < 1)"; - - let additionalMainQuery = ""; - - if (requiredMedia.length) { - additionalMainQuery += ` AND part.medium_id IN (${toSqlList(requiredMedia)}) `; - } - - if (ignoredMedia.length) { - additionalMainQuery += ` AND part.medium_id NOT IN (${toSqlList(ignoredMedia)}) `; - } - - let filterQuery = ""; - - if (requiredLists.length) { - filterQuery += ` AND part.medium_id IN (SELECT medium_id FROM list_medium WHERE list_id IN (${toSqlList( - requiredLists, - )})) `; - } else if (ignoredLists.length) { - filterQuery += ` AND part.medium_id NOT IN (SELECT medium_id FROM list_medium WHERE list_id IN (${toSqlList( - ignoredLists, - )})) `; - } - - const releasePromise = this.query( - "SELECT er.episode_id as episodeId, er.title, er.url as link, er.releaseDate as date, er.locked, medium_id as mediumId, progress " + - "FROM (SELECT * FROM episode_release WHERE releaseDate < ? AND (? IS NULL OR releaseDate > ?) ORDER BY releaseDate DESC LIMIT 10000) as er " + - "INNER JOIN episode ON episode.id=er.episode_id " + - "LEFT JOIN (SELECT * FROM user_episode WHERE user_uuid = ?) as ue ON episode.id=ue.episode_id " + - "INNER JOIN part ON part.id=part_id " + - additionalMainQuery + - `WHERE ${progressCondition}${filterQuery} LIMIT 500;`, - [latestDate, untilDate, untilDate, uuid, read, read], - ); - const mediaPromise: Promise> = this.query( - "SELECT id, title, medium FROM medium;", - ); - const latestReleaseResult: Array<{ releaseDate: string }> = await this.query( - "SELECT releaseDate FROM episode_release ORDER BY releaseDate LIMIT 1;", - ); - const releases = await releasePromise; - - const mediaIds: Set = new Set(); - - for (const release of releases) { - mediaIds.add(release.mediumId); - } - const media = (await mediaPromise).filter((value) => mediaIds.has(value.id)); - - return { - latest: latestReleaseResult.length ? new Date(latestReleaseResult[0].releaseDate) : new Date(0), - media, - releases, - }; - } - - public async getMediumReleases(mediumId: number, uuid: Uuid): Promise { - return this.query( - "SELECT er.episode_id as episodeId, er.title, er.url as link, er.releaseDate as date, er.locked, episode.combiIndex, progress " + - "FROM episode_release as er " + - "INNER JOIN episode ON episode.id=er.episode_id " + - "LEFT JOIN (SELECT * FROM user_episode WHERE user_uuid = ?) as ue ON episode.id=ue.episode_id " + - "INNER JOIN part ON part.id=part_id " + - "WHERE part.medium_id = ?;", - [uuid, mediumId], + return this.stream( + sql.type(pureEpisode)`SELECT + episode.id, episode.partial_index, episode.total_index, + episode.combi_index, episode.part_id, + coalesce(progress, 0) as progress, read_date + FROM episode LEFT JOIN user_episode ON episode.id=user_episode.episode_id AND user_uuid IS NULL OR user_uuid=${uuid}`, ); } public async getAssociatedEpisode(url: string): Promise { - const result: Array> = await this.query( - "SELECT id FROM episode INNER JOIN episode_release ON episode.id=episode_release.episode_id WHERE url=?", - url, + const result = await this.con.maybeOneFirst( + sql.type(entity)` + SELECT id FROM episode + INNER JOIN episode_release ON episode.id=episode_release.episode_id + WHERE url=${url}`, ); - if (result.length === 1) { - return result[0].id; - } - return 0; + return result ?? 0; } /** * */ - public async getLatestReleases(mediumId: number): Promise { - const resultArray: any[] = await this.query( - "SELECT episode.* FROM episode_release " + - "INNER JOIN episode ON episode.id=episode_release.episode_id " + - "INNER JOIN part ON part.id=episode.part_id " + - "WHERE medium_id=? " + - "GROUP BY episode_id " + - "ORDER BY episode.totalIndex DESC, episode.partialIndex DESC " + - "LIMIT 5;", - mediumId, - ); - return Promise.all( - resultArray.map(async (rawEpisode) => { - const releases = await this.getReleases(rawEpisode.id); - return { - id: rawEpisode.id, - partialIndex: rawEpisode.partialIndex, - partId: rawEpisode.part_id, - totalIndex: rawEpisode.totalIndex, - combiIndex: rawEpisode.combiIndex, - releases, - }; - }), - ); - } - - public async getReleases(episodeId: number | number[]): Promise { - if (!episodeId || (Array.isArray(episodeId) && !episodeId.length)) { - return []; - } - const resultArray: Optional = await this.queryInList( - "SELECT * FROM episode_release WHERE episode_id IN (??)", - [episodeId], - ); - if (!resultArray?.length) { - return []; + public async getLatestReleases(mediumId: number): Promise { + const resultArray = await this.con.any( + sql.type(simpleEpisode)` + SELECT episode.* FROM episode_release + INNER JOIN episode ON episode.id=episode_release.episode_id + INNER JOIN part ON part.id=episode.part_id + WHERE medium_id=${mediumId} + GROUP BY episode.id + ORDER BY episode.total_index DESC, episode.partial_index DESC + LIMIT 5;`, + ); + const releases = await this.getContext(EpisodeReleaseContext).getReleases(resultArray.map((value) => value.id)); + const episodeMap = new Map(); + + for (const episode of resultArray) { + episodeMap.set(episode.id, { + id: episode.id, + partialIndex: episode.partialIndex, + partId: episode.partId, + totalIndex: episode.totalIndex, + combiIndex: episode.combiIndex, + releases: [], + }); } - return resultArray.map((value: any): EpisodeRelease => { - return { - episodeId: value.episode_id, - sourceType: value.source_type, - releaseDate: value.releaseDate, - locked: !!value.locked, - url: value.url, - title: value.title, - tocId: value.toc_id, - }; - }); - } - public async getReleasesByHost(episodeId: number | number[], host: string): Promise { - if (!episodeId || (Array.isArray(episodeId) && !episodeId.length)) { - return []; - } - const resultArray: Optional = await this.queryInList( - "SELECT * FROM episode_release WHERE locate(?, url) = 1 AND episode_id IN (??);", - [host, episodeId], - ); - if (!resultArray?.length) { - return []; + for (const release of releases) { + episodeMap.get(release.episodeId)?.releases.push(release); } - return resultArray.map((value: any): EpisodeRelease => { - return { - episodeId: value.episode_id, - sourceType: value.source_type, - releaseDate: value.releaseDate, - locked: !!value.locked, - url: value.url, - title: value.title, - }; - }); - } - - public async getMediumReleasesByHost(mediumId: number, host: string): Promise { - const resultArray: any[] = await this.query( - ` - SELECT er.* FROM episode_release as er - INNER JOIN episode as e ON e.id=er.episode_id - INNER JOIN part as p ON p.id=e.part_id - WHERE medium_id = ? - AND locate(?, url) = 1 - `, - [mediumId, host], - ); - return resultArray.map((value: any): EpisodeRelease => { - return { - episodeId: value.episode_id, - sourceType: value.source_type, - releaseDate: value.releaseDate, - locked: !!value.locked, - url: value.url, - title: value.title, - tocId: value.toc_id, - }; - }); + return [...episodeMap.values()].sort((a, b) => b.combiIndex - a.combiIndex); } - public async getPartsEpisodeIndices( - partId: number | number[], - ): Promise> { - const result: Optional> = await this.queryInList( - "SELECT part_id, combiIndex as combinedIndex " + "FROM episode WHERE part_id IN (??)", - [partId], + public async getPartsEpisodeIndices(partId: number[]): Promise> { + const result = await this.con.any<{ part_id: number; combiindex: number }>( + sql`SELECT part_id, combi_index FROM episode WHERE part_id = ANY(${sql.array(partId, "int8")});`, ); if (!result) { return []; @@ -265,7 +89,7 @@ export class EpisodeContext extends SubContext { const partValue = getElseSet(idMap, value.part_id, () => { return { partId: value.part_id, episodes: [] }; }); - partValue.episodes.push(value.combinedIndex); + partValue.episodes.push(value.combiindex); }); if (Array.isArray(partId)) { partId.forEach((value) => { @@ -288,19 +112,23 @@ export class EpisodeContext extends SubContext { */ public async addProgress( uuid: Uuid, - episodeId: number | number[], + episodeId: number[], progress: number, readDate: Nullable, ): Promise { if (progress < 0 || progress > 1) { return Promise.reject(new ValidationError(`Invalid Progress: ${progress}`)); } - const results = await this.multiInsert( - "REPLACE INTO user_episode " + "(user_uuid, episode_id, progress, read_date) " + "VALUES ", - episodeId, - (value) => [uuid, value, progress, readDate || new Date()], + readDate ??= new Date(); + + await this.con.query( + sql` + INSERT INTO user_episode (user_uuid, episode_id, progress, read_date) + SELECT ${uuid},id,${progress},${sql.timestamp(readDate)} + FROM ${sql.unnest([episodeId], ["int8"])} as insert_data(id) + ON CONFLICT DO UPDATE SET progress=EXCLUDED.progress, read_date=EXCLUDED.read_date;`, ); - multiSingle(results, (value: OkPacket) => storeModifications("progress", "update", value)); + // FIXME: multiSingle(results, (value) => storeModifications("progress", "update", value)); return true; } @@ -319,30 +147,8 @@ export class EpisodeContext extends SubContext { value: episodeId, }, ); - storeModifications("progress", "delete", result); - return result.affectedRows > 0; - } - - /** - * Sets the progress of an user in regard to an episode with one or multiple progressResult objects. - */ - public setProgress(uuid: Uuid, progressResult: ProgressResult | ProgressResult[]): EmptyPromise { - return promiseMultiSingle(progressResult, async (value: ProgressResult) => { - const resultArray: any[] = await this.query( - "SELECT episode_id FROM result_episode WHERE novel=? AND (chapter=? OR chapIndex=?)", - [value.novel, value.chapter, value.chapIndex], - ); - const episodeId: Optional = resultArray[0]?.episode_id; - - if (episodeId == null) { - const msg = `could not find an episode for '${value.novel}', '${value.chapter + ""}', '${ - value.chapIndex + "" - }'`; - logger.info(msg); - return; - } - await this.addProgress(uuid, episodeId, value.progress, value.readDate); - }).then(ignore); + // FIXME: storeModifications("progress", "delete", result); + return result.rowCount > 0; } /** @@ -350,12 +156,10 @@ export class EpisodeContext extends SubContext { * Defaults to zero if no entry is found. */ public async getProgress(uuid: Uuid, episodeId: number): Promise { - const result = await this.query("SELECT * FROM user_episode " + "WHERE user_uuid = ? " + "AND episode_id = ?", [ - uuid, - episodeId, - ]); - - return result[0]?.progress || 0; + const result = await this.con.maybeOneFirst<{ progress: number }>( + sql`SELECT progress FROM user_episode WHERE user_uuid = ${uuid} AND episode_id = ${episodeId}`, + ); + return result ?? 0; } /** @@ -363,599 +167,233 @@ export class EpisodeContext extends SubContext { */ public updateProgress(uuid: Uuid, episodeId: number, progress: number, readDate: Nullable): Promise { // TODO for now its the same as calling addProgress, but somehow do it better maybe? - return this.addProgress(uuid, episodeId, progress, readDate); + return this.addProgress(uuid, [episodeId], progress, readDate); + } + + public async getEpisodeContentData(chapterLink: string): Promise { + const result = await this.con.maybeOne( + sql.type(episodeContentData)` + SELECT + episode_release.title as episode_title, episode.combi_index as index, medium.title as medium_title + FROM episode_release + INNER JOIN episode ON episode.id=episode_release.episode_id + INNER JOIN part ON part.id=episode.part_id + INNER JOIN medium ON medium.id=part.medium_id + WHERE episode_release.url=${chapterLink}`, + ); + + return ( + result ?? { + episodeTitle: "", + index: 0, + mediumTitle: "", + } + ); } /** - * Marks an Episode as read and adds it into Storage if the episode does not exist yet. + * Adds a episode of a part to the storage. */ - public async markEpisodeRead(uuid: Uuid, result: Result): EmptyPromise { - if (!result.accept) { - return; - } - const teaserMatcher = /\(?teaser\)?$|(\s+$)/i; - return promiseMultiSingle(result.result, async (value: MetaResult): EmptyPromise => { - // TODO what if it is not a serial medium but only an article? should it even save such things? - if ( - !value.novel || - (!value.chapIndex && !value.chapter) || - // do not mark episode if they are a teaser only - value.chapter?.match(teaserMatcher) - ) { - return; - } + public async addEpisode(episodes: Array>): Promise { + // FIXME: storeModifications("episode", "insert", result); + const values = episodes.map((value) => [value.partId, value.combiIndex, value.totalIndex, value.partialIndex]); - const resultArray: any[] = await this.query( - "SELECT episode_id FROM result_episode WHERE novel=? AND (chapter=? OR chapIndex=?);", - [value.novel, value.chapter, value.chapIndex], - ); - // if a similar/same result was mapped to an episode before, get episode_id and update read - if (resultArray[0]?.episode_id != null) { - const insertResult = await this.query( - "INSERT IGNORE INTO user_episode (user_uuid, episode_id,progress) VALUES (?,?,0);", - [uuid, resultArray[0].episode_id], - ); - storeModifications("progress", "insert", insertResult); - return; - } + const insertedEpisodes = await this.con.any( + sql.type(simpleEpisode)`INSERT INTO episode + (part_id, combi_index, total_index, partial_index) + SELECT * FROM ${sql.unnest(values, ["int8", "float8", "int8", "int8"])} + RETURNING id, part_id, combi_index, total_index, partial_index;`, + ); - const escapedNovel = escapeLike(value.novel, { singleQuotes: true, noBoundaries: true }); - const media: Array<{ - title: string; - id: number; - synonym?: string; - }> = await this.query( - "SELECT title, id,synonym FROM medium " + - "LEFT JOIN medium_synonyms ON medium.id=medium_synonyms.medium_id " + - "WHERE medium.title LIKE ? OR medium_synonyms.synonym LIKE ?;", - [escapedNovel, escapedNovel], + if (insertedEpisodes.length !== episodes.length) { + throw new DatabaseError( + `returned rows length does not match inserted rows length: ${insertedEpisodes.length} != ${episodes.length}`, ); - // TODO for now only get the first medium?, later test it against each other - let bestMedium = media[0]; - - if (!bestMedium) { - const addedMedium = await this.parentContext.mediumContext.addMedium( - { - title: value.novel, - medium: MediaType.TEXT, - }, - uuid, - ); - bestMedium = { id: addedMedium.id as number, title: value.novel }; - // TODO add medium if it is not known? - } - - let volumeId; - - // if there is either an volume or volIndex in result - // search or add the given volume to link the episode to the part/volume - let volumeTitle = value.volume; - // if there is no volume yet, with the given volumeTitle or index, add one - let volIndex = Number(value.volIndex); - - if (volIndex || volumeTitle) { - // TODO: do i need to convert volIndex from a string to a number for the query? - const volumeArray: Array<{ id: number }> = await this.query( - "SELECT id FROM part WHERE medium_id=? AND title LIKE ? OR totalIndex=?)", - [ - bestMedium.id, - volumeTitle && - escapeLike(volumeTitle, { - singleQuotes: true, - noBoundaries: true, - }), - volIndex, - ], - ); + } - const volume = volumeArray[0]; + const insertReleases: Array> = []; - if (volume) { - volumeId = volume.id; - } else { - if (Number.isNaN(volIndex)) { - const lowestIndexArray: Array<{ totalIndex: number }> = await this.query( - "SELECT MIN(totalIndex) as totalIndex FROM part WHERE medium_id=?", - bestMedium.id, - ); - // TODO look if totalIndex incremential needs to be replaced with combiIndex - const lowestIndexObj = lowestIndexArray[0]; - // if the lowest available totalIndex not indexed, decrement, else take -2 - // -1 is reserved for all episodes, which do not have any volume/part assigned - volIndex = lowestIndexObj && lowestIndexObj.totalIndex < 0 ? --lowestIndexObj.totalIndex : -2; - } - volumeTitle ??= "Volume " + volIndex; - const addedVolume = await this.parentContext.partContext.addPart( - // @ts-expect-error - { title: volumeTitle, totalIndex: volIndex, mediumId: bestMedium.id }, - ); - volumeId = addedVolume.id; - } - } else { - // check if there is a part/volume, with index -1, reserved for all episodes, which are not indexed - const volumeArray: Array<{ - id: number; - }> = await this.query("SELECT id FROM part WHERE medium_id=? AND totalIndex=?", [bestMedium.id, -1]); - const volume = volumeArray[0]; + // assume that the order of returned rows is the same as the order of inserted episodes + for (let index = 0; index < episodes.length; index++) { + const episode = episodes[index]; + const insertedEpisode = insertedEpisodes[index]; - if (!volume) { - volumeId = (await this.parentContext.partContext.createStandardPart(bestMedium.id)).id; - } else { - volumeId = volume.id; - } - } - - if (!Number.isInteger(volumeId) || volumeId <= 0) { - throw new ValidationError("no volume id available"); + if ( + episode.partId !== insertedEpisode.partId || + episode.totalIndex !== insertedEpisode.totalIndex || + // eslint-disable-next-line eqeqeq + episode.partialIndex != insertedEpisode.partialIndex + ) { + throw new DatabaseError("returned rows order does not match inserted rows order!"); } - const episodeSelectArray: Array<{ id: number; part_id: number; link: string }> = await this.query( - "SELECT id, part_id, url FROM episode " + - "LEFT JOIN episode_release " + - "ON episode.id=episode_release.episode_id " + - "WHERE title LIKE ? OR totalIndex=?", - [ - value.chapter && - escapeLike(value.chapter, { - noBoundaries: true, - singleQuotes: true, - }), - value.chapIndex, - ], - ); - - const episodeSelect = episodeSelectArray[0]; - - let episodeId = episodeSelect?.id; - - if (episodeId == null) { - let episodeIndex = Number(value.chapIndex); - - // if there is no index, decrement the minimum index available for this medium - if (Number.isNaN(episodeIndex)) { - const latestEpisodeArray: Array<{ totalIndex: number }> = await this.query( - "SELECT MIN(totalIndex) as totalIndex FROM episode " + - "WHERE part_id EXISTS (SELECT id from part WHERE medium_id=?);", - bestMedium.id, - ); - const latestEpisode = latestEpisodeArray[0]; - - // TODO: 23.07.2019 look if totalIndex needs to be replaced with combiIndex - // if the lowest available totalIndex not indexed, decrement, else take -1 - episodeIndex = latestEpisode && latestEpisode.totalIndex < 0 ? --latestEpisode.totalIndex : -1; - } - - const chapter = value.chapter ?? "Chapter " + episodeIndex; - - const episode = await this.addEpisode({ - id: 0, - partId: volumeId, - totalIndex: episodeIndex, - releases: [ - { - title: chapter, - url: result.url, - releaseDate: new Date(), - // TODO get source type - sourceType: "", - episodeId: 0, - }, - ], - }); - episodeId = episode.id; + for (const release of episode.releases) { + release.episodeId = insertedEpisode.id; } - // now after setting the storage up, so that all data is 'consistent' with this result, - // mark the episode as read - // normally the progress should be updated by messages of the tracker - // it should be inserted only, if there does not exist any progress - let queryResult = await this.query( - "INSERT IGNORE INTO user_episode (user_uuid, episode_id, progress) VALUES (?,?,0);", - [uuid, episodeId], - ); - storeModifications("progress", "insert", queryResult); - queryResult = await this.query( - "INSERT INTO result_episode (novel, chapter, chapIndex, volume, volIndex, episode_id) " + - "VALUES (?,?,?,?,?,?);", - [value.novel, value.chapter, value.chapIndex, value.volume, value.volIndex, episodeId], - ); - storeModifications("result_episode", "insert", queryResult); - }).then(ignore); - } - - public async addRelease(releases: T[]): Promise; - public async addRelease(releases: T): Promise; - - public async addRelease>(releases: T): Promise { - const results = await this.multiInsert( - "INSERT IGNORE INTO episode_release " + - "(episode_id, title, url, source_type, releaseDate, locked, toc_id) " + - "VALUES", - releases, - (release) => { - if (!release.episodeId) { - throw new ValidationError("missing episodeId on release"); - } - return [ - release.episodeId, - release.title, - release.url, - release.sourceType, - release.releaseDate, - release.locked, - release.tocId, - ]; - }, - ); - multiSingle(results, (value: OkPacket) => storeModifications("release", "insert", value)); - return releases; - } - - public getEpisodeLinks(episodeIds: number[]): Promise { - return this.queryInList("SELECT episode_id as episodeId, url FROM episode_release WHERE episode_id IN (??)", [ - episodeIds, - ]) as Promise; - } - - public getEpisodeLinksByMedium(mediumId: number): Promise { - return this.query( - "SELECT episode_id as episodeId, url FROM episode_release " + - "inner join episode on episode.id=episode_release.episode_id " + - "inner join part on part.id=episode.part_id " + - "WHERE medium_id = ?;", - mediumId, - ) as Promise; - } - - public getSourcedReleases( - sourceType: string, - mediumId: number, - ): Promise> { - return this.query( - "SELECT url, episode_release.title FROM episode_release " + - "INNER JOIN episode ON episode.id=episode_release.episode_id " + - "INNER JOIN part ON part.id=episode.part_id " + - "WHERE source_type=? AND medium_id=?;", - [sourceType, mediumId], - ).then((resultArray) => - resultArray.map((value: any) => { - value.sourceType = sourceType; - value.mediumId = mediumId; - return value; - }), - ); - } - - public async updateRelease(releases: MultiSingleValue): EmptyPromise { - if (!Array.isArray(releases)) { - releases = [releases]; - } - const batches = batch(releases, 100); - await Promise.all( - batches.map(async (releaseBatch) => { - const params = releaseBatch.flatMap((release) => { - return [ - release.episodeId, - release.url, - release.title, - release.releaseDate, - release.sourceType, - !!release.locked, - release.tocId, - ]; - }); - const result: OkPacket = await this.query( - ` - INSERT INTO episode_release - (episode_id, url, title, releaseDate, source_type, locked, toc_id) - VALUES ${"(?,?,?,?,?,?,?),".repeat(releaseBatch.length).slice(0, -1)} - ON DUPLICATE KEY UPDATE - title = VALUES(title), - releaseDate = VALUES(releaseDate), - source_type = VALUES(source_type), - locked = VALUES(locked), - toc_id = VALUES(toc_id); - `, - params, - ); - storeModifications("release", "update", result); - }), - ); - } - - public async deleteRelease(release: EpisodeRelease | EpisodeRelease[]): EmptyPromise { - if (Array.isArray(release)) { - await Promise.all( - batch(release, 100).map((releaseBatch) => { - return this.query( - `DELETE FROM episode_release WHERE (episode_id, url) in (${releaseBatch.map(() => "(?,?)").join(",")})`, - releaseBatch.flatMap((item) => [item.episodeId, item.url]), - ); - }), - ); - } else { - const result = await this.delete( - "episode_release", - { - column: "episode_id", - value: release.episodeId, - }, - { - column: "url", - value: release.url, - }, - ); - storeModifications("release", "delete", result); + insertReleases.push(...episode.releases); } - } - public async getEpisodeContentData(chapterLink: string): Promise { - const results: EpisodeContentData[] = await this.query( - "SELECT episode_release.title as episodeTitle, episode.combiIndex as `index`, " + - "medium.title as mediumTitle FROM episode_release " + - "INNER JOIN episode ON episode.id=episode_release.episode_id " + - "INNER JOIN part ON part.id=episode.part_id INNER JOIN medium ON medium.id=part.medium_id " + - "WHERE episode_release.url=?", - chapterLink, - ); + const insertedReleases = await this.getContext(EpisodeReleaseContext).addReleases(insertReleases); + const idReleaseMap = new Map(); - if (!results?.length) { - return { - episodeTitle: "", - index: 0, - mediumTitle: "", - }; + for (const release of insertedReleases) { + getElseSet(idReleaseMap, release.episodeId, () => []).push(release); } - return { - episodeTitle: results[0].episodeTitle, - index: results[0].index, - mediumTitle: results[0].mediumTitle, - }; - } - public addEpisode(episode: SimpleEpisode): Promise; - public addEpisode(episode: SimpleEpisode[]): Promise; - - /** - * Adds a episode of a part to the storage. - */ - public addEpisode>(episodes: T): PromiseMultiSingle { - // TODO: 29.06.2019 insert multiple rows, what happens with insertId? - const insertReleases: EpisodeRelease[] = []; - // @ts-expect-error - return promiseMultiSingle(episodes, async (episode: SimpleEpisode): Promise => { - if (episode.partId == null || episode.partId <= 0) { - throw new ValidationError(`episode without partId: ${episode.partId}`); - } - let insertId: Optional; - const episodeCombiIndex = episode.combiIndex == null ? combiIndex(episode) : episode.combiIndex; - try { - const result: any = await this.query( - "INSERT INTO episode " + "(part_id, totalIndex, partialIndex, combiIndex) " + "VALUES (?,?,?,?);", - [episode.partId, episode.totalIndex, episode.partialIndex, episodeCombiIndex], - ); - storeModifications("episode", "insert", result); - insertId = result.insertId; - } catch (e) { - // do not catch if it isn't an duplicate key error - // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access - if ( - !e || - (hasPropType(e, "errno") && - e.errno !== MysqlServerError.ER_DUP_KEY && - e.errno !== MysqlServerError.ER_DUP_ENTRY) - ) { - throw e; - } - const result = await this.query("SELECT id from episode where part_id=? and combiIndex=?", [ - episode.partId, - combiIndex(episode), - ]); - insertId = result[0].id; - } - if (!Number.isInteger(insertId)) { - throw new ValidationError(`invalid ID ${insertId + ""}`); - } - - if (episode.releases) { - // @ts-expect-error - episode.releases.forEach((value) => (value.episodeId = insertId)); - insertReleases.push(...episode.releases); - } + return insertedEpisodes.map((episode) => { return { - id: insertId as number, - partId: episode.partId, - partialIndex: episode.partialIndex, - totalIndex: episode.totalIndex, - combiIndex: episodeCombiIndex, - releases: episode.releases, - progress: 0, - readDate: null, + ...episode, + releases: idReleaseMap.get(episode.id) ?? [], }; - }).then(async (value: MultiSingleValue) => { - if (insertReleases.length) { - await this.addRelease(insertReleases); - } - return value; }); } - public getEpisode(id: number, uuid: Uuid): Promise; - public getEpisode(id: number[], uuid: Uuid): Promise; - /** * Gets an episode from the storage. */ - public async getEpisode(id: number | number[], uuid: Uuid): Promise { - const episodes: Optional = await this.queryInList( - "SELECT episode.*, ue.progress, ue.read_date FROM episode LEFT JOIN user_episode ue ON episode.id=ue.episode_id " + - "WHERE (user_uuid IS NULL OR user_uuid=?) AND episode.id IN (??);", - [uuid, id], - ); - if (!episodes?.length) { + public async getEpisode(id: number[], uuid: Uuid): Promise { + const episodes = await this.con.any( + sql.type(pureEpisode)` + SELECT e.id, e.part_id, e.combi_index, e.total_index, e.partial_index, + coalesce(ue.progress, 0), ue.read_date + FROM episode e + LEFT JOIN user_episode ue ON episode.id=ue.episode_id + WHERE (user_uuid IS NULL OR user_uuid=${uuid}) AND episode.id = ANY(${sql.array(id, "int8")});`, + ); + if (!episodes.length) { return []; } - const idMap = new Map(); - const releases = await this.getReleases( - episodes.map((value: any): number => { - idMap.set(value.id, value); - return value.id; - }), + const idMap = new Map(); + const releases = await this.getContext(EpisodeReleaseContext).getReleases( + episodes.map((value): number => value.id), ); releases.forEach((value) => { - const episode = idMap.get(value.episodeId); - if (!episode) { - throw new DatabaseError("episode missing for queried release"); - } - if (!episode.releases) { - episode.releases = []; - } - episode.releases.push(value); + getElseSet(idMap, value.episodeId, () => []).push(value); }); return episodes.map((episode) => { return { progress: episode.progress != null ? episode.progress : 0, - readDate: episode.progress != null ? episode.read_date : null, + readDate: episode.progress != null ? episode.readDate : null, id: episode.id, partialIndex: episode.partialIndex, - partId: episode.part_id, + partId: episode.partId, totalIndex: episode.totalIndex, combiIndex: episode.combiIndex, - releases: episode.releases || [], + releases: idMap.get(episode.id) ?? [], }; }); } - public async getPartMinimalEpisodes(partId: number): Promise> { - return this.query("SELECT id, combiIndex FROM episode WHERE part_id=?", partId); + public async getPartMinimalEpisodes(partId: number): Promise> { + return this.con.any<{ id: number; combiIndex: number }>( + sql`SELECT id, combi_index FROM episode WHERE part_id=${partId}`, + ); } - public async getPartEpisodePerIndex(partId: number, index: number | number[]): Promise { - const episodes: Optional = await this.queryInList( - "SELECT * FROM episode WHERE part_id = ? AND combiIndex IN (??);", - [partId, index], + /** + * Get an episode for each index requested. + * If an episode is not found for the given index and part_id, + * a dummy episode is returned, with id and part_id set to zero + * and empty release array. + * + * @param partId limit episodes to part_id + * @param indices filters episode by combi_index + * @returns an episode for each index + */ + public async getPartEpisodePerIndex(partId: number, indices: number[]): Promise { + const episodes = await this.con.any( + sql.type(simpleEpisode)` + SELECT e.id, e.part_id, e.combi_index, e.total_index, e.partial_index + FROM episode WHERE part_id = ${partId} AND combi_index = ANY(${sql.array(indices, "int8")});`, ); - if (!episodes?.length) { + + if (!episodes.length) { return []; } - const availableIndices: number[] = []; - const idMap = new Map(); - const episodeIds = episodes.map((value: any) => { - availableIndices.push(value.combiIndex); - idMap.set(value.id, value); - return value.id; - }); - const releases = await this.getReleases(episodeIds); - releases.forEach((value) => { - const episode = idMap.get(value.episodeId); - if (!episode) { - throw new DatabaseError("missing episode for release"); - } - if (!episode.releases) { - episode.releases = []; - } - episode.releases.push(value); - }); - - multiSingle(index, (value: number) => { - if (!availableIndices.includes(value)) { - const separateValue = separateIndex(value); - checkIndices(separateValue); - episodes.push(separateValue); - } - }); - return episodes.map((value) => { - checkIndices(value); - return { - id: value.id, - partId, - totalIndex: value.totalIndex, - partialIndex: value.partialIndex, - combiIndex: value.combiIndex, - releases: value.releases || [], - }; - }); + return this.toSimpleEpisodeReleases(episodes, indices); } - public async getMediumEpisodes(mediumId: number): Promise> { - const episodes: any[] = await this.query( - ` - SELECT episode.* - FROM episode - INNER JOIN part ON part.id=episode.part_id - WHERE medium_id = ?; - `, - mediumId, + public async getMediumEpisodes(mediumId: number): Promise { + return this.con.any( + sql.type(simpleEpisode)` + SELECT + episode.id, + episode.part_id, + episode.combi_index, + episode.total_index, + episode.partial_index + FROM episode + INNER JOIN part ON part.id=episode.part_id + WHERE medium_id = ${mediumId}; + `, ); - if (!episodes?.length) { - return []; - } - return episodes.map((value) => { - checkIndices(value); - return { - id: value.id, - partId: value.part_id, - totalIndex: value.totalIndex, - partialIndex: value.partialIndex, - combiIndex: value.combiIndex || combiIndex(value), - releases: [], - }; - }); } - public getMediumEpisodePerIndex(mediumId: number, index: number, ignoreRelease?: boolean): Promise; - public getMediumEpisodePerIndex(mediumId: number, index: number[], ignoreRelease?: boolean): Promise; - public async getMediumEpisodePerIndex( mediumId: number, - index: number | number[], + indices: number[], ignoreRelease = false, - ): Promise { - const episodes: Optional = await this.queryInList( - "SELECT episode.* FROM episode INNER JOIN part ON part.id=episode.part_id " + - "WHERE medium_id = ? AND episode.combiIndex IN (??);", - [mediumId, index], - ); - if (!episodes?.length) { + ): Promise { + const episodes = await this.con.any( + sql.type(simpleEpisode)`SELECT + episode.id, + episode.part_id, + episode.combi_index, + episode.total_index, + episode.partial_index + FROM episode + INNER JOIN part ON part.id=episode.part_id + WHERE medium_id = ${mediumId} AND episode.combiIndex = ANY(${sql.array(indices, "int8")});`, + [mediumId, indices], + ); + if (!episodes.length) { return []; } - const availableIndices: number[] = []; - const idMap = new Map(); + if (ignoreRelease) { + return episodes as SimpleEpisodeReleases[]; + } + return this.toSimpleEpisodeReleases(episodes, indices); + } + + private async toSimpleEpisodeReleases( + episodes: readonly SimpleEpisode[], + requestedIndices: number[], + ): Promise { + const availableIndices = new Set(); + const episodeIds = episodes.map((value: any) => { - availableIndices.push(value.combiIndex); - idMap.set(value.id, value); + availableIndices.add(value.combiindex); return value.id; }); - const releases = ignoreRelease ? [] : await this.getReleases(episodeIds); - releases.forEach((value) => { - const episode = idMap.get(value.episodeId); - if (!episode) { - throw new DatabaseError("missing episode for release"); - } - if (!episode.releases) { - episode.releases = []; - } - episode.releases.push(value); + + const releases = await this.getContext(EpisodeReleaseContext).getReleases(episodeIds); + + const idMap = new Map(); + releases.forEach((value) => getElseSet(idMap, value.episodeId, () => []).push(value)); + + const result = episodes.map((episode): SimpleEpisodeReleases => { + const value = episode as SimpleEpisodeReleases; + value.releases = idMap.get(episode.id) ?? []; + return value; }); - multiSingle(index, (value: number) => { - if (!availableIndices.includes(value)) { - const separateValue = separateIndex(value); - episodes.push(separateValue); + requestedIndices.forEach((index: number) => { + if (!availableIndices.has(index)) { + const separateValue = separateIndex(index); + checkIndices(separateValue); + result.push({ + combiIndex: index, + id: 0, + partId: 0, + releases: [], + totalIndex: separateValue.totalIndex, + partialIndex: separateValue.partialIndex, + }); } }); - return episodes.map((value) => { - checkIndices(value); - return { - id: value.id, - partId: value.part_id, - totalIndex: value.totalIndex, - partialIndex: value.partialIndex, - combiIndex: value.combiIndex, - releases: value.releases || [], - }; - }); + return result; } /** @@ -964,33 +402,31 @@ export class EpisodeContext extends SubContext { public async updateEpisode(episode: SimpleEpisode): Promise { const result = await this.update( "episode", - (updates, values) => { + () => { + const updates = []; if (episode.partId) { - updates.push("part_id = ?"); - values.push(episode.partId); + updates.push(sql`part_id = ${episode.partId}`); } if (episode.partialIndex != null) { - updates.push("partialIndex = ?"); - values.push(episode.partialIndex); + updates.push(sql`partial_index = ${episode.partialIndex}`); } if (episode.totalIndex != null) { - updates.push("totalIndex = ?"); - values.push(episode.totalIndex); + updates.push(sql`total_index = ${episode.totalIndex}`); } if (episode.totalIndex || episode.partialIndex) { - updates.push("combiIndex = ?"); - values.push(episode.combiIndex == null ? combiIndex(episode) : episode.combiIndex); + updates.push(sql`combi_index = ${episode.combiIndex == null ? combiIndex(episode) : episode.combiIndex}`); } + return updates; }, { column: "id", value: episode.id, }, ); - storeModifications("episode", "update", result); - return result.changedRows > 0; + // FIXME: storeModifications("episode", "update", result); + return result.rowCount > 0; } /** @@ -1000,43 +436,47 @@ export class EpisodeContext extends SubContext { if (!oldPartId || !newPartId) { return false; } - const replaceIds: Array<{ + const replaceIds = await this.con.any<{ oldId: number; newId: number; - }> = await this.query( - "SELECT oldEpisode.id as oldId, newEpisode.id as newId FROM " + - "(Select * from episode where part_id=?) as oldEpisode " + - "inner join (Select * from episode where part_id=?) as newEpisode " + - "ON oldEpisode.combiIndex=newEpisode.combiIndex", - [oldPartId, newPartId], + }>( + sql` + SELECT oldEpisode.id as old_id, newEpisode.id as new_id + FROM + ( + Select id, combi_index from episode where part_id=${oldPartId} + ) as oldEpisode + inner join ( + Select id, combi_index from episode where part_id=${newPartId} + ) as newEpisode + ON oldEpisode.combi_index=newEpisode.combi_index`, ); - const changePartIdsResult: any[] = await this.query( - "SELECT id FROM episode WHERE combiIndex IN " + - "(SELECT combiIndex FROM episode WHERE part_id = ?) AND part_id = ?;", - [newPartId, oldPartId], + const changePartIds = await this.con.anyFirst( + sql.type(entity)` + SELECT id FROM episode + WHERE combi_index IN (SELECT combi_index FROM episode WHERE part_id = ${newPartId}) + AND part_id = ${oldPartId};`, ); - const changePartIds: number[] = changePartIdsResult.map((value) => value.id); - let result = await this.queryInList("UPDATE episode SET part_id= ? " + "WHERE part_id= ? AND combiIndex IN (??);", [ - newPartId, - oldPartId, - changePartIds, - ]); - multiSingle(result, (value) => storeModifications("release", "update", value)); + await this.con.query( + sql`UPDATE episode SET part_id=${newPartId} + WHERE part_id=${oldPartId} AND combi_index = ANY(${sql.array(changePartIds, "int8")});`, + ); + // FIXME: multiSingle(result, (value) => storeModifications("release", "update", value)); + if (!replaceIds.length) { return true; } const deleteReleaseIds: number[] = []; + await Promise.all( replaceIds.map((replaceId) => { - return this.query("UPDATE episode_release set episode_id=? where episode_id=?", [ - replaceId.newId, - replaceId.oldId, - ]) - .then((value) => storeModifications("release", "update", value)) + // FIXME: .then((value) => storeModifications("release", "update", value)) + return this.con + .query(sql`UPDATE episode_release set episode_id=${replaceId.newId} where episode_id=${replaceId.oldId}`) .catch((reason) => { - if (reason && MysqlServerError.ER_DUP_ENTRY === reason.errno) { + if (isDuplicateError(reason)) { deleteReleaseIds.push(replaceId.oldId); } else { throw reason; @@ -1048,10 +488,11 @@ export class EpisodeContext extends SubContext { await Promise.all( replaceIds.map((replaceId) => { - return this.query("UPDATE user_episode set episode_id=? where episode_id=?", [replaceId.newId, replaceId.oldId]) - .then((value) => storeModifications("progress", "update", value)) + // FIXME: .then((value) => storeModifications("progress", "update", value)) + return this.con + .query(sql`UPDATE user_episode set episode_id=${replaceId.newId} where episode_id=${replaceId.oldId}`) .catch((reason) => { - if (reason && MysqlServerError.ER_DUP_ENTRY === reason.errno) { + if (isDuplicateError(reason)) { deleteProgressIds.push(replaceId.oldId); } else { throw reason; @@ -1059,37 +500,22 @@ export class EpisodeContext extends SubContext { }); }), ); - const deleteResultIds: number[] = []; - - await Promise.all( - replaceIds.map((replaceId) => { - return this.query("UPDATE result_episode set episode_id=? where episode_id=?", [ - replaceId.newId, - replaceId.oldId, - ]) - .then((value) => storeModifications("result_episode", "update", value)) - .catch((reason) => { - if (reason && MysqlServerError.ER_DUP_ENTRY === reason.errno) { - deleteResultIds.push(replaceId.oldId); - } else { - throw reason; - } - }); - }), - ); const oldIds = replaceIds.map((value) => value.oldId); // TODO: 26.08.2019 this does not go quite well, throws error with 'cannot delete parent reference' - result = await this.queryInList("DELETE FROM episode_release WHERE episode_id IN (??);", [deleteReleaseIds]); - multiSingle(result, (value) => storeModifications("release", "delete", value)); - - result = await this.queryInList("DELETE FROM user_episode WHERE episode_id IN (??);", [deleteProgressIds]); - multiSingle(result, (value) => storeModifications("progress", "delete", value)); + await this.con.query( + sql`DELETE FROM episode_release WHERE episode_id = ANY(${sql.array(deleteReleaseIds, "int8")});`, + ); + // FIXME: multiSingle(result, (value) => storeModifications("release", "delete", value)); - result = await this.queryInList("DELETE FROM result_episode WHERE episode_id IN (??);", [deleteResultIds]); - multiSingle(result, (value) => storeModifications("result_episode", "delete", value)); + await this.con.query( + sql`DELETE FROM user_episode WHERE episode_id = ANY(${sql.array(deleteProgressIds, "int8")});`, + ); + // FIXME: multiSingle(result, (value) => storeModifications("progress", "delete", value)); - result = await this.queryInList("DELETE FROM episode WHERE part_id= ? AND id IN (??);", [oldPartId, oldIds]); - multiSingle(result, (value) => storeModifications("episode", "delete", value)); + await this.con.query( + sql`DELETE FROM episode WHERE part_id=${oldPartId} AND id = ANY(${sql.array(oldIds, "int8")});`, + ); + // FIXME: multiSingle(result, (value) => storeModifications("episode", "delete", value)); return true; } @@ -1098,57 +524,47 @@ export class EpisodeContext extends SubContext { */ public async deleteEpisode(id: number): Promise { // remove episode from progress first - let result = await this.delete("user_episode", { column: "episode_id", value: id }); - storeModifications("progress", "delete", result); + await this.delete("user_episode", { column: "episode_id", value: id }); + // FIXME: storeModifications("progress", "delete", result); - result = await this.delete("episode_release", { column: "episode_id", value: id }); - storeModifications("release", "delete", result); + await this.delete("episode_release", { column: "episode_id", value: id }); + // FIXME: storeModifications("release", "delete", result); // lastly remove episode itself - result = await this.delete("episode", { column: "id", value: id }); - storeModifications("episode", "delete", result); - return result.affectedRows > 0; + await this.delete("episode", { column: "id", value: id }); + // FIXME: storeModifications("episode", "delete", result); + return true; } - public async getChapterIndices(mediumId: number): Promise { - const result: any[] = await this.query( - "SELECT episode.combiIndex FROM episode INNER JOIN part ON episode.part_id=part.id WHERE medium_id=?", - mediumId, + public async getChapterIndices(mediumId: number): Promise { + return this.con.anyFirst<{ combiIndex: number }>( + sql`SELECT episode.combi_index FROM episode INNER JOIN part ON episode.part_id=part.id WHERE medium_id=${mediumId}`, ); - return result.map((value) => value.combiIndex); } - public async getAllChapterLinks(mediumId: number): Promise { - const result: any[] = await this.query( - "SELECT url FROM episode " + - "INNER JOIN episode_release ON episode.id=episode_release.episode_id " + - "INNER JOIN part ON episode.part_id=part.id WHERE medium_id=?", - mediumId, + public async getAllChapterLinks(mediumId: number): Promise { + return this.con.anyFirst<{ url: string }>( + sql`SELECT url FROM episode + INNER JOIN episode_release ON episode.id=episode_release.episode_id + INNER JOIN part ON episode.part_id=part.id WHERE medium_id=${mediumId}`, ); - return result.map((value) => value.url).filter((value) => value); } - public async getUnreadChapter(uuid: Uuid): Promise { - const resultArray = await this.query( - "SELECT id FROM episode WHERE id NOT IN " + - "(SELECT episode_id FROM user_episode WHERE progress >= 1 AND user_uuid=?);", - uuid, + public async getUnreadChapter(uuid: Uuid): Promise { + return this.con.anyFirst( + sql.type(entity)` + SELECT id FROM episode WHERE id NOT IN + ( + SELECT episode_id FROM user_episode WHERE progress >= 1 AND user_uuid=${uuid} + );`, ); - return resultArray.map((value: any) => value.id); } - public async getReadToday(uuid: Uuid): Promise { - const resultArray = await this.query( - "SELECT * FROM user_episode WHERE read_date > (NOW() - INTERVAL 1 DAY) AND user_uuid=?;", - uuid, + public async getReadToday(uuid: Uuid): Promise { + return this.con.any( + sql`SELECT episode_id, read_date, progress + FROM user_episode WHERE read_date > (NOW() - INTERVAL 1 DAY) AND user_uuid=${uuid};`, ); - return resultArray.map((value: any): ReadEpisode => { - return { - episodeId: value.episode_id, - readDate: value.read_date, - progress: value.progress, - }; - }); } /** @@ -1173,30 +589,30 @@ export class EpisodeContext extends SubContext { // TODO: 09.03.2020 rework query and input, for now the episodeIndices are only relative to their parts mostly, // not always relative to the medium // first update existing user-episode-progress where it not marked as read - let result = await this.query( - "UPDATE user_episode, episode, part " + - "SET user_episode.progress=1, user_episode.read_date=NOW() " + - "WHERE user_episode.progress != 1 " + - "AND user_episode.user_uuid = ? " + - "AND user_episode.episode_id=episode.id " + - "AND episode.part_id=part.id " + - "AND part.medium_id=? " + - "AND (? IS NULL OR part.combiIndex < ?) " + - "AND episode.combiIndex < ?", - [uuid, mediumId, partIndex, partIndex, episodeIndex], - ); - storeModifications("progress", "update", result); + await this.con.query( + sql`UPDATE user_episode, episode, part + SET user_episode.progress=1, user_episode.read_date=NOW() + WHERE user_episode.progress != 1 + AND user_episode.user_uuid = ${uuid} + AND user_episode.episode_id=episode.id + AND episode.part_id=part.id + AND part.medium_id=${mediumId} + ${partIndex ? sql`AND part.combi_index < ${partIndex}` : sql``} + ${episodeIndex ? sql`AND episode.combi_index < ${episodeIndex}` : sql``}`, + ); + // FIXME: storeModifications("progress", "update", result); // then insert non-existing user-episode-progress as read - result = await this.query( - "INSERT IGNORE INTO user_episode (user_uuid, episode_id, progress, read_date) " + - "SELECT ?, episode.id, 1, NOW() FROM episode, part " + - "WHERE episode.part_id=part.id " + - "AND part.medium_id=? " + - "AND (? IS NULL OR part.combiIndex < ?) " + - "AND episode.combiIndex < ?", - [uuid, mediumId, partIndex, partIndex, episodeIndex], - ); - storeModifications("progress", "insert", result); + // TODO: cant both queries be collapsed in the one below? update happens on conflict + await this.con.query( + sql`INSERT INTO user_episode (user_uuid, episode_id, progress, read_date) + SELECT ${uuid}, episode.id, 1, NOW() FROM episode, part + WHERE episode.part_id=part.id + AND part.medium_id=${mediumId} + ${partIndex ? sql`AND part.combi_index < ${partIndex}` : sql``} + ${episodeIndex ? sql`AND episode.combi_index < ${episodeIndex}` : sql``} + ON CONFLICT DO NOTHING`, + ); + // FIXME: storeModifications("progress", "insert", result); } } diff --git a/packages/core/src/database/contexts/episodeReleaseContext.ts b/packages/core/src/database/contexts/episodeReleaseContext.ts new file mode 100644 index 00000000..fe252b24 --- /dev/null +++ b/packages/core/src/database/contexts/episodeReleaseContext.ts @@ -0,0 +1,272 @@ +import { TypedQuery, DisplayReleasesResponse, Nullable, Uuid, Insert, EmptyPromise } from "../../types"; +import { sql } from "slonik"; +import { + displayRelease, + minimalMedium, + mediumRelease, + SimpleRelease, + MediumRelease, + simpleRelease, + minimalRelease, + MinimalRelease, +} from "../databaseTypes"; +import { QueryContext } from "./queryContext"; +import { joinAnd } from "./helper"; + +export class EpisodeReleaseContext extends QueryContext { + public async getAllReleases(): Promise> { + return this.stream( + sql`SELECT + episode_id, source_type, toc_id, + release_date, locked, url, title + FROM episode_release;`, + ); + } + + public async getDisplayReleases( + latestDate: Date, + untilDate: Nullable, + read: Nullable, + uuid: Uuid, + ignoredLists: number[], + requiredLists: number[], + ignoredMedia: number[], + requiredMedia: number[], + ): Promise { + const whereClause = []; + whereClause.push(read == null ? sql`1` : read ? sql`progress >= 1` : sql`(progress IS NULL OR progress < 1)`); + + if (requiredLists.length) { + const array = sql.array(requiredLists, "int8"); + whereClause.push(sql`part.medium_id IN (SELECT medium_id FROM list_medium WHERE list_id = ANY(${array}))`); + } else if (ignoredLists.length) { + const array = sql.array(ignoredLists, "int8"); + whereClause.push(sql`part.medium_id NOT IN (SELECT medium_id FROM list_medium WHERE list_id = ANY(${array}))`); + } + + // part of the join condition + const additionalMainQueries = []; + + if (requiredMedia.length) { + const array = sql.array(requiredMedia, "int8"); + additionalMainQueries.push(sql`part.medium_id = ANY(${array})`); + } + + if (ignoredMedia.length) { + const array = sql.array(ignoredMedia, "int8"); + additionalMainQueries.push(sql`part.medium_id != ALL(${array})`); + } + + const additionalMainQuery = additionalMainQueries.length ? sql`AND ${joinAnd(additionalMainQueries)}` : sql``; + const lowerDateLimitQuery = untilDate ? sql`AND release_date > ${sql.timestamp(untilDate)}` : sql``; + + const releasePromise = this.con.any( + sql.type(displayRelease)`SELECT + er.id, er.episode_id, er.title, er.url, + er.release_date as date, er.locked, medium_id, coalesce(progress, 0) as progress + FROM ( + SELECT * FROM episode_release + WHERE release_date < ${sql.timestamp(latestDate)}${lowerDateLimitQuery} + ORDER BY release_date DESC LIMIT 10000 + ) as er + INNER JOIN episode ON episode.id=er.episode_id + LEFT JOIN (SELECT * FROM user_episode WHERE user_uuid = ${uuid}) as ue ON episode.id=ue.episode_id + INNER JOIN part ON part.id=part_id ${additionalMainQuery} + WHERE ${joinAnd(whereClause)} + LIMIT 500;`, + ); + + const mediaPromise = this.con.any(sql.type(minimalMedium)`SELECT id, title, medium FROM medium;`); + + const latestReleaseResult = await this.con.oneFirst<{ releaseDate: string }>( + sql`SELECT release_date FROM episode_release ORDER BY release_date LIMIT 1;`, + ); + let releases; + // eslint-disable-next-line no-useless-catch + try { + releases = await releasePromise; + } catch (error) { + throw error; + } + + const mediaIds: Set = new Set(); + + for (const release of releases) { + mediaIds.add(release.mediumId); + } + const media = (await mediaPromise).filter((value) => mediaIds.has(value.id)); + + return { + latest: latestReleaseResult ? new Date(latestReleaseResult) : new Date(0), + media, + releases, + }; + } + + public async getMediumReleases(mediumId: number, uuid: Uuid): Promise { + return this.con.any( + sql.type(mediumRelease)` + SELECT + er.episode_id, er.title, er.url, + er.release_date as date, er.locked, episode.combi_index, progress + FROM episode_release as er + INNER JOIN episode ON episode.id=er.episode_id + LEFT JOIN ( + SELECT * FROM user_episode WHERE user_uuid = ${uuid} + ) as ue ON episode.id=ue.episode_id + INNER JOIN part ON part.id=part_id + WHERE part.medium_id = ${mediumId};`, + ); + } + + public async getReleases(episodeId: number[]): Promise { + if (!episodeId || (Array.isArray(episodeId) && !episodeId.length)) { + return []; + } + if (!Array.isArray(episodeId)) { + episodeId = [episodeId]; + } + + const resultArray = await this.con.any( + sql.type(simpleRelease)` + SELECT id, episode_id, source_type, toc_id, + release_date, locked, url, title + FROM episode_release + WHERE episode_id = ANY(${sql.array(episodeId, "int8")});`, + ); + + if (!resultArray.length) { + return []; + } + // always ensure that locked is set + resultArray.forEach((value) => (value.locked = !!value.locked)); + return resultArray; + } + + public async getReleasesByHost(episodeId: number[], host: string): Promise { + if (!episodeId.length) { + return []; + } + return this.con.any( + sql.type(simpleRelease)` + SELECT id, episode_id, source_type, toc_id, + release_date, locked, url, title + FROM episode_release + WHERE strpos(url, ${host}) = 1 AND episode_id = ANY(${sql.array(episodeId, "int8")});`, + ); + } + + public async getMediumReleasesByHost(mediumId: number, host: string): Promise { + return this.con.any( + sql.type(simpleRelease)` + SELECT er.id, er.episode_id, er.source_type, er.toc_id, + er.release_date, er.locked, er.url, er.title + FROM episode_release as er + INNER JOIN episode as e ON e.id=er.episode_id + INNER JOIN part as p ON p.id=e.part_id + WHERE medium_id = ${mediumId} + AND strpos(url, ${host}) = 1 + `, + ); + } + + public async addReleases(releases: Array>): Promise { + const insert = releases.map((value) => [ + value.episodeId, + value.title, + value.url, + value.sourceType, + value.releaseDate.toISOString(), + value.locked, + value.tocId, + ]); + + // FIXME: multiSingle(results, (value) => storeModifications("release", "insert", value)); + return this.con.any( + sql.type(simpleRelease)` + INSERT INTO episode_release (episode_id, title, url, source_type, release_date, locked, toc_id) + SELECT * FROM ${sql.unnest(insert, ["int8", "text", "text", "text", "timestamptz", "bool", "int8"])} + ON CONFLICT DO NOTHING + RETURNING episode_id, title, url, source_type, release_date, locked, toc_id;`, + ); + } + + public async getEpisodeLinks(episodeIds: number[]): Promise { + return this.con.any( + sql.type(minimalRelease)` + SELECT episode_id, url FROM episode_release + WHERE episode_id = ANY(${sql.array(episodeIds, "int8")});`, + ); + } + + public async getEpisodeLinksByMedium(mediumId: number): Promise { + return this.con.any( + sql.type(minimalRelease)` + SELECT + episode_id, url + FROM episode_release + inner join episode on episode.id=episode_release.episode_id + inner join part on part.id=episode.part_id + WHERE medium_id = ${mediumId};`, + ); + } + + public async getSourcedReleases( + sourceType: string, + mediumId: number, + ): Promise> { + const resultArray = await this.con.any( + sql` + SELECT url, episode_release.title + FROM episode_release + INNER JOIN episode ON episode.id=episode_release.episode_id + INNER JOIN part ON part.id=episode.part_id + WHERE source_type=${sourceType} AND medium_id=${mediumId};`, + ); + return resultArray.map((value: any) => { + value.sourceType = sourceType; + value.mediumId = mediumId; + return value; + }); + } + + /** + * Currently does an upsert instead of purely an insert. + * @param releases releases to update + */ + public async updateReleases(releases: SimpleRelease[]): EmptyPromise { + const values = releases.map((value) => [ + value.episodeId, + value.title, + value.url, + value.sourceType, + value.releaseDate.toISOString(), + value.locked, + value.tocId, + ]); + + await this.con.query( + sql` + INSERT INTO episode_release + (episode_id, title, url, source_type, release_date, locked, toc_id) + SELECT * FROM ${sql.unnest(values, ["int8", "text", "text", "text", "timestamptz", "bool", "int8"])} + ON CONFLICT (episode_id, url) DO UPDATE SET + title = EXCLUDED.title, + release_date = EXCLUDED.release_date, + source_type = EXCLUDED.source_type, + locked = EXCLUDED.locked, + toc_id = EXCLUDED.toc_id; + `, + ); + // FIXME: storeModifications("release", "update", result); + } + + public async deleteReleases(release: SimpleRelease[]): EmptyPromise { + // FIXME: storeModifications("release", "delete", result); + const ids = release.map((value) => value.id); + await this.con.query( + sql`DELETE FROM episode_release + WHERE id = ANY(${sql.array(ids, "int8")});`, + ); + } +} diff --git a/packages/core/src/database/contexts/externalListContext.ts b/packages/core/src/database/contexts/externalListContext.ts index fbd03fbc..d66fb494 100644 --- a/packages/core/src/database/contexts/externalListContext.ts +++ b/packages/core/src/database/contexts/externalListContext.ts @@ -1,74 +1,61 @@ -import { SubContext } from "./subContext"; -import { ExternalList, Id, Insert, Uuid } from "../../types"; -import { promiseMultiSingle, multiSingle } from "../../tools"; -import { storeModifications } from "../sqlTools"; -import { OkPacket } from "mysql"; -import { DatabaseError, ValidationError } from "../../error"; +import { Id, Insert, Uuid } from "../../types"; +import { promiseMultiSingle } from "../../tools"; +import { QueryContext } from "./queryContext"; +import { sql } from "slonik"; +import { entity, ExternalList, simpleExternalList, SimpleExternalList } from "../databaseTypes"; +import { DatabaseError } from "../../error"; export type UpdateExternalList = Partial & { id: Id }; -export class ExternalListContext extends SubContext { +export class ExternalListContext extends QueryContext { public async getAll(uuid: Uuid): Promise { - // FIXME: 03.03.2020 this query is invalid - const result = await this.query( - "SELECT el.id, el.user_uuid as uuid, el.name, el.medium, el.url " + - "FROM external_reading_list as el " + - "INNER JOIN external_user as eu ON el.user_uuid=eu.uuid " + - "WHERE eu.local_uuid = ?;", - uuid, + // FIXME: 03.03.2020 this query is invalid, really??? + const result = await this.con.any( + sql`SELECT + el.id, el.user_uuid, el.name, el.medium, el.url + FROM external_reading_list as el + INNER JOIN external_user as eu ON el.user_uuid=eu.uuid + WHERE eu.local_uuid = ${uuid};`, ); return Promise.all(result.map((value: any) => this.createShallowExternalList(value))); } /** * Adds an external list of an user to the storage. - * - * @param {string} userUuid - * @param {ExternalList} externalList - * @return {Promise} */ - public async addExternalList(userUuid: Uuid, externalList: Insert): Promise { - const result = await this.query( - "INSERT INTO external_reading_list " + "(name, user_uuid, medium, url) " + "VALUES(?,?,?,?);", - [externalList.name, userUuid, externalList.medium, externalList.url], + public async addExternalList( + externalUserUuid: Uuid, + externalList: Insert, + ): Promise { + return this.con.one( + sql.type(simpleExternalList)` + INSERT INTO external_reading_list (name, user_uuid, medium, url) + VALUES(${externalList.name},${externalUserUuid},${externalList.medium},${externalList.url}) + RETURNING id, name, user_uuid, medium, url;`, ); - storeModifications("external_list", "insert", result); - const insertId = result.insertId; - - if (!Number.isInteger(insertId)) { - throw new DatabaseError(`invalid ID ${insertId + ""}`); - } - - return { - id: insertId, - name: externalList.name, - medium: externalList.medium, - url: externalList.url, - items: [], - }; } /** * Updates an external list. */ public async updateExternalList(externalList: UpdateExternalList): Promise { - const result = await this.update( + await this.update( "external_reading_list", - (updates, values) => { + () => { + const updates = []; if (externalList.medium) { - updates.push("medium = ?"); - values.push(externalList.medium); + updates.push(sql`medium = ${externalList.medium}`); } if (externalList.name) { - updates.push("name = ?"); - values.push(externalList.name); + updates.push(sql`name = ${externalList.name}`); } + return updates; }, { column: "user_uuid", value: externalList.id }, ); - storeModifications("external_list", "delete", result); - return result.changedRows > 0; + // FIXME: storeModifications("external_list", "delete", result); + return false; } /** @@ -78,14 +65,14 @@ export class ExternalListContext extends SubContext { // TODO: 29.06.2019 replace with id IN (...) and list_id IN (...) const results = await promiseMultiSingle(externalListId, async (item) => { // first delete any references of externalList: list-media links - let result = await this.delete("external_list_medium", { + await this.delete("external_list_medium", { column: "list_id", value: item, }); - storeModifications("external_list_item", "delete", result); + // FIXME: storeModifications("external_list_item", "delete", result); // then delete list itself - result = await this.delete( + await this.delete( "external_reading_list", { column: "user_uuid", @@ -96,8 +83,8 @@ export class ExternalListContext extends SubContext { value: item, }, ); - storeModifications("external_list", "delete", result); - return result.affectedRows > 0; + // FIXME: storeModifications("external_list", "delete", result); + return false; }); return Array.isArray(results) ? results.some((v) => v) : results; } @@ -109,8 +96,13 @@ export class ExternalListContext extends SubContext { * @return {Promise} */ public async getExternalList(id: number): Promise { - const result = await this.query("SELECT * FROM external_reading_list WHERE id = ?", id); - return this.createShallowExternalList(result[0]); + const result = await this.con.one( + sql.type(simpleExternalList)` + SELECT id, name, user_uuid, medium, url + FROM external_reading_list + WHERE id = ${id}`, + ); + return this.createShallowExternalList(result); } /** @@ -120,20 +112,24 @@ export class ExternalListContext extends SubContext { * @param {ExternalList} storageList * @return {Promise} */ - public async createShallowExternalList(storageList: ExternalList): Promise { - const result = await this.query("SELECT * FROM external_list_medium WHERE list_id = ?;", storageList.id); - storageList.items = result.map((value: any) => value.medium_id); - // TODO return input or copy object? - return storageList; + public async createShallowExternalList(storageList: SimpleExternalList): Promise { + const result = await this.con.anyFirst( + sql.type(entity)`SELECT medium_id as id FROM external_list_medium WHERE list_id = ${storageList.id};`, + ); + return { + ...storageList, + items: [...result], + }; } /** * Gets all external lists from the externalUser from the storage. */ public async getExternalUserLists(uuid: Uuid): Promise { - const result = await this.query( - "SELECT id, name, user_uuid as uuid, medium, url" + " FROM external_reading_list WHERE user_uuid = ?;", - uuid, + const result = await this.con.any( + sql`SELECT id, name, user_uuid, medium, url + FROM external_reading_list + WHERE user_uuid = ${uuid};`, ); return Promise.all(result.map((value: any) => this.createShallowExternalList(value))); } @@ -141,13 +137,10 @@ export class ExternalListContext extends SubContext { /** * Adds a medium to an external list in the storage. */ - public async addItemToExternalList(listId: number, mediumId: number): Promise { - const result = await this.query("INSERT INTO external_list_medium " + "(list_id, medium_id) " + "VALUES (?,?)", [ - listId, - mediumId, - ]); - storeModifications("external_list_item", "insert", result); - return result.affectedRows > 0; + public async addItemToList(listId: number, mediumId: number): Promise { + await this.con.query(sql`INSERT INTO external_list_medium (list_id, medium_id) VALUES (${listId},${mediumId});`); + // FIXME: storeModifications("external_list_item", "insert", result); + return false; } /** @@ -156,41 +149,43 @@ export class ExternalListContext extends SubContext { * If no listId is available it selects the * 'Standard' List of the given user and adds it there. */ - public async addItemToList(medium: { id: number | number[]; listId?: number }, uuid?: Uuid): Promise { - // if list_ident is not a number, - // then take it as uuid from user and get the standard listId of 'Standard' list - if (medium.listId == null || !Number.isInteger(medium.listId)) { - if (!uuid) { - throw new ValidationError("missing uuid parameter"); - } - const idResult = await this.query( - "SELECT id FROM reading_list WHERE `name` = 'Standard' AND user_uuid = ?;", - uuid, - ); - medium.listId = idResult[0].id; + public async addItemsToList(mediumIds: number[], listId: number, uuid: Uuid): Promise { + const ownsList = await this.con.exists( + sql.type(entity)` + SELECT id + FROM external_reading_list as el + INNER JOIN external_user as eu ON el.user_uuid=eu.uuid + WHERE eu.local_uuid = ${uuid};`, + ); + + if (!ownsList) { + throw new DatabaseError("uuid does not own list"); } - const result = await this.multiInsert( - "INSERT IGNORE INTO external_list_medium (list_id, medium_id) VALUES", - medium.id, - (value) => [medium.listId, value], + + const values = mediumIds.map((value) => [listId, value]); + + await this.con.query( + sql`INSERT INTO external_list_medium (list_id, medium_id) + SELECT * FROM ${sql.unnest(values, ["int8", "in8"])} + ON CONFLICT DO NOTHING;`, ); - let added = false; + // let added = false; - multiSingle(result, (value: OkPacket) => { - storeModifications("external_list_item", "insert", value); + // multiSingle(result, (value) => { + // // FIXME: storeModifications("external_list_item", "insert", value); - if (value.affectedRows > 0) { - added = true; - } - }); - return added; + // if (value.rowCount > 0) { + // added = true; + // } + // }); + return false; } /** * Removes an item from an external list. */ - public removeMedium(listId: number, mediumId: number | number[]): Promise { - return promiseMultiSingle(mediumId, async (value) => { + public async removeMedium(listId: number, mediumId: number | number[]): Promise { + const changed = await promiseMultiSingle(mediumId, async (value) => { const result = await this.delete( "external_list_medium", { @@ -202,8 +197,9 @@ export class ExternalListContext extends SubContext { value, }, ); - storeModifications("external_list_item", "delete", result); - return result.affectedRows > 0; - }).then(() => true); + // FIXME: storeModifications("external_list_item", "delete", result); + return result.rowCount > 0; + }); + return Array.isArray(changed) ? changed.reduce((p, c) => p || c) : changed; } } diff --git a/packages/core/src/database/contexts/externalUserContext.ts b/packages/core/src/database/contexts/externalUserContext.ts index a0321056..b43d68d9 100644 --- a/packages/core/src/database/contexts/externalUserContext.ts +++ b/packages/core/src/database/contexts/externalUserContext.ts @@ -1,59 +1,64 @@ -import { SubContext } from "./subContext"; +import { Uuid, Insert } from "../../types"; +import { v1 as uuidGenerator } from "uuid"; +import { DuplicateEntityError, MissingEntityError } from "../../error"; +import { QueryContext } from "./queryContext"; import { - ExternalUser, - Uuid, - MultiSingleValue, - PromiseMultiSingle, + BasicDisplayExternalUser, + basicDisplayExternalUser, DisplayExternalUser, - TypedQuery, - ExternalStorageUser, -} from "../../types"; -import { promiseMultiSingle } from "../../tools"; -import { v1 as uuidGenerator } from "uuid"; -import { storeModifications } from "../sqlTools"; -import { DatabaseError, DuplicateEntityError, MissingEntityError } from "../../error"; - -export class ExternalUserContext extends SubContext { - public async getAll(uuid: Uuid): Promise> { - const lists = await this.parentContext.externalListContext.getAll(uuid); - return this.queryStream( - "SELECT uuid, local_uuid as localUuid, name as identifier, service as type FROM external_user " + - "WHERE local_uuid = ?;", - uuid, - ).on("result", (row) => { - row.lists = []; + simpleExternalUser, + SimpleExternalUser, + SimpleExternalUserListed, +} from "../databaseTypes"; +import { sql } from "slonik"; +import { ExternalListContext } from "./externalListContext"; + +export class ExternalUserContext extends QueryContext { + public async getAll(uuid: Uuid): Promise { + const lists = await this.getContext(ExternalListContext).getAll(uuid); + const users = await this.con.any( + sql.type(basicDisplayExternalUser)` + SELECT + uuid, local_uuid, identifier, type + FROM external_user + WHERE local_uuid = ${uuid};`, + ); + + return users.map((user): DisplayExternalUser => { + const value = user as DisplayExternalUser; + value.lists = []; for (const list of lists) { - if (list.uuid === row.uuid) { - row.lists.push(list); + if (list.userUuid === user.uuid) { + value.lists.push(list); } } + return value; }); } /** * Adds an external user of an user to the storage. */ - public async addExternalUser(localUuid: Uuid, externalUser: ExternalUser): Promise { - let result = await this.query( - "SELECT * FROM external_user " + "WHERE name = ? " + "AND local_uuid = ? " + "AND service = ?", - [externalUser.identifier, localUuid, externalUser.type], + public async addExternalUser(localUuid: Uuid, externalUser: Insert): Promise { + const result = await this.con.exists( + sql`SELECT uuid FROM external_user + WHERE identifier = ${externalUser.identifier} AND local_uuid = ${localUuid} AND type = ${externalUser.type}`, ); - if (result.length) { + if (result) { throw new DuplicateEntityError("Duplicate ExternalUser"); } const uuid = uuidGenerator(); - result = await this.query( - "INSERT INTO external_user " + "(name, uuid, local_uuid, service, cookies) " + "VALUES (?,?,?,?,?);", - [externalUser.identifier, uuid, localUuid, externalUser.type, externalUser.cookies], + await this.con.query( + sql` + INSERT INTO external_user (identifier, uuid, local_uuid, type, cookies) + VALUES (${externalUser.identifier},${uuid},${localUuid},${externalUser.type},${externalUser.cookies ?? null});`, ); - storeModifications("external_user", "insert", result); - if (!result.affectedRows) { - return Promise.reject(new DatabaseError("Insert failed")); - } + // FIXME: storeModifications("external_user", "insert", insert); + externalUser.localUuid = localUuid; - return externalUser; + return externalUser as SimpleExternalUser; } /** @@ -65,125 +70,108 @@ export class ExternalUserContext extends SubContext { // because deleting top-down // would violate the foreign keys restraints + const ownsExternalUser = await this.con.exists( + sql`SELECT uuid from external_user WHERE uuid = ${externalUuid} AND user_uuid = ${userUuid}`, + ); + + if (!ownsExternalUser) { + throw Error("trying to delete unowned externalUser"); + } + // first delete list - medium links - let result = await this.query( - "DELETE FROM external_list_medium " + - "WHERE list_id " + - "IN (SELECT id FROM external_reading_list " + - "WHERE user_uuid =?);", - externalUuid, + await this.con.query( + sql`DELETE FROM external_list_medium + WHERE list_id + IN (SELECT id FROM external_reading_list + WHERE user_uuid =${externalUuid});`, ); - storeModifications("external_list_item", "delete", result); + // FIXME: storeModifications("external_list_item", "delete", result); // proceed to delete lists of external user - result = await this.delete("external_reading_list", { column: "user_uuid", value: externalUuid }); - storeModifications("external_list", "delete", result); + await this.delete("external_reading_list", { column: "user_uuid", value: externalUuid }); + // FIXME: storeModifications("external_list", "delete", result); // finish by deleting external user itself - result = await this.delete("external_user", { column: "uuid", value: externalUuid }); - storeModifications("external_user", "delete", result); - return result.affectedRows > 0; + await this.delete("external_user", { column: "uuid", value: externalUuid }); + // FIXME: storeModifications("external_user", "delete", result); + return false; } /** * Gets an external user. */ - public async getExternalUser>(externalUuid: T): PromiseMultiSingle { - return promiseMultiSingle(externalUuid, async (value) => { - const resultArray: any[] = await this.query("SELECT * FROM external_user WHERE uuid = ?;", value); - if (!resultArray.length) { - throw new MissingEntityError("No result found for given uuid"); - } - return this.createShallowExternalUser(resultArray[0]); - }); + public async getExternalUser(externalUuid: Uuid[]): Promise { + const resultArray = await this.con.any( + sql.type(basicDisplayExternalUser)`SELECT identifier, uuid, local_uuid, type, cookies + FROM external_user WHERE uuid = ANY(${sql.array(externalUuid, "text")});`, + ); + if (resultArray.length !== externalUuid.length) { + throw new MissingEntityError("missing queried externalUser"); + } + return Promise.all(resultArray.map((user) => this.createShallowExternalUser(user))); } /** * Gets an external user with cookies, without items. */ - public async getExternalUserWithCookies(uuid: Uuid): Promise { - const value = await this.query( - "SELECT uuid, local_uuid, service, cookies FROM external_user WHERE uuid = ?;", - uuid, + public async getSimpleExternalUser(uuid: Uuid): Promise { + return this.con.one( + sql.type(simpleExternalUser)` + SELECT uuid, local_uuid, identifier, type, last_scrape, cookies + FROM external_user + WHERE uuid = ${uuid};`, ); - return { - uuid: value[0].uuid, - userUuid: value[0].local_uuid, - type: value[0].service, - cookies: value[0].cookies, - }; } /** * Return all ExternalUser not scraped in the last seven days. */ - public async getScrapeExternalUser(): Promise { - const result = await this.query( - "SELECT uuid, local_uuid, service, cookies, name, last_scrape FROM external_user " + - "WHERE last_scrape IS NULL OR last_scrape < TIMESTAMPADD(day, -7, now())", + public async getScrapeExternalUser(): Promise { + return this.con.any( + sql.type(simpleExternalUser)` + SELECT uuid, local_uuid, type, cookies, identifier, last_scrape FROM external_user + WHERE last_scrape IS NULL OR last_scrape < TIMESTAMPADD(day, -7, now())`, ); - - return result.map((value: any): ExternalUser => { - return { - uuid: value.uuid, - localUuid: value.local_uuid, - type: value.service, - cookies: value.cookies, - identifier: value.name, - lastScrape: value.last_scrape && new Date(value.last_scrape), - lists: [], - }; - }); } /** * Creates a ExternalUser with * shallow lists. */ - public async createShallowExternalUser(storageUser: { - name: string; - uuid: Uuid; - service: number; - local_uuid: Uuid; - }): Promise { - const externalUser: ExternalUser = { - identifier: storageUser.name, - uuid: storageUser.uuid, - type: storageUser.service, - lists: [], - localUuid: storageUser.local_uuid, - }; - externalUser.lists = await this.parentContext.externalListContext.getExternalUserLists(externalUser.uuid); - return externalUser; + public async createShallowExternalUser(storageUser: BasicDisplayExternalUser): Promise { + const lists = await this.getContext(ExternalListContext).getExternalUserLists(storageUser.uuid); + const result = storageUser as DisplayExternalUser; + result.lists = lists; + return result; } /** * Updates an external user. */ - public async updateExternalUser(externalUser: ExternalUser): Promise { - const result = await this.update( + public async updateExternalUser(externalUser: SimpleExternalUser): Promise { + await this.update( "external_user", - (updates, values) => { + () => { + const updates = []; + if (externalUser.identifier) { - updates.push("name = ?"); - values.push(externalUser.identifier); + updates.push(sql`identifier = ${externalUser.identifier}`); } if (externalUser.lastScrape) { - updates.push("last_scrape = ?"); - values.push(externalUser.lastScrape); + updates.push(sql`last_scrape = ${externalUser.lastScrape ? sql.date(externalUser.lastScrape) : null}`); } if (externalUser.cookies) { - updates.push("cookies = ?"); - values.push(externalUser.cookies); + updates.push(sql`cookies = ${externalUser.cookies}`); } else if (externalUser.cookies == null) { - updates.push("cookies = NULL"); + updates.push(sql`cookies = NULL`); } + return updates; }, { column: "uuid", value: externalUser.uuid }, ); - storeModifications("external_user", "update", result); - return result.changedRows > 0; + // FIXME: storeModifications("external_user", "update", result); + return false; } } diff --git a/packages/core/src/database/contexts/genericContext.ts b/packages/core/src/database/contexts/genericContext.ts new file mode 100644 index 00000000..73127ab3 --- /dev/null +++ b/packages/core/src/database/contexts/genericContext.ts @@ -0,0 +1,285 @@ +import { getElseSetObj, getElseSet } from "../../tools"; +import { PageInfo, EmptyPromise, Uuid, NewData, DataStats, QueryItems, QueryItemsResult } from "../../types"; +import { NotImplementedError, ValidationError } from "../../error"; +import { QueryContext } from "./queryContext"; +import * as validate from "validate.js"; +import { sql } from "slonik"; +import { + basicDisplayExternalUser, + mediumInWait, + pureEpisode, + simpleExternalList, + simpleMedium, + simpleMediumToc, + simplePart, + simpleRelease, + userList, +} from "../databaseTypes"; +import { EpisodeReleaseContext } from "./episodeReleaseContext"; +import { EpisodeContext } from "./episodeContext"; +import { PartContext } from "./partContext"; +import { InternalListContext } from "./internalListContext"; +import { MediumContext } from "./mediumContext"; +import { MediumTocContext } from "./mediumTocContext"; +import { ExternalUserContext } from "./externalUserContext"; +import { ExternalListContext } from "./externalListContext"; + +/** + * Query Methods which do not pertain to a single particular entity. + */ +export class GenericContext extends QueryContext { + public async getPageInfo(link: string, key: string): Promise { + if (!validate.isString(link) || !link || !key || !validate.isString(key)) { + throw new ValidationError("invalid link or key"); + } + const query = await this.con.anyFirst<{ value: string }>( + sql`SELECT value FROM page_info WHERE link=${link} AND key_string=${key}`, + ); + return { + link, + key, + values: query.filter((value) => value), + }; + } + + public async updatePageInfo(link: string, key: string, values: string[], toDeleteValues?: string[]): EmptyPromise { + if (!validate.isString(link) || !link || !key || !validate.isString(key)) { + throw new ValidationError("invalid link or key"); + } + await this.removePageInfo(link, key, toDeleteValues); + + const insertValues = values.map((value) => [link, key, value]); + await this.con.query( + sql` + INSERT INTO page_info (link, key_string, value) + SELECT * FROM ${sql.unnest(insertValues, ["text", "text", "text"])}`, + ); + } + + public async removePageInfo(link: string, key?: string, toDeleteValues?: string[]): EmptyPromise { + if (!validate.isString(link) || !link || (key && !validate.isString(key))) { + throw new ValidationError("invalid link or key"); + } + if (key) { + if (toDeleteValues) { + await this.con.query( + sql` + DELETE FROM page_info + WHERE link=${link} AND keyString=${key} AND value=ANY(${sql.array(toDeleteValues, "text")});`, + ); + } else { + await this.delete("page_info", { column: "link", value: link }, { column: "key_string", value: key }); + } + } else { + await this.delete("page_info", { column: "link", value: link }); + } + } + + public async queueNewTocs(): EmptyPromise { + throw new NotImplementedError("queueNewTocs not supported"); + } + + public async getNew(uuid: Uuid, date = new Date(0)): Promise { + const sqlTimestamp = sql.timestamp(date); + + const episodeReleasePromise = this.con.any( + sql.type(simpleRelease)` + SELECT id, episode_id, title, url, release_date, locked, toc_id, source_type + FROM episode_release WHERE updated_at > ${sqlTimestamp};`, + ); + const episodePromise = this.con.any( + sql.type(pureEpisode)` + SELECT episode.id, part_id, total_index, partial_index, + user_episode.progress, user_episode.read_date + FROM episode + LEFT JOIN user_episode ON episode.id=user_episode.episode_id + WHERE (user_episode.user_uuid IS NULL OR user_episode.user_uuid = ${uuid}) + AND (updated_at > ${sqlTimestamp} OR read_date > ${sqlTimestamp});`, + ); + const partPromise = this.con.any( + sql.type(simplePart)` + SELECT id, title, medium_id, total_index, partial_index, combi_index + FROM part WHERE updated_at > ${sqlTimestamp};`, + ); + + const mediumPromise = this.con.any( + sql.type(simpleMedium)`SELECT + id, country_of_origin, language_of_origin, author, artist, title, + medium, lang, state_origin, state_tl, series, universe + FROM medium WHERE updated_at > ${sqlTimestamp}`, + ); + const listPromise = this.con.any( + sql.type(userList)` + SELECT id, name, medium FROM reading_list WHERE user_uuid=${uuid} AND updated_at > ${sqlTimestamp};`, + ); + const exListPromise = this.con.any( + sql.type(simpleExternalList)` + SELECT list.id, list.name, list.user_uuid, list.medium, list.url + FROM external_user INNER JOIN external_reading_list as list ON uuid=user_uuid + WHERE local_uuid=${uuid} AND list.updated_at > ${sqlTimestamp};`, + ); + const exUserPromise = this.con.any( + sql.type(basicDisplayExternalUser)` + SELECT identifier, uuid, type, local_uuid + FROM external_user WHERE local_uuid = ${uuid} AND updated_at > ${sqlTimestamp}`, + ); + const mediumInWaitPromise = this.con.any( + sql.type(mediumInWait)`SELECT title, medium, link FROM medium_in_wait WHERE updated_at > ${sqlTimestamp}`, + ); + const newsPromise = this.con.any( + sql`SELECT id, title, link, date, CASE WHEN user_id IS NULL THEN false ELSE true END as read + FROM news_board LEFT JOIN news_user ON id=news_id + WHERE (user_id IS NULL OR user_id = ${uuid}) AND updated_at > ${sqlTimestamp}`, + ); + const tocPromise = this.con.any( + sql.type(simpleMediumToc)` + SELECT id, medium_id, link, country_of_origin, language_of_origin, author, artist, title, + medium, lang, state_origin, state_tl, series, universe + FROM medium_toc WHERE updated_at > ${sqlTimestamp}`, + ); + return { + tocs: await tocPromise, + media: await mediumPromise, + releases: await episodeReleasePromise, + episodes: await episodePromise, + parts: await partPromise, + lists: await listPromise, + extLists: await exListPromise, + extUser: await exUserPromise, + mediaInWait: await mediumInWaitPromise, + news: await newsPromise, + }; + } + + public async getStat(uuid: Uuid): Promise { + const episodePromise = this.con.query<{ + part_id: number; + episodeCount: number; + episodeSum: number; + releaseCount: number; + }>( + sql` + SELECT part_id, count(distinct episode.id) as episode_count, sum(distinct episode.id) as episode_sum, count(url) as release_count + FROM episode LEFT JOIN episode_release ON episode.id=episode_release.episode_id + GROUP BY part_id`, + ); + const partPromise = this.con.query<{ id: number; medium_id: number }>(sql`SELECT part.id, medium_id FROM part;`); + const listPromise = this.con.query<{ id: number; medium_id: number }>( + sql`SELECT id, medium_id FROM reading_list LEFT JOIN list_medium ON reading_list.id=list_id WHERE user_uuid=${uuid}`, + ); + const exListPromise = this.con.query<{ id: number; medium_id: number }>( + sql` + SELECT id, medium_id + FROM external_user + INNER JOIN external_reading_list ON uuid=user_uuid + LEFT JOIN external_list_medium ON external_reading_list.id=list_id + WHERE local_uuid=${uuid}`, + ); + const extUserPromise = this.con.query<{ id: number; uuid: string }>( + sql`SELECT uuid, id FROM external_user LEFT JOIN external_reading_list ON uuid=user_uuid WHERE local_uuid=${uuid}`, + ); + const tocPromise = this.con.query<{ medium_id: number; count: number }>( + sql`SELECT medium_id, count(link) as "count" FROM medium_toc GROUP BY medium_id;`, + ); + + const tocs = await tocPromise; + const parts = await partPromise; + const episodes = await episodePromise; + const emptyPart = { episodeCount: 0, episodeSum: 0, releaseCount: 0 }; + const partMap = new Map(); + + for (const episode of episodes.rows) { + partMap.set(episode.part_id, episode); + // @ts-expect-error + delete episode.part_id; + } + const media = {}; + const mediaStats = {}; + const lists = {}; + const extLists = {}; + const extUser = {}; + + for (const toc of tocs.rows) { + const medium = getElseSetObj(mediaStats, toc.medium_id, () => { + return { + tocs: 0, + }; + }); + medium.tocs = toc.count; + } + + for (const part of parts.rows) { + const mediumParts: any = getElseSetObj(media, part.medium_id, () => ({})); + mediumParts[part.id] = getElseSet(partMap, part.id, () => emptyPart); + } + + for (const list of (await listPromise).rows) { + const listMedia: number[] = getElseSetObj(lists, list.id, () => []); + if (list.medium_id != null) { + listMedia.push(list.medium_id); + } + } + + for (const list of (await exListPromise).rows) { + const listMedia: number[] = getElseSetObj(extLists, list.id, () => []); + + if (list.medium_id != null) { + listMedia.push(list.medium_id); + } + } + + for (const user of (await extUserPromise).rows) { + const userLists: number[] = getElseSetObj(extUser, user.uuid, () => []); + userLists.push(user.id); + } + return { + media, + mediaStats, + lists, + extLists, + extUser, + }; + } + + public async queryItems(uuid: Uuid, query: QueryItems): Promise { + const [ + externalUser, + externalMediaLists, + mediaLists, + mediaTocs, + tocs, + media, + parts, + partReleases, + partEpisodes, + episodes, + episodeReleases, + ] = await Promise.all([ + this.getContext(ExternalUserContext).getExternalUser(query.externalUser), + Promise.all(query.externalMediaLists.map((id) => this.getContext(ExternalListContext).getExternalList(id))), + this.getContext(InternalListContext).getShallowList(query.mediaLists, uuid), + this.getContext(MediumTocContext).getTocsByMediumIds(query.mediaTocs), + this.getContext(MediumTocContext).getTocsByIds(query.tocs), + this.getContext(MediumContext).getSimpleMedium(query.media), + this.getContext(PartContext).getParts(query.parts, uuid, false), + this.getContext(PartContext).getPartReleases(query.partReleases), + this.getContext(PartContext).getPartItems(query.partEpisodes), + this.getContext(EpisodeContext).getEpisode(query.episodes, uuid), + this.getContext(EpisodeReleaseContext).getReleases(query.episodeReleases), + ]); + + return { + episodeReleases, // by episode id + episodes, + partEpisodes, // by part id + partReleases, // by part id + parts, + media, + tocs, // by toc id + mediaTocs, // by medium id + mediaLists, + externalMediaLists, + externalUser, + }; + } +} diff --git a/packages/core/src/database/contexts/helper.ts b/packages/core/src/database/contexts/helper.ts new file mode 100644 index 00000000..720ba276 --- /dev/null +++ b/packages/core/src/database/contexts/helper.ts @@ -0,0 +1,16 @@ +import { sql, ValueExpression } from "slonik"; + +export function joinIdentifier(identifier: string[]) { + return sql.join( + identifier.map((value) => sql.identifier([value])), + sql`,`, + ); +} + +export function joinComma(values: ValueExpression[]) { + return sql.join(values, sql`,`); +} + +export function joinAnd(values: ValueExpression[]) { + return sql.join(values, sql` AND `); +} diff --git a/packages/core/src/database/contexts/internalListContext.ts b/packages/core/src/database/contexts/internalListContext.ts index a8b8bd4a..b3b08558 100644 --- a/packages/core/src/database/contexts/internalListContext.ts +++ b/packages/core/src/database/contexts/internalListContext.ts @@ -1,85 +1,84 @@ -import { SubContext } from "./subContext"; -import { List, Uuid, MultiSingleNumber, MinList, StorageList, ListMedia, PromiseMultiSingle } from "../../types"; -import { promiseMultiSingle, multiSingle } from "../../tools"; -import { storeModifications } from "../sqlTools"; -import { DatabaseError, MissingEntityError, ValidationError } from "../../error"; - -export class InternalListContext extends SubContext { +import { List, Uuid, ListMedia, Id, Insert } from "../../types"; +import { promiseMultiSingle } from "../../tools"; +import { MissingEntityError, ValidationError } from "../../error"; +import { QueryContext } from "./queryContext"; +import { sql } from "slonik"; +import { entity, SimpleList, simpleList } from "../databaseTypes"; +import { MediumContext } from "./mediumContext"; + +export class InternalListContext extends QueryContext { /** * Adds a list to the storage and * links it to the user of the uuid. */ - public async addList(uuid: Uuid, { name, medium }: MinList): Promise { - const result = await this.query("INSERT INTO reading_list (user_uuid, name, medium) VALUES (?,?,?)", [ - uuid, - name, - medium, - ]); - storeModifications("list", "insert", result); - if (!Number.isInteger(result.insertId)) { - throw new DatabaseError(`insert failed, invalid ID: ${result.insertId + ""}`); - } - return { - id: result.insertId, - items: [], - name, - medium, - userUuid: uuid, - }; + public async addList({ name, medium, userUuid }: Insert): Promise { + return this.con.one( + sql.type(simpleList)` + INSERT INTO reading_list (user_uuid, name, medium) + VALUES (${userUuid},${name},${medium}) RETURNING id, name, medium, user_uuid`, + ); } /** * Returns all mediums of a list with * the list_id. */ - public async getList(listId: T, media: number[], uuid: Uuid): Promise { + public async getLists(listId: number[], media: number[], uuid: Uuid): Promise { const toLoadMedia: Set = new Set(); - // TODO: 29.06.2019 replace with id IN (...) - const lists = await promiseMultiSingle(listId, async (id: number) => { - const result = await this.query("SELECT * FROM reading_list WHERE id = ?;", id); - const list = await this.createShallowList(result[0]); - - for (const itemId of list.items) { - if (!media.includes(itemId)) { - toLoadMedia.add(itemId); + + const simpleLists = await this.con.any( + sql.type(simpleList)` + SELECT id, name, medium, user_uuid + FROM reading_list + WHERE id = ANY(${sql.array(listId, "int8")});`, + ); + + const lists = await Promise.all( + simpleLists.map(async (item) => { + const list = await this.createShallowList(item); + + for (const itemId of list.items) { + if (!media.includes(itemId)) { + toLoadMedia.add(itemId); + } } - } - return list; - }); + return list; + }), + ); - const loadedMedia = await this.parentContext.mediumContext.getMedium([...toLoadMedia], uuid); + const loadedMedia = await this.getContext(MediumContext).getMedium([...toLoadMedia], uuid); return { list: lists, media: loadedMedia }; } - public async getShallowList(listId: T, uuid: Uuid): PromiseMultiSingle { - // TODO: 29.06.2019 replace with id IN (...) - return promiseMultiSingle(listId, async (id: number) => { - const result = await this.query("SELECT * FROM reading_list WHERE uuid = ? AND id = ?;", [uuid, id]); - return this.createShallowList(result[0]); - }); + public async getShallowList(listId: number[], uuid: Uuid): Promise { + const result = await this.con.any( + sql.type(simpleList)` + SELECT id, name, medium, user_uuid + FROM reading_list + WHERE uuid = ${uuid} AND id = ANY(${sql.array(listId, "int8")});`, + ); + return Promise.all(result.map((list) => this.createShallowList(list))); } /** * Recreates a list from storage. */ - public async createShallowList(storageList: StorageList): Promise { + public async createShallowList(storageList: SimpleList): Promise { if (!storageList.name) { throw new ValidationError("Missing List Name"); } - const list: List = { - items: [], + const result = await this.con.anyFirst( + sql.type(entity)`SELECT medium_id as id FROM list_medium WHERE list_id = ${storageList.id};`, + ); + return { + items: [...result], name: storageList.name, medium: storageList.medium, id: storageList.id, - userUuid: storageList.user_uuid, + userUuid: storageList.userUuid, }; - - const result = await this.query("SELECT medium_id FROM list_medium WHERE list_id = ?", storageList.id); - list.items.push(...result.map((value: any) => value.medium_id)); - - return list; } /** @@ -91,44 +90,47 @@ export class InternalListContext extends SubContext { } const result = await this.update( "reading_list", - (updates, values) => { + () => { + const updates = []; + if (list.name) { - updates.push("name = ?"); - values.push(list.name); + updates.push(sql`name = ${list.name}`); } if (list.medium) { - updates.push("medium = ?"); - values.push(list.medium); + updates.push(sql`medium = ${list.medium}`); } + return updates; }, { column: "id", value: list.id, }, ); - storeModifications("list", "update", result); - return result.changedRows > 0; + // FIXME: storeModifications("list", "update", result); + return result.rowCount > 0; } /** * Deletes a single list irreversibly. */ public async deleteList(listId: number, uuid: Uuid): Promise { - const result = await this.query("SELECT id FROM reading_list WHERE id = ? AND user_uuid = ?", [listId, uuid]); + const exists = await this.con.exists( + sql.type(entity)`SELECT id FROM reading_list WHERE id = ${listId} AND user_uuid = ${uuid}`, + ); // first check if such a list does exist for the given user - if (!result.length) { + if (!exists) { return Promise.reject(new MissingEntityError(`List ${listId}-${uuid} does not exist`)); } // first remove all links between a list and their media - let deleteResult = await this.delete("list_medium", { column: "list_id", value: listId }); - storeModifications("list_item", "delete", deleteResult); + await this.delete("list_medium", { column: "list_id", value: listId }); + // FIXME: storeModifications("list_item", "delete", deleteResult); // lastly delete the list itself - deleteResult = await this.delete("reading_list", { column: "id", value: listId }); - storeModifications("list", "delete", deleteResult); - return deleteResult.affectedRows > 0; + await this.delete("reading_list", { column: "id", value: listId }); + // FIXME: storeModifications("list", "delete", deleteResult); + return false; } /** @@ -136,10 +138,12 @@ export class InternalListContext extends SubContext { */ public async getUserLists(uuid: Uuid): Promise { // query all available lists for user - const result = await this.query("SELECT * FROM reading_list WHERE reading_list.user_uuid = ?;", [uuid, uuid]); + const result = await this.con.any( + sql.type(simpleList)` + SELECT id, name, medium, user_uuid FROM reading_list WHERE reading_list.user_uuid = ${uuid};`, + ); // query a shallow list, so that only the id´s of their media is contained - // @ts-expect-error return Promise.all(result.map((value) => this.createShallowList(value))); } @@ -149,36 +153,45 @@ export class InternalListContext extends SubContext { * If no listId is available it selects the * 'Standard' List of the given user and adds it there. */ - public async addItemToList(medium: { id: number | number[]; listId?: number }, uuid?: Uuid): Promise { + public async addItemsToList(mediumIds: number[], uuid: Uuid, targetListId?: Id): Promise { // TODO: 27.02.2020 use uuid to check that listId is owned by uuid + let listId: number; // if list_ident is not a number, // then take it as uuid from user and get the standard listId of 'Standard' list - if (medium.listId == null || !Number.isInteger(medium.listId)) { - if (!uuid) { - throw new ValidationError("Missing uuid"); - } - const idResult = await this.query( - "SELECT id FROM reading_list WHERE `name` = 'Standard' AND user_uuid = ?;", - uuid, + if (!targetListId) { + const idResult = await this.con.oneFirst( + sql.type(entity)`SELECT id FROM reading_list WHERE name = 'Standard' AND user_uuid = ${uuid};`, + ); + listId = idResult; + } else { + const ownedByUuid = await this.con.exists( + sql`SELECT 1 FROM reading_list WHERE id = ${targetListId} AND user_uuid = ${uuid};`, ); - medium.listId = idResult[0].id; - } - const result = await this.multiInsert( - "INSERT IGNORE INTO list_medium (list_id, medium_id) VALUES", - medium.id, - (value) => [medium.listId, value], - ); - let added = false; - // @ts-expect-error - multiSingle(result, (value: OkPacket) => { - storeModifications("list_item", "insert", value); - if (value.affectedRows > 0) { - added = true; + if (!ownedByUuid) { + throw Error("cannot add item to list it does not own"); } - }); - return added; + + listId = targetListId; + } + const values = mediumIds.map((mediumId) => [listId, mediumId]); + + await this.con.query( + sql` + INSERT INTO list_medium (list_id, medium_id) + SELECT * FROM ${sql.unnest(values, ["int8", "int8"])} + ON CONFLICT DO NOTHING`, + ); + // FIXME: let added = false; + // multiSingle(result, (value) => { + // storeModifications("list_item", "insert", value); + + // if (value.rowCount > 0) { + // added = true; + // } + // }); + return false; } /** @@ -186,11 +199,11 @@ export class InternalListContext extends SubContext { * * @return {Promise} */ - public async moveMedium(oldListId: number, newListId: number, mediumId: number | number[]): Promise { + public async moveMedium(oldListId: number, newListId: number, mediumIds: number[], uuid: Uuid): Promise { // first remove medium from old list - await this.removeMedium(oldListId, mediumId); + await this.removeMedium(oldListId, mediumIds); // add item to new list - return this.addItemToList({ listId: newListId, id: mediumId }); + return this.addItemsToList(mediumIds, uuid, newListId); } /** @@ -209,8 +222,8 @@ export class InternalListContext extends SubContext { value, }, ); - storeModifications("list_item", "delete", result); - return result.affectedRows > 0; + // FIXME: storeModifications("list_item", "delete", result); + return result.rowCount > 0; }); return Array.isArray(results) ? results.some((v) => v) : results; } diff --git a/packages/core/src/database/contexts/jobContext.ts b/packages/core/src/database/contexts/jobContext.ts index 48c28556..ef04b72e 100644 --- a/packages/core/src/database/contexts/jobContext.ts +++ b/packages/core/src/database/contexts/jobContext.ts @@ -1,16 +1,11 @@ -import { SubContext } from "./subContext"; import { - JobItem, JobRequest, JobState, JobStats, AllJobStats, EmptyPromise, - MultiSingleValue, - PromiseMultiSingle, Optional, JobDetails, - JobHistoryItem, JobStatFilter, BasicJobStats, TimeBucket, @@ -23,13 +18,22 @@ import { QueryJobHistory, Paginated, } from "../../types"; -import { isString, promiseMultiSingle, multiSingle, defaultNetworkTrack } from "../../tools"; +import { isString, promiseMultiSingle, defaultNetworkTrack } from "../../tools"; import logger from "../../logger"; -import mysql from "promise-mysql"; import { escapeLike } from "../storages/storageTools"; import { requireStore, StoreKey } from "../../asyncStorage"; -import { storeModifications } from "../sqlTools"; import { DatabaseError, ValidationError } from "../../error"; +import { QueryContext } from "./queryContext"; +import { sql } from "slonik"; +import { + SimpleJob, + simpleJob, + SimpleJobHistory, + simpleJobHistory, + SimpleJobStatSummary, + simpleJobStatSummary, +} from "../databaseTypes"; +import { joinAnd, joinComma } from "./helper"; interface CountValue { count: number; @@ -49,9 +53,9 @@ export interface JobQuery { limit?: number; } -export class JobContext extends SubContext { +export class JobContext extends QueryContext { public async getJobsStats(): Promise { - const results: AllJobStats[] = await this.getStats(); + const results: readonly AllJobStats[] = await this.getStats(); return results[0]; } @@ -64,59 +68,72 @@ export class JobContext extends SubContext { } private async getStats(statFilter?: JobStatFilter): Promise { - let filterColumn = ""; - let groupBy = ""; + const filterColumn = []; + const groupBy = []; let minMax = true; if (statFilter?.type === "named") { - filterColumn = "name,"; - groupBy = "group by name"; + filterColumn.push(sql`name`); + groupBy.push(sql.identifier(["name"])); } else if (statFilter?.type === "timed") { minMax = false; - groupBy = "group by timepoint"; + groupBy.push(sql.identifier(["timepoint"])); if (statFilter.groupByDomain) { - groupBy += ", name"; - filterColumn += "name,"; + groupBy.push(sql`name`); + filterColumn.push(sql`name`); } if (statFilter.unit === "day") { - filterColumn += - "TIMESTAMPADD(second, -SECOND(`start`)-MINUTE(`start`)*60-HOUR(start)*3600, start) as timepoint,"; + filterColumn.push(sql`name`); + filterColumn.push(sql`date_trunc('day', start) as timepoint`); } else if (statFilter.unit === "hour") { - filterColumn += "TIMESTAMPADD(second, -SECOND(`start`)-MINUTE(`start`)*60, start) as timepoint,"; + filterColumn.push(sql`date_trunc('hour', start) as timepoint`); } else if (statFilter.unit === "minute") { - filterColumn += "TIMESTAMPADD(second, -SECOND(`start`), start) as timepoint,"; + filterColumn.push(sql`date_trunc('minute', start) as timepoint`); } } - const values = await this.query(` - SELECT - ${filterColumn} + const empty = sql``; + const values = (await this.con.any( + sql` + SELECT + ${filterColumn.length ? sql`${joinComma(filterColumn)},` : sql``} + AVG(network_queries) as avgnetwork, AVG(network_queries) as avgnetwork, - ${minMax ? "MIN(network_queries) as minnetwork," : ""} - ${minMax ? "MAX(network_queries) as maxnetwork, " : ""} + AVG(network_queries) as avgnetwork, + ${minMax ? sql`MIN(network_queries) as minnetwork,` : empty} + ${minMax ? sql`MAX(network_queries) as maxnetwork, ` : empty} + AVG(network_received) as avgreceived, AVG(network_received) as avgreceived, - ${minMax ? "MIN(network_received) as minreceived," : ""} - ${minMax ? "MAX(network_received) as maxreceived, " : ""} + AVG(network_received) as avgreceived, + ${minMax ? sql`MIN(network_received) as minreceived,` : empty} + ${minMax ? sql`MAX(network_received) as maxreceived, ` : empty} + AVG(network_send) as avgsend, AVG(network_send) as avgsend, - ${minMax ? "MIN(network_send) as minsend, " : ""} - ${minMax ? "MAX(network_send) as maxsend, " : ""} + AVG(network_send) as avgsend, + ${minMax ? sql`MIN(network_send) as minsend, ` : empty} + ${minMax ? sql`MAX(network_send) as maxsend, ` : empty} + AVG(duration) as avgduration, AVG(duration) as avgduration, - ${minMax ? "MAX(duration) maxD, " : ""} - ${minMax ? "MIN(duration) minD," : ""} - Count(*) as count, - AVG(lagging) as avglagging, - SUM(updated) as allupdate, - SUM(created) as allcreate, - SUM(deleted) as alldelete, - AVG(CASE WHEN \`result\` = 'failed' THEN 1 ELSE 0 END) as failed, - AVG(CASE WHEN \`result\` = 'success' THEN 1 ELSE 0 END) as succeeded, - AVG(queries) as queries${minMax ? "," : ""} - ${minMax ? "MAX(queries) maxQ, " : ""} - ${minMax ? "MIN(CASE WHEN queries = 0 THEN NULL ELSE queries END) minQ" : ""} - FROM job_history - ${groupBy} - ;`); + AVG(duration) as avgduration, + ${minMax ? sql`MAX(duration) maxD, ` : empty} + ${minMax ? sql`MIN(duration) minD,` : empty} + Count(*) as count, + AVG(lagging) as avglagging, + SUM(updated) as allupdate, + SUM(created) as allcreate, + SUM(deleted) as alldelete, + AVG(CASE WHEN "result" = 'failed' THEN 1 ELSE 0 END) as failed, + AVG(CASE WHEN "result" = 'failed' THEN 1 ELSE 0 END) as failed, + AVG(CASE WHEN "result" = 'failed' THEN 1 ELSE 0 END) as failed, + AVG(CASE WHEN "result" = 'success' THEN 1 ELSE 0 END) as succeeded, + AVG(queries) as queries${minMax ? sql`,` : empty} + ${minMax ? sql`MAX(queries) maxQ, ` : empty} + ${minMax ? sql`MIN(CASE WHEN queries = 0 THEN NULL ELSE queries END) minQ` : empty} + FROM job_history + ${groupBy.length ? sql`GROUP BY ${joinComma(groupBy)}` : empty};`, + )) as unknown as Array; + if (statFilter?.type === "timed") { for (const value of values) { value.timepoint = new Date(value.timepoint); @@ -145,7 +162,7 @@ export class JobContext extends SubContext { "succeeded", ]; - for (const value of values as Array) { + for (const value of values) { let match = tocJob.exec(value.name); let group = dateMapping.get(value.timepoint.getTime()); @@ -227,31 +244,35 @@ export class JobContext extends SubContext { value.value.domain[domain] = domainValue.value; } - values.push(value.value); + values.push(value.value as any); } } - return values; + return values as any[]; } - public async getJobsStatsSummary(): Promise { - return this.query("SELECT * FROM job_stat_summary;"); + public getJobsStatsSummary(): Promise { + // TODO: typing this beast + return this.con.any(sql`SELECT * FROM job_stat_summary;`); } public async getJobDetails(id: number): Promise { - const jobPromise: Promise = this.query("SELECT * FROM jobs WHERE id = ?", id); - const historyPromise: Promise = this.query( - ` - SELECT * FROM job_history - WHERE name = (SELECT name FROM job_history WHERE id = ? LIMIT 1) + const jobPromise = this.con.maybeOne(sql.type(simpleJob)`SELECT * FROM jobs WHERE id = ${id}`); + const historyPromise = this.con.any( + sql.type(simpleJobHistory)` + SELECT + id, name, type, start, "end", arguments, result, message, context, + scheduled_at, created, updated, deleted, queries, network_queries, + network_received, network_send, lagging, duration + FROM job_history + WHERE name = (SELECT name FROM job_history WHERE id = ${id} LIMIT 1) ORDER BY start DESC; `, - id, ); - const [jobs, history] = await Promise.all([jobPromise, historyPromise]); + const [job, history] = await Promise.all([jobPromise, historyPromise]); return { - job: jobs[0], + job, history, }; } @@ -269,157 +290,181 @@ export class JobContext extends SubContext { noBoundaries: true, singleQuotes: true, }); - const result = await this.query(`DELETE FROM jobs WHERE ${mysql.escapeId(column)} LIKE ?`, like); - storeModifications("job", "delete", result); + await this.con.query(sql`DELETE FROM jobs WHERE ${sql.identifier([column])} LIKE ${like};`); + // FIXME: storeModifications("job", "delete", result); } } - public async getJobs(limit = 50): Promise { + public async getJobs(limit = 50): Promise { if (limit <= 0 || !limit) { limit = 50; } - return this.query( - "SELECT * FROM jobs WHERE (nextRun IS NULL OR nextRun < NOW()) AND state = 'waiting' AND job_state != 'disabled' order by nextRun LIMIT ?", - limit, + return this.con.any( + sql.type(simpleJob)` + SELECT id, type, name, state, interval, delete_after_run, + running_since, run_after, last_run, next_run, arguments, enabled + FROM jobs + WHERE (nextRun IS NULL OR nextRun < NOW()) AND state = 'waiting' AND enabled != 'disabled' order by nextRun LIMIT ${limit};`, ); } - public async queryJobs({ limit }: JobQuery = {}): Promise { - const values = []; - if (limit) { - values.push(limit); - } - return this.query( - "SELECT * FROM jobs " + - "WHERE (nextRun IS NULL OR nextRun < NOW()) " + - "AND state = 'waiting' AND job_state != 'disabled' " + - "order by nextRun" + - (limit ? " LIMIT ?" : ""), - values, + public async queryJobs({ limit }: JobQuery = {}): Promise { + return this.con.any( + sql.type(simpleJob)` + SELECT id, type, name, state, interval, delete_after_run, + running_since, run_after, last_run, next_run, arguments, enabled + FROM jobs + WHERE (next_run IS NULL OR next_run < NOW()) + AND state = 'waiting' AND enabled + order by next_run + ${limit ? sql` LIMIT ${limit}` : sql``}`, ); } - public async getAllJobs(): Promise { - return this.query("SELECT id, name, state, runningSince, nextRun, job_state, `interval`, type FROM jobs;"); + public async getAllJobs(): Promise { + return this.con.any( + sql.type(simpleJob)` + SELECT id, type, name, state, interval, delete_after_run, + running_since, run_after, last_run, next_run, arguments, enabled + FROM jobs;`, + ); } - public getJobsById(jobIds: number[]): Promise { - return this.queryInList("SELECT * FROM jobs WHERE id IN (??);", [jobIds]) as Promise; + public async getJobsById(jobIds: readonly number[]): Promise { + return this.con.any( + sql.type(simpleJob)` + SELECT id, type, name, state, interval, delete_after_run, + running_since, run_after, last_run, next_run, arguments, enabled + FROM jobs + WHERE id = ANY(${sql.array(jobIds, "int8")});`, + ); + } + + public async getJobsByName(names: readonly string[]): Promise { + return this.con.any( + sql.type(simpleJob)` + SELECT id, type, name, state, interval, delete_after_run, + running_since, run_after, last_run, next_run, arguments, enabled + FROM jobs + WHERE (next_run IS NULL OR next_run < NOW()) AND state = 'waiting' AND name = ANY(${sql.array(names, "text")});`, + ); } - public getJobsByName(names: string[]): Promise { - return this.queryInList( - "SELECT * FROM jobs WHERE (nextRun IS NULL OR nextRun < NOW()) AND state = 'waiting' AND name IN (??);", - [names], - ) as Promise; + public async getJobsInState(state: JobState): Promise { + return this.con.any( + sql.type(simpleJob)` + SELECT id, type, name, state, interval, delete_after_run, + running_since, run_after, last_run, next_run, arguments, enabled + FROM jobs + WHERE state = ${state} order by next_run`, + ); } public async stopJobs(): EmptyPromise { - await this.query("UPDATE jobs SET state = ?", JobState.WAITING); - await this.query("CREATE TEMPORARY TABLE tmp_jobs (id INT UNSIGNED NOT NULL)"); - await this.query("INSERT INTO tmp_jobs SELECT id from jobs"); - await this.query("DELETE FROM jobs WHERE runAfter IS NOT NULL AND runAfter NOT IN (SELECT id FROM tmp_jobs)"); - await this.query("DROP TEMPORARY TABLE tmp_jobs"); + await this.con.query(sql`UPDATE jobs SET state = ${JobState.WAITING}`); + await this.con.query(sql`CREATE TEMPORARY TABLE tmp_jobs (id INT NOT NULL)`); + await this.con.query(sql`INSERT INTO tmp_jobs SELECT id from jobs`); + await this.con.query( + sql`DELETE FROM jobs WHERE run_after IS NOT NULL AND run_after NOT IN (SELECT id FROM tmp_jobs)`, + ); + await this.con.query(sql`DROP TABLE tmp_jobs`); } - public async getAfterJobs(id: number): Promise { - return this.query("SELECT * FROM jobs WHERE `runAfter` = ? AND `state` != 'running'", id); + public async getAfterJobs(id: number): Promise { + return this.con.any( + sql.type(simpleJob)` + SELECT id, type, name, state, interval, delete_after_run, + running_since, run_after, last_run, next_run, arguments, enabled + FROM jobs + WHERE run_after = ${id} AND state != 'running';`, + ); } - public async addJobs>(jobs: T): PromiseMultiSingle { + public async addJobs(jobs: JobRequest[]): Promise { const now = new Date(); - const currentJobs: Array<{ id: number; name: string }> = await this.query("SELECT id, name FROM jobs"); - return promiseMultiSingle(jobs, async (value: JobRequest): Promise => { - let args = value.arguments; - if (value.arguments && !isString(value.arguments)) { - args = JSON.stringify(value.arguments); - } - let runAfter: Optional; + const currentJobs = await this.getAllJobs(); - // @ts-expect-error - if (value.runAfter?.id && Number.isInteger(value.runAfter.id)) { - // @ts-expect-error - runAfter = value.runAfter.id; - } - const nextRun = value.runImmediately ? now : null; - const foundJob = currentJobs.find((job) => job.name === value.name); + return Promise.all( + jobs.map(async (value: JobRequest): Promise => { + let args = value.arguments; + if (value.arguments && !isString(value.arguments)) { + args = JSON.stringify(value.arguments); + } + let runAfter: Optional; - if (foundJob) { // @ts-expect-error - value.id = foundJob.id; - } else { - const result = await this.query( - "INSERT IGNORE INTO jobs " + - "(`type`, `name`, `state`, `interval`, `deleteAfterRun`, `runAfter`, `arguments`, `nextRun`, `job_state`) " + - "VALUES (?,?,?,?,?,?,?,?, 'enabled')", - [value.type, value.name, JobState.WAITING, value.interval, value.deleteAfterRun, runAfter, args, nextRun], - ); - // the only reason it should fail to insert is when its name constraint is violated - if (!result.insertId) { - throw new DatabaseError("could not add job: " + JSON.stringify(value) + " nor find it"); - } else { + if (value.runAfter?.id && Number.isInteger(value.runAfter.id)) { // @ts-expect-error - value.id = result.insertId; + runAfter = value.runAfter.id; } - storeModifications("job", "insert", result); - } - // @ts-expect-error - delete value.runImmediately; - return value as unknown as JobItem; - }); + const foundJob = currentJobs.find((job) => job.name === value.name); + + if (foundJob) { + return foundJob; + } else { + const nextRun = value.runImmediately ? sql.timestamp(now) : null; + const maybeJob = await this.con.maybeOne( + sql.type(simpleJob)`INSERT INTO jobs + (type, name, state, interval, delete_after_run, run_after, arguments, next_run, enabled) + VALUES ( + ${value.type},${value.name},${JobState.WAITING},${value.interval}, + ${value.deleteAfterRun},${runAfter ?? null},${args ?? null},${nextRun}, true + ) ON CONFLICT DO NOTHING + RETURNING id, type, name, state, interval, delete_after_run, run_after, arguments, next_run, enabled;`, + ); + // the only reason it should fail to insert is when its name constraint is violated + if (!maybeJob) { + throw new DatabaseError("could not add job: " + JSON.stringify(value) + " nor find it"); + } + return maybeJob; + // FIXME: storeModifications("job", "insert", result); + } + }), + ); } - public async removeJobs(jobs: JobItem | JobItem[], finished?: Date): EmptyPromise { - const params = multiSingle(jobs, (val) => val.id); - const result = await this.queryInList("DELETE FROM jobs WHERE id IN (??);", [params]); - multiSingle(result, (value) => storeModifications("job", "delete", value)); + public async removeJobs(jobs: readonly SimpleJob[]): EmptyPromise { + const params = jobs.map((val) => val.id); + await this.con.query(sql`DELETE FROM jobs WHERE id = ANY(${sql.array(params, "int8")});`); + // FIXME: multiSingle(result, (value) => storeModifications("job", "delete", value)); + } - if (finished) { - await this.addJobHistory(jobs, finished); - } + public async removeFinishedJob(job: SimpleJob, finished: Date, previousScheduledAt: Date | undefined): EmptyPromise { + await this.con.query(sql`DELETE FROM jobs WHERE id = ${job.id};`); + // FIXME: multiSingle(result, (value) => storeModifications("job", "delete", value)); + await this.addJobHistory(job, finished, previousScheduledAt); } public async removeJob(key: string | number): EmptyPromise { - let result; - if (isString(key)) { - result = await this.query("DELETE FROM jobs WHERE `name` = ?", key); - } else { - result = await this.query("DELETE FROM jobs WHERE id = ?", key); - } - storeModifications("job", "delete", result); + await this.con.query(sql`DELETE FROM jobs WHERE ${isString(key) ? sql`name` : sql`id`} = ${key}`); + // FIXME: storeModifications("job", "delete", result); } public async updateJobsEnable(id: Id, enabled: boolean): EmptyPromise { - await this.query("UPDATE jobs SET job_state = ? WHERE id = ?", [enabled ? "enabled" : "disabled", id]); + await this.con.query(sql`UPDATE jobs SET enabled = ${enabled} WHERE id = ${id}`); } - public async updateJobs(jobs: JobItem | JobItem[], finished?: Date): EmptyPromise { - await promiseMultiSingle(jobs, (value: JobItem) => { + public async updateJobs(job: SimpleJob | SimpleJob[]): EmptyPromise { + await promiseMultiSingle(job, (value: SimpleJob) => { return this.update( "jobs", - (updates, values) => { - updates.push("state = ?"); - values.push(value.state); - + () => { // for now updateJobs is used only to switch between the running states running and waiting - updates.push("runningSince = ?"); if (value.state === JobState.RUNNING && !value.runningSince) { throw new ValidationError("No running_since value on running job!"); } - values.push(value.runningSince); - - updates.push("lastRun = ?"); - values.push(value.lastRun); - - updates.push("nextRun = ?"); - values.push(value.nextRun); - - updates.push("arguments = ?"); let args = value.arguments; + if (value.arguments && !isString(value.arguments)) { args = JSON.stringify(value.arguments); } - values.push(args); + const updates = []; + updates.push(sql`state = ${value.state}`); + updates.push(sql`running_since = ${value.runningSince ? sql.timestamp(value.runningSince) : null}`); + updates.push(sql`last_run = ${value.lastRun ? sql.timestamp(value.lastRun) : null}`); + updates.push(sql`next_run = ${value.nextRun ? sql.timestamp(value.nextRun) : null}`); + updates.push(sql`arguments = ${args ?? null}`); + return updates; }, { column: "id", @@ -427,14 +472,11 @@ export class JobContext extends SubContext { }, ); }); - - if (finished) { - await this.addJobHistory(jobs, finished); - } } - public getJobsInState(state: JobState): Promise { - return this.query("SELECT * FROM jobs WHERE state = ? order by nextRun", state); + public async updateFinishedJob(job: SimpleJob, finished: Date, previousScheduledAt: Date | undefined): EmptyPromise { + await this.updateJobs(job); + await this.addJobHistory(job, finished, previousScheduledAt); } /** @@ -444,19 +486,29 @@ export class JobContext extends SubContext { * @param limit max number of results or all if negative * @returns a Query object */ - public async getJobHistoryStream(since: Date, limit: number): Promise> { - let query = "SELECT * FROM job_history WHERE start < ? ORDER BY start DESC"; - const values = [since.toISOString()] as any[]; - - if (limit >= 0) { - query += " LIMIT ?;"; - values.push(limit); - } - return this.queryStream(query, values); + public async getJobHistoryStream(since: Date, limit: number): Promise> { + return this.stream(sql.type(simpleJobHistory)` + SELECT + id, arguments, message, context, scheduled_at, + created, updated, deleted, queries, + network_queries, network_received, network_send, + lagging, duration, type, name, start, "end", result + FROM job_history + WHERE start < ${sql.timestamp(since)} + ORDER BY start DESC${limit >= 0 ? sql` LIMIT ${limit}` : sql``}`); } - public async getJobHistory(): Promise { - return this.query("SELECT * FROM job_history ORDER BY start;"); + public async getJobHistory(): Promise { + return this.con.any( + sql.type(simpleJobHistory)` + SELECT + id, arguments, message, context, scheduled_at, + created, updated, deleted, queries, + network_queries, network_received, network_send, + lagging, duration, type, name, start, "end", result + FROM job_history + ORDER BY start`, + ); } /** @@ -466,38 +518,42 @@ export class JobContext extends SubContext { * @param filter the query filter * @returns an array of items */ - public async getJobHistoryPaginated(filter: QueryJobHistory): Promise> { - let conditions = "WHERE start < ?"; + public async getJobHistoryPaginated(filter: QueryJobHistory): Promise> { // to transform the date into the correct form in the local timezone // else the database misses it with the timezoneoffset const since = new Date(filter.since); since.setMinutes(since.getMinutes() - since.getTimezoneOffset()); - const values: any[] = [since.toISOString()]; + + const conditions = [sql`start < ${sql.timestamp(since)}`]; if (filter.name) { - values.push(`%${filter.name}%`); - conditions += " AND name like ?"; + conditions.push(sql`name like ${"%" + filter.name + "%"}`); } if (filter.type) { - values.push(filter.type); - conditions += " AND type = ?"; + conditions.push(sql`type = ${filter.type}`); } if (filter.result) { - values.push(filter.result); - conditions += " AND result = ?"; + conditions.push(sql`type = ${filter.result}`); } - conditions += " ORDER BY start DESC"; - const countValues = [...values]; - - const limit = " LIMIT ?;"; - values.push(Math.max(Math.min(filter.limit, 1000), 5)); - - const totalPromise = this.query("SELECT count(*) as total FROM job_history " + conditions, countValues); - const items: JobHistoryItem[] = await this.query("SELECT * FROM job_history " + conditions + limit, values); - const [{ total }]: [{ total: number }] = await totalPromise; + const totalPromise = this.con.oneFirst<{ total: number }>( + sql`SELECT count(*) as total FROM job_history WHERE ${joinAnd(conditions)}`, + ); + const items = await this.con.any( + sql.type(simpleJobHistory)` + SELECT + id, arguments, message, context, scheduled_at, + created, updated, deleted, queries, + network_queries, network_received, network_send, + lagging, duration, type, name, start, "end", result + FROM job_history + WHERE ${joinAnd(conditions)} + ORDER BY start DESC + LIMIT ${Math.max(Math.min(filter.limit, 1000), 5)}`, + ); + const total = await totalPromise; return { items, @@ -506,179 +562,143 @@ export class JobContext extends SubContext { }; } - private async addJobHistory(jobs: JobItem | JobItem[], finished: Date): EmptyPromise { - await promiseMultiSingle(jobs, async (value: JobItem) => { - let args = value.arguments; - if (value.arguments && !isString(value.arguments)) { - args = JSON.stringify(value.arguments); - } - const store = requireStore(); - const result = store.get(StoreKey.RESULT) || "success"; - const message = store.get(StoreKey.MESSAGE) || JSON.stringify({ message: "No Message" }); - - const jobTrack: JobTrack = { - modifications: store.get(StoreKey.MODIFICATIONS) || {}, - network: store.get(StoreKey.NETWORK) || defaultNetworkTrack(), - queryCount: store.get(StoreKey.QUERY_COUNT) || 0, - }; - - let [item] = (await this.query("SELECT * FROM job_stat_summary WHERE name = ?", [ - value.name, - ])) as JobStatSummary[]; - - item ??= { - name: value.name, - type: value.type, - count: 0, - failed: 0, - succeeded: 0, - network_requests: 0, - network_send: 0, - network_received: 0, - duration: 0, - updated: 0, - created: 0, - deleted: 0, - sql_queries: 0, - lagging: 0, - min_network_requests: 0, - min_network_send: 0, - min_network_received: 0, - min_duration: 0, - min_updated: 0, - min_created: 0, - min_deleted: 0, - min_sql_queries: 0, - min_lagging: 0, - max_network_requests: 0, - max_network_send: 0, - max_network_received: 0, - max_duration: 0, - max_updated: 0, - max_created: 0, - max_deleted: 0, - max_sql_queries: 0, - max_lagging: 0, - }; - item.count++; - const modifications = Object.values(jobTrack.modifications); - modifications.forEach((modification) => { - item.created += modification.created; - item.min_created = Math.min(item.min_created, modification.created); - item.max_created = Math.max(item.max_created, modification.created); - - item.updated += modification.updated; - item.min_updated = Math.min(item.min_updated, modification.updated); - item.max_updated = Math.max(item.max_updated, modification.updated); - - item.deleted += modification.deleted; - item.min_deleted = Math.min(item.min_deleted, modification.deleted); - item.max_deleted = Math.max(item.max_deleted, modification.deleted); - }); - item.sql_queries = jobTrack.queryCount; - item.min_sql_queries = Math.min(jobTrack.queryCount, item.min_sql_queries); - item.max_sql_queries = Math.max(jobTrack.queryCount, item.max_sql_queries); + private async addJobHistory(job: SimpleJob, finished: Date, previousScheduledAt: Date | undefined): EmptyPromise { + let args = job.arguments; - item.failed += result === "failed" ? 1 : 0; - item.succeeded += result === "success" ? 1 : 0; + if (job.arguments && !isString(job.arguments)) { + args = JSON.stringify(job.arguments); + } - const startTime = value.runningSince?.getTime() || 0; + const store = requireStore(); + const result = store.get(StoreKey.RESULT) || "success"; + const message = store.get(StoreKey.MESSAGE) || JSON.stringify({ message: "No Message" }); - const lagging = (value.runningSince?.getTime() || 0) - (value.previousScheduledAt?.getTime() || startTime); - item.lagging = lagging; - item.min_lagging = Math.min(item.min_lagging, lagging); - item.max_lagging = Math.max(item.max_lagging, lagging); + const jobTrack: JobTrack = { + modifications: store.get(StoreKey.MODIFICATIONS) || {}, + network: store.get(StoreKey.NETWORK) || defaultNetworkTrack(), + queryCount: store.get(StoreKey.QUERY_COUNT) || 0, + }; - const duration = finished.getTime() - startTime; - item.duration = duration; - item.min_duration = Math.min(item.min_duration, duration); - item.max_duration = Math.max(item.max_duration, duration); + let item = await this.con.maybeOne( + sql.type(simpleJobStatSummary)`SELECT * FROM job_stat_summary WHERE name = ${job.name}`, + ); - const keys = [ - "name", - "type", - "count", - "failed", - "succeeded", - "network_requests", - "min_network_requests", - "max_network_requests", - "network_send", - "min_network_send", - "max_network_send", - "network_received", - "min_network_received", - "max_network_received", - "duration", - "min_duration", - "max_duration", - "lagging", - "min_lagging", - "max_lagging", - "updated", - "min_updated", - "max_updated", - "created", - "min_created", - "max_created", - "deleted", - "min_deleted", - "max_deleted", - "sql_queries", - "min_sql_queries", - "max_sql_queries", - ] as Array; - - const insertColumns = keys.join(", "); - const params = keys.map(() => "?").join(", "); - const updates = keys.map((key) => key + " = VALUES(" + key + ")").join(", "); - const values = keys.map((key) => item[key]); - - await this.query( - `INSERT INTO job_stat_summary (${insertColumns}) VALUES (${params}) ON DUPLICATE KEY UPDATE ${updates}`, - values, - ); + item ??= { + name: job.name, + type: job.type, + count: 0, + failed: 0, + succeeded: 0, + networkRequests: 0, + minNetworkRequests: 0, + maxNetworkRequests: 0, + networkSend: 0, + minNetworkSend: 0, + maxNetworkSend: 0, + networkReceived: 0, + minNetworkReceived: 0, + maxNetworkReceived: 0, + duration: 0, + minDuration: 0, + maxDuration: 0, + updated: 0, + minUpdated: 0, + maxUpdated: 0, + created: 0, + minCreated: 0, + maxCreated: 0, + deleted: 0, + minDeleted: 0, + maxDeleted: 0, + sqlQueries: 0, + minSqlQueries: 0, + maxSqlQueries: 0, + lagging: 0, + minLagging: 0, + maxLagging: 0, + }; + item.count++; - let created = 0; - let updated = 0; - let deleted = 0; - modifications.forEach((modification) => { - created += modification.created; - updated += modification.updated; - deleted += modification.deleted; - }); + let created = 0; + let updated = 0; + let deleted = 0; - const queries = jobTrack.queryCount; - const networkReceived = jobTrack.network.received || 0; - const networkSend = jobTrack.network.sent || 0; - const networkRequests = jobTrack.network.count || 0; - - return this.query( - "INSERT INTO job_history (id, type, name, deleteAfterRun, runAfter, scheduled_at, start, end, result, message, context, arguments, created, updated, deleted, queries, network_queries, network_received, network_send)" + - " VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);", - [ - value.id, - value.type, - value.name, - value.deleteAfterRun, - value.runAfter, - value.previousScheduledAt || value.runningSince, - value.runningSince, - finished, - result, - message, - // context take too much space, ignore it for now - "", - args, - created, - updated, - deleted, - queries, - networkRequests, - networkReceived, - networkSend, - ], - ); - }); + for (const modification of Object.values(jobTrack.modifications)) { + created += modification.created; + updated += modification.updated; + deleted += modification.deleted; + + item.created += modification.created; + item.minCreated = Math.min(item.minCreated, modification.created); + item.maxCreated = Math.max(item.maxCreated, modification.created); + + item.updated += modification.updated; + item.minUpdated = Math.min(item.minUpdated, modification.updated); + item.maxUpdated = Math.max(item.maxUpdated, modification.updated); + + item.deleted += modification.deleted; + item.minDeleted = Math.min(item.minDeleted, modification.deleted); + item.maxDeleted = Math.max(item.maxDeleted, modification.deleted); + } + + item.sqlQueries = jobTrack.queryCount; + item.minSqlQueries = Math.min(jobTrack.queryCount, item.minSqlQueries); + item.maxSqlQueries = Math.max(jobTrack.queryCount, item.maxSqlQueries); + + item.failed += result === "failed" ? 1 : 0; + item.succeeded += result === "success" ? 1 : 0; + + const startTime = job.runningSince?.getTime() || 0; + + const lagging = (job.runningSince?.getTime() || 0) - (previousScheduledAt?.getTime() || startTime); + item.lagging = lagging; + item.minLagging = Math.min(item.minLagging, lagging); + item.maxLagging = Math.max(item.maxLagging, lagging); + + const duration = finished.getTime() - startTime; + item.duration = duration; + item.minDuration = Math.min(item.minDuration, duration); + item.maxDuration = Math.max(item.maxDuration, duration); + + const insertColumns = []; + const insertValues = []; + const updates = []; + + for (const key of Object.keys(item)) { + const identifier = sql.identifier([key]); + insertColumns.push(sql.identifier([key])); + insertValues.push(sql`${item[key as keyof SimpleJobStatSummary]}`); + updates.push(sql`${identifier} = EXCLUDED.${identifier}`); + } + + // update job stat summary + await this.con.query( + sql` + INSERT INTO job_stat_summary + (${joinComma(insertColumns)}) + VALUES (${joinComma(insertValues)}) + ON CONFLICT (name) DO UPDATE SET ${joinComma(updates)}`, + ); + + const queries = jobTrack.queryCount; + const networkReceived = jobTrack.network.received || 0; + const networkSend = jobTrack.network.sent || 0; + const networkRequests = jobTrack.network.count || 0; + + const scheduledAt = previousScheduledAt ?? job.runningSince; + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const runningSince = sql.timestamp(job.runningSince!); + + await this.con.query( + sql`INSERT INTO job_history + ( + id, type, name, scheduled_at, start, "end", result, message, + context, arguments, created, updated, deleted, queries, network_queries, network_received, network_send + ) VALUES ( + ${job.id},${job.name},${job.name},${scheduledAt ? sql.timestamp(scheduledAt) : null}, + ${runningSince},${sql.timestamp(finished)},${result},${sql.jsonb(message)},${""},${args ?? null}, + ${created},${updated},${deleted},${queries},${networkRequests},${networkReceived},${networkSend} + );`, + ); } } diff --git a/packages/core/src/database/contexts/mediumContext.ts b/packages/core/src/database/contexts/mediumContext.ts index 9bab5c9b..99bc6f49 100644 --- a/packages/core/src/database/contexts/mediumContext.ts +++ b/packages/core/src/database/contexts/mediumContext.ts @@ -1,115 +1,93 @@ -import { SubContext } from "./subContext"; import { - FullMediumToc, LikeMedium, LikeMediumQuery, - Medium, - SimpleMedium, Synonyms, - TocSearchMedium, - UpdateMedium, Uuid, - SecondaryMedium, - VoidablePromise, - EmptyPromise, PromiseMultiSingle, MultiSingleValue, MultiSingleNumber, - MediumToc, TypedQuery, Id, + Insert, + TocSearchMedium, + Medium, + SecondaryMedium, + UpdateMedium, } from "../../types"; -import { count, getElseSet, isInvalidId, multiSingle, promiseMultiSingle } from "../../tools"; +import { count, getElseSet, multiSingle, promiseMultiSingle } from "../../tools"; import { escapeLike } from "../storages/storageTools"; -import { OkPacket } from "mysql"; -import { storeModifications } from "../sqlTools"; import { DatabaseError, MissingEntityError, ValidationError } from "../../error"; - -export class MediumContext extends SubContext { - public async getSpecificToc(id: number, link: string): VoidablePromise { - const tocs = (await this.query( - "SELECT id, medium_id as mediumId, link, " + - "countryOfOrigin, languageOfOrigin, author, title," + - "medium, artist, lang, stateOrigin, stateTL, series, universe " + - "FROM medium_toc WHERE medium_id = ? AND link = ?;", - [id, link], - )) as FullMediumToc[]; - return tocs[0]; - } - - public async removeToc(tocLink: string): EmptyPromise { - const result: any[] = await this.query("SELECT medium_id FROM medium_toc WHERE link = ?", tocLink); - await Promise.all( - result.map((value) => { - return this.removeMediumToc(value.medium_id, tocLink); - }), - ); - } - +import { QueryContext } from "./queryContext"; +import { entity, mediumSynonym, simpleMedium, SimpleMedium, softInsertEntity } from "../databaseTypes"; +import { sql } from "slonik"; +import { PartContext } from "./partContext"; +import { JobContext } from "./jobContext"; +import { EpisodeReleaseContext } from "./episodeReleaseContext"; +import { MediumTocContext } from "./mediumTocContext"; +import { EpisodeContext } from "./episodeContext"; +import { InternalListContext } from "./internalListContext"; + +export class MediumContext extends QueryContext { /** * Adds a medium to the storage. */ - public async addMedium(medium: SimpleMedium, uuid?: Uuid): Promise { + public async addMedium(medium: Insert, uuid?: Uuid): Promise { + simpleMedium.parse(medium); + if (!medium?.medium || !medium?.title) { return Promise.reject(new ValidationError(`Invalid Medium: ${medium?.title}-${medium?.medium}`)); } - const result = await this.query("INSERT INTO medium(medium, title) VALUES (?,?);", [medium.medium, medium.title]); - if (!Number.isInteger(result.insertId)) { - throw new DatabaseError(`insert failed, invalid ID: ${result.insertId + ""}`); - } - storeModifications("medium", "insert", result); + const id = await this.con.oneFirst( + sql.type(entity)` + INSERT INTO medium(medium, title) + VALUES (${medium.medium},${medium.title}) + RETURNING id;`, + ); + // FIXME: storeModifications("medium", "insert", result); - await this.parentContext.partContext.createStandardPart(result.insertId); + await this.getContext(PartContext).createStandardPart(id); const newMedium = { ...medium, - id: result.insertId, + id, }; // if it should be added to an list, do it right away if (uuid) { // add item to listId of medium or the standard list - await this.parentContext.internalListContext.addItemToList(newMedium, uuid); + await this.getContext(InternalListContext).addItemsToList([newMedium.id], uuid); } return newMedium; } - public getSimpleMedium(id: T): PromiseMultiSingle { - // TODO: 29.06.2019 replace with id IN (...) - return promiseMultiSingle(id, async (mediumId) => { - const resultArray: any[] = await this.query("SELECT * FROM medium WHERE medium.id =?;", mediumId); - const result = resultArray[0]; + public async getSimpleMedium(ids: number[]): Promise { + const resultArray = await this.con.any( + sql.type(simpleMedium)` + SELECT id, country_of_origin, language_of_origin, + author, title, medium, artist, lang, state_origin, + state_tl, series, universe + FROM medium + WHERE id = ANY(${sql.array(ids, "int8")});`, + ); + const requestedIds = new Set(ids); + const missingIds = resultArray.filter((medium) => !requestedIds.has(medium.id)).map((medium) => medium.id); - if (!result) { - throw new MissingEntityError(`Medium with id ${mediumId} does not exist`); - } - return { - id: result.id, - countryOfOrigin: result.countryOfOrigin, - languageOfOrigin: result.languageOfOrigin, - author: result.author, - title: result.title, - medium: result.medium, - artist: result.artist, - lang: result.lang, - stateOrigin: result.stateOrigin, - stateTL: result.stateTL, - series: result.series, - universe: result.universe, - }; - }); + if (missingIds.length) { + throw new MissingEntityError(`Media with ids ${JSON.stringify(missingIds)} do not exist`); + } + return resultArray; } public async getTocSearchMedia(): Promise { - const result: Array<{ host?: string; mediumId: number; title: string; medium: number }> = await this.query( + const result = await this.con.any<{ host: string | null; mediumId: number; title: string; medium: number }>( // eslint-disable-next-line @typescript-eslint/quotes - 'SELECT substring(episode_release.url, 1, locate("/",episode_release.url,9)) as host, ' + - "medium.id as mediumId, medium.title, medium.medium " + - "FROM medium " + - "LEFT JOIN part ON part.medium_id=medium.id " + - "LEFT JOIN episode ON part_id=part.id " + - "LEFT JOIN episode_release ON episode_release.episode_id=episode.id " + - "GROUP BY mediumId, host;", + sql`SELECT substring(episode_release.url, 1, 8 + strpos(substring(url from 9), '/')) as host, + medium.id as medium_id, medium.title, medium.medium + FROM medium + LEFT JOIN part ON part.medium_id=medium.id + LEFT JOIN episode ON part_id=part.id + LEFT JOIN episode_release ON episode_release.episode_id=episode.id + GROUP BY medium_id, host;`, ); const idMap = new Map(); const tocSearchMedia = result @@ -134,7 +112,7 @@ export class MediumContext extends SubContext { idMap.set(value.mediumId, searchMedium); return searchMedium; }) - .filter((value) => value) as any[] as TocSearchMedium[]; + .filter((value): value is TocSearchMedium => !!value); const synonyms = await this.getSynonyms(tocSearchMedia.map((value) => value.mediumId)); synonyms.forEach((value) => { @@ -156,9 +134,15 @@ export class MediumContext extends SubContext { } public async getTocSearchMedium(id: number): Promise { - const resultArray: any[] = await this.query("SELECT * FROM medium WHERE medium.id =?;", id); - const result = resultArray[0]; - const synonyms: Synonyms[] = await this.getSynonyms(id); + const result = await this.con.one( + sql.type(simpleMedium)` + SELECT id, country_of_origin, language_of_origin, + author, title, medium, artist, lang, state_origin, + state_tl, series, universe + FROM medium + WHERE medium.id =${id};`, + ); + const synonyms: Synonyms[] = await this.getSynonyms([id]); return { mediumId: result.id, @@ -174,80 +158,90 @@ export class MediumContext extends SubContext { public getMedium(id: T, uuid: Uuid): PromiseMultiSingle { // TODO: 29.06.2019 replace with id IN (...) return promiseMultiSingle(id, async (mediumId: number): Promise => { - let result = await this.query("SELECT * FROM medium WHERE medium.id=?;", mediumId); - result = result[0]; - - const latestReleasesResult = await this.parentContext.episodeContext.getLatestReleases(mediumId); - - const currentReadResult = await this.query( - "SELECT user_episode.episode_id FROM " + - "(SELECT * FROM user_episode " + - "WHERE episode_id IN (SELECT id from episode " + - "WHERE part_id IN (SELECT id FROM part " + - "WHERE medium_id=?))) as user_episode " + - "INNER JOIN episode ON user_episode.episode_id=episode.id " + - "WHERE user_uuid=? " + - "ORDER BY totalIndex DESC, partialIndex DESC LIMIT 1", - [mediumId, uuid], + const result = await this.con.one( + sql.type(simpleMedium)` + SELECT id, country_of_origin, language_of_origin, + author, title, medium, artist, lang, state_origin, + state_tl, series, universe + FROM medium + WHERE medium.id=${mediumId}`, + ); + + const latestReleasesResult = await this.getContext(EpisodeContext).getLatestReleases(mediumId); + + const currentReadResult = await this.con.maybeOneFirst( + sql.type(entity)` + SELECT user_episode.episode_id as id + FROM ( + SELECT episode_id FROM user_episode + WHERE episode_id IN ( + SELECT id from episode + WHERE part_id IN ( + SELECT id FROM part + WHERE medium_id=${mediumId} + ) + ) + AND user_uuid=${uuid} + ) as user_episode + INNER JOIN episode ON user_episode.episode_id=episode.id + ORDER BY total_index DESC, partial_index DESC LIMIT 1`, ); - const unReadResult = await this.query( - "SELECT * FROM episode WHERE part_id IN (SELECT id FROM part WHERE medium_id=?) " + - "AND id NOT IN (SELECT episode_id FROM user_episode WHERE user_uuid=?) " + - "ORDER BY totalIndex DESC, partialIndex DESC;", - [mediumId, uuid], + const unReadResult = await this.con.anyFirst( + sql.type(entity)` + SELECT episode.id as id + FROM episode + WHERE part_id IN ( + SELECT id FROM part WHERE medium_id=${mediumId} + ) + AND id NOT IN ( + SELECT episode_id FROM user_episode WHERE user_uuid=${uuid} + ) + ORDER BY total_index DESC, partial_index DESC;`, ); - const partsResult = await this.query("SELECT id FROM part WHERE medium_id=?;", mediumId); + const partsResult = await this.con.anyFirst(sql.type(entity)`SELECT id FROM part WHERE medium_id=${mediumId};`); return { - id: result.id, - countryOfOrigin: result.countryOfOrigin, - languageOfOrigin: result.languageOfOrigin, - author: result.author, - title: result.title, - medium: result.medium, - artist: result.artist, - lang: result.lang, - stateOrigin: result.stateOrigin, - stateTL: result.stateTL, - series: result.series, - universe: result.universe, - parts: partsResult.map((packet: any) => packet.id), - currentRead: currentReadResult[0] ? currentReadResult[0].episode_id : undefined, - latestReleased: latestReleasesResult.map((packet: any) => packet.id), - unreadEpisodes: unReadResult.map((packet: any) => packet.id), + ...result, + parts: partsResult, + currentRead: currentReadResult, + latestReleased: latestReleasesResult.map((packet) => packet.id), + unreadEpisodes: unReadResult, }; }); } public async getAllMediaFull(): Promise> { - return this.queryStream( - "SELECT " + - "id, countryOfOrigin, languageOfOrigin, author, title," + - "medium, artist, lang, stateOrigin, stateTL, series, universe " + - "FROM medium", + return this.stream( + sql.type(simpleMedium)`SELECT + id, country_of_origin, language_of_origin, + author, title, medium, artist, lang, + state_origin, state_tl, series, universe + FROM medium`, ); } public async getAllSecondary(uuid: Uuid): Promise { - const readStatsPromise = this.query( - "SELECT part.medium_id as id, COUNT(*) as totalEpisodes , COUNT(case when episode.id in (select episode_id from user_episode where ? = user_uuid and progress = 1) then 1 else null end) as readEpisodes " + - "FROM part " + - "INNER JOIN episode ON part.id=episode.part_id " + - "GROUP BY part.medium_id;", - uuid, + const readStatsPromise = this.con.any<{ id: number; totalEpisode: number; readEpisodes: number }>( + sql` + SELECT part.medium_id as id, COUNT(*) as total_episodes , + COUNT(case when episode.id in ( + select episode_id from user_episode where user_uuid = ${uuid} and progress = 1 + ) then 1 else null end) as read_episodes + FROM part + INNER JOIN episode ON part.id=episode.part_id + GROUP BY part.medium_id;`, ); - const tocs = (await this.query( - "SELECT id, medium_id as mediumId, link, " + - "countryOfOrigin, languageOfOrigin, author, title," + - "medium, artist, lang, stateOrigin, stateTL, series, universe " + - "FROM medium_toc;", - )) as FullMediumToc[]; - const readStats: Array<{ id: number; totalEpisode: number; readEpisodes: number }> = await readStatsPromise; + const tocs = await this.getContext(MediumTocContext).getTocs(); + const readStats = await readStatsPromise; const idMap = new Map(); for (const value of readStats) { - const secondary = value as unknown as SecondaryMedium; - secondary.tocs = []; + const secondary: SecondaryMedium = { + id: value.id, + readEpisodes: value.readEpisodes, + tocs: [], + totalEpisodes: value.totalEpisode, + }; idMap.set(value.id, secondary); } @@ -264,9 +258,8 @@ export class MediumContext extends SubContext { return [...idMap.values()]; } - public async getAllMedia(): Promise { - const result: Array<{ id: number }> = await this.query("SELECT id FROM medium"); - return result.map((value) => value.id); + public async getAllMedia(): Promise { + return this.con.anyFirst(sql.type(entity)`SELECT id FROM medium`); } /** @@ -277,76 +270,31 @@ export class MediumContext extends SubContext { const escapedLinkQuery = escapeLike(value.link || "", { noRightBoundary: true }); const escapedTitle = escapeLike(value.title, { singleQuotes: true }); - let result: any[] = await this.query( - "SELECT id,medium FROM medium WHERE title LIKE ? OR id IN " + - "(SELECT medium_id FROM medium_toc WHERE medium_id IS NOT NULL AND link LIKE ?);", - [escapedTitle, escapedLinkQuery], + const result = await this.con.any( + sql` + SELECT id, medium + FROM medium + WHERE title LIKE ${escapedTitle} OR id IN ( + SELECT medium_id + FROM medium_toc + WHERE link LIKE ${escapedLinkQuery} + )${value.type != null ? sql`medium = ${value.type}` : sql``} + LIMIT 1;`, ); - if (value.type != null) { - result = result.filter((medium: any) => medium.medium === value.type); - } return { - medium: result[0], + medium: result[0] as unknown as LikeMedium["medium"], title: value.title, - link: value.link || "", + link: value.link ?? "", }; }); } - /** - * Updates a medium from the storage. - */ - public async updateMediumToc(mediumToc: FullMediumToc): Promise { - const keys: Array = [ - "countryOfOrigin", - "languageOfOrigin", - "author", - "title", - "medium", - "artist", - "lang", - "stateOrigin", - "stateTL", - "series", - "universe", - ]; - - if (isInvalidId(mediumToc.mediumId) || !mediumToc.link) { - throw new ValidationError("invalid medium_id or link is invalid: " + JSON.stringify(mediumToc)); - } - const conditions = []; - - if (isInvalidId(mediumToc.id)) { - conditions.push({ column: "medium_id", value: mediumToc.mediumId }); - conditions.push({ column: "link", value: mediumToc.link }); - } else { - conditions.push({ column: "id", value: mediumToc.id }); - } - const result = await this.update( - "medium_toc", - (updates, values) => { - for (const key of keys) { - const value = mediumToc[key]; - - if (value === null) { - updates.push(`${key} = NULL`); - } else if (value != null) { - updates.push(`${key} = ?`); - values.push(value); - } - } - }, - ...conditions, - ); - storeModifications("toc", "update", result); - return result.changedRows > 0; - } - /** * Updates a medium from the storage. */ public async updateMedium(medium: UpdateMedium): Promise { + // define updatable keys const keys: Array = [ "countryOfOrigin", "languageOfOrigin", @@ -356,7 +304,7 @@ export class MediumContext extends SubContext { "artist", "lang", "stateOrigin", - "stateTL", + "stateTl", "series", "universe", ]; @@ -372,26 +320,32 @@ export class MediumContext extends SubContext { } const result = await this.update( "medium", - (updates, values) => { + () => { + const updates = []; for (const key of keys) { const value = medium[key]; if (value === null) { - updates.push(`${key} = NULL`); + updates.push(sql`${sql.identifier([key])} = NULL`); } else if (value != null) { - updates.push(`${key} = ?`); - values.push(value); + updates.push(sql`${sql.identifier([key])} = ${value}`); } } + return updates; }, { column: "id", value: medium.id }, ); - storeModifications("medium", "update", result); - return result.changedRows > 0; + // FIXME: storeModifications("medium", "update", result); + return result.rowCount > 0; } - public async getSynonyms(mediumId: number | number[]): Promise { - const synonyms = await this.queryInList("SELECT * FROM medium_synonyms WHERE medium_id IN (??);", [mediumId]); + public async getSynonyms(mediumId: number[]): Promise { + const synonyms = await this.con.any( + sql.type(mediumSynonym)` + SELECT medium_id, synonym + FROM medium_synonyms + WHERE medium_id = ANY(${sql.array(mediumId, "int8")});`, + ); if (!synonyms) { return []; } @@ -403,8 +357,8 @@ export class MediumContext extends SubContext { return [...synonymMap.values()]; } - public removeSynonyms(synonyms: Synonyms | Synonyms[]): Promise { - return promiseMultiSingle(synonyms, (value: Synonyms) => { + public async removeSynonyms(synonyms: Synonyms | Synonyms[]): Promise { + await promiseMultiSingle(synonyms, (value: Synonyms) => { return promiseMultiSingle(value.synonym, async (item) => { const result = await this.delete( "medium_synonyms", @@ -417,10 +371,11 @@ export class MediumContext extends SubContext { value: value.mediumId, }, ); - storeModifications("synonym", "delete", result); - return result.affectedRows > 0; + // FIXME: storeModifications("synonym", "delete", result); + return result.rowCount > 0; }); - }).then(() => true); + }); + return true; } public async addSynonyms>(synonyms: T): Promise { @@ -430,131 +385,15 @@ export class MediumContext extends SubContext { params.push([value.mediumId, item]); }); }); - const result = await this.multiInsert( - "INSERT IGNORE INTO medium_synonyms (medium_id, synonym) VALUES", - params, - (value) => value, + await this.con.query( + sql`INSERT INTO medium_synonyms (medium_id, synonym) + SELECT * FROM ${sql.unnest(params, ["int8", "text"])} + ON CONFLICT DO NOTHING;`, ); - multiSingle(result, (value) => storeModifications("synonym", "insert", value)); + // FIXME: multiSingle(result, (value) => storeModifications("synonym", "insert", value)); return true; } - public async addToc(mediumId: number, link: string): Promise { - const result: OkPacket = await this.query("INSERT IGNORE INTO medium_toc (medium_id, link) VAlUES (?,?)", [ - mediumId, - link, - ]); - storeModifications("toc", "insert", result); - return result.insertId; - } - - public async getToc(mediumId: number): Promise { - const resultArray: any[] = await this.query("SELECT link FROM medium_toc WHERE medium_id=?", mediumId); - return resultArray.map((value) => value.link).filter((value) => value); - } - - public getMediumTocs(mediumId: number[]): Promise { - return this.queryInList( - "SELECT id, medium_id as mediumId, link, " + - "countryOfOrigin, languageOfOrigin, author, title," + - "medium, artist, lang, stateOrigin, stateTL, series, universe " + - "FROM medium_toc WHERE medium_id IN (??);", - [mediumId], - ) as Promise; - } - - public getTocs(tocIds: number[]): Promise { - return this.queryInList( - "SELECT id, medium_id as mediumId, link, " + - "countryOfOrigin, languageOfOrigin, author, title," + - "medium, artist, lang, stateOrigin, stateTL, series, universe " + - "FROM medium_toc WHERE id IN (??);", - [tocIds], - ) as Promise; - } - - public async removeMediumToc(mediumId: number, link: string): Promise { - const domainRegMatch = /https?:\/\/(.+?)(\/|$)/.exec(link); - - if (!domainRegMatch) { - throw new ValidationError("Invalid link, Unable to extract Domain: " + link); - } - - await this.parentContext.jobContext.removeJobLike("name", `toc-${mediumId}-${link}`); - const domain = domainRegMatch[1]; - - const releases = await this.parentContext.episodeContext.getEpisodeLinksByMedium(mediumId); - const episodeMap: Map = new Map(); - const valueCb = () => []; - - for (const release of releases) { - getElseSet(episodeMap, release.episodeId, valueCb).push(release.url); - } - const removeEpisodesAfter: number[] = []; - - for (const [episodeId, links] of episodeMap.entries()) { - const toMoveCount = count(links, (value) => value.includes(domain)); - - if (toMoveCount) { - if (links.length === toMoveCount) { - removeEpisodesAfter.push(episodeId); - } - } - } - const deletedReleaseResult = await this.query( - "DELETE er FROM episode_release as er, episode as e, part as p" + - " WHERE er.episode_id = e.id" + - " AND e.part_id = p.id" + - " AND p.medium_id = ?" + - " AND locate(?,er.url) > 0;", - [mediumId, domain], - ); - storeModifications("release", "delete", deletedReleaseResult); - - const deletedProgressResult = await this.queryInList( - "DELETE ue FROM user_episode as ue, episode as e, part as p" + - " WHERE ue.episode_id = e.id" + - " AND e.part_id = p.id" + - " AND p.medium_id = ?" + - " AND e.id IN (??);", - [mediumId, removeEpisodesAfter], - ); - multiSingle(deletedProgressResult, (value) => storeModifications("progress", "delete", value)); - - const deletedResultResult = await this.queryInList( - "DELETE re FROM result_episode as re, episode as e, part as p" + - " WHERE re.episode_id = e.id" + - " AND e.part_id = p.id" + - " AND p.medium_id = ?" + - " AND e.id IN (??);", - [mediumId, removeEpisodesAfter], - ); - multiSingle(deletedResultResult, (value) => storeModifications("result_episode", "delete", value)); - - const deletedEpisodesResult = await this.queryInList("DELETE FROM episode WHERE episode.id IN (??);", [ - removeEpisodesAfter, - ]); - multiSingle(deletedEpisodesResult, (value) => storeModifications("episode", "delete", value)); - - const result = await this.delete( - "medium_toc", - { column: "medium_id", value: mediumId }, - { column: "link", value: link }, - ); - storeModifications("toc", "delete", result); - return result.affectedRows > 0; - } - - public getAllMediaTocs(): Promise> { - return this.query( - "SELECT medium.id, medium_toc.link FROM medium LEFT JOIN medium_toc ON medium.id=medium_toc.medium_id", - ); - } - - public getAllTocs(): Promise { - return this.query("SELECT medium_id as mediumId, link FROM medium_toc"); - } - public async mergeMedia(sourceMediumId: number, destMediumId: number): Promise { // transfer all tocs from source to dest and with it all associated episodes // add source title as synonym for dest @@ -565,88 +404,69 @@ export class MediumContext extends SubContext { // the tocs will be transferred and do not need to be moved manually here // transferring the tocs should remove any related jobs, // and toc jobs should be the only jobs related directly to an medium - const sourceTocs = await this.getToc(sourceMediumId); - const destTocs = await this.getToc(destMediumId); + const sourceTocs = await this.getContext(MediumTocContext).getTocLinkByMediumId(sourceMediumId); + const destTocs = await this.getContext(MediumTocContext).getTocLinkByMediumId(destMediumId); // transfer unknown tocs and all related episodes await Promise.all( sourceTocs .filter((toc) => !destTocs.includes(toc)) - .map((toc) => this.transferToc(sourceMediumId, destMediumId, toc)), + .map((tocLink) => this.transferToc(sourceMediumId, destMediumId, tocLink)), ); // remove all tocs of source - let result = await this.delete("medium_toc", { column: "medium_id", value: sourceMediumId }); - storeModifications("toc", "delete", result); + await this.delete("medium_toc", { column: "medium_id", value: sourceMediumId }); + // FIXME: storeModifications("toc", "delete", result); - result = await this.query("UPDATE IGNORE list_medium SET medium_id=? WHERE medium_id=?", [ - destMediumId, - sourceMediumId, - ]); - storeModifications("list_item", "update", result); + await this.con.query(sql`UPDATE list_medium SET medium_id=${destMediumId} WHERE medium_id=${sourceMediumId}`); + // FIXME: storeModifications("list_item", "update", result); - result = await this.delete("list_medium", { column: "medium_id", value: sourceMediumId }); - storeModifications("list_item", "delete", result); + await this.delete("list_medium", { column: "medium_id", value: sourceMediumId }); + // FIXME: storeModifications("list_item", "delete", result); - result = await this.query("UPDATE IGNORE external_list_medium SET medium_id=? WHERE medium_id=?", [ - destMediumId, - sourceMediumId, - ]); - storeModifications("external_list_item", "update", result); + await this.con.query( + sql`UPDATE external_list_medium SET medium_id=${destMediumId} WHERE medium_id=${sourceMediumId}`, + ); + // FIXME: storeModifications("external_list_item", "update", result); - result = await this.delete("external_list_medium", { column: "medium_id", value: sourceMediumId }); - storeModifications("external_list_item", "delete", result); + await this.delete("external_list_medium", { column: "medium_id", value: sourceMediumId }); + // FIXME: storeModifications("external_list_item", "delete", result); - result = await this.query("UPDATE IGNORE medium_synonyms SET medium_id=? WHERE medium_id=?", [ - destMediumId, - sourceMediumId, - ]); - storeModifications("synonym", "update", result); + await this.con.query(sql`UPDATE medium_synonyms SET medium_id=${destMediumId} WHERE medium_id=${sourceMediumId}`); + // FIXME: storeModifications("synonym", "update", result); - result = await this.delete("medium_synonyms", { column: "medium_id", value: sourceMediumId }); - storeModifications("synonym", "delete", result); + await this.delete("medium_synonyms", { column: "medium_id", value: sourceMediumId }); + // FIXME: storeModifications("synonym", "delete", result); - await this.query("UPDATE IGNORE news_medium SET medium_id=? WHERE medium_id=?", [destMediumId, sourceMediumId]); + await this.con.query(sql`UPDATE news_medium SET medium_id=${destMediumId} WHERE medium_id=${sourceMediumId}`); await this.delete("news_medium", { column: "medium_id", value: sourceMediumId }); - const deletedReleaseResult = await this.query( - "DELETE er FROM episode_release as er, episode as e, part as p" + - " WHERE er.episode_id = e.id" + - " AND e.part_id = p.id" + - " AND p.medium_id = ?", - sourceMediumId, - ); - storeModifications("release", "delete", deletedReleaseResult); - - const deletedProgressResult = await this.query( - "DELETE ue FROM user_episode as ue, episode as e, part as p" + - " WHERE ue.episode_id = e.id" + - " AND e.part_id = p.id" + - " AND p.medium_id = ?", - sourceMediumId, + await this.con.query( + sql`DELETE er FROM episode_release as er, episode as e, part as p + WHERE er.episode_id = e.id + AND e.part_id = p.id + AND p.medium_id = ${sourceMediumId}`, ); - storeModifications("progress", "delete", deletedProgressResult); - - const deletedResultResult = await this.query( - "DELETE re FROM result_episode as re, episode as e, part as p" + - " WHERE re.episode_id = e.id" + - " AND e.part_id = p.id" + - " AND p.medium_id = ?", - sourceMediumId, + // FIXME: storeModifications("release", "delete", deletedReleaseResult); + + await this.con.query( + sql`DELETE ue FROM user_episode as ue, episode as e, part as p + WHERE ue.episode_id = e.id + AND e.part_id = p.id + AND p.medium_id = ${sourceMediumId}`, ); - storeModifications("result_episode", "delete", deletedResultResult); + // FIXME: storeModifications("progress", "delete", deletedProgressResult); - const deletedEpisodesResult = await this.query( - "DELETE e FROM episode as e, part as p" + " WHERE e.part_id = p.id" + " AND p.medium_id = ?", - sourceMediumId, + await this.con.query( + sql`DELETE e FROM episode as e, part as p WHERE e.part_id = p.id AND p.medium_id = ${sourceMediumId}`, ); - storeModifications("episode", "delete", deletedEpisodesResult); + // FIXME: storeModifications("episode", "delete", deletedEpisodesResult); - const deletedPartResult = await this.query("DELETE FROM part" + " WHERE medium_id = ?", sourceMediumId); - storeModifications("part", "delete", deletedPartResult); + await this.con.query(sql`DELETE FROM part WHERE medium_id = ${sourceMediumId}`); + // FIXME: storeModifications("part", "delete", deletedPartResult); - const deletedMediumResult = await this.query("DELETE FROM medium" + " WHERE id = ?", sourceMediumId); - storeModifications("medium", "delete", deletedMediumResult); + await this.con.query(sql`DELETE FROM medium WHERE medium_id = ${sourceMediumId}`); + // FIXME: storeModifications("medium", "delete", deletedMediumResult); return true; } @@ -657,30 +477,24 @@ export class MediumContext extends SubContext { new ValidationError(`Invalid destination Medium: ${destMedium?.title}-${destMedium?.medium}`), ); } - const result = await this.query("INSERT IGNORE INTO medium(medium, title) VALUES (?,?);", [ - destMedium.medium, - destMedium.title, - ]); - if (!Number.isInteger(result.insertId)) { - throw new ValidationError(`insert failed, invalid ID: ${result.insertId + ""}`); - } - storeModifications("medium", "insert", result); + const id = await this.con.oneFirst( + sql.type(softInsertEntity)` + INSERT INTO medium(medium, title) + VALUES (${destMedium.medium},${destMedium.title}) + ON CONFLICT DO NOTHING + RETURNING id;`, + ); + + // FIXME: storeModifications("medium", "insert", result); let mediumId: number; // medium exists already if insertId == 0 - if (result.insertId === 0) { - const realMedium: Array<{ - id: number; - }> = await this.query("SELECT id FROM medium WHERE (medium, title) = (?,?);", [ - destMedium.medium, - destMedium.title, - ]); - if (!realMedium.length) { - throw new MissingEntityError("Expected a MediumId, but got nothing"); - } - mediumId = realMedium[0].id; + if (id === 0) { + mediumId = await this.con.oneFirst( + sql.type(entity)`SELECT id FROM medium WHERE (medium, title) = (${destMedium.medium},${destMedium.title});`, + ); } else { - await this.parentContext.partContext.createStandardPart(result.insertId); - mediumId = result.insertId; + await this.getContext(PartContext).createStandardPart(id); + mediumId = id; } const success = await this.transferToc(sourceMediumId, mediumId, toc); return success ? mediumId : 0; @@ -695,20 +509,19 @@ export class MediumContext extends SubContext { const domain = domainRegMatch[1]; - await this.parentContext.jobContext.removeJobLike("name", `toc-${sourceMediumId}-${toc}`); - const standardPartId = await this.parentContext.partContext.getStandardPartId(destMediumId); + await this.getContext(JobContext).removeJobLike("name", `toc-${sourceMediumId}-${toc}`); + const standardPartId = await this.getContext(PartContext).getStandardPartId(destMediumId); if (!standardPartId) { throw new DatabaseError("medium does not have a standard part"); } - const updatedTocResult = await this.query( - "UPDATE IGNORE medium_toc SET medium_id = ? WHERE (medium_id, link) = (?,?);", - [destMediumId, sourceMediumId, toc], + await this.con.query( + sql`UPDATE medium_toc SET medium_id = ${destMediumId} WHERE (medium_id, link) = (${sourceMediumId},${toc});`, ); - storeModifications("toc", "update", updatedTocResult); + // FIXME: storeModifications("toc", "update", updatedTocResult); - const releases = await this.parentContext.episodeContext.getEpisodeLinksByMedium(sourceMediumId); + const releases = await this.getContext(EpisodeReleaseContext).getEpisodeLinksByMedium(sourceMediumId); const episodeMap: Map = new Map(); const valueCb = () => []; @@ -730,108 +543,70 @@ export class MediumContext extends SubContext { } } // add the episodes of the releases - const copyEpisodesResult = await this.queryInList( - "INSERT IGNORE INTO episode" + - " (part_id, totalIndex, partialIndex, combiIndex, updated_at)" + - " SELECT ?, episode.totalIndex, episode.partialIndex, episode.combiIndex, episode.updated_at" + - " FROM episode INNER JOIN part ON part.id=episode.part_id" + - " WHERE part.medium_id = ? AND episode.id IN (??);", - [standardPartId, sourceMediumId, copyEpisodes], - ); - multiSingle(copyEpisodesResult, (value) => storeModifications("episode", "insert", value)); - - const updatedReleaseResult = await this.query( - "UPDATE IGNORE episode_release, episode as src_e, episode as dest_e, part" + - " SET episode_release.episode_id = dest_e.id" + - " WHERE episode_release.episode_id = src_e.id" + - " AND src_e.part_id = part.id" + - " AND part.medium_id = ?" + - " AND dest_e.part_id = ?" + - " AND src_e.combiIndex = dest_e.combiIndex" + - " AND locate(?,episode_release.url) > 0;", - [sourceMediumId, standardPartId, domain], + await this.con.query( + sql`INSERT INTO episode + (part_id, total_index, partial_index, combi_index, updated_at) + SELECT ${standardPartId}, episode.total_index, episode.partial_index, episode.combi_index, episode.updated_at + FROM episode INNER JOIN part ON part.id=episode.part_id + WHERE part.medium_id = ${sourceMediumId} AND episode.id = ANY(${sql.array(copyEpisodes, "int8")}) + ON CONFLICT DO NOTHING;`, ); - storeModifications("release", "update", updatedReleaseResult); - - const updatedProgressResult = await this.queryInList( - "UPDATE IGNORE user_episode, episode as src_e, episode as dest_e, part" + - " SET user_episode.episode_id = dest_e.id" + - " WHERE user_episode.episode_id = src_e.id" + - " AND src_e.part_id = part.id" + - " AND part.medium_id = ?" + - " AND dest_e.part_id = ?" + - " AND src_e.combiIndex = dest_e.combiIndex" + - " AND src_e.id IN (??);", - [sourceMediumId, standardPartId, removeEpisodesAfter], + // FIXME: multiSingle(copyEpisodesResult, (value) => storeModifications("episode", "insert", value)); + + await this.con.query( + sql`UPDATE episode_release, episode as src_e, episode as dest_e, part + SET episode_release.episode_id = dest_e.id + WHERE episode_release.episode_id = src_e.id + AND src_e.part_id = part.id + AND part.medium_id = ${sourceMediumId} + AND dest_e.part_id = ${standardPartId} + AND src_e.combiIndex = dest_e.combiIndex + AND strpos(episode_release.url, ${domain}) > 0;`, ); - multiSingle(updatedProgressResult, (value) => storeModifications("progress", "update", value)); - - const updatedResultResult = await this.queryInList( - "UPDATE IGNORE result_episode, episode as src_e, episode as dest_e, part" + - " SET result_episode.episode_id = dest_e.id" + - " WHERE result_episode.episode_id = src_e.id" + - " AND src_e.part_id = part.id" + - " AND part.medium_id = ?" + - " AND dest_e.part_id = ?" + - " AND src_e.combiIndex = dest_e.combiIndex" + - " AND src_e.id IN (??);", - [sourceMediumId, standardPartId, removeEpisodesAfter], + // FIXME: storeModifications("release", "update", updatedReleaseResult); + + await this.con.query( + sql`UPDATE user_episode, episode as src_e, episode as dest_e, part + SET user_episode.episode_id = dest_e.id + WHERE user_episode.episode_id = src_e.id + AND src_e.part_id = part.id + AND part.medium_id = ${sourceMediumId} + AND dest_e.part_id = ${standardPartId} + AND src_e.combiIndex = dest_e.combiIndex + AND src_e.id = ANY(${sql.array(removeEpisodesAfter, "int8")});`, ); - multiSingle(updatedResultResult, (value) => storeModifications("result_episode", "update", value)); + // FIXME: multiSingle(updatedProgressResult, (value) => storeModifications("progress", "update", value)); - const deletedReleasesResult = await this.queryInList("DELETE FROM episode_release" + " WHERE episode_id IN (??);", [ - removeEpisodesAfter, - ]); - multiSingle(deletedReleasesResult, (value) => storeModifications("release", "delete", value)); - - const deletedUserEpisodesResult = await this.queryInList( - "DELETE FROM user_episode" + " WHERE episode_id IN (??);", - [removeEpisodesAfter], + await this.con.query( + sql`DELETE FROM episode_release WHERE episode_id = ANY(${sql.array(removeEpisodesAfter, "int8")});`, ); - multiSingle(deletedUserEpisodesResult, (value) => storeModifications("progress", "delete", value)); + // FIXME: multiSingle(deletedReleasesResult, (value) => storeModifications("release", "delete", value)); - const deletedResultEpisodesResult = await this.queryInList( - "DELETE FROM result_episode" + " WHERE episode_id IN (??);", - [removeEpisodesAfter], + await this.con.query( + sql`DELETE FROM user_episode WHERE episode_id = ANY(${sql.array(removeEpisodesAfter, "int8")});`, ); - multiSingle(deletedResultEpisodesResult, (value) => storeModifications("result_episode", "delete", value)); + // FIXME: multiSingle(deletedUserEpisodesResult, (value) => storeModifications("progress", "delete", value)); - const deletedEpisodesResult = await this.queryInList("DELETE FROM episode" + " WHERE id IN (??);", [ + await this.con.query(sql`DELETE FROM episode WHERE id = ANY(${sql.array(removeEpisodesAfter, "int8")});`, [ removeEpisodesAfter, ]); - multiSingle(deletedEpisodesResult, (value) => storeModifications("episode", "delete", value)); + // FIXME: multiSingle(deletedEpisodesResult, (value) => storeModifications("episode", "delete", value)); const copiedOnlyEpisodes: number[] = copyEpisodes.filter((value) => !removeEpisodesAfter.includes(value)); - const copiedProgressResult = await this.queryInList( - " IGNORE INTO user_episode" + - " (user_uuid, episode_id, progress, read_date)" + - " SELECT user_episode.user_uuid, dest_e.id, user_episode.progress, user_episode.read_date" + - " FROM user_episode, episode as src_e, episode as dest_e, part" + - " WHERE user_episode.episode_id = src_e.id" + - " AND src_e.part_id = part.id" + - " AND part.medium_id = ?" + - " AND dest_e.part_id = ?" + - " AND src_e.combiIndex = dest_e.combiIndex" + - " AND src_e.id IN (??);", - [sourceMediumId, standardPartId, copiedOnlyEpisodes], - ); - multiSingle(copiedProgressResult, (value) => storeModifications("progress", "insert", value)); - - const copiedResultResult = await this.queryInList( - "INSERT IGNORE INTO result_episode" + - " (novel, chapter, chapIndex, volIndex, volume, episode_id)" + - " SELECT result_episode.novel, result_episode.chapter, result_episode.chapIndex," + - " result_episode.volIndex, result_episode.volume, dest_e.id" + - " FROM result_episode, episode as src_e, episode as dest_e, part" + - " WHERE result_episode.episode_id = src_e.id" + - " AND src_e.part_id = part.id" + - " AND part.medium_id = ?" + - " AND dest_e.part_id = ?" + - " AND src_e.combiIndex = dest_e.combiIndex" + - " AND src_e.id IN (??);", - [sourceMediumId, standardPartId, copiedOnlyEpisodes], + await this.con.query( + sql`INSERT INTO user_episode + (user_uuid, episode_id, progress, read_date) + SELECT user_episode.user_uuid, dest_e.id, user_episode.progress, user_episode.read_date + FROM user_episode, episode as src_e, episode as dest_e, part + WHERE user_episode.episode_id = src_e.id + AND src_e.part_id = part.id + AND part.medium_id = ${sourceMediumId} + AND dest_e.part_id = ${standardPartId} + AND src_e.combiIndex = dest_e.combiIndex + AND src_e.id = ANY(${sql.array(copiedOnlyEpisodes, "int8")}) + ON CONFLICT DO NOTHING;`, ); - multiSingle(copiedResultResult, (value) => storeModifications("result_episode", "insert", value)); + // FIXME: multiSingle(copiedProgressResult, (value) => storeModifications("progress", "insert", value)); return true; } } diff --git a/packages/core/src/database/contexts/mediumInWaitContext.ts b/packages/core/src/database/contexts/mediumInWaitContext.ts index 96a7bacf..5dc79834 100644 --- a/packages/core/src/database/contexts/mediumInWaitContext.ts +++ b/packages/core/src/database/contexts/mediumInWaitContext.ts @@ -1,4 +1,3 @@ -import { SubContext } from "./subContext"; import { Medium, SimpleMedium, @@ -7,16 +6,28 @@ import { MediumInWait, TypedQuery, MediumInWaitSearch, + Uuid, } from "../../types"; -import { equalsIgnore, ignore, promiseMultiSingle, sanitizeString, multiSingle } from "../../tools"; -import { storeModifications } from "../sqlTools"; +import { equalsIgnore, promiseMultiSingle, sanitizeString } from "../../tools"; import { escapeLike } from "../storages/storageTools"; import { DatabaseError } from "../../error"; - -export class MediumInWaitContext extends SubContext { - public async createFromMediaInWait(medium: MediumInWait, same?: MediumInWait[], listId?: number): Promise { +import { QueryContext } from "./queryContext"; +import { sql } from "slonik"; +import { MediumTocContext } from "./mediumTocContext"; +import { MediumContext } from "./mediumContext"; +import { PartContext } from "./partContext"; +import { InternalListContext } from "./internalListContext"; +import { joinAnd } from "./helper"; + +export class MediumInWaitContext extends QueryContext { + public async createFromMediaInWait( + medium: MediumInWait, + uuid: Uuid, + same?: MediumInWait[], + listId?: number, + ): Promise { const title = sanitizeString(medium.title); - const newMedium: SimpleMedium = await this.parentContext.mediumContext.addMedium({ + const newMedium: SimpleMedium = await this.getContext(MediumContext).addMedium({ title, medium: medium.medium, }); @@ -31,7 +42,7 @@ export class MediumInWaitContext extends SubContext { await Promise.all( same .filter((value) => value && value.medium === medium.medium) - .map((value) => this.parentContext.mediumContext.addToc(id, value.link)), + .map((value) => this.getContext(MediumTocContext).addToc(id, value.link)), ); const synonyms: string[] = same @@ -39,19 +50,19 @@ export class MediumInWaitContext extends SubContext { .filter((value) => !equalsIgnore(value, medium.title)); if (synonyms.length) { - await this.parentContext.mediumContext.addSynonyms({ mediumId: id, synonym: synonyms }); + await this.getContext(MediumContext).addSynonyms({ mediumId: id, synonym: synonyms }); } toDeleteMediaInWaits.push(...same); } if (listId) { - await this.parentContext.internalListContext.addItemToList({ id, listId }); + await this.getContext(InternalListContext).addItemsToList([id], uuid, listId); } if (medium.link) { - await this.parentContext.mediumContext.addToc(id, medium.link); + await this.getContext(MediumTocContext).addToc(id, medium.link); } await this.deleteMediaInWait(toDeleteMediaInWaits); - const parts = await this.parentContext.partContext.getMediumParts(id); + const parts = await this.getContext(PartContext).getMediumParts(id); return { ...newMedium, parts: parts.map((value) => value.id), @@ -66,40 +77,35 @@ export class MediumInWaitContext extends SubContext { return false; } await Promise.all( - same.filter((value) => value).map((value) => this.parentContext.mediumContext.addToc(mediumId, value.link)), + same.filter((value) => value).map((value) => this.getContext(MediumTocContext).addToc(mediumId, value.link)), ); const synonyms: string[] = same.map((value) => sanitizeString(value.title)); - await this.parentContext.mediumContext.addSynonyms({ mediumId, synonym: synonyms }); + await this.getContext(MediumContext).addSynonyms({ mediumId, synonym: synonyms }); await this.deleteMediaInWait(same); return true; } public async getMediaInWait(search?: MediumInWaitSearch): Promise> { - const limit = search?.limit && search.limit > 0 ? ` LIMIT ${search.limit}` : ""; + const limit = search?.limit && search.limit > 0 ? sql` LIMIT ${search.limit}` : sql``; const whereFilter = []; - const values = []; if (search?.medium) { - whereFilter.push("medium = ?"); - values.push(search.medium); + whereFilter.push(sql`medium = ${search.medium}`); } if (search?.link && search.link !== "undefined") { - whereFilter.push(`link like '%${escapeLike(search.link)}%'`); - values.push(search.link); + whereFilter.push(sql`link like ${"%" + escapeLike(search.link) + "%"}`); } if (search?.title && search.title !== "undefined") { - whereFilter.push(`link like '%${escapeLike(search.title)}%'`); - values.push(search.title); + whereFilter.push(sql`title like ${"%" + escapeLike(search.title) + "%"}`); } - return this.queryStream( - `SELECT * FROM medium_in_wait${ - whereFilter.length ? " WHERE " + whereFilter.join(" AND ") : "" + return this.stream( + sql`SELECT title, medium, link FROM medium_in_wait${ + whereFilter.length ? sql` WHERE ${joinAnd(whereFilter)}` : sql`` } ORDER BY title${limit}`, - values, ); } @@ -107,8 +113,8 @@ export class MediumInWaitContext extends SubContext { if (!mediaInWait) { return; } - return promiseMultiSingle(mediaInWait, async (value: MediumInWait) => { - const result = await this.delete( + await promiseMultiSingle(mediaInWait, async (value: MediumInWait) => { + await this.delete( "medium_in_wait", { column: "title", @@ -123,24 +129,25 @@ export class MediumInWaitContext extends SubContext { value: value.link, }, ); - storeModifications("medium_in_wait", "delete", result); - return result.affectedRows > 0; - }).then(ignore); + // FIXME: storeModifications("medium_in_wait", "delete", result); + }); } - public async addMediumInWait(mediaInWait: MultiSingleValue): EmptyPromise { - const results = await this.multiInsert( - "INSERT IGNORE INTO medium_in_wait (title, medium, link) VALUES ", - mediaInWait, - (value: any) => [value.title, value.medium, value.link], + public async addMediumInWait(mediaInWait: readonly MediumInWait[]): EmptyPromise { + const values = mediaInWait.map((value) => [value.title, value.medium, value.link]); + await this.con.query( + sql` + INSERT INTO medium_in_wait (title, medium, link) + SELECT * FROM ${sql.unnest(values, ["text", "int8", "text"])} + ON CONFLICT DO NOTHING`, ); - multiSingle(results, (result) => storeModifications("medium_in_wait", "insert", result)); + // FIXME: multiSingle(results, (result) => storeModifications("medium_in_wait", "insert", result)); } public async deleteUsedMediumInWait() { - const result = await this.dmlQuery( - "delete from medium_in_wait where (title, medium, link) in (select title, medium, link from medium_toc);", + await this.con.query( + sql`delete from medium_in_wait where (title, medium, link) in (select title, medium, link from medium_toc);`, ); - storeModifications("medium_in_wait", "delete", result); + // FIXME: storeModifications("medium_in_wait", "delete", result); } } diff --git a/packages/core/src/database/contexts/mediumTocContext.ts b/packages/core/src/database/contexts/mediumTocContext.ts new file mode 100644 index 00000000..4a558ab6 --- /dev/null +++ b/packages/core/src/database/contexts/mediumTocContext.ts @@ -0,0 +1,190 @@ +import { ValidationError } from "../../error"; +import { count, getElseSet, isInvalidId } from "../../tools"; +import { EmptyPromise } from "../../types"; +import { sql } from "slonik"; +import { + entity, + linkValue, + minimalMediumtoc, + MinimalMediumtoc, + SimpleMediumToc, + simpleMediumToc, +} from "../databaseTypes"; +import { QueryContext } from "./queryContext"; +import { EpisodeReleaseContext } from "./episodeReleaseContext"; +import { JobContext } from "./jobContext"; + +function selectAllColumns() { + return sql`SELECT id, medium_id, link, country_of_origin, + language_of_origin, author, title, medium, + artist, lang, state_origin, state_tl, series, universe + FROM medium_toc`; +} + +export class MediumTocContext extends QueryContext { + public async getSpecificToc(id: number, link: string): Promise { + return this.con.maybeOne( + sql.type(simpleMediumToc)`${selectAllColumns()} WHERE medium_id = ${id} AND link = ${link};`, + ); + } + + public async removeToc(tocLink: string): EmptyPromise { + const result = await this.con.anyFirst( + sql.type(entity)`SELECT medium_id as id FROM medium_toc WHERE link = ${tocLink}`, + ); + await Promise.all( + result.map((value) => { + return this.removeMediumToc(value, tocLink); + }), + ); + } + + public getTocs(): Promise { + return this.con.any( + sql.type(simpleMediumToc)` + ${selectAllColumns()};`, + ); + } + + /** + * Updates a mediumToc from the storage. + */ + public async updateMediumToc(mediumToc: SimpleMediumToc): Promise { + // define the updatable keys + const keys: Array = [ + "countryOfOrigin", + "languageOfOrigin", + "author", + "title", + "medium", + "artist", + "lang", + "stateOrigin", + "stateTl", + "series", + "universe", + ]; + + if (isInvalidId(mediumToc.mediumId) || !mediumToc.link) { + throw new ValidationError("invalid medium_id or link is invalid: " + JSON.stringify(mediumToc)); + } + const conditions = []; + + if (isInvalidId(mediumToc.id)) { + conditions.push({ column: "medium_id", value: mediumToc.mediumId }); + conditions.push({ column: "link", value: mediumToc.link }); + } else { + conditions.push({ column: "id", value: mediumToc.id }); + } + const result = await this.update( + "medium_toc", + () => { + const updates = []; + for (const key of keys) { + const value = mediumToc[key]; + + if (value === null) { + updates.push(sql`${sql.identifier([key])} = NULL`); + } else if (value != null) { + updates.push(sql`${sql.identifier([key])} = ${value}`); + } + } + return updates; + }, + ...conditions, + ); + // FIXME: storeModifications("toc", "update", result); + return result.rowCount > 0; + } + + public async addToc(mediumId: number, link: string): Promise { + const result = await this.con.oneFirst( + sql.type(entity)` + INSERT INTO medium_toc (medium_id, link, title, medium) + VAlUES (${mediumId},${link},${""},${0}) + RETURNING id`, + ); + // FIXME: storeModifications("toc", "insert", result); + return result; + } + + public async getTocLinkByMediumId(mediumId: number): Promise { + return this.con.anyFirst(sql.type(linkValue)`SELECT link FROM medium_toc WHERE medium_id=${mediumId}`); + } + + public async getTocsByMediumIds(mediumId: number[]): Promise { + return this.con.any( + sql.type(simpleMediumToc)` + ${selectAllColumns()} WHERE medium_id = ANY(${sql.array(mediumId, "int8")});`, + ); + } + + public getTocsByIds(tocIds: number[]): Promise { + return this.con.any(sql.type(simpleMediumToc)`${selectAllColumns()} WHERE id = ANY(${sql.array(tocIds, "int8")});`); + } + + public async removeMediumToc(mediumId: number, link: string): Promise { + const domainRegMatch = /https?:\/\/(.+?)(\/|$)/.exec(link); + + if (!domainRegMatch) { + throw new ValidationError("Invalid link, Unable to extract Domain: " + link); + } + + await this.getContext(JobContext).removeJobLike("name", `toc-${mediumId}-${link}`); + const domain = domainRegMatch[1]; + + const releases = await this.getContext(EpisodeReleaseContext).getEpisodeLinksByMedium(mediumId); + const episodeMap: Map = new Map(); + const valueCb = () => []; + + for (const release of releases) { + getElseSet(episodeMap, release.episodeId, valueCb).push(release.url); + } + const removeEpisodesAfter: number[] = []; + + for (const [episodeId, links] of episodeMap.entries()) { + const toMoveCount = count(links, (value) => value.includes(domain)); + + if (toMoveCount) { + if (links.length === toMoveCount) { + removeEpisodesAfter.push(episodeId); + } + } + } + await this.con.query( + sql`DELETE er FROM episode_release as er, episode as e, part as p + WHERE er.episode_id = e.id + AND e.part_id = p.id + AND p.medium_id = ? + AND strpos(er.url, ?) > 0;`, + [mediumId, domain], + ); + // FIXME: storeModifications("release", "delete", deletedReleaseResult); + + await this.con.query( + sql`DELETE ue FROM user_episode as ue, episode as e, part as p + WHERE ue.episode_id = e.id + AND e.part_id = p.id + AND p.medium_id = ${mediumId} + AND e.id = ANY(${sql.array(removeEpisodesAfter, "int8")});`, + ); + // FIXME: multiSingle(deletedProgressResult, (value) => storeModifications("progress", "delete", value)); + + await this.con.query(sql`DELETE FROM episode WHERE episode.id = ANY(${sql.array(removeEpisodesAfter, "int8")});`); + // FIXME: multiSingle(deletedEpisodesResult, (value) => storeModifications("episode", "delete", value)); + + await this.delete("medium_toc", { column: "medium_id", value: mediumId }, { column: "link", value: link }); + // FIXME: storeModifications("toc", "delete", result); + return false; + } + + public getAllMediaTocs(): Promise> { + return this.con.any<{ link?: string; id: number }>( + sql`SELECT medium.id, medium_toc.link FROM medium LEFT JOIN medium_toc ON medium.id=medium_toc.medium_id`, + ); + } + + public async getAllTocs(): Promise { + return this.con.any(sql.type(minimalMediumtoc)`SELECT id, medium_id, link FROM medium_toc;`); + } +} diff --git a/packages/core/src/database/contexts/newsContext.ts b/packages/core/src/database/contexts/newsContext.ts index a52f6c3d..9f60092f 100644 --- a/packages/core/src/database/contexts/newsContext.ts +++ b/packages/core/src/database/contexts/newsContext.ts @@ -1,10 +1,9 @@ -import { SubContext } from "./subContext"; -import { News, Uuid, MultiSingleValue, PromiseMultiSingle, Optional } from "../../types"; -import { promiseMultiSingle } from "../../tools"; -import { storeModifications } from "../sqlTools"; -import { DatabaseError, ValidationError } from "../../error"; +import { Uuid, Insert } from "../../types"; +import { QueryContext } from "./queryContext"; +import { sql } from "slonik"; +import { entity, News, news, SimpleNews, simpleNews } from "../databaseTypes"; -export class NewsContext extends SubContext { +export class NewsContext extends QueryContext { /** * Inserts a News item into the Storage. * Returns a News item with id if insert was successful. @@ -14,139 +13,124 @@ export class NewsContext extends SubContext { * @param {News|Array} news * @return {Promise>} */ - public async addNews>(news: T): PromiseMultiSingle> { - // TODO: 29.06.2019 if inserting multiple rows in a single insert, what happens with result.insertId? - return promiseMultiSingle(news, async (value: News) => { - // an empty link may be the result of a faulty link (e.g. a link which leads to 404 error) - if (!value.link) { - return; - } - if (!value.title || !value.date) { - return Promise.reject(new ValidationError(`Invalid News: ${value.title}-${value.date + ""}`)); - } - let result = await this.query("INSERT IGNORE INTO news_board (title, link, date) VALUES (?,?,?);", [ - value.title, - value.link, - value.date, - ]); - if (!Number.isInteger(result.insertId)) { - throw new DatabaseError(`failed insert, invalid ID ${result.insertId + ""}`); - } - storeModifications("news", "insert", result); - if (!result.affectedRows) { - return; - } - result = { ...value, id: result.insertId }; - return result; + public async addNews(news: ReadonlyArray>): Promise { + // an empty link may be the result of a faulty link (e.g. a link which leads to 404 error) + const values = news.map((value) => { + return [value.title, value.link, value.date ? value.date.toISOString() : null]; }); - } - public getLatestNews(domain: string): Promise { - return this.query("SELECT * FROM news_board WHERE locate(?, link) < 9 ORDER BY date DESC LIMIT 10", domain); + const result = await this.con.any( + sql.type(simpleNews)` + INSERT INTO news_board (title, link, date) + SELECT * FROM ${sql.unnest(values, ["text", "text", "timestamptz"])} + RETURNING id, title, link, date;`, + ); + // FIXME: storeModifications("news", "insert", result); + return result; } - public async getAll(uuid: Uuid): Promise { - const newsResult: any[] = await this.query( - "SELECT * FROM news_board LEFT JOIN " + - "(SELECT news_id,1 AS read_news FROM news_user WHERE user_id=?) as news_user " + - "ON news_user.news_id=news_board.id " + - "WHERE id IN (" + - "SELECT news_id FROM news_medium WHERE medium_id IN(" + - "SELECT medium_id FROM list_medium WHERE list_id IN (" + - "SELECT id FROM reading_list WHERE user_uuid = ?) UNION SELECT medium_id FROM external_list_medium " + - "WHERE list_id IN (SELECT id from external_reading_list " + - "WHERE user_uuid IN (SELECT uuid FROM external_user WHERE local_uuid = ?" + - "))))" + - "ORDER BY date DESC LIMIT 100", - [uuid, uuid, uuid], + public getLatestNews(domain: string): Promise { + return this.con.any( + sql.type(simpleNews)`SELECT * FROM news_board WHERE strpos(link, ${domain}) < 9 ORDER BY date DESC LIMIT 10`, ); + } - return newsResult.map((value): News => { - return { - title: value.title, - date: value.date, - link: value.link, - id: value.id, - read: Boolean(value.read_news), - }; - }); + public async getAll(uuid: Uuid): Promise { + return this.con.any( + sql.type(news)` + SELECT news_board.id, news_board.title, news_board.link, news_board.id, news_user.read + FROM news_board + LEFT JOIN ( + SELECT news_id,true AS read FROM news_user WHERE user_id=${uuid} + ) as news_user ON news_user.news_id=news_board.id + WHERE id IN ( + SELECT news_id FROM news_medium WHERE medium_id IN ( + SELECT medium_id FROM list_medium WHERE list_id IN ( + SELECT id FROM reading_list WHERE user_uuid = ${uuid} + ) + UNION SELECT medium_id FROM external_list_medium + WHERE list_id IN ( + SELECT id from external_reading_list + WHERE user_uuid IN ( + SELECT uuid FROM external_user WHERE local_uuid = ${uuid} + ) + ) + ) + ) + ORDER BY date DESC LIMIT 100;`, + ); } /** * */ - public async getNews(uuid: Uuid, since?: Date, till?: Date, newsIds?: number[]): Promise { - let parameter: Array | string; - let query: string; + public async getNews(uuid: Uuid, since?: Date, till?: Date, newsIds?: number[]): Promise { + let query; if (newsIds) { if (!newsIds.length || newsIds.some((newsId) => !Number.isInteger(newsId) && newsId > 0)) { return []; } - query = - "SELECT * FROM news_board " + - "LEFT JOIN (SELECT news_id,1 AS read_news FROM news_user WHERE user_id=?) " + - "as news_user ON news_user.news_id=news_board.id " + - "WHERE id IN (" + - newsIds.join(", ") + - ");"; - parameter = uuid; + query = sql.type(news)`SELECT id, title, link, date, read FROM news_board + LEFT JOIN ( + SELECT news_id,true AS read FROM news_user WHERE user_id=${uuid} + ) + as news_user ON news_user.news_id=news_board.id + WHERE id = ANY(${sql.array(newsIds, "int8")});`; } else { // TODO query looks horrible, replace it with something better? // a time based query - query = - "SELECT * FROM news_board " + - "LEFT JOIN (SELECT news_id,1 AS read_news FROM news_user WHERE user_id=?) " + - "as news_user ON news_user.news_id=news_board.id " + - // where date between since and till - `WHERE ${since ? "? < date AND " : ""} ? > date AND id IN ` + - "(SELECT news_id FROM news_medium WHERE medium_id IN" + - // and news id from either an medium in user list or external list - "(SELECT medium_id FROM list_medium WHERE list_id IN " + - "(SELECT id FROM reading_list WHERE user_uuid = ?) " + - "UNION SELECT medium_id FROM external_list_medium WHERE list_id IN " + - "(SELECT id from external_reading_list WHERE user_uuid IN " + - "(SELECT uuid FROM external_user WHERE local_uuid = ?))))" + - "ORDER BY date DESC LIMIT 100"; - - till ??= new Date(); - parameter = [till, uuid, uuid]; - if (since) { - parameter.unshift(since); - } - parameter.unshift(uuid); + query = sql.type(news)`SELECT id, title, link, date, read FROM news_board + LEFT JOIN ( + SELECT news_id,true AS read FROM news_user WHERE user_id=${uuid} + ) + as news_user ON news_user.news_id=news_board.id + -- where date between since and till + WHERE ${since ? sql`date > ${sql.timestamp(since)} AND ` : sql``} + date < ${sql.timestamp(till ?? new Date())} + AND id IN ( + SELECT news_id FROM news_medium WHERE medium_id IN ( + -- and news id from either an medium in user list or external list + SELECT medium_id FROM list_medium WHERE list_id IN ( + SELECT id FROM reading_list WHERE user_uuid = ${uuid} + ) + UNION SELECT medium_id FROM external_list_medium WHERE list_id IN ( + SELECT id from external_reading_list WHERE user_uuid IN ( + SELECT uuid FROM external_user WHERE local_uuid = ${uuid} + ) + ) + ) + ) + ORDER BY date DESC LIMIT 100`; } - const newsResult: any[] = await this.query(query, parameter); - - return newsResult.map((value): News => { - return { - title: value.title, - date: value.date, - link: value.link, - id: value.id, - read: Boolean(value.read_news), - }; - }); + return this.con.any(query); } /** * */ public async deleteOldNews(): Promise { - await this.query( - "DELETE FROM news_medium WHERE news_id IN " + - "(SELECT news_id FROM news_board WHERE date < NOW() - INTERVAL 30 DAY);", + await this.con.query( + sql`DELETE FROM news_medium WHERE news_id IN ( + SELECT news_id FROM news_board WHERE date < NOW() - INTERVAL 30 DAY + );`, ); - const result = await this.query("DELETE FROM news_board WHERE date < NOW() - INTERVAL 30 DAY;"); - storeModifications("news", "delete", result); - return result.affectedRows > 0; + await this.con.query(sql`DELETE FROM news_board WHERE date < NOW() - INTERVAL 30 DAY;`); + // FIXME: storeModifications("news", "delete", result); + return false; } /** * Marks these news as read for the given user. */ public async markRead(uuid: Uuid, news: number[]): Promise { - await this.multiInsert("INSERT IGNORE INTO news_user (user_id, news_id) VALUES", news, (value) => [uuid, value]); + const values = news.map((value) => [uuid, value]); + await this.con.query( + sql` + INSERT INTO news_user (user_id, news_id) + SELECT * FROM ${sql.unnest(values, ["text", "int8"])} + ON CONFLICT DO NOTHING`, + ); return true; } @@ -154,21 +138,19 @@ export class NewsContext extends SubContext { * */ public async checkUnreadNewsCount(uuid: Uuid): Promise { - const result = await this.query( - "SELECT COUNT(*) AS count FROM news_board WHERE id NOT IN " + - "(SELECT news_id FROM news_user WHERE user_id = ?);", - uuid, + return this.con.oneFirst<{ count: number }>( + sql`SELECT COUNT(*) AS count FROM news_board WHERE id NOT IN (SELECT news_id FROM news_user WHERE user_id = ${uuid});`, ); - return result[0].count; } /** * */ - public checkUnreadNews(uuid: Uuid): Promise { - return this.query( - "SELECT * FROM news_board WHERE id NOT IN (SELECT news_id FROM news_user WHERE user_id = ?);", - uuid, + public checkUnreadNews(uuid: Uuid): Promise { + return this.con.anyFirst( + sql.type(entity)`SELECT id FROM news_board WHERE id NOT IN ( + SELECT news_id FROM news_user WHERE user_id = ${uuid} + );`, ); } @@ -177,12 +159,13 @@ export class NewsContext extends SubContext { */ public async linkNewsToMedium(): Promise { // TODO maybe implement this with a trigger - const result = await this.query( - "INSERT IGNORE INTO news_medium (medium_id, news_id)" + - "SELECT medium.id, news_board.id FROM medium,news_board " + - "WHERE locate(medium.title, news_board.title) > 0", + await this.con.query( + sql`INSERT INTO news_medium (medium_id, news_id) + SELECT medium.id, news_board.id FROM medium,news_board + WHERE strpos(news_board.title, medium.title) > 0 + ON CONFLICT DO NOTHING`, ); - return result.affectedRows > 0; + return false; } /** @@ -202,6 +185,6 @@ export class NewsContext extends SubContext { value: mediumId, }); } - return this.delete("news_medium", ...columns).then((value) => value.affectedRows > 0); + return this.delete("news_medium", ...columns).then((value) => value.rowCount > 0); } } diff --git a/packages/core/src/database/contexts/notificationContext.ts b/packages/core/src/database/contexts/notificationContext.ts index f850b830..3682c382 100644 --- a/packages/core/src/database/contexts/notificationContext.ts +++ b/packages/core/src/database/contexts/notificationContext.ts @@ -1,16 +1,20 @@ -import { Id, Insert, Notification, UserNotification, Uuid } from "@/types"; -import { SubContext } from "./subContext"; +import { Id, Insert, Uuid } from "../../types"; +import { sql } from "slonik"; +import { entity, Notification, userNotification, UserNotification } from "../databaseTypes"; +import { QueryContext } from "./queryContext"; -export class NotificationContext extends SubContext { +export class NotificationContext extends QueryContext { private readonly tableName = "notifications"; public async insertNotification(notification: Insert): Promise { - const result = await this.dmlQuery( - "INSERT INTO notifications (`title`, `content`, `date`, `key`, `type`) VALUES (?,?,?,?,?);", - [notification.title, notification.content, notification.date, notification.key, notification.type], + notification.id = await this.con.oneFirst( + sql.type(entity)`INSERT INTO notifications (title, content, date, key, type) + VALUES ( + ${notification.title},${notification.content},${sql.timestamp(notification.date)}, + ${notification.key},${notification.type} + ) + RETURNING id;`, ); - - notification.id = result.insertId; return notification as Notification; } @@ -19,82 +23,72 @@ export class NotificationContext extends SubContext { column: "id", value: notification.id, }); - return result.affectedRows > 0; + return result.rowCount > 0; } public async updateNotification(notification: Notification): Promise { const result = await this.update( this.tableName, - (updates, values) => { - updates.push("title = ?"); - values.push(notification.title); - - updates.push("content = ?"); - values.push(notification.content); - - updates.push("date = ?"); - values.push(notification.date); - - updates.push("key = ?"); - values.push(notification.key); - - updates.push("type = ?"); - values.push(notification.type); + () => { + const updates = []; + updates.push(sql`title = ${notification.title}`); + updates.push(sql`content = ${notification.content}`); + updates.push(sql`date = ${sql.timestamp(notification.date)}`); + updates.push(sql`key = ${notification.key}`); + updates.push(sql`type = ${notification.type}`); + return updates; }, { column: "id", value: notification.id, }, ); - return result.affectedRows > 0; + return result.rowCount > 0; } - public async getNotifications(date: Date, uuid: Uuid, read: boolean, size?: number): Promise { - const args = [date, uuid] as any[]; - const limit = size && size > 0 ? " LIMIT ?" : ""; - - if (limit) { - args.push(size); - } + public async getNotifications( + date: Date, + uuid: Uuid, + read: boolean, + size?: number, + ): Promise { + const limit = size && size > 0 ? sql` LIMIT ${size}` : sql``; - if (read) { - return this.query( - "SELECT n.*, true as `read` FROM notifications as n WHERE date > ? AND id IN (select id from notifications_read where uuid = ?) ORDER BY date desc" + - limit, - args, - ); - } else { - return this.query( - "SELECT n.*, false as `read` FROM notifications as n WHERE date > ? AND id NOT IN (select id from notifications_read where uuid = ?) ORDER BY date desc" + - limit, - args, - ); - } + return this.con.any( + sql.type(userNotification)` + SELECT n.title, n.content, n.date, n.key, n.type, ${read ? sql`true` : sql`false`} as read + FROM notifications as n + WHERE date > ${sql.timestamp(date)} AND id ${read ? sql`NOT ` : sql``}IN ( + select id from notifications_read where uuid = ${uuid} + ) ORDER BY date desc${limit}`, + ); } public async readNotification(id: Id, uuid: Uuid): Promise { - const result = await this.dmlQuery("INSERT IGNORE INTO notifications_read (id, uuid) VALUES (?, ?)", [id, uuid]); - return result.affectedRows > 0; + await this.con.query( + sql`INSERT INTO notifications_read (id, uuid) + VALUES (${id}, ${uuid}) + ON CONFLICT DO NOTHING`, + ); + return false; } public async countNotifications(uuid: Uuid, read: boolean): Promise { - let result; if (read) { - result = await this.query("SELECT count(id) as count FROM notifications_read WHERE uuid = ?", uuid); + return this.con.oneFirst<{ count: number }>( + sql`SELECT count(id) as count FROM notifications_read WHERE uuid = ${uuid}`, + ); } else { - result = await this.query( - "SELECT count(id) as count FROM notifications WHERE id not in (SELECT id as count FROM notifications_read WHERE uuid = ?)", - uuid, + return this.con.oneFirst<{ count: number }>( + sql`SELECT count(id) as count FROM notifications WHERE id not in (SELECT id as count FROM notifications_read WHERE uuid = ${uuid})`, ); } - return result[0].count; } public async readAllNotifications(uuid: Uuid): Promise { - const result = await this.dmlQuery( - "INSERT IGNORE INTO notifications_read (id, uuid) SELECT id, ? FROM notifications", - uuid, + const result = await this.con.query( + sql`INSERT INTO notifications_read (id, uuid) SELECT id, ${uuid} FROM notifications ON CONFLICT DO NOTHING`, ); - return result.affectedRows > 0; + return result.rowCount > 0; } } diff --git a/packages/core/src/database/contexts/partContext.ts b/packages/core/src/database/contexts/partContext.ts index 7e158e27..c0c531c4 100644 --- a/packages/core/src/database/contexts/partContext.ts +++ b/packages/core/src/database/contexts/partContext.ts @@ -1,103 +1,97 @@ -import { SubContext } from "./subContext"; import { Episode, FullPart, - MinPart, Part, ShallowPart, Uuid, - MultiSingleNumber, - Optional, VoidablePromise, SimpleRelease, TypedQuery, AddPart, } from "../../types"; -import { combiIndex, getElseSetObj, hasPropType, multiSingle, separateIndex } from "../../tools"; -import { MysqlServerError } from "../mysqlError"; -import { storeModifications } from "../sqlTools"; -import { DatabaseError, MissingEntityError } from "../../error"; +import { combiIndex, getElseSetObj, multiSingle, separateIndex } from "../../tools"; +import { isDuplicateError, MissingEntityError } from "../../error"; +import { QueryContext } from "./queryContext"; +import { sql } from "slonik"; +import { entity, SimpleEpisodeReleases, simplePart, SimplePart } from "../databaseTypes"; +import { EpisodeContext } from "./episodeContext"; interface MinEpisode { id: number; partId: number; } -export class PartContext extends SubContext { - public async getAll(): Promise> { - return this.queryStream("SELECT id, totalIndex, partialIndex, title, medium_id as mediumId FROM part"); +export class PartContext extends QueryContext { + public async getAll(): Promise> { + return this.stream( + sql.type(simplePart)` + SELECT id, total_index, partial_index, combi_index, title, medium_id + FROM part`, + ); } - public async getStandardPartId(mediumId: number): VoidablePromise { - const [standardPartResult]: any = await this.query( - "SELECT id FROM part WHERE medium_id = ? AND totalIndex=-1", - mediumId, + public async getStandardPartId(mediumId: number): Promise { + return this.con.maybeOneFirst( + sql.type(entity)`SELECT id FROM part WHERE medium_id = ${mediumId} AND total_index=-1`, ); - return standardPartResult ? standardPartResult.id : undefined; } public async getStandardPart(mediumId: number): VoidablePromise { - const [standardPartResult]: any = await this.query( - "SELECT * FROM part WHERE medium_id = ? AND totalIndex=-1", - mediumId, + const standardPartResult = await this.con.maybeOne( + sql.type(simplePart)`SELECT id, total_index, partial_index, combi_index, title, medium_id + FROM part WHERE medium_id = ${mediumId} AND total_index=-1`, ); if (!standardPartResult) { return; } - const episodesIds: MinEpisode[] = await this.queryInList( - "SELECT id, part_id as partId FROM episode WHERE part_id IN (??)", - standardPartResult.id, + const episodesIds = await this.con.anyFirst( + sql.type(entity)`SELECT id FROM episode WHERE part_id = ${standardPartResult.id}`, ); const standardPart: ShallowPart = { - id: standardPartResult.id, - totalIndex: standardPartResult.totalIndex, - partialIndex: standardPartResult.partialIndex, - title: standardPartResult.title, - episodes: [], - mediumId: standardPartResult.medium_id, + ...standardPartResult, + episodes: episodesIds, }; - episodesIds.forEach((value) => standardPart.episodes.push(value.id)); return standardPart; } - public async getMediumPartIds(mediumId: number): Promise { - const result: any[] = await this.query("SELECT id FROM part WHERE medium_id = ?;", mediumId); - return result.map((value) => value.id); + public async getMediumPartIds(mediumId: number): Promise { + return this.con.anyFirst(sql.type(entity)`SELECT id FROM part WHERE medium_id = ${mediumId};`); } /** * Returns all parts of an medium. */ - public async getMediumParts(mediumId: number, uuid?: Uuid): Promise { - const parts: any[] = await this.query("SELECT * FROM part WHERE medium_id = ?", mediumId); + public async getMediumParts(mediumId: number, uuid?: Uuid): Promise { + const parts = await this.con.any( + sql.type(simplePart)` + SELECT id, total_index, partial_index, combi_index, title, medium_id + FROM part WHERE medium_id = ${mediumId}`, + ); const idMap = new Map(); // recreate shallow parts const fullParts = parts.map((value) => { const part = { - id: value.id, - totalIndex: value.totalIndex, - partialIndex: value.partialIndex, - title: value.title, + ...value, episodes: [], - mediumId: value.medium_id, }; idMap.set(value.id, part); return part; }); - const episodesIds: MinEpisode[] = await this.queryInList( - "SELECT id, part_id as partId FROM episode WHERE part_id IN (??);", - [parts.map((v) => v.id)], - ); + const episodesIds = (await this.con.any( + sql`SELECT id, part_id + FROM episode + WHERE part_id = ANY(${sql.array([...idMap.keys()], "int8")});`, + )) as unknown as readonly MinEpisode[]; if (episodesIds.length) { if (uuid) { const values = episodesIds.map((episode: any): number => episode.id); - const episodes = await this.parentContext.episodeContext.getEpisode(values, uuid); + const episodes = await this.getContext(EpisodeContext).getEpisode(values, uuid); episodes.forEach((value) => { const part = idMap.get(value.partId); if (!part) { @@ -127,81 +121,83 @@ export class PartContext extends SubContext { * Returns all parts of an medium with specific totalIndex. * If there is no such part, it returns an object with only the totalIndex as property. */ - public async getMediumPartsPerIndex(mediumId: number, partCombiIndex: MultiSingleNumber): Promise { - const parts: Optional = await this.queryInList( - "SELECT * FROM part WHERE medium_id = ? AND combiIndex IN (??);", + public async getMediumPartsPerIndex(mediumId: number, partCombiIndex: number[]): Promise { + const parts = await this.con.any( + sql.type(simplePart)`SELECT id, total_index, partial_index, combi_index, title, medium_id + FROM part + WHERE medium_id = ${mediumId} AND combiIndex = ANY(${sql.array(partCombiIndex, "int8")});`, [mediumId, partCombiIndex], ); - if (!parts?.length) { + if (!parts.length) { return []; } + const result = [...parts]; + multiSingle(partCombiIndex, (combinedIndex: number) => { if (parts.every((part) => part.combiIndex !== combinedIndex)) { const separateValue = separateIndex(combinedIndex); - parts.push(separateValue); + result.push({ + id: 0, + combiIndex: combinedIndex, + mediumId: 0, + totalIndex: separateValue.totalIndex, + partialIndex: separateValue.partialIndex, + title: "unknown", + }); } }); - return parts.map((value): MinPart => { - return { - id: value.id, - totalIndex: value.totalIndex, - partialIndex: value.partialIndex, - title: value.title, - mediumId: value.medium_id, - }; - }); + return result; } /** * Returns all parts of an medium. */ - public async getParts(partId: T, uuid: Uuid, full = true): Promise { - const parts: Optional = await this.queryInList("SELECT * FROM part WHERE id IN (??);", [partId]); - if (!parts?.length) { + public async getParts(partId: number[], uuid: Uuid, full = true): Promise { + const parts = await this.con.any( + sql.type(simplePart)`SELECT id, total_index, partial_index, combi_index, title, medium_id + FROM part WHERE id = ANY(${sql.array(partId, "int8")});`, + ); + if (!parts.length) { return []; } - const partIdMap = new Map(); - const episodesResult: Optional = await this.queryInList( - "SELECT id, part_id FROM episode WHERE part_id IN (??);", - [ - parts.map((value) => { - partIdMap.set(value.id, value); - return value.id; - }), - ], + const partIdMap = new Map(); + const ids = parts.map((value) => { + partIdMap.set(value.id, []); + return value.id; + }); + const episodesResult = await this.con.any<{ id: number; part_id: number }>( + sql`SELECT id, part_id FROM episode WHERE part_id = ANY(${sql.array(ids, "in8")});`, ); - const episodes: Array<{ id: number; part_id: number }> = episodesResult || []; - if (full) { - const episodeIds = episodes.map((value) => value.id); - const fullEpisodes = await this.parentContext.episodeContext.getEpisode(episodeIds, uuid); + const episodeIds = episodesResult.map((value) => value.id); + const fullEpisodes = await this.getContext(EpisodeContext).getEpisode(episodeIds, uuid); fullEpisodes.forEach((value) => { - const part = partIdMap.get(value.partId); - if (!part) { + const values = partIdMap.get(value.partId) as Episode[]; + if (!values) { throw new MissingEntityError("missing part for queried episode"); } - if (!part.episodes) { - part.episodes = []; - } - part.episodes.push(value); + values.push(value); }); } else { - episodes.forEach((value) => { - const part: Part = partIdMap.get(value.part_id); - (part.episodes as number[]).push(value.id); + episodesResult.forEach((value) => { + const values = partIdMap.get(value.part_id) as number[]; + if (!values) { + throw new MissingEntityError("missing part for queried episode"); + } + values.push(value.id); }); } - return parts.map((part) => { + return parts.map((part): Part => { return { id: part.id, totalIndex: part.totalIndex, partialIndex: part.partialIndex, title: part.title, - episodes: part.episodes || [], - mediumId: part.medium_id, + episodes: partIdMap.get(part.id) ?? [], + mediumId: part.mediumId, }; }); } @@ -213,9 +209,9 @@ export class PartContext extends SubContext { if (!partIds.length) { return {}; } - const episodesResult: MinEpisode[] = await this.queryInList( - "SELECT id, part_id as partId FROM episode WHERE part_id IN (??);", - [partIds], + // @ts-expect-error + const episodesResult = await this.con.any( + sql`SELECT id, part_id FROM episode WHERE part_id = ANY(${sql.array(partIds, "int8")});`, ); const result = {}; @@ -236,15 +232,18 @@ export class PartContext extends SubContext { if (!partIds.length) { return {}; } - const episodesResult: Array = await this.queryInList( - "SELECT episode.id as episodeId, part_id, url FROM episode_release INNER JOIN episode ON episode.id = episode_id WHERE part_id IN (??);", - [partIds], + // @ts-expect-error + const episodesResult = await this.con.any( + sql`SELECT episode_id, url, part_id + FROM episode_release + INNER JOIN episode ON episode.id = episode_id + WHERE part_id = ANY(${sql.array(partIds, "int8")});`, ); const result = {}; episodesResult.forEach((value) => { - const items = getElseSetObj(result, value.part_id, () => []) as any[]; + const items = getElseSetObj(result, value.partId, () => []) as any[]; // @ts-expect-error delete value.part_id; items.push(value); @@ -256,20 +255,18 @@ export class PartContext extends SubContext { return result; } - public async getOverLappingParts(standardId: number, nonStandardPartIds: number[]): Promise { + public async getOverLappingParts(standardId: number, nonStandardPartIds: number[]): Promise { if (!nonStandardPartIds.length) { return []; } - const results = await this.queryInList( - "SELECT part_id FROM episode WHERE combiIndex IN" + - "(SELECT combiIndex FROM episode WHERE part_id = ?) " + - "AND part_id IN (??) GROUP BY part_id;", - [standardId, nonStandardPartIds], + return this.con.anyFirst<{ partId: number }>( + sql` + SELECT part_id FROM episode WHERE combiIndex IN ( + SELECT combiIndex FROM episode WHERE part_id = ${standardId} + ) + AND part_id = ANY(${sql.array(nonStandardPartIds, "in8")}) + GROUP BY part_id;`, ); - if (!results) { - return []; - } - return results.map((value) => value.part_id); } /** @@ -277,39 +274,32 @@ export class PartContext extends SubContext { */ public async addPart(part: AddPart): Promise { if (part.totalIndex === -1) { + // @ts-expect-error return this.createStandardPart(part.mediumId); } let partId: number; const partCombiIndex = combiIndex(part); try { - const result = await this.query( - "INSERT INTO part (medium_id, title, totalIndex, partialIndex, combiIndex) VALUES (?,?,?,?,?);", - [part.mediumId, part.title, part.totalIndex, part.partialIndex, partCombiIndex], + partId = await this.con.oneFirst( + sql.type(entity)`INSERT INTO part (medium_id, title, total_index, partial_index, combi_index) + VALUES ( + ${part.mediumId},${part.title ?? null},${part.totalIndex},${part.partialIndex ?? null},${partCombiIndex} + ) + RETURNING id;`, ); - partId = result.insertId; - storeModifications("part", "insert", result); + // FIXME: storeModifications("part", "insert", result); } catch (e) { // do not catch if it isn't an duplicate key error - if ( - !e || - (hasPropType(e, "errno") && - e.errno !== MysqlServerError.ER_DUP_KEY && - e.errno !== MysqlServerError.ER_DUP_ENTRY) - ) { + if (!e || !isDuplicateError(e)) { throw e; } - const result = await this.query("SELECT id from part where medium_id=? and combiIndex=?", [ - part.mediumId, - partCombiIndex, - ]); - partId = result[0].id; + partId = await this.con.oneFirst( + sql.type(entity)`SELECT id from part where medium_id=${part.mediumId} and combiIndex=${partCombiIndex}`, + ); } - if (!Number.isInteger(partId) || partId <= 0) { - throw new DatabaseError(`invalid ID ${partId}`); - } - let episodes: Episode[]; + let episodes: readonly SimpleEpisodeReleases[]; if (part.episodes?.length) { if (!Number.isInteger(part.episodes[0])) { @@ -317,7 +307,8 @@ export class PartContext extends SubContext { episode.partId = partId; return episode; }); - episodes = await this.parentContext.episodeContext.addEpisode(part.episodes); + // TODO: how to handle this?, also insert release? or separately? + episodes = await this.getContext(EpisodeContext).addEpisode(part.episodes); } else { episodes = []; } @@ -340,33 +331,34 @@ export class PartContext extends SubContext { public async updatePart(part: Part): Promise { const result = await this.update( "part", - (updates, values) => { + () => { + const updates = []; if (part.title) { - updates.push("title = ?"); - values.push(part.title); - } else { - if (part.title === null) { - updates.push("title = NULL"); - } + updates.push(sql`title = ${part.title}`); + } else if (part.title === null) { + updates.push(sql`title = NULL`); } if (part.partialIndex) { - updates.push("partialIndex = ?"); - values.push(part.partialIndex); + updates.push(sql`partial_index = ${part.partialIndex ?? null}`); } if (part.totalIndex) { - updates.push("totalIndex = ?"); - values.push(part.totalIndex); + updates.push(sql`total_index = ${part.totalIndex}`); + } + + if (part.totalIndex || part.partialIndex) { + updates.push(sql`combi_index = ${combiIndex(part)}`); } + return updates; }, { column: "id", value: part.id, }, ); - storeModifications("part", "update", result); - return result.changedRows > 0; + // FIXME: storeModifications("part", "update", result); + return result.rowCount > 0; } /** @@ -377,22 +369,21 @@ export class PartContext extends SubContext { return false; } - public createStandardPart(mediumId: number): Promise { + public async createStandardPart(mediumId: number): Promise { const partName = "Non Indexed Volume"; - return this.query("INSERT IGNORE INTO part (medium_id,title, totalIndex, combiIndex) VALUES (?,?,?,?);", [ + const id = await this.con.oneFirst( + sql.type(entity)` + INSERT INTO part (medium_id,title, total_index, combi_index) + VALUES (${mediumId},${partName},-1,-1) + RETURNING id;`, + ); + // FIXME: storeModifications("part", "insert", value); + return { + totalIndex: -1, + title: partName, + id, mediumId, - partName, - -1, - -1, - ]).then((value): ShallowPart => { - storeModifications("part", "insert", value); - return { - totalIndex: -1, - title: partName, - id: value.insertId, - mediumId, - episodes: [], - }; - }); + episodes: [], + }; } } diff --git a/packages/core/src/database/contexts/queryContext.ts b/packages/core/src/database/contexts/queryContext.ts index d3a09079..4df71f65 100644 --- a/packages/core/src/database/contexts/queryContext.ts +++ b/packages/core/src/database/contexts/queryContext.ts @@ -1,51 +1,24 @@ -import mySql, { Connection } from "promise-mysql"; +import { EmptyPromise, Primitive, DBEntity, TypedQuery } from "../../types"; +import { getElseSet } from "../../tools"; +import { ValidationError } from "../../error"; import { - Invalidation, - MetaResult, - Result, - Uuid, - EmptyPromise, - MultiSingleValue, - Nullable, - UnpackArray, - PromiseMultiSingle, - Optional, - PageInfo, - Primitive, - DataStats, - NewData, - QueryItems, - QueryItemsResult, -} from "../../types"; -import { getElseSet, getElseSetObj, ignore, multiSingle, promiseMultiSingle, batch } from "../../tools"; -import logger from "../../logger"; -import * as validate from "validate.js"; -import { Query, OkPacket } from "mysql"; -import { DatabaseContext } from "./databaseContext"; -import { UserContext } from "./userContext"; -import { ExternalUserContext } from "./externalUserContext"; -import { InternalListContext } from "./internalListContext"; -import { ExternalListContext } from "./externalListContext"; -import { NewsContext } from "./newsContext"; -import { EpisodeContext } from "./episodeContext"; -import { MediumContext } from "./mediumContext"; -import { PartContext } from "./partContext"; -import { JobContext } from "./jobContext"; -import { MediumInWaitContext } from "./mediumInWaitContext"; + DatabaseConnection, + DatabaseTransactionConnection, + QueryResult, + QueryResultRow, + sql, + TaggedTemplateLiteralInvocation, + ValueExpression, +} from "slonik"; import { ConnectionContext } from "../databaseTypes"; -import env from "../../env"; -import { setContext, removeContext, StoreKey } from "../../asyncStorage"; -import { storeCount } from "../sqlTools"; -import { ScraperHookContext } from "./scraperHookContext"; -import { AppEventContext } from "./appEventContext"; -import { CustomHookContext } from "./customHookContext"; -import { DatabaseError, NotImplementedError, UnsupportedError, ValidationError } from "../../error"; -import { NotificationContext } from "./notificationContext"; +import { joinAnd, joinComma } from "./helper"; +import { Readable } from "stream"; const database = "enterprise"; -type ParamCallback = (value: UnpackArray) => any[] | any; -type UpdateCallback = (updates: string[], values: any[]) => undefined | EmptyPromise; +// eslint-disable-next-line @typescript-eslint/no-invalid-void-type +type UpdateCallback = () => ValueExpression[] | Promise; + export type SqlPrimitive = Primitive | Date; export type QueryValue = SqlPrimitive | SqlPrimitive[]; export type QueryInValue = SqlPrimitive | Array; @@ -62,111 +35,71 @@ export interface Condition { value: any; } -function emptyPacket() { +function emptyPacket(): QueryResult { return { - affectedRows: 0, - changedRows: 0, - fieldCount: 0, - insertId: 0, - message: "Not queried", - protocol41: false, + notices: [], + fields: [], + rowCount: 0, + rows: [], + // @ts-expect-error + command: "", }; } +export interface ContextConfig { + connection: DatabaseConnection | DatabaseTransactionConnection; + subClass: Map any, any>; +} + +export type ContextConstructor = new (context: ContextConfig) => T; + /** * A Class for consecutive queries on the same connection. */ export class QueryContext implements ConnectionContext { - private readonly con: Connection; - private readonly subClassMap: Map any, any> = new Map(); - - private getSubInstanceLazy(constructor: new (parentContext: QueryContext) => T): T { - return getElseSet(this.subClassMap, constructor, () => new constructor(this)); - } - - public get databaseContext(): DatabaseContext { - return this.getSubInstanceLazy(DatabaseContext); - } - - public get userContext(): UserContext { - return this.getSubInstanceLazy(UserContext); - } - - public get partContext(): PartContext { - return this.getSubInstanceLazy(PartContext); - } - - public get mediumContext(): MediumContext { - return this.getSubInstanceLazy(MediumContext); - } - - public get episodeContext(): EpisodeContext { - return this.getSubInstanceLazy(EpisodeContext); - } + private readonly contextConfig: ContextConfig; - public get newsContext(): NewsContext { - return this.getSubInstanceLazy(NewsContext); + public constructor(context: ContextConfig) { + this.contextConfig = context; } - public get externalListContext(): ExternalListContext { - return this.getSubInstanceLazy(ExternalListContext); + public getContext(constructor: ContextConstructor): T { + return getElseSet(this.contextConfig.subClass, constructor, () => new constructor(this.contextConfig)); } - public get externalUserContext(): ExternalUserContext { - return this.getSubInstanceLazy(ExternalUserContext); + public escapeIdentifier(str: string) { + return sql.identifier([str]); } - public get internalListContext(): InternalListContext { - return this.getSubInstanceLazy(InternalListContext); - } - - public get jobContext(): JobContext { - return this.getSubInstanceLazy(JobContext); - } - - public get mediumInWaitContext(): MediumInWaitContext { - return this.getSubInstanceLazy(MediumInWaitContext); - } - - public get scraperHookContext(): ScraperHookContext { - return this.getSubInstanceLazy(ScraperHookContext); - } - - public get appEventContext(): AppEventContext { - return this.getSubInstanceLazy(AppEventContext); - } - - public get customHookContext(): CustomHookContext { - return this.getSubInstanceLazy(CustomHookContext); - } + private isAborted = false; - public get notificationContext(): NotificationContext { - return this.getSubInstanceLazy(NotificationContext); + public markAborted() { + this.isAborted = true; } - public constructor(con: Connection) { - this.con = con; + public aborted() { + return this.isAborted; } /** * */ - public startTransaction(): EmptyPromise { - return this.query("START TRANSACTION;").then(ignore); + public async startTransaction(): EmptyPromise { + await this.con.query(sql`START TRANSACTION;`); } /** * */ - public commit(): EmptyPromise { - return this.query("COMMIT;").then(ignore); + public async commit(): EmptyPromise { + await this.con.query(sql`COMMIT;`); } /** * */ - public rollback(): EmptyPromise { - return this.query("ROLLBACK;").then(ignore); + public async rollback(): EmptyPromise { + await this.con.query(sql`ROLLBACK;`); } /** @@ -176,7 +109,7 @@ export class QueryContext implements ConnectionContext { public async start(): EmptyPromise { const exists = await this.databaseExists(); if (!exists) { - await this.query(`CREATE DATABASE ${database};`); + await this.con.query(sql`CREATE DATABASE ${sql.identifier([database])};`); } } @@ -184,592 +117,67 @@ export class QueryContext implements ConnectionContext { * Checks whether the main database exists currently. */ public async databaseExists(): Promise { - const databases = await this.query("SHOW DATABASES;"); - return databases.find((data: { Database: string }) => data.Database === database) != null; - } - - public processResult(result: Result): Promise>> { - if (!result.preliminary) { - return Promise.reject(new ValidationError("Invalid Result: missing preliminary value")); - } - return promiseMultiSingle(result.result, async (value: MetaResult) => { - const resultArray: any[] = await this.query( - "SELECT episode_id FROM result_episode WHERE novel=? AND (chapter=? OR chapIndex=?)", - [value.novel, value.chapter, value.chapIndex], - ); - if (resultArray[0]?.episode_id != null) { - return null; - } - // TODO implement - return value; - }); + const databases = await this.con.query<{ database: string }>(sql`SHOW DATABASES;`); + return databases.rows.find((data) => data.database === database) != null; } - public saveResult(result: Result): Promise>> { - if (!result.preliminary) { - return Promise.reject(new ValidationError("Invalid Result: missing preliminary value")); - } - return promiseMultiSingle(result.result, async (value) => { - if (!result.accept) { - return null; - } - // if there is a title, search a medium which matches - - // TODO implement - return value; - }); + public get con(): DatabaseConnection | DatabaseTransactionConnection { + return this.contextConfig.connection; } - public async getPageInfo(link: string, key: string): Promise { - if (!validate.isString(link) || !link || !key || !validate.isString(key)) { - throw new ValidationError("invalid link or key"); - } - const query: any[] = await this.query("SELECT value FROM page_info WHERE link=? AND keyString=?", [link, key]); - return { - link, - key, - values: query.map((value) => value.value).filter((value) => value), - }; - } - - public async updatePageInfo(link: string, key: string, values: string[], toDeleteValues?: string[]): EmptyPromise { - if (!validate.isString(link) || !link || !key || !validate.isString(key)) { - throw new ValidationError("invalid link or key"); - } - await this.removePageInfo(link, key, toDeleteValues); - - await Promise.all( - values.map((value) => { - if (!value || !validate.isString(value)) { - throw new TypeError("value is not a string: " + typeof value); - } - return this.query("INSERT INTO page_info (link, keyString, value) VALUES(?,?,?)", [link, key, value]); - }), - ); - } - - public async removePageInfo(link: string, key?: string, toDeleteValues?: string[]): EmptyPromise { - if (!validate.isString(link) || !link || (key && !validate.isString(key))) { - throw new ValidationError("invalid link or key"); - } - if (key) { - if (toDeleteValues) { - await Promise.all( - toDeleteValues.map((value) => { - if (!value || !validate.isString(value)) { - throw new ValidationError("value not a string: " + typeof value); - } - // TODO: 29.06.2019 use 'value IN (list)' - return this.query("DELETE FROM page_info WHERE link=? AND keyString=? AND value=?", [link, key, value]); - }), - ); - } else { - await this.query("DELETE FROM page_info WHERE link=? AND keyString=?", [link, key]); - } - } else { - await this.query("DELETE FROM page_info WHERE link=?", link); - } - } - - public async queueNewTocs(): EmptyPromise { - throw new NotImplementedError("queueNewTocs not supported"); - } - - public async getInvalidated(uuid: Uuid): Promise { - const result: any[] = await this.query("SELECT * FROM user_data_invalidation WHERE uuid=?", uuid); - await this.query("DELETE FROM user_data_invalidation WHERE uuid=?;", uuid).catch((reason) => logger.error(reason)); - return result.map((value: any): Invalidation => { - return { - externalListId: value.external_list_id, - externalUuid: value.external_uuid, - mediumId: value.medium_id, - partId: value.part_id, - episodeId: value.episode_id, - userUuid: !!value.user_uuid, - listId: value.list_id, - newsId: value.news_id, - uuid, - }; - }); - } - - public async getInvalidatedStream(uuid: Uuid): Promise { - return this.queryStream( - "SELECT " + - "external_list_id as externalListId, external_uuid as externalUuid, medium_id as mediumId, " + - "part_id as partId, episode_id as episodeId, user_uuid as userUuid," + - "list_id as listId, news_id as newsId, uuid " + - "FROM user_data_invalidation WHERE uuid=?", - uuid, - ).on("end", () => { - this.query("DELETE FROM user_data_invalidation WHERE uuid=?;", uuid).catch((reason) => logger.error(reason)); - }); - } - - public clearInvalidationTable(): EmptyPromise { - return this.query("TRUNCATE user_data_invalidation"); - } - - /** - * - * @param query - * @param parameter - */ - public async query(query: string, parameter?: any | any[]): Promise { - if (query.length > 20 && env.development) { - logger.debug(query.replace(/\n+/g, "").replace(/\s+/g, " ").substring(0, 80)); - } - const start = Date.now(); - let result; - try { - setContext("sql-query"); - result = await this.con.query(query, parameter); - storeCount(StoreKey.QUERY_COUNT); - } finally { - removeContext("sql-query"); - } - - if (Array.isArray(result) && result.length > 10) { - logger.debug(`[${Date.now() - start}ms] ${result.length} Results for ${query} - Parameter: '${parameter + ""}'`); - } - return result; - } - - /** - * Convenience function for correct return type. - * Should only be used for data manipulation queries like INSERT, UPDATE, DELETE. - * - * @param query sql query - * @param parameter parameter for the sql query - */ - public async dmlQuery(query: string, parameter?: any | any[]): Promise { - return this.query(query, parameter); + public async stream(query: TaggedTemplateLiteralInvocation): Promise> { + // FIXME: it does not seem possible to return a stream from within: + // pool.transaction((con) => new Promise(resolve => con.stream(query, resolve))) + // this will never resolve because the library is shit for streaming + // so fake it, and just get all data in memory + const result = await this.con.any(query); + return Readable.from(result); } /** * Deletes one or multiple entries from one specific table, * with only one conditional. */ - public async delete(table: string, ...condition: Condition[]): Promise { + public async delete( + table: string, + ...condition: Condition[] + ): Promise>> { if (!condition || (Array.isArray(condition) && !condition.length)) { return Promise.reject(new ValidationError("Invalid delete condition")); } - let query = `DELETE FROM ${mySql.escapeId(table)} WHERE `; - const values: any[] = []; - - multiSingle(condition, (value: any, _, last) => { - query += `${mySql.escapeId(value.column)} = ?`; - if (last) { - query += ";"; - } else { - query += " AND "; - } - values.push(value.value); - }); + const query = sql`DELETE FROM ${this.escapeIdentifier(table)} WHERE ${joinAnd( + condition.map((value) => { + return sql`${sql.identifier([value.column])} = ${value.value}`; + }), + )};`; - return this.query(query, values); + // @ts-expect-error + return this.con().query(query); } /** * Updates data from the storage. * May return a empty OkPacket if no values are to be updated. */ - public async update(table: string, cb: UpdateCallback, ...condition: Condition[]): Promise { + public async update( + table: string, + cb: UpdateCallback, + ...condition: Condition[] + ): Promise>> { if (!condition || (Array.isArray(condition) && !condition.length)) { return Promise.reject(new ValidationError("Invalid update condition")); } - const updates: string[] = []; - const values: any[] = []; - const updatePromise = cb(updates, values); - if (updatePromise) { - await updatePromise; - } + const valueExpressions = await cb(); - if (!updates.length) { + if (!valueExpressions.length) { return Promise.resolve(emptyPacket()); } - let query = `UPDATE ${mySql.escapeId(table)} - SET ${updates.join(", ")} - WHERE `; - multiSingle(condition, (value: any, _, last) => { - query += `${mySql.escapeId(value.column)} = ?`; - if (last) { - query += ";"; - } else { - query += " AND "; - } - values.push(value.value); - }); - return this.query(query, values); - } - - public multiInsert>( - query: string, - value: T, - paramCallback: ParamCallback, - ): PromiseMultiSingle { - if (!value || (Array.isArray(value) && !value.length)) { - return Promise.resolve(Array.isArray(value) ? [] : emptyPacket()) as any; - } - if (Array.isArray(value) && value.length > 100) { - return this._batchFunction( - value, - query, - paramCallback, - // @ts-expect-error - (q, v, p) => this.multiInsert(q, v, p) as Promise, - ) as any; - } - let valuesQuery = ""; - let valuesQueries = ""; - let paramCount = -1; - const param: any[] = []; - - multiSingle(value, (item, _index, lastItem) => { - const items = paramCallback(item); - if (Array.isArray(items)) { - param.push(...items); - } else { - param.push(items); - } - - if (paramCount !== items.length) { - paramCount = items.length; - valuesQuery = "("; - if (items.length > 1) { - valuesQuery += "?,".repeat(items.length - 1); - } - valuesQuery += "?)"; - } - - valuesQueries += valuesQuery; - - if (!lastItem) { - valuesQueries += ","; - } - }); - return this.query(`${query} ${valuesQueries};`, param); - } - - /** - * Expects either a primitive value or a list of primitive values and at most one list of primitive values. - * Currently it does a simple String Search for the List Placeholder '??' (instead the normal sql one '?'), - * so one needs to take care of not using a '??' string in the sql query itself, use parameter. - * - * Example: - * queryInList( - * "SELECT * FROM example_table WHERE id = ? AND setting IN ?? ORDER BY date;", - * [1, [1,2,3,4,5,6,7]] - * ) // normal query - * - * queryInList( - * "SELECT * FROM example_table WHERE id = ? ORDER BY date;", - * [1] // or only 1 - * ) // normal query - * - * queryInList( - * "SELECT * FROM example_table WHERE id = ? AND setting IN ?? ORDER BY date;", - * [1, []] - * ) // no nested list values, return an empty list by default - * - * queryInList( - * "SELECT * FROM example_table WHERE id = ? AND setting IN ?? ORDER BY date;", - * undefined - * ) // no value given returns an empty list by default - * - * queryInList( - * "SELECT * FROM example_table WHERE id = ? AND setting IN ?? ORDER BY date;", - * [1, 1,2,3,4,5,6,7] - * ) // list placeholder '??' is present but no nested list results in a thrown error - * - * queryInList( - * "SELECT * FROM example_table WHERE id = ? AND setting IN ?? AND value IN ?? ORDER BY date;", - * [1, [1,2,3,4,5],[6,7]] - * ) // multiple list placeholder '??' are present but currently not allowd and results in a thrown error - * - * @param query the sql query string - * @param value placeholder values - */ - public async queryInList(query: string, value: QueryInValue): Promise { - if (!value || (Array.isArray(value) && !value.length)) { - return []; - } - if (!Array.isArray(value)) { - value = [value]; - } - const placeHolderValues = value; - - const listPlaceholderIndex = query.indexOf("??"); - - if (listPlaceholderIndex !== query.lastIndexOf("??")) { - throw new UnsupportedError("Invalid Query: multiple Listplaceholder are currently not allowed"); - } - const params: Array<[string, any[]]> = []; - const listParams = placeHolderValues - .map((param, index) => (Array.isArray(param) ? [param, index] : undefined)) - .filter((v) => v) as Array<[any[], number]>; + const query = sql`UPDATE ${sql.identifier([table])} SET ${joinComma(valueExpressions)} WHERE ${joinAnd( + condition.map((value) => sql`${sql.identifier([value.column])} = ${value.value}`), + )};`; - if (listParams.length > 1) { - throw new UnsupportedError("Using multiple ListParams is not supported"); - } - if (listParams.length) { - const [listParam, index] = listParams[0]; - - if (!listParam.length) { - return []; - } - batch(listParam, 100).forEach((param: any[]) => { - const values = [ - // placeholder values before the listParam - ...placeHolderValues.slice(0, index), - ...param, - // placeholder values after the listParam, is empty if index + 1 is greater than the array - ...placeHolderValues.slice(index + 1), - ]; - const placeholder = "?,".repeat(param.length).slice(0, -1); - params.push([placeholder, values]); - }); - } else { - // if no listParam was found, it is used with a primitive value if that index exists - const placeholder = listPlaceholderIndex < 0 ? "" : "?"; - params.push([placeholder, placeHolderValues]); - } - if (!params.length) { - throw new DatabaseError(`no params for '${query}'`); - } - const result: any[][] = await Promise.all( - params.map((param) => { - const [placeholder, values] = param; - const newQuery = query.replace("??", placeholder); - return this.query(newQuery, values); - }), - ); - return result.flat(1); - } - - public queryStream(query: string, parameter?: any | any[]): Query { - if (query.length > 20 && env.development) { - logger.debug(`${query} - ${(parameter + "").replace(/\n+/g, "").replace(/\s+/g, " ").substring(0, 30)}`); - } - return this.con.queryStream(query, parameter); - } - - public async getNew(uuid: Uuid, date = new Date(0)): Promise { - const episodeReleasePromise = this.query( - "SELECT episode_id as episodeId, title, url, releaseDate, locked, toc_id as tocId " + - "FROM episode_release WHERE updated_at > ?", - date, - ); - const episodePromise = this.query( - "SELECT episode.id, part_id as partId, totalIndex, partialIndex, " + - "user_episode.progress, user_episode.read_date as readDate " + - "FROM episode LEFT JOIN user_episode ON episode.id=user_episode.episode_id " + - "WHERE (user_episode.user_uuid IS NULL OR user_episode.user_uuid = ?) " + - "AND (updated_at > ? OR read_date > ?)", - [uuid, date, date], - ); - const partPromise = this.query( - "SELECT id, title, medium_id as mediumId, totalIndex, partialIndex FROM part WHERE updated_at > ?", - date, - ); - const mediumPromise = this.query( - "SELECT id, countryOfOrigin, languageOfOrigin, author, artist, title, " + - "medium, lang, stateOrigin, stateTL, series, universe " + - "FROM medium WHERE updated_at > ?", - date, - ); - const listPromise = this.query("SELECT id, name, medium FROM reading_list WHERE user_uuid=? AND updated_at > ?", [ - uuid, - date, - ]); - const exListPromise = this.query( - "SELECT list.id, list.name, list.user_uuid as uuid, list.medium, list.url " + - "FROM external_user INNER JOIN external_reading_list as list ON uuid=user_uuid " + - "WHERE local_uuid=? AND list.updated_at > ?", - [uuid, date], - ); - const exUserPromise = this.query( - "SELECT name as identifier, uuid, service as type, local_uuid as localUuid " + - "FROM external_user WHERE local_uuid = ? AND updated_at > ?", - [uuid, date], - ); - const mediumInWaitPromise = this.query("SELECT title, medium, link FROM medium_in_wait WHERE updated_at > ?", date); - const newsPromise = this.query( - "SELECT id, title, link, date, CASE WHEN user_id IS NULL THEN 0 ELSE 1 END as `read` " + - "FROM news_board LEFT JOIN news_user ON id=news_id " + - "WHERE (user_id IS NULL OR user_id = ?) AND updated_at > ?", - [uuid, date], - ); - const tocPromise = this.query( - "SELECT id, medium_id as mediumId, link, " + - "countryOfOrigin, languageOfOrigin, author, title," + - "medium, artist, lang, stateOrigin, stateTL, series, universe " + - "FROM medium_toc WHERE updated_at > ?", - date, - ); - return { - tocs: await tocPromise, - media: await mediumPromise, - releases: await episodeReleasePromise, - episodes: await episodePromise, - parts: await partPromise, - lists: await listPromise, - extLists: await exListPromise, - extUser: await exUserPromise, - mediaInWait: await mediumInWaitPromise, - news: await newsPromise.then((values: any[]) => { - values.forEach((value) => (value.read = value.read === 1)); - return values; - }), - }; - } - - public async getStat(uuid: Uuid): Promise { - const episodePromise = this.query( - "SELECT part_id, count(distinct episode.id) as episodeCount, sum(distinct episode.id) as episodeSum, count(url) as releaseCount " + - "FROM episode LEFT JOIN episode_release ON episode.id=episode_release.episode_id " + - "GROUP BY part_id", - ); - const partPromise = this.query("SELECT part.id, medium_id FROM part "); - const listPromise = this.query( - "SELECT id, medium_id FROM reading_list LEFT JOIN list_medium ON reading_list.id=list_id WHERE user_uuid=?", - uuid, - ); - const exListPromise = this.query( - "SELECT id, medium_id FROM external_user INNER JOIN external_reading_list ON uuid=user_uuid LEFT JOIN external_list_medium ON external_reading_list.id=list_id WHERE local_uuid=?", - uuid, - ); - const extUserPromise = this.query( - "SELECT uuid, id FROM external_user LEFT JOIN external_reading_list ON uuid=user_uuid WHERE local_uuid=?", - uuid, - ); - const tocPromise: Promise> = this.query( - "SELECT medium_id, count(link) as count FROM medium_toc GROUP BY medium_id;", - ); - - const tocs = await tocPromise; - const parts = await partPromise; - const episodes = await episodePromise; - const emptyPart = { episodeCount: 0, episodeSum: 0, releaseCount: 0 }; - const partMap = new Map(); - - for (const episode of episodes) { - partMap.set(episode.part_id, episode); - delete episode.part_id; - } - const media = {}; - const mediaStats = {}; - const lists = {}; - const extLists = {}; - const extUser = {}; - - for (const toc of tocs) { - const medium = getElseSetObj(mediaStats, toc.medium_id, () => { - return { - tocs: 0, - }; - }); - medium.tocs = toc.count; - } - - for (const part of parts) { - const mediumParts: any = getElseSetObj(media, part.medium_id, () => ({})); - mediumParts[part.id] = getElseSet(partMap, part.id, () => emptyPart); - } - - for (const list of await listPromise) { - const listMedia: number[] = getElseSetObj(lists, list.id, () => []); - if (list.medium_id != null) { - listMedia.push(list.medium_id); - } - } - - for (const list of await exListPromise) { - const listMedia: number[] = getElseSetObj(extLists, list.id, () => []); - - if (list.medium_id != null) { - listMedia.push(list.medium_id); - } - } - - for (const user of await extUserPromise) { - const userLists: number[] = getElseSetObj(extUser, user.uuid, () => []); - userLists.push(user.id); - } - return { - media, - mediaStats, - lists, - extLists, - extUser, - }; - } - - public async queryItems(uuid: Uuid, query: QueryItems): Promise { - const [ - externalUser, - externalMediaLists, - mediaLists, - mediaTocs, - tocs, - media, - parts, - partReleases, - partEpisodes, - episodes, - episodeReleases, - ] = await Promise.all([ - this.externalUserContext.getExternalUser(query.externalUser), - Promise.all(query.externalMediaLists.map((id) => this.externalListContext.getExternalList(id))), - this.internalListContext.getShallowList(query.mediaLists, uuid), - this.mediumContext.getMediumTocs(query.mediaTocs), - this.mediumContext.getTocs(query.tocs), - this.mediumContext.getSimpleMedium(query.media), - this.partContext.getParts(query.parts, uuid, false), - this.partContext.getPartReleases(query.partReleases), - this.partContext.getPartItems(query.partEpisodes), - this.episodeContext.getEpisode(query.episodes, uuid), - this.episodeContext.getReleases(query.episodeReleases), - ]); - - return { - episodeReleases, // by episode id - episodes, - partEpisodes, // by part id - partReleases, // by part id - parts, - media, - tocs, // by toc id - mediaTocs, // by medium id - mediaLists, - externalMediaLists, - externalUser, - }; - } - - private async _batchFunction( - value: T[], - query: string, - paramCallback: Optional>, - func: (query: string, values: T[], paramCallback?: ParamCallback) => Promise, - ): Promise { - const length = value.length; - const resultsPromises = []; - - const batchLimit = 100; - - for (let i = 0; i < length; i += batchLimit) { - let subList: T[]; - if (length < batchLimit) { - subList = value.slice(i, length); - } else { - subList = value.slice(i, i + batchLimit); - } - resultsPromises.push(func(query, subList, paramCallback)); - } - const results = await Promise.all(resultsPromises); - return results.flat(); + return this.con.query(query) as Promise>>; } } diff --git a/packages/core/src/database/contexts/scraperHookContext.ts b/packages/core/src/database/contexts/scraperHookContext.ts index cab9cfb5..f2387915 100644 --- a/packages/core/src/database/contexts/scraperHookContext.ts +++ b/packages/core/src/database/contexts/scraperHookContext.ts @@ -1,63 +1,65 @@ -import { SubContext } from "./subContext"; -import { ScraperHook, TypedQuery } from "../../types"; -import { storeModifications } from "../sqlTools"; +import { Insert, ScraperHook, TypedQuery } from "../../types"; import { escapeLike } from "../storages/storageTools"; import { ValidationError } from "../../error"; +import { QueryContext } from "./queryContext"; +import { sql } from "slonik"; +import { simpleScraperHook } from "../databaseTypes"; -export class ScraperHookContext extends SubContext { +export class ScraperHookContext extends QueryContext { public async getAllStream(): Promise> { - return this.queryStream("SELECT id, name, state, message FROM scraper_hook"); + return this.stream(sql.type(simpleScraperHook)`SELECT id, name, enabled, message FROM scraper_hook`); } - public async getAll(): Promise { - return this.query("SELECT id, name, state, message FROM scraper_hook"); + public async getAll(): Promise { + return this.con.any(sql.type(simpleScraperHook)`SELECT id, name, enabled, message FROM scraper_hook`); } /** * Adds a scraper_hook of an medium to the storage. */ - public async addScraperHook(scraperHook: ScraperHook): Promise { - const result = await this.query("INSERT INTO scraper_hook (id, name, state, message) VALUES (?,?,?,?);", [ - scraperHook.id, - scraperHook.name, - scraperHook.state, - scraperHook.message, - ]); - storeModifications("scraper_hook", "insert", result); + public async addScraperHook(scraperHook: Insert): Promise { + await this.con.query( + sql` + INSERT INTO scraper_hook (name, enabled, message) + VALUES (${scraperHook.name},${scraperHook.enabled},${scraperHook.message});`, + ); + // FIXME: storeModifications("scraper_hook", "insert", result); } /** * Updates a scraperHook. */ public async updateScraperHook(scraperHook: ScraperHook): Promise { - let result = await this.update( + const result = await this.update( "scraper_hook", - (updates, values) => { + () => { + const updates = []; + if (scraperHook.message) { - updates.push("message = ?"); - values.push(scraperHook.message); + updates.push(sql`message = ${scraperHook.message}`); } else if (scraperHook.message === null) { throw new ValidationError("Cannot set the message of scraper_hook to null"); } - if (scraperHook.state) { - updates.push("state = ?"); - values.push(scraperHook.state); - } else if (scraperHook.state === null) { - throw new ValidationError("Cannot set the state of scraper_hook to null"); + if (scraperHook.enabled) { + updates.push(sql`enabled = ${scraperHook.enabled}`); + } else if (scraperHook.enabled === null) { + throw new ValidationError("Cannot set the enabled of scraper_hook to null"); } + return updates; }, { column: "id", value: scraperHook.id, }, ); - storeModifications("scraper_hook", "update", result); - result = await this.query( - `UPDATE jobs SET job_state = ? WHERE name LIKE '%${escapeLike(scraperHook.name)}%'`, - scraperHook.state, + // FIXME: storeModifications("scraper_hook", "update", result); + await this.con.query( + sql`UPDATE jobs SET job_enabled = ${scraperHook.enabled} WHERE name LIKE ${ + "%" + escapeLike(scraperHook.name) + "%" + }`, ); - storeModifications("job", "update", result); - return result.changedRows > 0; + // FIXME: storeModifications("job", "update", result); + return result.rowCount > 0; } /** @@ -65,7 +67,7 @@ export class ScraperHookContext extends SubContext { */ public async deleteScraperHook(id: number): Promise { const result = await this.delete("scraper_hook", { column: "id", value: id }); - storeModifications("scraper_hook", "delete", result); - return result.affectedRows > 0; + // FIXME: storeModifications("scraper_hook", "delete", result); + return result.rowCount > 0; } } diff --git a/packages/core/src/database/contexts/subContext.ts b/packages/core/src/database/contexts/subContext.ts deleted file mode 100644 index 5bc03427..00000000 --- a/packages/core/src/database/contexts/subContext.ts +++ /dev/null @@ -1,62 +0,0 @@ -import { QueryContext, Condition, QueryInValue } from "./queryContext"; -import { Query, OkPacket } from "mysql"; -import { ConnectionContext } from "../databaseTypes"; -import { MultiSingleValue, EmptyPromise, UnpackArray } from "../../types"; - -type ParamCallback = (value: UnpackArray) => any[] | any; -type UpdateCallback = (updates: string[], values: any[]) => void; - -export class SubContext implements ConnectionContext { - public constructor(public readonly parentContext: QueryContext) {} - - public commit(): EmptyPromise { - return this.parentContext.commit(); - } - - public rollback(): EmptyPromise { - return this.parentContext.rollback(); - } - - public startTransaction(): EmptyPromise { - return this.parentContext.startTransaction(); - } - - public query(query: string, parameter?: any | any[]): Promise { - return this.parentContext.query(query, parameter); - } - - public dmlQuery(query: string, parameter?: any | any[]): Promise { - return this.parentContext.dmlQuery(query, parameter); - } - - /** - * Deletes one or multiple entries from one specific table, - * with only one conditional. - */ - protected async delete(table: string, ...condition: Condition[]): Promise { - return this.parentContext.delete(table, ...condition); - } - - /** - * Updates data from the storage. - */ - protected async update(table: string, cb: UpdateCallback, ...condition: Condition[]): Promise { - return this.parentContext.update(table, cb as any, ...condition); - } - - protected multiInsert>( - query: string, - value: T, - paramCallback: ParamCallback, - ): Promise { - return this.parentContext.multiInsert(query, value, paramCallback); - } - - protected async queryInList(query: string, value: QueryInValue): Promise { - return this.parentContext.queryInList(query, value); - } - - protected queryStream(query: string, parameter?: any | any[]): Query { - return this.parentContext.queryStream(query, parameter); - } -} diff --git a/packages/core/src/database/contexts/userContext.ts b/packages/core/src/database/contexts/userContext.ts index ade7739c..41c64d28 100644 --- a/packages/core/src/database/contexts/userContext.ts +++ b/packages/core/src/database/contexts/userContext.ts @@ -1,15 +1,11 @@ -import { SubContext } from "./subContext"; import { SimpleUser, User, Uuid, Nullable, UpdateUser } from "../../types"; import { allTypes, BcryptHash, Errors, Hasher, Hashes } from "../../tools"; import { v1 as uuidGenerator, v4 as sessionGenerator } from "uuid"; -import { - CredentialError, - DatabaseError, - DuplicateEntityError, - MissingEntityError, - SessionError, - ValidationError, -} from "../../error"; +import { CredentialError, DuplicateEntityError, SessionError, ValidationError } from "../../error"; +import { QueryContext } from "./queryContext"; +import { sql } from "slonik"; +import { simpleUser } from "../databaseTypes"; +import { InternalListContext } from "./internalListContext"; /** * Checks whether the password equals to the given hash @@ -24,7 +20,7 @@ import { * @return {boolean} * @private */ -const verifyPassword = (password: string, hash: string, alg: string, salt: string): Promise => { +const verifyPassword = (password: string, hash: string, alg: string, salt?: string): Promise => { const hashAlgorithm = Hashes.find((value) => value.tag === alg); if (!hashAlgorithm) { @@ -37,7 +33,7 @@ const verifyPassword = (password: string, hash: string, alg: string, salt: strin const StandardHash: Hasher = BcryptHash; const standardListName = "Standard"; -export class UserContext extends SubContext { +export class UserContext extends QueryContext { /** * Registers an User if the userName is free. * Returns a Error Code if userName is already @@ -53,9 +49,9 @@ export class UserContext extends SubContext { if (!userName || !password) { return Promise.reject(new ValidationError("missing username or password")); } - const user = await this.query("SELECT * FROM user WHERE name = ?;", userName); - // if there is a result in array, userName is not new, so abort - if (user.length) { + const userExists = await this.con.exists(sql`SELECT name FROM user WHERE name = ${userName};`); + // userName is not new, so abort + if (userExists) { return Promise.reject(new DuplicateEntityError(Errors.USER_EXISTS_ALREADY)); } // if userName is new, proceed to register @@ -63,17 +59,18 @@ export class UserContext extends SubContext { const { salt, hash } = await StandardHash.hash(password); // insert the full user and loginUser right after - await this.query("INSERT INTO user (name, uuid, salt, alg, password) VALUES (?,?,?,?,?);", [ - userName, - id, - salt, - StandardHash.tag, - hash, - ]); + await this.con.query( + sql`INSERT INTO user (name, uuid, salt, alg, password) + VALUES (${userName},${id},${salt ?? null},${StandardHash.tag},${hash});`, + ); // every user gets a standard list for everything that got no list assigned // this standard list name 'Standard' is reserved for this purpose - await this.parentContext.internalListContext.addList(id, { name: standardListName, medium: allTypes() }); + await this.getContext(InternalListContext).addList({ + name: standardListName, + medium: allTypes(), + userUuid: id, + }); return this.loginUser(userName, password, ip); } @@ -88,18 +85,13 @@ export class UserContext extends SubContext { if (!userName || !password) { return Promise.reject(new ValidationError("missing username or password")); } - const result = await this.query("SELECT * FROM user WHERE name = ?;", userName); - - if (!result.length) { - return Promise.reject(new MissingEntityError(Errors.USER_DOES_NOT_EXIST)); - } else if (result.length !== 1) { - return Promise.reject(new DatabaseError("got multiple user for the same name")); - } + const user = await this.con.one( + sql.type(simpleUser)`SELECT uuid, name, alg, password, salt FROM user WHERE name = ${userName};`, + ); - const user = result[0]; const uuid = user.uuid; - if (!(await verifyPassword(password, user.password, user.alg, user.salt))) { + if (!(await verifyPassword(password, user.password, user.alg, user.salt ?? undefined))) { return Promise.reject(new CredentialError(Errors.INVALID_CREDENTIALS)); } // if there exists a session already for that device, remove it @@ -109,12 +101,9 @@ export class UserContext extends SubContext { const session = sessionGenerator(); const date = new Date().toISOString(); - await this.query("INSERT INTO user_log (user_uuid, ip, session_key, acquisition_date) VALUES (?,?,?,?);", [ - uuid, - ip, - session, - date, - ]); + await this.con.query( + sql`INSERT INTO user_log (user_uuid, ip, session_key, acquisition_date) VALUES (${uuid},${ip},${session},${date});`, + ); return this._getUser(uuid, session); } @@ -126,18 +115,18 @@ export class UserContext extends SubContext { * the session key of the user for the ip. */ public async userLoginStatus(ip: string, uuid?: Uuid, session?: string): Promise { - const result = await this.query("SELECT * FROM user_log WHERE ip = ?;", ip); - - const sessionRecord = result[0]; + const sessionRecord = await this.con.maybeOne<{ sessionKey: string; userUuid: string }>( + sql`SELECT user_uuid, session_key FROM user_log WHERE ip = ${ip};`, + ); if (!sessionRecord) { return false; } - const currentSession = sessionRecord.session_key; + const currentSession = sessionRecord.sessionKey; if (session) { - return session === currentSession && uuid === sessionRecord.user_uuid; + return session === currentSession && uuid === sessionRecord.userUuid; } return !!currentSession; } @@ -146,41 +135,42 @@ export class UserContext extends SubContext { if (!ip) { return null; } - const result = await this.query( - "SELECT name, uuid, session_key FROM user_log " + "INNER JOIN user ON user.uuid=user_log.user_uuid WHERE ip = ?;", - ip, + const result = await this.con.query<{ name: string; uuid: string; sessionKey: string }>( + sql`SELECT name, uuid, session_key FROM user_log + INNER JOIN user ON user.uuid=user_log.user_uuid + WHERE ip = ${ip};`, ); - const userRecord = result[0]; + const userRecord = result.rows[0]; - if (!userRecord || !ip || !userRecord.session_key || !userRecord.name || !userRecord.uuid) { + if (!userRecord || !ip || !userRecord.sessionKey || !userRecord.name || !userRecord.uuid) { return null; } return { name: userRecord.name, - session: userRecord.session_key, + session: userRecord.sessionKey, uuid: userRecord.uuid, }; } public async getUser(uuid: Uuid, ip: string): Promise { - const result = await this.query("SELECT * FROM user_log WHERE user_uuid = ? AND ip = ?;", [uuid, ip]); - - const sessionRecord = result[0]; + const hasSession = await this.con.maybeOne<{ sessionKey: string }>( + sql`SELECT session_key FROM user_log WHERE user_uuid = ${uuid} AND ip = ${ip};`, + ); - if (!sessionRecord?.session_key) { + if (!hasSession) { throw new SessionError("user has no session"); } - return this._getUser(uuid, sessionRecord.session_key); + return this._getUser(uuid, hasSession.sessionKey); } /** * Logs a user out. */ public logoutUser(uuid: Uuid, ip: string): Promise { - return this.delete("user_log", { column: "ip", value: ip }).then((v) => v.affectedRows > 0); + return this.delete("user_log", { column: "ip", value: ip }).then((v) => v.rowCount > 0); } /** @@ -199,29 +189,27 @@ export class UserContext extends SubContext { await this.delete("user_log", { column: "user_uuid", value: uuid }); // delete reading lists contents - await this.query( - "DELETE FROM list_medium WHERE list_id in (SELECT id FROM reading_list WHERE user_uuid = ?);", - uuid, + await this.con.query( + sql`DELETE FROM list_medium WHERE list_id in (SELECT id FROM reading_list WHERE user_uuid = ${uuid});`, ); // delete lists await this.delete("reading_list", { column: "user_uuid", value: uuid }); // delete external reading lists contents - await this.query( - "DELETE FROM external_list_medium " + - "WHERE list_id " + - "IN (SELECT id FROM external_reading_list " + - "WHERE user_uuid " + - "IN (SELECT uuid FROM external_user " + - "WHERE local_uuid = ?));", - uuid, + await this.con.query( + sql`DELETE FROM external_list_medium + WHERE list_id IN ( + SELECT id FROM external_reading_list + WHERE user_uuid IN ( + SELECT uuid FROM external_user WHERE local_uuid = ${uuid} + ) + );`, ); // delete external lists - await this.query( - "DELETE FROM external_reading_list " + - "WHERE user_uuid " + - "IN (SELECT uuid FROM external_user WHERE local_uuid = ?);", - uuid, + await this.con.query( + sql`DELETE FROM external_reading_list + WHERE user_uuid + IN (SELECT uuid FROM external_user WHERE local_uuid = ${uuid});`, ); // delete external user await this.delete("external_user", { column: "local_uuid", value: uuid }); @@ -234,7 +222,7 @@ export class UserContext extends SubContext { // in case the deletion was unsuccessful, just 'ban' any further access to that account // and delete it manually? const result = await this.delete("user", { column: "uuid", value: uuid }); - return result.affectedRows > 0; + return result.rowCount > 0; } /** @@ -251,10 +239,10 @@ export class UserContext extends SubContext { } return this.update( "user", - async (updates, values) => { + async () => { + const updates = []; if (user.name) { - updates.push("name = ?"); - values.push(user.name); + updates.push(sql`name = ${user.name}`); } if (user.newPassword) { @@ -264,21 +252,17 @@ export class UserContext extends SubContext { } const { salt, hash } = await StandardHash.hash(user.newPassword); - updates.push("alg = ?"); - values.push(StandardHash.tag); - - updates.push("salt = ?"); - values.push(salt); - - updates.push("password = ?"); - values.push(hash); + updates.push(sql`alg = ${StandardHash.tag}`); + updates.push(sql`salt = ${salt ?? null}`); + updates.push(sql`password = ${hash}`); } + return updates; }, { column: "uuid", value: uuid, }, - ).then((value) => value.changedRows > 0); + ).then((value) => value.rowCount > 0); } /** @@ -290,8 +274,9 @@ export class UserContext extends SubContext { * @return {Promise} */ public async verifyPassword(uuid: Uuid, password: string): Promise { - const result = await this.query("SELECT password, alg, salt FROM user WHERE uuid = ?", uuid); - const user = result[0]; + const user = await this.con.one<{ password: string; alg: string; salt: string }>( + sql`SELECT password, alg, salt FROM user WHERE uuid = ${uuid}`, + ); return verifyPassword(password, user.password, user.alg, user.salt); } @@ -313,11 +298,13 @@ export class UserContext extends SubContext { session, }; // query for user - const userPromise = this.query("SELECT * FROM user WHERE uuid = ?;", uuid).then((value: any[]) => { - // add user metadata - user.name = value[0].name; - user.uuid = uuid; - }); + const userPromise = this.con + .query<{ name: string }>(sql`SELECT name FROM user WHERE uuid = ${uuid};`) + .then((value) => { + // add user metadata + user.name = value.rows[0].name; + user.uuid = uuid; + }); await userPromise; return user; } diff --git a/packages/core/src/database/databaseBuilder.ts b/packages/core/src/database/databaseBuilder.ts index d1e8876e..ad36197a 100644 --- a/packages/core/src/database/databaseBuilder.ts +++ b/packages/core/src/database/databaseBuilder.ts @@ -1,22 +1,76 @@ -import { TableBuilder } from "./tableBuilder"; -import { DatabaseSchema, InvalidationType, Migration } from "./databaseTypes"; +import { ColumnType, DatabaseSchema, Migration } from "./databaseTypes"; import { TableSchema } from "./tableSchema"; -import { Trigger } from "./trigger"; import { TriggerBuilder } from "./triggerBuilder"; -import { DatabaseError, SchemaError } from "../error"; +import { NotImplementedError, SchemaError } from "../error"; +import { sql, SqlSqlToken } from "slonik"; +import { parseFirst } from "pgsql-ast-parser"; +import { ColumnSchema } from "./columnSchema"; -interface InvalidationSchema { - table: TableSchema; - type: InvalidationType; - tableName?: string; +function parseTableWithAst(statement: string) { + const parsed = parseFirst(statement); + + if (parsed.type !== "create table") { + throw new SchemaError("invalid statement: expected a 'create table' statement"); + } + + const columns: ColumnSchema[] = []; + + for (const column of parsed.columns) { + if (column.kind === "like table") { + throw new SchemaError("'create table ... like ...' is forbidden in this package"); + } + // TODO: implement better transform or directly use ast type + columns.push(new ColumnSchema(column.name.name, column.dataType as unknown as ColumnType, [])); + } + return { + name: parsed.name.name, + columns, + }; +} + +function parseTable(statement: string) { + try { + return parseTableWithAst(statement); + } catch (error) { + // ignore error + } + + const match = [...statement.matchAll(/create\s+table\s*(if\s+not\s+exists)?\s+(\w+)\s+\(([^;]+)\);/gim)]; + + if (!match[0]) { + throw new SchemaError("could not parse 'create table' statement"); + } + + const name = match[0][2]; + const columns: ColumnSchema[] = []; + const columnDefinitions = match[0][3].replaceAll(/\s+/g, " "); + + for (const definition of columnDefinitions.split(",").map((s) => s.trim().toLowerCase())) { + if ( + definition.startsWith("primary key") || + definition.startsWith("unique") || + definition.startsWith("foreign key") + ) { + continue; + } + + const columnDefinition = definition.split(" "); + const name = columnDefinition[0].replaceAll('"', ""); + columns.push(new ColumnSchema(name, columnDefinition[1] as ColumnType, [])); + } + return { + name, + columns, + }; } export class DataBaseBuilder { public readonly tables: TableSchema[] = []; - private readonly triggers: Trigger[] = []; - private readonly invalidations: InvalidationSchema[] = []; + private readonly triggers: Array> = []; private readonly migrations: Migration[] = []; + private readonly procedures: Array> = []; private readonly version: number; + private autoUpdatedAt = false; public constructor(version: number) { this.version = version; @@ -26,88 +80,13 @@ export class DataBaseBuilder { if (this.version <= 0 || !Number.isInteger(this.version)) { throw new TypeError("invalid database version"); } - this.invalidations.forEach((value) => { - let table: TableSchema; - if (value.tableName) { - const foundTable = this.tables.find((t) => t.name === value.tableName); - - if (!foundTable) { - throw new DatabaseError(`table '${value.tableName}' not found`); - } - table = foundTable; - } else { - table = value.table; - } - value.table.invalidations.push({ table, type: value.type }); - }); - let mainTable; - let invalidationTable; - - for (const table of this.tables) { - if (table.main) { - if (mainTable) { - throw new SchemaError("only one main table allowed"); - } - mainTable = table; - } else if (table.invalidationTable) { - if (invalidationTable) { - throw new SchemaError("only one invalidation table allowed"); - } - invalidationTable = table; - } - } - - if (!mainTable) { - throw new SchemaError("no main table specified"); - } - if (mainTable.primaryKeys.length !== 1) { - throw new SchemaError("main table does not have exact one primary key"); - } - if (!invalidationTable) { - throw new SchemaError("no invalidation table specified"); - } - if (invalidationTable === mainTable) { - throw new SchemaError("invalidation table and main table cannot be the same"); - } - const mainPrimaryKey = mainTable.primaryKeys[0]; - for (const table of this.tables) { - if (table.foreignKeys.some((value) => value.foreignKey === mainPrimaryKey)) { - table.mainDependent = true; - } - } - let marked; - // mark all tables which have foreign keys to mainDependant tables as mainDependant - while (marked) { - marked = false; - - for (const table of this.tables) { - if ( - table.foreignKeys.some((column) => { - const foreignKey = column.foreignKey; - if (foreignKey) { - if (!foreignKey.table) { - const name = foreignKey.name; - throw new SchemaError(`foreign key '${name}' of '${column.name}' in '${table.name}' has no table`); - } - if (foreignKey.table.mainDependent && !table.mainDependent) { - marked = table.mainDependent = true; - } - } - return false; - }) - ) { - table.mainDependent = true; - } - } - } return { version: this.version, triggers: this.triggers, tables: [...this.tables], - invalidationTable, - mainTable, migrations: this.migrations, + procedures: this.procedures, }; } @@ -115,20 +94,57 @@ export class DataBaseBuilder { this.migrations.push(...migrations); } - public addTrigger(trigger: Trigger): void { - this.triggers.push(trigger); + /** + * Automatically add a specific trigger on each table with a + * "updated_at" column. + * Requires a trigger procedure named "trigger_set_update_at". + */ + public setAutoUpdatedAt(enabled = true) { + this.autoUpdatedAt = enabled; } - public addTable(table: TableSchema, invalidations: Array<{ type: InvalidationType; table?: string }>): this { - this.tables.push(table); - for (const value of invalidations) { - this.invalidations.push({ tableName: value.table, table, type: value.type }); + public addTable(tableSchema: SqlSqlToken, config?: { indices?: string[][]; updated_at?: boolean }): this { + const parsed = parseTable(tableSchema.sql); + + if (this.autoUpdatedAt && parsed.columns.find((column) => column.name === "updated_at")) { + this.triggers.push(sql`CREATE OR REPLACE TRIGGER set_timestamp + BEFORE UPDATE ON ${sql.identifier([parsed.name])} + FOR EACH ROW + EXECUTE PROCEDURE trigger_set_update_at();`); } + const indices = []; + + if (config?.indices) { + for (const indexIdentifiers of config.indices) { + const indexColumns = []; + + for (const identifier of indexIdentifiers) { + const hasColumn = parsed.columns.find((value) => value.name === identifier); + + if (!hasColumn) { + throw new SchemaError(`index column identifier '${identifier}' not defined on table ${parsed.name}`); + } + indexColumns.push(identifier); + } + + if (indexColumns.length) { + indices.push(indexColumns); + } + } + } + this.tables.push(new TableSchema(parsed.columns, parsed.name, tableSchema, undefined, indices)); + return this; + } + + public addProcedure(procedure: SqlSqlToken): this { + this.procedures.push(procedure); return this; } - public getTableBuilder(): TableBuilder { - return new TableBuilder(this); + public addTrigger(triggerSchema: SqlSqlToken): this { + throw new NotImplementedError( + "implement add trigger - currently 'pgsql-ast-parser' cannot parse create trigger statements", + ); } public getTriggerBuilder(): TriggerBuilder { diff --git a/packages/core/src/database/databaseSchema.ts b/packages/core/src/database/databaseSchema.ts index 32c80d6c..489ede59 100644 --- a/packages/core/src/database/databaseSchema.ts +++ b/packages/core/src/database/databaseSchema.ts @@ -1,436 +1,474 @@ +import { sql } from "slonik"; import { DataBaseBuilder } from "./databaseBuilder"; import { Migrations } from "./migrations"; -const dataBaseBuilder = new DataBaseBuilder(19); - -dataBaseBuilder - .getTableBuilder() - .setName("user") - .setMain() - .parseColumn("name VARCHAR(200) NOT NULL UNIQUE") - .parseColumn("uuid CHAR(36) NOT NULL") - .parseColumn("salt VARCHAR(200)") - .parseColumn("password VARCHAR(200) NOT NULL") - .parseColumn("alg VARCHAR(100) NOT NULL") - .parseMeta("PRIMARY KEY(uuid)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("external_user") - .parseColumn("name VARCHAR(200) NOT NULL") - .parseColumn("uuid CHAR(36) NOT NULL") - .parseColumn("local_uuid CHAR(36) NOT NULL") - .parseColumn("service INT NOT NULL") - .parseColumn("cookies TEXT") - .parseColumn("last_scrape DATETIME") - .parseColumn("updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP") - .parseMeta("PRIMARY KEY(uuid)") - .parseMeta("FOREIGN KEY(local_uuid) REFERENCES user(uuid)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("user_log") - .parseColumn("user_uuid CHAR(36) NOT NULL") - .parseColumn("ip VARCHAR(100)") - .parseColumn("session_key CHAR(36)") - .parseColumn("acquisition_date VARCHAR(40)") - .parseMeta("PRIMARY KEY(session_key)") - .parseMeta("FOREIGN KEY(user_uuid) REFERENCES user(uuid)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("reading_list") - .parseColumn("id INT UNSIGNED NOT NULL AUTO_INCREMENT") - .parseColumn("name VARCHAR(200) NOT NULL") - .parseColumn("user_uuid CHAR(36) NOT NULL") - .parseColumn("medium INT NOT NULL") - .parseColumn("updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP") - .parseMeta("PRIMARY KEY(id)") - .parseMeta("FOREIGN KEY(user_uuid) REFERENCES user(uuid)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("external_reading_list") - .parseColumn("id INT UNSIGNED NOT NULL AUTO_INCREMENT") - .parseColumn("name VARCHAR(200) NOT NULL") - .parseColumn("user_uuid CHAR(36) NOT NULL") - .parseColumn("medium INT NOT NULL") - .parseColumn("url VARCHAR(200) NOT NULL") - .parseColumn("updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP") - .parseMeta("PRIMARY KEY(id)") - .parseMeta("FOREIGN KEY(user_uuid) REFERENCES external_user(uuid)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("medium") - .parseColumn("id INT UNSIGNED NOT NULL AUTO_INCREMENT") - .parseColumn("countryOfOrigin VARCHAR(200)") - .parseColumn("languageOfOrigin VARCHAR(200)") - .parseColumn("author VARCHAR(200)") - .parseColumn("artist VARCHAR(200)") - .parseColumn("title VARCHAR(200) NOT NULL") - .parseColumn("medium INT NOT NULL") - .parseColumn("lang VARCHAR(200)") - .parseColumn("stateOrigin INT") - .parseColumn("stateTL INT") - .parseColumn("series VARCHAR(200)") - .parseColumn("universe VARCHAR(200)") - .parseColumn("updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP") - .parseMeta("PRIMARY KEY(id)") - .parseMeta("UNIQUE(title, medium)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("medium_synonyms") - .parseColumn("medium_id INT UNSIGNED") - .parseColumn("synonym VARCHAR(200) NOT NULL") - .parseMeta("PRIMARY KEY(medium_id, synonym)") - .parseMeta("FOREIGN KEY(medium_id) REFERENCES medium(id)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("medium_toc") - .parseColumn("medium_id INT UNSIGNED") - .parseColumn("link VARCHAR(767) NOT NULL") - .parseColumn("id INT UNSIGNED NOT NULL AUTO_INCREMENT") - .parseColumn("countryOfOrigin VARCHAR(200)") - .parseColumn("languageOfOrigin VARCHAR(200)") - .parseColumn("author VARCHAR(200)") - .parseColumn("artist VARCHAR(200)") - .parseColumn("title VARCHAR(200) NOT NULL") - .parseColumn("medium INT NOT NULL") - .parseColumn("lang VARCHAR(200)") - .parseColumn("stateOrigin INT") - .parseColumn("stateTL INT") - .parseColumn("series VARCHAR(200)") - .parseColumn("universe VARCHAR(200)") - .parseColumn("updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP") - .parseMeta("PRIMARY KEY(id)") - .parseMeta("UNIQUE(medium_id, link)") - .parseMeta("FOREIGN KEY(medium_id) REFERENCES medium(id)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("medium_in_wait") - .parseColumn("title VARCHAR(180) NOT NULL") - .parseColumn("medium INT NOT NULL") - .parseColumn("link VARCHAR(767) NOT NULL") - .parseColumn("updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP") - .parseMeta("PRIMARY KEY(title, medium, link(500))") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("list_medium") - .parseColumn("list_id INT UNSIGNED NOT NULL") - .parseColumn("medium_id INT UNSIGNED NOT NULL") - .parseMeta("PRIMARY KEY(list_id, medium_id)") - .parseMeta("FOREIGN KEY(list_id) REFERENCES reading_list(id)") - .parseMeta("FOREIGN KEY(medium_id) REFERENCES medium(id)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("external_list_medium") - .parseColumn("list_id INT UNSIGNED NOT NULL") - .parseColumn("medium_id INT UNSIGNED NOT NULL") - .parseMeta("PRIMARY KEY(list_id, medium_id)") - .parseMeta("FOREIGN KEY(list_id) REFERENCES external_reading_list(id)") - .parseMeta("FOREIGN KEY(medium_id) REFERENCES medium(id)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("part") - .parseColumn("id INT UNSIGNED NOT NULL AUTO_INCREMENT") - .parseColumn("medium_id INT UNSIGNED NOT NULL") - .parseColumn("title VARCHAR(200)") - .parseColumn("totalIndex INT NOT NULL") - .parseColumn("partialIndex INT") - // TODO: change default to coalesce(totalindex, 0) ... - .parseColumn("combiIndex DOUBLE NOT NULL DEFAULT 0") - .parseColumn("updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP") - .parseMeta("PRIMARY KEY(id)") - .parseMeta("FOREIGN KEY(medium_id) REFERENCES medium(id)") - .parseMeta("UNIQUE(medium_id, combiIndex)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("episode") - .parseColumn("id INT UNSIGNED NOT NULL AUTO_INCREMENT") - .parseColumn("part_id INT UNSIGNED NOT NULL") - .parseColumn("totalIndex INT NOT NULL") - .parseColumn("partialIndex INT") - // TODO: change default to coalesce(totalindex, 0) ... - .parseColumn("combiIndex DOUBLE NOT NULL DEFAULT 0") - .parseColumn("updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP") - .parseMeta("PRIMARY KEY(id)") - .parseMeta("FOREIGN KEY(part_id) REFERENCES part(id)") - .parseMeta("UNIQUE(part_id, combiIndex)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("episode_release") - .parseColumn("id INT UNSIGNED NOT NULL AUTO_INCREMENT") - .parseColumn("episode_id INT UNSIGNED NOT NULL") - // TODO: look through all ~35000 releases without toc_id and set this to "not null" if possible - .parseColumn("toc_id INT UNSIGNED") - .parseColumn("title TEXT NOT NULL") - .parseColumn("url VARCHAR(767) NOT NULL") - .parseColumn("source_type VARCHAR(200)") - .parseColumn("releaseDate DATETIME NOT NULL") - .parseColumn("locked BOOLEAN DEFAULT 0") - .parseColumn("updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP") - .parseMeta("PRIMARY KEY(id)") - .parseMeta("UNIQUE (episode_id, url)") - .parseMeta("FOREIGN KEY(episode_id) REFERENCES episode(id)") - .parseMeta("FOREIGN KEY(toc_id) REFERENCES medium_toc(id)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("user_episode") - .parseColumn("user_uuid CHAR(36) NOT NULL") - .parseColumn("episode_id INT UNSIGNED NOT NULL") - .parseColumn("progress FLOAT UNSIGNED NOT NULL") - .parseColumn("read_date DATETIME NOT NULL DEFAULT NOW()") - .parseMeta("PRIMARY KEY(user_uuid, episode_id)") - .parseMeta("FOREIGN KEY(user_uuid) REFERENCES user(uuid)") - .parseMeta("FOREIGN KEY(episode_id) REFERENCES episode(id)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("scrape_board") - .parseColumn("link VARCHAR(500) NOT NULL") - .parseColumn("next_scrape DATETIME NOT NULL") - .parseColumn("type INT UNSIGNED NOT NULL") - .parseColumn("uuid CHAR(36)") - .parseColumn("external_uuid CHAR(36)") - .parseColumn("info TEXT") - .parseColumn("medium_id INT UNSIGNED") - .parseMeta("PRIMARY KEY(link, type)") - .parseMeta("FOREIGN KEY(uuid) REFERENCES user(uuid)") - .parseMeta("FOREIGN KEY(external_uuid) REFERENCES external_user(uuid)") - .parseMeta("FOREIGN KEY(medium_id) REFERENCES medium(id)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("news_board") - .parseColumn("id INT UNSIGNED NOT NULL AUTO_INCREMENT") - .parseColumn("title TEXT NOT NULL") - .parseColumn("link VARCHAR(700) UNIQUE NOT NULL") - .parseColumn("date DATETIME NOT NULL") - .parseColumn("updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP") - .parseMeta("PRIMARY KEY (id)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("news_user") - .parseColumn("news_id INT UNSIGNED NOT NULL") - .parseColumn("user_id CHAR(36) NOT NULL") - .parseMeta("FOREIGN KEY (user_id) REFERENCES user(uuid)") - .parseMeta("FOREIGN KEY (news_id) REFERENCES news_board(id)") - .parseMeta("PRIMARY KEY (news_id, user_id)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("news_medium") - .parseColumn("news_id INT UNSIGNED NOT NULL") - .parseColumn("medium_id INT UNSIGNED NOT NULL") - .parseMeta("FOREIGN KEY(medium_id) REFERENCES medium(id)") - .parseMeta("FOREIGN KEY (news_id) REFERENCES news_board(id)") - .parseMeta("PRIMARY KEY(news_id, medium_id)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("meta_corrections") - .parseColumn("link VARCHAR(767) NOT NULL") - .parseColumn("replaced TEXT NOT NULL") - .parseColumn("startIndex INT UNSIGNED NOT NULL") - .parseColumn("endIndex INT UNSIGNED NOT NULL") - .parseColumn("fieldKey INT UNSIGNED NOT NULL") - .parseMeta("PRIMARY KEY (link(367), replaced(367), startIndex, endIndex)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("result_episode") - .parseColumn("novel VARCHAR(300) NOT NULL") - .parseColumn("chapter VARCHAR(300)") - .parseColumn("chapIndex INT UNSIGNED") - .parseColumn("volIndex INT UNSIGNED") - .parseColumn("volume VARCHAR(300)") - .parseColumn("episode_id INT UNSIGNED NOT NULL") - .parseMeta("FOREIGN KEY(episode_id) REFERENCES episode(id)") - .parseMeta("PRIMARY KEY(novel, chapter, chapIndex)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("page_info") - .parseColumn("link VARCHAR(767) NOT NULL") - .parseColumn("keyString VARCHAR(200) NOT NULL") - .parseColumn("value TEXT NOT NULL") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("enterprise_database_info") - .parseColumn("version INT UNSIGNED NOT NULL") - .parseColumn("migrating BOOLEAN NOT NULL DEFAULT 0") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("jobs") - .parseColumn("id INT UNSIGNED NOT NULL AUTO_INCREMENT") - .parseColumn("type VARCHAR(200) NOT NULL") - .parseColumn("name VARCHAR(200) UNIQUE") - .parseColumn("state VARCHAR(200) NOT NULL") - .parseColumn("job_state VARCHAR(200) NOT NULL") - .parseColumn("interval INT NOT NULL") - .parseColumn("deleteAfterRun INT NOT NULL") - .parseColumn("runAfter INT") - .parseColumn("runningSince DATETIME") - .parseColumn("lastRun DATETIME") - .parseColumn("nextRun DATETIME") - .parseColumn("arguments TEXT") - .parseMeta("PRIMARY KEY(id)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("job_history") - .parseColumn("id INT UNSIGNED NOT NULL") - .parseColumn("type VARCHAR(200) NOT NULL") - .parseColumn("name VARCHAR(200) NOT NULL") - .parseColumn("deleteAfterRun BOOLEAN NOT NULL") - .parseColumn("runAfter INT") - .parseColumn("scheduled_at DATETIME NOT NULL") - .parseColumn("start DATETIME NOT NULL") - .parseColumn("end DATETIME NOT NULL") - .parseColumn("result VARCHAR(100) NOT NULL") - .parseColumn("message VARCHAR(200) NOT NULL") - .parseColumn("context TEXT NOT NULL") - .parseColumn("created INT NOT NULL DEFAULT 0") - .parseColumn("updated INT NOT NULL DEFAULT 0") - .parseColumn("deleted INT NOT NULL DEFAULT 0") - .parseColumn("queries INT NOT NULL DEFAULT 0") - .parseColumn("network_queries INT NOT NULL DEFAULT 0") - .parseColumn("network_received INT NOT NULL DEFAULT 0") - .parseColumn("network_send INT NOT NULL DEFAULT 0") - // .parseColumn("duration INT NOT NULL AS (end - start) PERSISTENT") // currently not supported in parseColumn - // .parseColumn("lagging INT NOT NULL AS (start - scheduled_at) PERSISTENT") // currently not supported in parseColumn - .parseColumn("arguments TEXT") - .parseMeta("PRIMARY KEY(id, start)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("scraper_hook") - .parseColumn("id INT UNSIGNED NOT NULL AUTO_INCREMENT") - .parseColumn("name VARCHAR(200) NOT NULL UNIQUE") - .parseColumn("state VARCHAR(200) NOT NULL") - .parseColumn("message VARCHAR(200) NOT NULL") - .parseMeta("PRIMARY KEY(id)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("custom_hook") - .parseColumn("id INT UNSIGNED NOT NULL AUTO_INCREMENT") - .parseColumn("name VARCHAR(200) NOT NULL UNIQUE") - .parseColumn("hookState VARCHAR(200) NOT NULL") - .parseColumn("comment TEXT NOT NULL") - .parseColumn("state TEXT NOT NULL") - .parseColumn("updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP") - .parseMeta("PRIMARY KEY(id)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("app_events") - .parseColumn("id INT UNSIGNED NOT NULL AUTO_INCREMENT") - .parseColumn("program VARCHAR(200) NOT NULL") - .parseColumn("date DATETIME NOT NULL") - .parseColumn("type VARCHAR(200) NOT NULL") - .parseMeta("PRIMARY KEY(id)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("job_stat_summary") - .parseColumn("name VARCHAR(200) NOT NULL") - .parseColumn("type VARCHAR(200) NOT NULL") - .parseColumn("count INT NOT NULL") - .parseColumn("failed INT NOT NULL") - .parseColumn("succeeded INT NOT NULL") - .parseColumn("network_requests INT NOT NULL") - .parseColumn("min_network_requests INT NOT NULL") - .parseColumn("max_network_requests INT NOT NULL") - .parseColumn("network_send INT NOT NULL") - .parseColumn("min_network_send INT NOT NULL") - .parseColumn("max_network_send INT NOT NULL") - .parseColumn("network_received INT NOT NULL") - .parseColumn("min_network_received INT NOT NULL") - .parseColumn("max_network_received INT NOT NULL") - .parseColumn("duration INT NOT NULL") - .parseColumn("min_duration INT NOT NULL") - .parseColumn("max_duration INT NOT NULL") - .parseColumn("lagging INT NOT NULL") - .parseColumn("min_lagging INT NOT NULL") - .parseColumn("max_lagging INT NOT NULL") - .parseColumn("updated INT NOT NULL") - .parseColumn("min_updated INT NOT NULL") - .parseColumn("max_updated INT NOT NULL") - .parseColumn("created INT NOT NULL") - .parseColumn("min_created INT NOT NULL") - .parseColumn("max_created INT NOT NULL") - .parseColumn("deleted INT NOT NULL") - .parseColumn("min_deleted INT NOT NULL") - .parseColumn("max_deleted INT NOT NULL") - .parseColumn("sql_queries INT NOT NULL") - .parseColumn("min_sql_queries INT NOT NULL") - .parseColumn("max_sql_queries INT NOT NULL") - .parseMeta("PRIMARY KEY(name)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("notifications") - .parseColumn("id INT UNSIGNED NOT NULL AUTO_INCREMENT") - .parseColumn("title VARCHAR(200) NOT NULL") - .parseColumn("content VARCHAR(500) NOT NULL") - .parseColumn("date DATETIME NOT NULL") - .parseColumn("type VARCHAR(200) NOT NULL") - .parseColumn("key VARCHAR(200) NOT NULL") - .parseMeta("PRIMARY KEY(id)") - .build(); - -dataBaseBuilder - .getTableBuilder() - .setName("notifications_read") - .parseColumn("id INT UNSIGNED NOT NULL") - .parseColumn("uuid CHAR(36) NOT NULL") - .parseMeta("PRIMARY KEY(id, uuid)") - .parseMeta("FOREIGN KEY (uuid) REFERENCES user(uuid)") - .parseMeta("FOREIGN KEY (id) REFERENCES notifications(id)") - .build(); +const dataBaseBuilder = new DataBaseBuilder(1); + +dataBaseBuilder.setAutoUpdatedAt(true); +dataBaseBuilder.addProcedure(sql` +CREATE OR REPLACE FUNCTION trigger_set_update_at() +RETURNS TRIGGER AS $$ +BEGIN + NEW.updated_at = NOW(); + RETURN NEW; +END; +$$ LANGUAGE plpgsql; +`); + +dataBaseBuilder + .addTable( + sql` +CREATE TABLE IF NOT EXISTS app_events ( + id bigserial NOT NULL, + "program" varchar(200) NOT NULL, + "date" timestamptz NOT NULL, + "type" varchar(200) NOT NULL, + PRIMARY KEY (id) +); +`, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS custom_hook ( + id bigserial NOT NULL, + "name" varchar(200) NOT NULL, + state jsonb NOT NULL, + enabled bool NOT NULL, + "comment" text NOT NULL, + updated_at timestamptz NOT NULL DEFAULT NOW(), + PRIMARY KEY (id), + UNIQUE("name") +); +`, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS enterprise_database_info ( + "version" int8 NOT NULL, + migrating bool NOT NULL DEFAULT false +); +`, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS "user" ( + "name" varchar(200) NOT NULL, + uuid bpchar(36) NOT NULL, + salt varchar(200) NULL DEFAULT NULL::character varying, + "password" varchar(200) NOT NULL, + alg varchar(100) NOT NULL, + PRIMARY KEY (uuid), + UNIQUE(name) +); +`, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS medium ( + id bigserial NOT NULL, + title varchar(200) NOT NULL, + medium int4 NOT NULL, + country_of_origin varchar(200) NULL DEFAULT NULL::character varying, + language_of_origin varchar(200) NULL DEFAULT NULL::character varying, + author varchar(200) NULL DEFAULT NULL::character varying, + artist varchar(200) NULL DEFAULT NULL::character varying, + lang varchar(200) NULL DEFAULT NULL::character varying, + state_origin int8 NULL, + state_tl int8 NULL, + series varchar(200) NULL DEFAULT NULL::character varying, + universe varchar(200) NULL DEFAULT NULL::character varying, + updated_at timestamptz NOT NULL DEFAULT NOW(), + PRIMARY KEY (id), + UNIQUE(title, medium) +); +`, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS medium_toc ( + id bigserial NOT NULL, + medium_id int8 NOT NULL, + link varchar(767) NOT NULL, + title varchar(200) NOT NULL, + medium int8 NOT NULL, + country_of_origin varchar(200) NULL DEFAULT NULL::character varying, + language_of_origin varchar(200) NULL DEFAULT NULL::character varying, + author varchar(200) NULL DEFAULT NULL::character varying, + artist varchar(200) NULL DEFAULT NULL::character varying, + lang varchar(200) NULL DEFAULT NULL::character varying, + state_origin int8 NULL, + state_tl int8 NULL, + series varchar(200) NULL DEFAULT NULL::character varying, + universe varchar(200) NULL DEFAULT NULL::character varying, + updated_at timestamptz NOT NULL DEFAULT NOW(), + PRIMARY KEY (id), + UNIQUE(link, medium_id) +); +`, + { indices: [["medium_id"]] }, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS part ( + id bigserial NOT NULL, + medium_id int8 NOT NULL, + title varchar(200) NULL DEFAULT NULL::character varying, + combi_index float8 NOT NULL, + total_index int8 NOT NULL, + partial_index int8 NULL, + updated_at timestamptz NOT NULL DEFAULT NOW(), + PRIMARY KEY (id), + FOREIGN KEY (medium_id) REFERENCES medium(id) ON DELETE RESTRICT ON UPDATE RESTRICT, + UNIQUE(medium_id, combi_index) +); +`, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS episode ( + id bigserial NOT NULL, + part_id int8 NOT NULL, + total_index int8 NOT NULL, + partial_index int8 NULL, + combi_index float8 NOT NULL, + updated_at timestamptz NOT NULL DEFAULT NOW(), + PRIMARY KEY (id), + FOREIGN KEY (part_id) REFERENCES part(id) ON DELETE RESTRICT ON UPDATE RESTRICT, + UNIQUE(part_id, combi_index) +); +`, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS episode_release ( + id bigserial NOT NULL, + episode_id int8 NOT NULL, + url varchar(767) NOT NULL, + title text NOT NULL, + source_type varchar(200) NULL DEFAULT NULL::character varying, + release_date timestamptz NULL, + "locked" bool NOT NULL DEFAULT false, + toc_id int8 NULL, + updated_at timestamptz NOT NULL DEFAULT NOW(), + PRIMARY KEY (id), + FOREIGN KEY (episode_id) REFERENCES episode(id) ON DELETE RESTRICT ON UPDATE RESTRICT, + FOREIGN KEY (toc_id) REFERENCES medium_toc(id) ON DELETE RESTRICT ON UPDATE RESTRICT, + UNIQUE(episode_id, url) +); +`, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS external_user ( + identifier varchar(200) NOT NULL, + uuid bpchar(36) NOT NULL, + local_uuid bpchar(36) NOT NULL, + "type" int8 NOT NULL, + cookies text NULL, + last_scrape timestamptz NULL, + updated_at timestamptz NOT NULL DEFAULT NOW(), + PRIMARY KEY (uuid), + FOREIGN KEY (local_uuid) REFERENCES "user"(uuid) ON DELETE RESTRICT ON UPDATE RESTRICT +); +`, + { indices: [["local_uuid"]] }, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS external_reading_list ( + id bigserial NOT NULL, + "name" varchar(200) NOT NULL, + user_uuid bpchar(36) NOT NULL, + medium int8 NOT NULL, + url varchar(200) NOT NULL, + updated_at timestamptz NOT NULL DEFAULT NOW(), + PRIMARY KEY (id), + FOREIGN KEY (user_uuid) REFERENCES external_user(uuid) ON DELETE RESTRICT ON UPDATE RESTRICT +); +`, + { indices: [["user_uuid"]] }, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS external_list_medium ( + list_id int8 NOT NULL, + medium_id int8 NOT NULL, + PRIMARY KEY (list_id, medium_id), + FOREIGN KEY (list_id) REFERENCES external_reading_list(id) ON DELETE RESTRICT ON UPDATE RESTRICT, + FOREIGN KEY (medium_id) REFERENCES medium(id) ON DELETE RESTRICT ON UPDATE RESTRICT +); +`, + { indices: [["medium_id"], ["list_id"]] }, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS job_history ( + id int8 NOT NULL, + "type" varchar(200) NOT NULL, + "name" varchar(200) NOT NULL, + "start" timestamptz NOT NULL, + "end" timestamptz NOT NULL, + arguments text NULL, + "result" varchar(100) NOT NULL, + message jsonb NOT NULL, + context text NOT NULL, + scheduled_at timestamptz NOT NULL, + created int8 NOT NULL DEFAULT '0'::bigint, + updated int8 NOT NULL DEFAULT '0'::bigint, + deleted int8 NOT NULL DEFAULT '0'::bigint, + queries int8 NOT NULL DEFAULT '0'::bigint, + network_queries int8 NOT NULL DEFAULT '0'::bigint, + network_received int8 NOT NULL DEFAULT '0'::bigint, + network_send int8 NOT NULL DEFAULT '0'::bigint, + lagging int8 GENERATED ALWAYS AS (extract(epoch from "start" - "scheduled_at")) STORED, + duration int8 GENERATED ALWAYS AS (extract(epoch from "end" - "start")) STORED, + PRIMARY KEY (id, start) +); +`, + { indices: [["end"], ["name"], ["result"], ["start"]] }, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS job_stat_summary ( + "name" varchar(200) NOT NULL, + "type" varchar(200) NOT NULL, + count int8 NOT NULL, + failed int8 NOT NULL, + succeeded int8 NOT NULL, + network_requests int8 NOT NULL, + min_network_requests int8 NOT NULL, + max_network_requests int8 NOT NULL, + network_send int8 NOT NULL, + min_network_send int8 NOT NULL, + max_network_send int8 NOT NULL, + network_received int8 NOT NULL, + min_network_received int8 NOT NULL, + max_network_received int8 NOT NULL, + duration int8 NOT NULL, + min_duration int8 NOT NULL, + max_duration int8 NOT NULL, + lagging int8 NOT NULL, + min_lagging int8 NOT NULL, + max_lagging int8 NOT NULL, + updated int8 NOT NULL, + min_updated int8 NOT NULL, + max_updated int8 NOT NULL, + created int8 NOT NULL, + min_created int8 NOT NULL, + max_created int8 NOT NULL, + deleted int8 NOT NULL, + min_deleted int8 NOT NULL, + max_deleted int8 NOT NULL, + sql_queries int8 NOT NULL, + min_sql_queries int8 NOT NULL, + max_sql_queries int8 NOT NULL, + PRIMARY KEY (name) +); +`, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS jobs ( + id bigserial NOT NULL, + "type" varchar(200) NOT NULL, + "name" varchar(200) NOT NULL, + state varchar(200) NOT NULL, + "interval" int8 NOT NULL, + delete_after_run bool NOT NULL, + running_since timestamptz NULL, + run_after int8 NULL, + last_run timestamptz NULL, + next_run timestamptz NULL, + arguments text NULL, + enabled bool NOT NULL, + PRIMARY KEY (id), + UNIQUE(name) +); +`, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS medium_in_wait ( + title varchar(180) NOT NULL, + medium int4 NOT NULL, + link varchar(767) NOT NULL, + updated_at timestamptz NOT NULL DEFAULT NOW(), + PRIMARY KEY (title, medium, link) +); +`, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS medium_synonyms ( + medium_id int8 NOT NULL, + synonym varchar(200) NOT NULL, + PRIMARY KEY (medium_id, synonym), + FOREIGN KEY (medium_id) REFERENCES medium(id) ON DELETE RESTRICT ON UPDATE RESTRICT +); +`, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS news_board ( + id bigserial NOT NULL, + title text NOT NULL, + link varchar(700) NOT NULL, + "date" timestamptz NULL, + updated_at timestamptz NOT NULL DEFAULT NOW(), + PRIMARY KEY (id), + UNIQUE(link) +); +`, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS news_medium ( + news_id int8 NOT NULL, + medium_id int8 NOT NULL, + PRIMARY KEY (news_id, medium_id), + FOREIGN KEY (medium_id) REFERENCES medium(id) ON DELETE RESTRICT ON UPDATE RESTRICT, + FOREIGN KEY (news_id) REFERENCES news_board(id) ON DELETE RESTRICT ON UPDATE RESTRICT +); +`, + { indices: [["medium_id"]] }, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS news_user ( + news_id int8 NOT NULL, + user_id bpchar(36) NOT NULL, + PRIMARY KEY (news_id, user_id), + FOREIGN KEY (user_id) REFERENCES "user"(uuid) ON DELETE RESTRICT ON UPDATE RESTRICT, + FOREIGN KEY (news_id) REFERENCES news_board(id) ON DELETE RESTRICT ON UPDATE RESTRICT +); +`, + { indices: [["user_id"]] }, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS notifications ( + id bigserial NOT NULL, + title varchar(200) NOT NULL, + "content" varchar(500) NOT NULL, + "date" timestamptz NOT NULL, + "type" varchar(200) NOT NULL, + "key" varchar(200) NOT NULL, + PRIMARY KEY (id) +); +`, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS notifications_read ( + id int8 NOT NULL, + uuid bpchar(36) NOT NULL, + PRIMARY KEY (id, uuid), + FOREIGN KEY (id) REFERENCES notifications(id) ON DELETE RESTRICT ON UPDATE RESTRICT, + FOREIGN KEY (uuid) REFERENCES "user"(uuid) ON DELETE RESTRICT ON UPDATE RESTRICT +); +`, + { indices: [["uuid"]] }, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS page_info ( + link varchar(767) NOT NULL, + key_string varchar(200) NOT NULL, + value text NOT NULL +); +`, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS part ( + id bigserial NOT NULL, + medium_id int8 NOT NULL, + title varchar(200) NULL DEFAULT NULL::character varying, + combi_index float8 NOT NULL, + total_index int8 NOT NULL, + partial_index int8 NULL, + updated_at timestamptz NOT NULL DEFAULT NOW(), + PRIMARY KEY (id), + FOREIGN KEY (medium_id) REFERENCES medium(id) ON DELETE RESTRICT ON UPDATE RESTRICT, + UNIQUE(medium_id, combi_index) +); +`, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS reading_list ( + id bigserial NOT NULL, + "name" varchar(200) NOT NULL, + user_uuid bpchar(36) NULL DEFAULT NULL::bpchar, + medium int8 NOT NULL, + updated_at timestamptz NOT NULL DEFAULT NOW(), + PRIMARY KEY (id), + FOREIGN KEY (user_uuid) REFERENCES "user"(uuid) ON DELETE RESTRICT ON UPDATE RESTRICT +); +`, + { indices: [["user_uuid"]] }, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS list_medium ( + list_id int8 NOT NULL, + medium_id int8 NOT NULL, + PRIMARY KEY (list_id, medium_id), + FOREIGN KEY (list_id) REFERENCES reading_list(id) ON DELETE RESTRICT ON UPDATE RESTRICT, + FOREIGN KEY (medium_id) REFERENCES medium(id) ON DELETE RESTRICT ON UPDATE RESTRICT +); +`, + { indices: [["medium_id"]] }, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS scraper_hook ( + id bigserial NOT NULL, + "name" varchar(200) NOT NULL, + enabled boolean NOT NULL, + message varchar(200) NOT NULL, + PRIMARY KEY (id), + UNIQUE(name) +); +`, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS user_episode ( + id bigserial NOT NULL, + user_uuid bpchar(36) NOT NULL, + episode_id int8 NOT NULL, + progress float8 NOT NULL, + read_date timestamptz NOT NULL, + PRIMARY KEY (id), + FOREIGN KEY (user_uuid) REFERENCES "user"(uuid) ON DELETE RESTRICT ON UPDATE RESTRICT, + FOREIGN KEY (episode_id) REFERENCES episode(id) ON DELETE RESTRICT ON UPDATE RESTRICT, + UNIQUE(episode_id, user_uuid) +); +`, + { + indices: [ + ["episode_id"], + ["episode_id", "user_uuid", "progress"], + ["episode_id", "progress"], + ["progress"], + ["user_uuid", "progress"], + ], + }, + ) + .addTable( + sql` +CREATE TABLE IF NOT EXISTS user_log ( + user_uuid bpchar(36) NULL DEFAULT NULL::bpchar, + ip varchar(255) NULL DEFAULT NULL::character varying, + session_key varchar(255) NOT NULL, + acquisition_date varchar(40) NULL DEFAULT NULL::character varying, + PRIMARY KEY (session_key), + FOREIGN KEY (user_uuid) REFERENCES "user"(uuid) ON DELETE RESTRICT ON UPDATE RESTRICT +); +`, + { indices: [["user_uuid"]] }, + ); dataBaseBuilder.addMigrations(...Migrations); @@ -473,31 +511,4 @@ dataBaseBuilder.getTableBuilder() .parseColumn("stringified_settings TEXT") .parseMeta("FOREIGN KEY(uuid) REFERENCES user(uuid)"); */ -dataBaseBuilder - .getTableBuilder() - .setName("user_data_invalidation") - .setInvalidationTable() - .parseColumn("uuid CHAR(36) NOT NULL") - .parseColumn("user_uuid BOOLEAN") - .parseColumn("news_id INT UNSIGNED ") - .parseColumn("medium_id INT UNSIGNED ") - .parseColumn("part_id INT UNSIGNED ") - .parseColumn("episode_id INT UNSIGNED ") - .parseColumn("list_id INT UNSIGNED ") - .parseColumn("external_list_id INT UNSIGNED ") - .parseColumn("external_uuid CHAR(36)") - .parseMeta("FOREIGN KEY(uuid) REFERENCES user(uuid)") - .parseMeta("FOREIGN KEY(news_id) REFERENCES news_board(id)") - .parseMeta("FOREIGN KEY(medium_id) REFERENCES medium(id)") - .parseMeta("FOREIGN KEY(part_id) REFERENCES part(id)") - .parseMeta("FOREIGN KEY(episode_id) REFERENCES episode(id)") - .parseMeta("FOREIGN KEY(list_id) REFERENCES reading_list(id)") - .parseMeta("FOREIGN KEY(external_list_id) REFERENCES external_reading_list(id)") - .parseMeta("FOREIGN KEY(external_uuid) REFERENCES external_user(uuid)") - .parseMeta( - "PRIMARY KEY(uuid, user_uuid, news_id, medium_id, part_id," + - "episode_id, list_id, external_list_id, external_uuid)", - ) - .build(); - export const databaseSchema = dataBaseBuilder.build(); diff --git a/packages/core/src/database/databaseTypes.ts b/packages/core/src/database/databaseTypes.ts index f06a96f4..fda79343 100644 --- a/packages/core/src/database/databaseTypes.ts +++ b/packages/core/src/database/databaseTypes.ts @@ -1,15 +1,16 @@ import { TableSchema } from "./tableSchema"; -import { Trigger } from "./trigger"; import { DatabaseContext } from "./contexts/databaseContext"; -import { Uuid, EmptyPromise } from "../types"; +import { Uuid, EmptyPromise, ReleaseState, ScrapeName, JobState } from "../types"; +import z from "zod"; +import { MediaType } from "../tools"; +import { SqlSqlToken } from "slonik"; export interface DatabaseSchema { readonly version: number; - readonly triggers: readonly Trigger[]; + readonly triggers: ReadonlyArray>; readonly tables: readonly TableSchema[]; - readonly mainTable: TableSchema; - readonly invalidationTable: TableSchema; readonly migrations: readonly Migration[]; + readonly procedures: ReadonlyArray>; } // for operations which alter things, like tables and cannot be done by simple insert or delete operations @@ -47,21 +48,16 @@ export enum ColumnType { INT = "INT", } -export enum InvalidationType { - INSERT = 0x1, - UPDATE = 0x2, - DELETE = 0x4, - INSERT_OR_UPDATE = INSERT | UPDATE, - INSERT_OR_DELETE = INSERT | DELETE, - ANY = INSERT | UPDATE | DELETE, -} - export interface ConnectionContext { startTransaction(): EmptyPromise; commit(): EmptyPromise; rollback(): EmptyPromise; + + markAborted(): void; + + aborted(): boolean; } export interface ChangeUser { @@ -76,3 +72,422 @@ export interface NewsItemRequest { till?: Date; newsIds?: number[]; } + +const id = z.number().int().min(1); + +// Copied from https://github.com/colinhacks/zod#json-type +const literalSchema = z.union([z.string(), z.number(), z.boolean(), z.null()]); +type Literal = z.infer; +type Json = Literal | { [key: string]: Json } | Json[]; +const jsonSchema: z.ZodType = z.lazy(() => z.union([literalSchema, z.array(jsonSchema), z.record(jsonSchema)])); + +/** + * What one would get from select * from app_events; + */ +export const appEvent = z.object({ + id, + program: z.enum(["server", "crawler"]), + date: z.date(), + type: z.enum(["end", "start"]), +}); + +export interface AppEvent extends z.infer {} + +export const entity = z.object({ + id, +}); + +export interface Entity extends z.infer {} + +/** + * What one would get from select * from custom_hook; + */ +export const customHook = z.object({ + id, + name: z.string().min(1), + state: jsonSchema, + updatedAt: z.date().optional(), + enabled: z.boolean(), + comment: z.string(), +}); + +export interface CustomHook extends z.infer {} + +export const dbTrigger = z.object({ + table: z.string().min(1), + event: z.string().min(1), + timing: z.string().min(1), + trigger: z.string().min(1), +}); + +export interface DbTrigger extends z.infer {} + +export const minimalRelease = z.object({ + episodeId: id, + url: z.string().url(), +}); + +export interface MinimalRelease extends z.infer {} + +/** + * What one would get from select * from episode_release; + */ +export const simpleRelease = minimalRelease.extend({ + id, + title: z.string().min(1), + releaseDate: z.date(), + locked: z.boolean(), + sourceType: z.string().nullish(), + tocId: z.number().int().min(1).nullish(), +}); + +export interface SimpleRelease extends z.infer {} + +export const displayRelease = minimalRelease.extend({ + title: z.string(), + mediumId: id, + locked: z.boolean(), + date: z.date(), + progress: z.number().min(0).max(1), +}); + +export interface DisplayRelease extends z.infer {} + +export const minimalMedium = z.object({ + title: z.string(), + medium: z.nativeEnum(MediaType), + id, +}); + +export interface MinimalMedium extends z.infer {} + +export const mediumRelease = minimalRelease.extend({ + title: z.string().min(1), + combiIndex: z.number(), + locked: z.boolean(), + date: z.date(), +}); + +export interface MediumRelease extends z.infer {} + +/** + * What one would get from select * from episode; + */ +export const simpleEpisode = z.object({ + id, + partId: id, + combiIndex: z.number(), + totalIndex: z.number().int(), + partialIndex: z.number().int().nullish(), +}); + +export interface SimpleEpisode extends z.infer {} + +export const simpleEpisodeReleases = simpleEpisode.extend({ + releases: z.array(simpleRelease), +}); + +export interface SimpleEpisodeReleases extends z.infer {} + +export const pureEpisode = simpleEpisode.extend({ + readDate: z.date().nullable(), + progress: z.number().min(0).max(1), +}); + +export interface PureEpisode extends z.infer {} + +export const episode = pureEpisode.extend({ + releases: z.array(simpleRelease), +}); + +export interface Episode extends z.infer {} + +export const episodeContentData = z.object({ + episodeTitle: z.string(), + index: z.number(), + mediumTitle: z.string(), +}); + +export interface EpisodeContentData extends z.infer {} + +export const simpleReadEpisode = z.object({}); + +export interface SimpleReadEpisode extends z.infer {} + +export const simpleExternalList = z.object({ + id, + userUuid: z.string().uuid(), + name: z.string(), + medium: z.number(), + url: z.string().url(), +}); + +export interface SimpleExternalList extends z.infer {} + +export const externalList = simpleExternalList.extend({ + items: z.array(id), +}); + +export interface ExternalList extends z.infer {} + +export const simpleExternalUser = z.object({ + localUuid: z.string().uuid(), + uuid: z.string().uuid(), + identifier: z.string().min(1), + type: z.number(), + lastScrape: z.date().nullish(), + cookies: z.string().nullish().nullish(), +}); + +export interface SimpleExternalUser extends z.infer {} + +export const basicDisplayExternalUser = z.object({ + localUuid: z.string().uuid(), + uuid: z.string().uuid(), + identifier: z.string().min(1), + type: z.number(), +}); + +export interface BasicDisplayExternalUser extends z.infer {} + +export const displayExternalUser = basicDisplayExternalUser.extend({ + lists: z.array(externalList), +}); + +export interface DisplayExternalUser extends z.infer {} + +export const simpleExternalUserListed = simpleExternalUser.extend({ + lists: z.array(externalList), +}); + +export interface SimpleExternalUserListed extends z.infer {} + +/** + * What one would get from select * from part; + */ +export const simplePart = z.object({ + id, + mediumId: id, + title: z.string(), + combiIndex: z.number(), + totalIndex: z.number().int(), + partialIndex: z.number().int().nullish(), +}); + +export interface SimplePart extends z.infer {} + +export const simpleMedium = z.object({ + id, + title: z.string().min(1), + medium: z.nativeEnum(MediaType), + countryOfOrigin: z.string().nullish(), + languageOfOrigin: z.string().nullish(), + author: z.string().nullish(), + artist: z.string().nullish(), + lang: z.string().nullish(), + stateOrigin: z.nativeEnum(ReleaseState).nullish(), + stateTl: z.nativeEnum(ReleaseState).nullish(), + series: z.string().nullish(), + universe: z.string().nullish(), +}); + +export interface SimpleMedium extends z.infer {} + +export const simpleList = z.object({ + id, + userUuid: z.string().uuid(), + name: z.string().min(1), + medium: z.number().min(0).int(), +}); + +export interface SimpleList extends z.infer {} + +export const userList = z.object({ + id, + medium: z.number().min(0).int(), + name: z.string().min(1), +}); + +export interface UserList extends z.infer {} + +export const mediumInWait = z.object({ + title: z.string(), + medium: z.nativeEnum(MediaType), + link: z.string().url(), +}); + +export interface MediumInWait extends z.infer {} + +export const minimalMediumtoc = z.object({ + id, + mediumId: id, + link: z.string().url(), +}); + +export interface MinimalMediumtoc extends z.infer {} + +export const simpleMediumToc = minimalMediumtoc.extend({ + title: z.string(), + medium: z.nativeEnum(MediaType).or(z.literal(0)), + countryOfOrigin: z.string().nullish(), + languageOfOrigin: z.string().nullish(), + author: z.string().nullish(), + artist: z.string().nullish(), + lang: z.string().nullish(), + stateOrigin: z.nativeEnum(ReleaseState).nullish(), + stateTl: z.nativeEnum(ReleaseState).nullish(), + series: z.string().nullish(), + universe: z.string().nullish(), +}); + +export interface SimpleMediumToc extends z.infer {} + +export const simpleJob = z.object({ + id, + enabled: z.boolean(), + name: z.string().min(1), + type: z.nativeEnum(ScrapeName), + state: z.nativeEnum(JobState), + deleteAfterRun: z.boolean(), + interval: z.number().int().min(60000), + // TODO: maybe remove this column, i dont need it either way + runAfter: z.number().int().nullish(), + runningSince: z.date().nullish(), + nextRun: z.date().nullish(), + lastRun: z.date().nullish(), + arguments: z.string().nullish(), +}); + +export interface SimpleJob extends z.infer {} + +export const simpleJobHistory = z.object({ + id, + name: z.string().min(1), + type: z.nativeEnum(ScrapeName), + arguments: z.string().nullish(), + scheduledAt: z.date().nullish(), + start: z.date(), + end: z.date(), + result: z.enum(["warning", "failed", "success"]), + message: jsonSchema, + context: z.string(), + created: z.number().int(), + updated: z.number().int(), + deleted: z.number().int(), + queries: z.number().int(), + networkQueries: z.number().int(), + networkReceived: z.number().int(), + networkSend: z.number().int(), + // TODO: previously generated columns in mariadb + lagging: z.number().int().optional(), + duration: z.number().int().optional(), +}); + +export interface SimpleJobHistory extends z.infer {} + +export const simpleJobStatSummary = z.object({ + name: z.string().min(1), + type: z.string().min(1), + count: z.number().int(), + failed: z.number().int(), + succeeded: z.number().int(), + networkRequests: z.number().int(), + minNetworkRequests: z.number().int(), + maxNetworkRequests: z.number().int(), + networkSend: z.number().int(), + minNetworkSend: z.number().int(), + maxNetworkSend: z.number().int(), + networkReceived: z.number().int(), + minNetworkReceived: z.number().int(), + maxNetworkReceived: z.number().int(), + duration: z.number().int(), + minDuration: z.number().int(), + maxDuration: z.number().int(), + updated: z.number().int(), + minUpdated: z.number().int(), + maxUpdated: z.number().int(), + created: z.number().int(), + minCreated: z.number().int(), + maxCreated: z.number().int(), + deleted: z.number().int(), + minDeleted: z.number().int(), + maxDeleted: z.number().int(), + sqlQueries: z.number().int(), + minSqlQueries: z.number().int(), + maxSqlQueries: z.number().int(), + lagging: z.number().int(), + minLagging: z.number().int(), + maxLagging: z.number().int(), +}); + +export interface SimpleJobStatSummary extends z.infer {} + +export const mediumSynonym = z.object({ + id, + synonym: z.string().min(1), +}); + +export interface MediumSynonym extends z.infer {} + +export const linkValue = z.object({ + link: z.string().url(), +}); + +export interface LinkValue extends z.infer {} + +// contrary to `entity`, also allow zero as id +export const softInsertEntity = z.object({ + id: z.number().int().min(0), +}); + +export interface SoftInsertEntity extends z.infer {} + +export const simpleNews = z.object({ + id, + title: z.string(), + link: z.string().url(), + // TODO: this needs a default value of NOW() + date: z.date().nullish(), +}); + +export interface SimpleNews extends z.infer {} + +export const news = simpleNews.extend({ + read: z.boolean(), +}); + +export interface News extends z.infer {} + +export const notification = z.object({ + id, + title: z.string(), + content: z.string(), + date: z.date(), + key: z.string(), + type: z.string(), +}); + +export interface Notification extends z.infer {} + +export const userNotification = notification.extend({ + read: z.boolean(), +}); + +export interface UserNotification extends z.infer {} + +export const simpleScraperHook = z.object({ + id, + name: z.string().min(1), + enabled: z.boolean(), + message: z.string(), +}); + +export interface SimpleScraperHook extends z.infer {} + +export const simpleUser = z.object({ + name: z.string(), + uuid: z.string(), + password: z.string(), + alg: z.string(), + salt: z.string().nullable(), +}); diff --git a/packages/core/src/database/databaseValidator.ts b/packages/core/src/database/databaseValidator.ts deleted file mode 100644 index 2eea20b9..00000000 --- a/packages/core/src/database/databaseValidator.ts +++ /dev/null @@ -1,733 +0,0 @@ -import logger from "../logger"; -import { ColumnType, DatabaseSchema, InvalidationType, Modifier } from "./databaseTypes"; -import { TableSchema } from "./tableSchema"; -import { ColumnSchema } from "./columnSchema"; -import { parseDataColumn, parseForeignKey, parsePrimaryKey } from "./tableParser"; -import { equalsIgnore, getElseSet, isString, unique } from "../tools"; -import mySql from "promise-mysql"; -import { Uuid, MultiSingleValue, EmptyPromise, Optional, Nullable } from "../types"; -import { DatabaseContext } from "./contexts/databaseContext"; -import validate from "validate.js"; -import { Counter } from "../counter"; -import { DatabaseError, SchemaError } from "../error"; - -interface StateProcessorInterface { - addSql(query: string, parameter: MultiSingleValue, value: T, uuid?: Uuid): T; - - startRound(): Promise; - - checkTables(tables: any, track: string[], ignore: string[]): void; - - initTableSchema(database: DatabaseSchema): void; - - checkTableSchema(context: DatabaseContext): EmptyPromise; - - validateQuery(query: string, parameter: any): EmptyPromise; -} - -interface Trigger { - table: TableSchema; - - triggerType: InvalidationType; - - updateInvalidationMap(query: Query, invalidationMap: Map): void; -} - -interface StateProcessorImpl extends StateProcessorInterface { - databaseName: string; - workingPromise: EmptyPromise; - readonly sqlHistory: RawQuery[]; - tables: TableSchema[]; - invalidationTable: Nullable; - mainTable: Nullable; - trigger: Trigger[]; - - _process(): Promise; - - startRound(): Promise; - - checkTableSchema(context: DatabaseContext): EmptyPromise; - - checkTables(tables: any, track: string[], ignore: string[]): void; - - initTableSchema(database: DatabaseSchema): void; - - addSql(query: string, parameter: MultiSingleValue, value: T, uuid?: Uuid): T; - - validateQuery(query: string, parameter: any): EmptyPromise; -} - -interface Invalidation { - table: string; - foreignColumn: string; - keyColumn: string; - uuid?: Uuid; - values: any[]; -} - -interface RawQuery { - rawQuery: string; - - parameter: MultiSingleValue; - - changedRows: number; - - affectedRows: number; - - uuid?: Uuid; -} - -interface Query extends RawQuery { - operation: InvalidationType; - target: TableSchema; - columnTarget: Array>; -} - -interface Parser { - parse(value: RawQuery): Nullable; -} - -const UpdateParser: Parser = { - parse(rawQuery: RawQuery): Nullable { - const query = rawQuery.rawQuery; - const exec = /update\s+(\w+)\s+set.+\s+WHERE\s+(\w+)\s*=\s*\?/i.exec(query); - - if (!exec) { - logger.warn(`could not parse update query: '${query}'`); - return null; - } - const [, table, idConditionColumn] = exec; - const tableMeta = stateProcessorImpl.tables.find((value) => value.name === table); - - if (!tableMeta) { - logger.warn(`unknown table: '${table}'`); - return null; - } - - let idConditionColumnName = idConditionColumn.trim(); - // if column was escaped, remove the escape characters - if (idConditionColumnName.startsWith("`") && idConditionColumnName.endsWith("`")) { - idConditionColumnName = idConditionColumnName.substring(1, idConditionColumnName.length - 1).trim(); - } - - const column = tableMeta.primaryKeys.find((value) => value.name === idConditionColumnName); - - if (!column) { - // TODO: 21.06.2019 somehow do this - // search for other possible keys, like foreign keys which are primary keys? - logger.warn(`condition column is not a primary key: '${idConditionColumnName}'`); - return null; - } - let idValue; - - if (Array.isArray(rawQuery.parameter)) { - idValue = rawQuery.parameter[rawQuery.parameter.length - 1]; - } else { - logger.warn(`suspicious update query: '${query}' with less than two parameter: '${rawQuery.parameter + ""}'`); - return null; - } - return { - rawQuery: query, - parameter: rawQuery.parameter, - affectedRows: rawQuery.affectedRows, - changedRows: rawQuery.changedRows, - operation: InvalidationType.UPDATE, - target: tableMeta, - uuid: rawQuery.uuid, - columnTarget: [{ column, value: idValue }], - }; - }, -}; -const InsertParser: Parser = { - parse(rawQuery: RawQuery): Nullable { - const query = rawQuery.rawQuery; - const exec = /insert.+into\s+`?(\w+)`?\s*(\(.+\))?\s+VALUES\s*\((.+)\);?/i.exec(query); - if (!exec) { - // warn only if it is not a 'insert ... into .... select ...' query - if (!/insert.+into\s+`?(\w+)`?\s*(\(.+\))?\s*select.+?/i.test(query)) { - logger.warn(`could not parse insert query: '${query}'`); - } - return null; - } - const [, tableName, insertColumns, insertValues] = exec; - const table = stateProcessorImpl.tables.find((value) => value.name === tableName); - - if (!table) { - logger.warn(`unknown table: '${tableName}'`); - return null; - } - let columns; - if (!insertColumns) { - columns = table.columns.map((value) => value.name); - } else { - columns = insertColumns - // remove the parenthesis - .substring(1, insertColumns.length - 1) - .split(",") - .map((value) => { - value = value.trim(); - // if column was escaped, remove the escape characters - if (value.startsWith("`") && value.endsWith("`")) { - value = value.substring(1, value.length - 1).trim(); - } - return value; - }); - } - const values: string[] = insertValues.split(",").map((value) => value.trim()); - - const columnLength = columns.length; - const valueLength = values.length; - if (valueLength < columnLength) { - logger.warn(`not enough values for the columns: expected ${columnLength}, got ${valueLength}`); - return null; - } else if (!(valueLength % columnLength)) { - logger.warn(`mismatching number of values for columns: expected ${columnLength}, got ${valueLength}`); - return null; - } - const columnTargets: Array> = []; - let singleParamUsed = false; - - for (let i = 0; i < columnLength; i++) { - const columnName = columns[i]; - let value = values[i]; - - if (value === "?") { - const parameter = rawQuery.parameter; - - if (Array.isArray(parameter)) { - if (!parameter.length) { - logger.warn(`not enough values for insert query: '${query}', Parameter: ${parameter + ""}`); - return null; - } - value = parameter.shift(); - } else { - if (singleParamUsed) { - logger.warn(`not enough values for insert query: '${query}', Parameter: ${parameter + ""}`); - return null; - } - singleParamUsed = true; - value = parameter; - } - } - const column = table.columns.find((tableColumn) => tableColumn.name === columnName); - if (!column) { - logger.warn(`could not find any columns for '${columnName}' in '${table.name}'`); - return null; - } - columnTargets.push({ column, value }); - } - return { - rawQuery: query, - parameter: rawQuery.parameter, - affectedRows: rawQuery.affectedRows, - changedRows: rawQuery.changedRows, - operation: InvalidationType.INSERT, - target: table, - uuid: rawQuery.uuid, - columnTarget: columnTargets, - }; - }, -}; -const DeleteParser: Parser = { - parse(rawQuery: RawQuery): Nullable { - const query = rawQuery.rawQuery; - const exec = /delete\s+from\s+(\w+)\s*(where\s+(.+))?;?/i.exec(query); - - if (!exec) { - logger.warn(`could not parse delete query: '${query}'`); - return null; - } - const [, tableName, , deleteCondition] = exec; - const table = stateProcessorImpl.tables.find((value) => value.name === tableName); - - if (!table) { - logger.warn(`unknown table: '${tableName}'`); - return null; - } - const columnTargets: any[] = []; - - if (deleteCondition) { - const conditionsParts = deleteCondition.split(/s+/); - - const column = /^`?\w+`?$/; - const equals = /^=$/; - const value = /^\?$/; - const concatenation = /^AND$/i; - const finish = /^;$/i; - - let previousState: Nullable = null; - let currentColumn: Nullable = null; - - for (const conditionPart of conditionsParts) { - if (previousState == null || previousState === concatenation) { - previousState = column; - - if (!column.test(conditionPart)) { - return null; - } - const columnName = conditionPart.substring(1, conditionPart.length - 1); - const matchedColumn = table.columns.find((tableColumn) => tableColumn.name === columnName); - - if (!matchedColumn) { - logger.warn(`unknown column: '${columnName}'`); - return null; - } - currentColumn = matchedColumn; - } else if (previousState === column) { - previousState = equals; - - if (!equals.test(conditionPart)) { - return null; - } - } else if (previousState === equals) { - previousState = value; - - if (!value.test(conditionPart)) { - return null; - } - columnTargets.push({ column: currentColumn, value: conditionPart }); - } else if (previousState === value) { - if (concatenation.test(conditionPart)) { - previousState = concatenation; - } else if (finish.test(conditionPart)) { - break; - } else { - return null; - } - } else { - return null; - } - } - } - return { - rawQuery: query, - parameter: rawQuery.parameter, - affectedRows: rawQuery.affectedRows, - changedRows: rawQuery.changedRows, - operation: InvalidationType.DELETE, - target: table, - uuid: rawQuery.uuid, - columnTarget: columnTargets, - }; - }, -}; - -// FIXME all parser cannot parse escaped id´s and values - -interface ColumnTarget { - value: any; - column: T; -} - -type ColumnConverter = (query: Query, triggeredColumn: ColumnTarget) => ColumnTarget; - -function createTrigger( - watchTable: TableSchema, - targetTable: TableSchema, - invalidationTable: TableSchema, - mainPrimaryKey: string, - columnConverter: ColumnConverter, - triggerType: InvalidationType, -): Trigger { - if (targetTable.primaryKeys.length !== 1) { - throw new SchemaError("targeted table does not has exact one primary key"); - } - - if (invalidationTable === watchTable || invalidationTable === targetTable) { - throw new SchemaError("invalidation table is not valid"); - } - return { - table: watchTable, - triggerType, - updateInvalidationMap(query: Query, invalidations: Map): void { - const triggeredColumn = query.columnTarget.find((value) => { - if (value.column.foreignKey && targetTable.primaryKeys.includes(value.column.foreignKey)) { - return true; - } - return targetTable.primaryKeys.includes(value.column); - }); - - if (!triggeredColumn) { - logger.warn("an trigger insert statement without the referenced key of target"); - return; - } - const invalidationColumn = columnConverter(query, triggeredColumn); - const key = `${invalidationTable.name}$${mainPrimaryKey}$${invalidationColumn.column}`; - const invalidation = getElseSet(invalidations, key, () => { - return { - table: invalidationTable.name, - foreignColumn: invalidationColumn.column, - keyColumn: mainPrimaryKey, - values: [], - }; - }); - - if (watchTable.mainDependent) { - if (!query.uuid) { - throw new SchemaError("missing uuid on dependant table"); - } - invalidation.uuid = mySql.escape(query.uuid); - } - if (!invalidation.values.includes(invalidationColumn.value)) { - invalidation.values.push(invalidationColumn.value); - } - }, - }; -} - -const queryTableReg = /((select .+? from)|(update )|(delete.+?from)|(insert.+?into )|(.+?join))\s*(\w+)/gi; -const queryColumnReg = /(((\w+\.)?(\w+))|\?)\s*(like|is|=|<|>|<>|<=|>=)\s*(((\w+\.)?(\w+))|\?)/gi; -const counter = new Counter(); -const stateProcessorImpl: StateProcessorImpl = { - databaseName: "", - workingPromise: Promise.resolve(), - sqlHistory: [], - tables: [], - mainTable: null, - invalidationTable: null, - trigger: [], - - async validateQuery(query: string, parameter: any) { - if (query.length > 20 && counter.count(query) === 100) { - console.log(`Query: '${query}' executed 100 times`); - } - if (counter.count("query") % 100 === 0) { - console.log(`Database queried ${counter.getCount("query")} times`); - } - let tableExec = queryTableReg.exec(query); - - if (!tableExec) { - return; - } - const tables = []; - while (tableExec) { - if (tables.length > this.tables.length * 5) { - throw new SchemaError("too many tables: regExp is faulty"); - } - if (tableExec[7]) { - tables.push(tableExec[7]); - } - tableExec = queryTableReg.exec(query); - } - let columnExec = queryColumnReg.exec(query); - const columns = []; - - while (columnExec) { - if (columnExec[1] === "?") { - columns.push(columnExec[6]); - } - if (columnExec[6] === "?") { - columns.push(columnExec[1]); - } - columnExec = queryColumnReg.exec(query); - } - const referencedTables = unique( - tables - .map((name) => { - const foundTable = this.tables.find((value) => equalsIgnore(value.name, name)); - - if (!foundTable) { - throw new DatabaseError(`Unknown Table: '${name}'`); - } - - return foundTable; - }) - .filter((value) => value), - ); - - for (let i = 0; i < columns.length; i++) { - const columnName = columns[i]; - - const separator = columnName.indexOf("."); - - let columnSchema: Optional; - if (separator >= 0) { - const tableName = columnName.substring(0, separator); - const foundTable = this.tables.find((value) => equalsIgnore(value.name, tableName)); - - if (!foundTable) { - throw new DatabaseError(`Unknown Table: '${tableName}'`); - } - const realColumnName = columnName.substring(separator + 1); - columnSchema = foundTable.columns.find((schema) => { - return equalsIgnore(schema.name, realColumnName); - }); - } else { - for (const referencedTable of referencedTables) { - const foundColumn = referencedTable.columns.find((schema) => { - return equalsIgnore(schema.name, columnName); - }); - - if (foundColumn) { - columnSchema = foundColumn; - break; - } - } - } - - if (!columnSchema) { - // TODO look into why he cant find it - logger.silly(`Unknown Column: '${columnName}', no reference found in query: '${query}'`); - return; - } - let columnValue: any; - - if (Array.isArray(parameter)) { - columnValue = parameter[i]; - } else { - if (i === 0) { - columnValue = parameter; - } else { - throw new SchemaError("Number of Values and Placeholders do not match, one value but multiple placeholders"); - } - } - const columnTable = columnSchema.table?.name; - - const notNull = columnValue != null; - - if (columnSchema.type === ColumnType.INT && notNull && !Number.isInteger(columnValue)) { - throw new SchemaError(`non integer value on int column: '${columnName}' in table '${columnTable + ""}'`); - } - - if (columnSchema.type === ColumnType.FLOAT && notNull && !validate.isNumber(columnValue)) { - throw new SchemaError(`non number value on float column: '${columnName}' in table '${columnTable + ""}'`); - } - - if (columnSchema.type === ColumnType.BOOLEAN && notNull && !validate.isBoolean(columnValue)) { - throw new SchemaError(`non boolean value on boolean column: '${columnName}' in table '${columnTable + ""}'`); - } - - if ( - columnSchema.type === ColumnType.DATETIME && - notNull && - (!validate.isDate(columnValue) || Number.isNaN(columnValue.getDate())) - ) { - throw new SchemaError(`no valid date value on date column: '${columnName}' in table '${columnTable + ""}'`); - } - - if ( - (columnSchema.type === ColumnType.TEXT || columnSchema.type === ColumnType.VARCHAR) && - notNull && - !isString(columnValue) - ) { - throw new SchemaError(`no string value on string column: '${columnName}' in table '${columnTable + ""}'`); - } - - if (columnSchema.modifiers.includes(Modifier.NOT_NULL) && !notNull) { - throw new SchemaError(`null/undefined on not nullable column: '${columnName}' in table '${columnTable + ""}'`); - } - - if (columnSchema.modifiers.includes(Modifier.UNSIGNED) && notNull && columnValue < 0) { - throw new SchemaError(`negative number on unsigned column: '${columnName}' in table '${columnTable + ""}'`); - } - } - }, - - addSql(query: string, parameter: MultiSingleValue, value: any, uuid?: string): any { - if ( - value && - ((Number.isInteger(value.affectedRows) && value.affectedRows) || - (Number.isInteger(value.changedRows) && value.changedRows)) - ) { - this.sqlHistory.push({ - rawQuery: query, - parameter, - changedRows: value.changedRows, - affectedRows: value.affectedRows, - uuid, - }); - logger.debug(`Query: '${query}', Parameter: '${parameter + ""}'`); - } - return value; - }, - - startRound(): Promise { - return new Promise((resolve, reject) => { - this.workingPromise = this.workingPromise - .then(() => this._process()) - .then((value) => { - resolve(value); - }) - .catch((reason) => reject(reason)); - }); - }, - - async _process(): Promise { - if (!this.sqlHistory.length) { - return []; - } - const rawQueries = [...this.sqlHistory]; - this.sqlHistory.length = 0; - const updateReg = /^\s*Update/i; - const insertReg = /^\s*Insert/i; - const deleteReg = /^\s*Delete/i; - - const queries: Query[] = []; - - for (const rawQuery of rawQueries) { - let parser: Parser; - if (updateReg.test(rawQuery.rawQuery)) { - parser = UpdateParser; - } else if (insertReg.test(rawQuery.rawQuery)) { - parser = InsertParser; - } else if (deleteReg.test(rawQuery.rawQuery)) { - parser = DeleteParser; - } else { - continue; - } - const query = parser.parse(rawQuery); - if (query) { - queries.push(query); - } - } - const invalidationMap: Map = new Map(); - - for (const query of queries) { - this.trigger - .filter((value) => value.triggerType === query.operation && value.table === query.target) - .forEach((value) => value.updateInvalidationMap(query, invalidationMap)); - } - const invalidationQueries: string[] = []; - for (const value of invalidationMap.values()) { - let sqlQuery = - `INSERT IGNORE INTO ${mySql.escapeId(value.table)} ` + - `(${mySql.escapeId(value.foreignColumn)}, ${mySql.escapeId(value.keyColumn)}) `; - - if (value.uuid) { - const values = value.values.map((v) => `SELECT ${mySql.escape(v)},${mySql.escape(value.uuid)}`).join(" UNION "); - sqlQuery += `VALUES (${values})`; - } else { - const values = value.values.map((v) => `SELECT ${mySql.escape(v)}`).join(" UNION "); - sqlQuery += ` SELECT * FROM (${values}) AS value JOIN (SELECT uuid FROM user) AS user`; - } - invalidationQueries.push(sqlQuery); - } - return invalidationQueries; - }, - - initTableSchema(database: DatabaseSchema): void { - this.tables = [...database.tables]; - const mainTable = database.mainTable; - const invalidationTable = database.invalidationTable; - - this.mainTable = database.mainTable; - this.invalidationTable = database.invalidationTable; - - const primaryKey = mainTable.primaryKeys[0]; - const mainPrimaryKey = primaryKey.name; - - // TODO inline this function to create trigger? - const columnConverter: ColumnConverter = (query, triggeredColumn) => { - let value = triggeredColumn.value; - - const tableKey = triggeredColumn.column; - - if (tableKey === primaryKey) { - value = true; - } - const found = invalidationTable.foreignKeys.find( - (column) => column.foreignKey === tableKey || column.foreignKey === tableKey.foreignKey, - ); - if (!found) { - throw new SchemaError(`no corresponding foreign key in invalidationTable for column '${tableKey.name}'`); - } - return { column: found.name, value }; - }; - - for (const table of this.tables) { - for (const invalidation of table.invalidations) { - const watchTable = invalidation.table; - - if (invalidation.type & InvalidationType.INSERT) { - const trigger = createTrigger( - watchTable, - table, - invalidationTable, - mainPrimaryKey, - columnConverter, - InvalidationType.INSERT, - ); - this.trigger.push(trigger); - } - if (invalidation.type & InvalidationType.UPDATE) { - const trigger = createTrigger( - watchTable, - table, - invalidationTable, - mainPrimaryKey, - columnConverter, - InvalidationType.UPDATE, - ); - this.trigger.push(trigger); - } - if (invalidation.type & InvalidationType.DELETE) { - const trigger = createTrigger( - watchTable, - table, - invalidationTable, - mainPrimaryKey, - columnConverter, - InvalidationType.DELETE, - ); - this.trigger.push(trigger); - } - } - } - }, - - async checkTableSchema(context: DatabaseContext): EmptyPromise { - // display all current tables - const tables: any[] = await context.getTables(); - - const enterpriseTableProperty = `Tables_in_${this.databaseName}`; - - // create tables which do not exist - await Promise.all( - this.tables - .filter((tableSchema) => !tables.find((table: any) => table[enterpriseTableProperty] === tableSchema.name)) - .map((tableSchema) => { - const schema = tableSchema.getTableSchema(); - return context.createTable(schema.name, schema.columns); - }), - ); - }, - - checkTables(tables: any, track: string[], ignore: string[]) { - const separator = /\s+/; - - for (const [tablesKey, tableValue] of Object.entries(tables)) { - // @ts-expect-error - const tableDeclaration: string = tableValue; - - if (ignore.includes(tableDeclaration)) { - continue; - } - const table = new TableSchema([], tablesKey); - - for (const declaration of tableDeclaration.trim().split(",")) { - const declarationParts: string[] = declaration.trim().split(separator); - - if (!declarationParts.length) { - logger.warn(`${tablesKey}has empty declaration`); - continue; - } - const keyPart = `${declarationParts[0]} ${declarationParts[1]}`.toUpperCase(); - - if (keyPart === "PRIMARY KEY") { - parsePrimaryKey(table, this.tables, declaration); - } else if (keyPart === "FOREIGN KEY") { - parseForeignKey(table, this.tables, declaration); - } else { - const column = parseDataColumn(table, this.tables, declaration); - - if (column) { - table.columns.push(column); - } - } - } - this.tables.push(table); - } - }, -}; - -export const StateProcessor: StateProcessorInterface = stateProcessorImpl; diff --git a/packages/core/src/database/migrations.ts b/packages/core/src/database/migrations.ts index 53a42cc9..7d3ce0ee 100644 --- a/packages/core/src/database/migrations.ts +++ b/packages/core/src/database/migrations.ts @@ -1,320 +1,3 @@ import { Migration } from "./databaseTypes"; -import { MysqlServerError } from "./mysqlError"; -import { DatabaseContext } from "./contexts/databaseContext"; -import { EmptyPromise } from "../types"; -function ignoreError(func: () => EmptyPromise, ignoreErrno: number[]): EmptyPromise { - return func().catch((reason) => { - if (reason && Number.isInteger(reason.errno) && !ignoreErrno.includes(reason.errno)) { - throw reason; - } - }); -} - -export const Migrations: Migration[] = [ - { - fromVersion: 0, - toVersion: 1, - async migrate(context: DatabaseContext): EmptyPromise { - await ignoreError(async () => { - await context.addColumn("episode", "combiIndex double DEFAULT 0"); - await context.query( - "UPDATE episode SET combiIndex=(concat(`totalIndex`, '.', coalesce(`partialIndex`, 0)) + 0)", - ); - }, [MysqlServerError.ER_DUP_FIELDNAME]); - await ignoreError(() => context.addColumn("scrape_board", "info TEXT"), [MysqlServerError.ER_DUP_FIELDNAME]); - await ignoreError( - () => context.addColumn("scrape_board", "external_uuid char(36)"), - [MysqlServerError.ER_DUP_FIELDNAME], - ); - await ignoreError(async () => { - await context.addColumn("part", "combiIndex double DEFAULT 0"); - await context.query("UPDATE part SET combiIndex=(concat(`totalIndex`, '.', coalesce(`partialIndex`, 0)) + 0)"); - }, [MysqlServerError.ER_DUP_FIELDNAME]); - await context.alterColumn("external_user", "uuid char(36)"); - await context.alterColumn("scrape_board", "link varchar(500)"); - await context.alterColumn("user_data_invalidation", "external_uuid char(36)"); - - await context.addUnique("episode", "UNIQUE_EPISODE", "part_id", "combiIndex"); - - await context.addUnique("medium", "UNIQUE", "title", "medium"); - - await context.dropIndex("news_board", "link"); - await context.addUnique("news_board", "link_UNIQUE", "link"); - - await context.dropForeignKey("news_user", "news_user_ibfk_2"); - await context.addForeignKey("news_user", "news_user_ibfk_1", "user_id", "user", "uuid"); - await context.addForeignKey("news_user", "news_user_ibfk_2", "news_id", "news_board", "id"); - - await context.addUnique("part", "UNIQUE_PART", "medium_id", "combiIndex"); - - await context.dropPrimaryKey("scrape_board"); - await context.addPrimaryKey("scrape_board", "link", "type"); - - // tslint:disable-next-line - await context.addForeignKey("scrape_board", "scrape_board_ibfk_1", "external_uuid", "external_user", "uuid"); - await context.addForeignKey("scrape_board", "scrape_board_ibfk_3", "uuid", "user", "uuid"); - - await context.parentContext.clearInvalidationTable(); - await ignoreError( - () => context.dropPrimaryKey("user_data_invalidation"), - [MysqlServerError.ER_CANT_DROP_FIELD_OR_KEY], - ); - await context.addUnique("user_data_invalidation", "UNIQUE_NEWS", "news_id", "uuid"); - await context.addUnique("user_data_invalidation", "UNIQUE_MEDIUM", "medium_id", "uuid"); - await context.addUnique("user_data_invalidation", "UNIQUE_PART", "part_id", "uuid"); - await context.addUnique("user_data_invalidation", "UNIQUE_EPISODE", "episode_id", "uuid"); - await context.addUnique("user_data_invalidation", "UNIQUE_LIST", "list_id", "uuid"); - await context.addUnique("user_data_invalidation", "UNIQUE_EXTERNAL_LIST", "external_list_id", "uuid"); - await context.addUnique("user_data_invalidation", "UNIQUE_EXTERNAL_USER", "external_uuid", "uuid"); - await context.addUnique("user_data_invalidation", "UNIQUE_USER", "user_uuid", "uuid"); - }, - }, - { - fromVersion: 1, - toVersion: 2, - async migrate(context: DatabaseContext): EmptyPromise { - await ignoreError( - () => context.addColumn("episode_release", "locked BOOLEAN DEFAULT 0"), - [MysqlServerError.ER_DUP_FIELDNAME], - ); - }, - }, - { - fromVersion: 2, - toVersion: 3, - async migrate(context: DatabaseContext): EmptyPromise { - await ignoreError( - () => context.changeColumn("scrape_board", "last_date", "next_scrape", "datetime"), - [MysqlServerError.ER_BAD_FIELD_ERROR], - ); - }, - }, - { - fromVersion: 3, - toVersion: 4, - async migrate(context: DatabaseContext): EmptyPromise { - await context.alterColumn("episode_release", "url varchar(767) not null"); - await context.alterColumn("meta_corrections", "link VARCHAR(767) NOT NULL"); - await context.alterColumn("page_info", "link VARCHAR(767) NOT NULL"); - await context.alterColumn("medium_toc", "link VARCHAR(767) NOT NULL"); - await context.alterColumn("medium_in_wait", "link VARCHAR(767) NOT NULL"); - }, - }, - { - fromVersion: 4, - toVersion: 5, - async migrate(): EmptyPromise { - // empty migration as it adds trigger only - }, - }, - { - fromVersion: 5, - toVersion: 6, - async migrate(context: DatabaseContext): EmptyPromise { - await context.addColumn("jobs", "runningSince DATETIME"); - }, - }, - { - fromVersion: 6, - toVersion: 7, - async migrate(context: DatabaseContext): EmptyPromise { - // implicit drop of all triggers which insert rows in user_data_invalidation table - await Promise.all( - [ - "reading_list", - "external_reading_list", - "medium", - "part", - "episode", - "episode_release", - "news_board", - "medium_in_wait", - ].map((value) => - ignoreError( - () => - context.addColumn(value, "updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP"), - [MysqlServerError.ER_DUP_FIELDNAME], - ), - ), - ); - await ignoreError( - () => context.addColumn("external_user", "updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP"), - [MysqlServerError.ER_DUP_FIELDNAME], - ); - }, - }, - { - fromVersion: 7, - toVersion: 8, - async migrate(context: DatabaseContext): EmptyPromise { - await ignoreError( - () => - context.addColumn("medium_toc", "updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP"), - [MysqlServerError.ER_DUP_FIELDNAME], - ); - }, - }, - { - fromVersion: 8, - toVersion: 9, - async migrate(context: DatabaseContext): EmptyPromise { - await ignoreError( - () => context.dropForeignKey("medium_toc", "medium_toc_ibfk_1"), - [MysqlServerError.ER_CANT_DROP_FIELD_OR_KEY], - ); - await ignoreError(() => context.dropPrimaryKey("medium_toc"), [MysqlServerError.ER_CANT_DROP_FIELD_OR_KEY]); - await Promise.all( - [ - "id INT UNSIGNED NOT NULL AUTO_INCREMENT PRIMARY KEY", - "countryOfOrigin VARCHAR(200)", - "languageOfOrigin VARCHAR(200)", - "author VARCHAR(200)", - "artist VARCHAR(200)", - "title VARCHAR(200) NOT NULL", - "medium INT NOT NULL", - "lang VARCHAR(200)", - "stateOrigin INT", - "stateTL INT", - "series VARCHAR(200)", - "universe VARCHAR(200)", - ].map((value) => - ignoreError(() => context.addColumn("medium_toc", value), [MysqlServerError.ER_DUP_FIELDNAME]), - ), - ); - // no error should occur as primary is dropped before if available - // context.addPrimaryKey("medium_toc", "id"); - // no error should occur as foreign key is dropped before if available - context.addForeignKey("medium_toc", "medium_toc_ibfk_1", "medium_id", "medium", "id"); - await ignoreError( - () => context.addUnique("medium_toc", "UNIQUE_TOC", "medium_id", "link"), - [MysqlServerError.ER_DUP_KEYNAME], - ); - - await ignoreError( - () => context.addColumn("episode_release", "toc_id INT UNSIGNED"), - [MysqlServerError.ER_DUP_FIELDNAME], - ); - await ignoreError( - () => context.addForeignKey("episode_release", "episode_release_ibfk_2", "toc_id", "medium_toc", "id"), - [MysqlServerError.ER_DUP_FIELDNAME], - ); - }, - }, - { - fromVersion: 9, - toVersion: 10, - async migrate(context: DatabaseContext): EmptyPromise { - // add index to speed up queries on episode_release where releaseDate is a big factor - await context.addIndex("episode_release", "episode_release_releaseDate_Index", ["releaseDate"]); - }, - }, - { - fromVersion: 10, - toVersion: 11, - async migrate(context: DatabaseContext): EmptyPromise { - // TODO: should i ask for user input before? - // remove all data, because this change is destructive - // one cannot/should not simulate the data for the new columns - await context.query("TRUNCATE job_history;"); - // add columns and ignore duplicate column error - await Promise.all( - ["result VARCHAR(100) NOT NULL", "message VARCHAR(200) NOT NULL", "context TEXT NOT NULL"].map((value) => - ignoreError(() => context.addColumn("job_history", value), [MysqlServerError.ER_DUP_FIELDNAME]), - ), - ); - // add not null restraint - await context.alterColumn("job_history", "start DATETIME NOT NULL"); - // add not null restraint - await context.alterColumn("job_history", "end DATETIME NOT NULL"); - }, - }, - { - fromVersion: 11, - toVersion: 12, - async migrate(context: DatabaseContext): EmptyPromise { - // Table 'scraper_hook is automatically added' - - // add columns and ignore duplicate column error - await Promise.all( - ["job_state VARCHAR(200) NOT NULL"].map((value) => - ignoreError(() => context.addColumn("jobs", value), [MysqlServerError.ER_DUP_FIELDNAME]), - ), - ); - }, - }, - { - fromVersion: 12, - toVersion: 13, - async migrate(context: DatabaseContext): EmptyPromise { - // add columns and ignore duplicate column error - await Promise.all( - ["scheduled_at DATETIME NOT NULL DEFAULT start"].map((value) => - ignoreError(() => context.addColumn("job_history", value), [MysqlServerError.ER_DUP_FIELDNAME]), - ), - ); - }, - }, - { - fromVersion: 13, - toVersion: 14, - async migrate(): EmptyPromise { - // empty migration as it adds trigger only - }, - }, - { - fromVersion: 14, - toVersion: 15, - async migrate(): EmptyPromise { - // Table 'custom_hook is automatically added' - }, - }, - { - fromVersion: 15, - toVersion: 16, - async migrate(context: DatabaseContext): EmptyPromise { - // add columns and ignore duplicate column error - await Promise.all( - ["hookState VARCHAR(200) NOT NULL", "comment TEXT NOT NULL"].map((value) => - ignoreError(() => context.addColumn("custom_hook", value), [MysqlServerError.ER_DUP_FIELDNAME]), - ), - ); - }, - }, - { - fromVersion: 16, - toVersion: 17, - async migrate(context: DatabaseContext): EmptyPromise { - // add columns and ignore duplicate column error - await ignoreError( - () => context.dropForeignKey("episode_release", "episode_release_ibfk_1"), - [MysqlServerError.ER_CANT_DROP_FIELD_OR_KEY], - ); - await ignoreError(() => context.dropPrimaryKey("episode_release"), [MysqlServerError.ER_CANT_DROP_FIELD_OR_KEY]); - - await context.addForeignKey("episode_release", "episode_release_ibfk_1", "episode_id", "episode", "id"); - await context.addUnique("episode_release", "UNIQUE_RELEASE", "episode_id", "url"); - - await Promise.all( - ["id INT UNSIGNED NOT NULL AUTO_INCREMENT PRIMARY KEY"].map((value) => - ignoreError(() => context.addColumn("episode_release", value), [MysqlServerError.ER_DUP_FIELDNAME]), - ), - ); - }, - }, - { - fromVersion: 17, - toVersion: 18, - async migrate(): EmptyPromise { - // Table 'notifications' is automatically added - }, - }, - { - fromVersion: 18, - toVersion: 19, - async migrate(): EmptyPromise { - // Table 'notifications_read' is automatically added - // foreign keys included - }, - }, -]; +export const Migrations: Migration[] = []; diff --git a/packages/core/src/database/schemaManager.ts b/packages/core/src/database/schemaManager.ts index 38fbfa06..7bc8ac7c 100644 --- a/packages/core/src/database/schemaManager.ts +++ b/packages/core/src/database/schemaManager.ts @@ -1,27 +1,28 @@ import { DatabaseSchema, Migration } from "./databaseTypes"; import { TableSchema } from "./tableSchema"; -import { Trigger } from "./trigger"; -import { delay, equalsIgnore } from "../tools"; +import { delay } from "../tools"; import { DatabaseContext } from "./contexts/databaseContext"; import logger from "../logger"; -import { EmptyPromise, Nullable } from "../types"; +import { EmptyPromise } from "../types"; import { MigrationError } from "../error"; +import { sql, SqlSqlToken } from "slonik"; +import { joinIdentifier } from "./contexts/helper"; export class SchemaManager { private databaseName = ""; private dataBaseVersion = 0; private readonly tables: TableSchema[] = []; - private mainTable: Nullable = null; - private readonly trigger: Trigger[] = []; + private readonly trigger: SqlSqlToken[] = []; + private readonly procedures: SqlSqlToken[] = []; private readonly migrations: Migration[] = []; public initTableSchema(database: DatabaseSchema, databaseName: string): void { this.databaseName = databaseName; this.dataBaseVersion = database.version; - this.mainTable = database.mainTable; this.tables.push(...database.tables); this.trigger.push(...database.triggers); this.migrations.push(...database.migrations); + this.procedures.push(...database.procedures); } public async checkTableSchema(context: DatabaseContext): EmptyPromise { @@ -43,6 +44,7 @@ export class SchemaManager { try { await this.migrate(context); } finally { + // FIXME: if transaction/migrate failed, then this will fail with a more confusing error, masking the original one await context.stopMigration(); } } @@ -59,62 +61,87 @@ export class SchemaManager { private async createMissing(context: DatabaseContext): EmptyPromise { logger.info("Check on missing database structures"); - const tables: any[] = await context.getTables(); + let proceduresCreated = 0; - const enterpriseTableProperty = `Tables_in_${this.databaseName}`; + await Promise.all( + this.procedures.map(async (procedure) => { + const result = await context.con.query(procedure); + proceduresCreated += result.rowCount; + }), + ); + + const tables = await context.getTablesPg(); let tablesCreated = 0; // create tables which do not exist await Promise.all( this.tables - .filter((tableSchema) => !tables.find((table: any) => table[enterpriseTableProperty] === tableSchema.name)) + .filter((tableSchema) => !tables.find((table) => table.tablename === tableSchema.name)) .map(async (tableSchema) => { - const schema = tableSchema.getTableSchema(); - await context.createTable(schema.name, schema.columns); + const schema = tableSchema.schema; + + // create schema if it does not exist + await context.con.query(schema); + // sloppy fix to add a single row to the table, to get "startMigration" to work on empty table - if (schema.name === "enterprise_database_info") { - await context.query("INSERT INTO enterprise_database_info (version) VALUES (0)"); + if (tableSchema.name === "enterprise_database_info") { + await context.con.query(sql`INSERT INTO enterprise_database_info (version) VALUES (0)`); } tablesCreated++; }), ); - const dbTriggers = await context.getTriggers(); + const indices = await context.getIndices(); + let indicesCreated = 0; - let triggerCreated = 0; - let triggerDeleted = 0; - // create triggers which do not exist, and drops triggers which are not in schema + // create indices which do not yet exist on all tables await Promise.all( - this.trigger - .filter((trigger) => { - for (let i = 0; i < dbTriggers.length; i++) { - const dbTrigger = dbTriggers[i]; - - if ( - equalsIgnore(dbTrigger.Trigger, trigger.name) && - equalsIgnore(dbTrigger.Event, trigger.event) && - equalsIgnore(dbTrigger.Timing, trigger.timing) && - equalsIgnore(dbTrigger.Table, trigger.table) - ) { - dbTriggers.splice(i, 1); + this.tables.map(async (table) => { + if (!table.indices.length) { + return; + } + + for (const indexColumns of table.indices) { + const indexMatch = indices.find((dbIndex) => { + if (dbIndex.tableName !== table.name || dbIndex.columnNames.length !== indexColumns.length) { return false; } + + return dbIndex.columnNames.every((column) => indexColumns.includes(column)); + }); + + // create every index which does not exist with this column set combination on that table + if (!indexMatch) { + await context.con.query( + sql`CREATE INDEX ON ${sql.identifier([table.name])} (${joinIdentifier(indexColumns)});`, + ); + indicesCreated++; } - return true; - }) - .map((trigger) => context.createTrigger(trigger).then(() => triggerCreated++)), + } + }), + ); + + const dbTriggers = [...(await context.getTriggersPg())]; + + let triggerCreated = 0; + let triggerDeleted = 0; + // create triggers which do not exist, and drops triggers which are not in schema + await Promise.all( + this.trigger.map((trigger) => context.con.query(trigger).then((result) => (triggerCreated += result.rowCount))), ); // every trigger that is left over, is not in schema and ready to be dropped await Promise.all( dbTriggers - .filter((value) => this.tables.find((table) => table.name === value.Table)) - .map((value) => context.dropTrigger(value.Trigger).then(() => triggerDeleted++)), + .filter((value) => this.tables.find((table) => table.name === value.table)) + .map((value) => context.dropTrigger(value.trigger).then(() => triggerDeleted++)), ); logger.info("db check missing", { created_trigger: triggerCreated, deleted_trigger: triggerDeleted, tables_created: tablesCreated, + indices_created: indicesCreated, + procedures_created: proceduresCreated, }); } @@ -122,8 +149,8 @@ export class SchemaManager { const versionResult = await context.getDatabaseVersion(); let previousVersion = 0; - if (versionResult?.[0] && versionResult[0].version > 0) { - previousVersion = versionResult[0].version; + if (versionResult > 0) { + previousVersion = versionResult; } const currentVersion = this.dataBaseVersion; diff --git a/packages/core/src/database/sqlTools.ts b/packages/core/src/database/sqlTools.ts index 4c313620..ff87f00f 100644 --- a/packages/core/src/database/sqlTools.ts +++ b/packages/core/src/database/sqlTools.ts @@ -1,4 +1,4 @@ -import { OkPacket } from "mysql"; +import { QueryResult } from "pg"; import { getStore, StoreKey } from "../asyncStorage"; import { getElseSet, getElseSetObj } from "../tools"; @@ -11,7 +11,6 @@ interface Modification { export type QueryType = "select" | "update" | "insert" | "delete"; export type ModificationKey = | "progress" - | "result_episode" | "medium" | "part" | "episode" @@ -35,8 +34,8 @@ export type ModificationKey = * * @param key */ -export function storeModifications(key: ModificationKey, queryType: QueryType, result: OkPacket): void { - if (!result.affectedRows || (!result.changedRows && queryType === "update")) { +export function storeModifications(key: ModificationKey, queryType: QueryType, result: QueryResult): void { + if (!result.rowCount) { return; } const store = getStore(); @@ -50,11 +49,11 @@ export function storeModifications(key: ModificationKey, queryType: QueryType, r }); if (queryType === "delete") { - modification.deleted += result.affectedRows; + modification.deleted += result.rowCount; } else if (queryType === "insert") { - modification.created += result.affectedRows; + modification.created += result.rowCount; } else if (queryType === "update") { - modification.updated += result.changedRows; + modification.updated += result.rowCount; } } diff --git a/packages/core/src/database/storages/storage.ts b/packages/core/src/database/storages/storage.ts index 692b967b..28cf5342 100644 --- a/packages/core/src/database/storages/storage.ts +++ b/packages/core/src/database/storages/storage.ts @@ -1,28 +1,11 @@ -import mySql from "promise-mysql"; import env from "../../env"; -import { - Invalidation, - MetaResult, - Result, - Uuid, - PropertyNames, - StringKeys, - PromiseFunctions, - EmptyPromise, - MultiSingleValue, - Nullable, - DataStats, - NewData, - QueryItems, - QueryItemsResult, -} from "../../types"; +import { StringKeys, PromiseFunctions, EmptyPromise } from "../../types"; import logger from "../../logger"; import { databaseSchema } from "../databaseSchema"; -import { delay, isQuery, isString } from "../../tools"; +import { delay, isString } from "../../tools"; import { SchemaManager } from "../schemaManager"; -import { Query } from "mysql"; import { ContextCallback, ContextProvider, queryContextProvider } from "./storageTools"; -import { QueryContext } from "../contexts/queryContext"; +import { ContextConstructor, QueryContext } from "../contexts/queryContext"; import { ConnectionContext } from "../databaseTypes"; import { MysqlServerError } from "../mysqlError"; import { MediumContext } from "../contexts/mediumContext"; @@ -36,12 +19,32 @@ import { InternalListContext } from "../contexts/internalListContext"; import { ExternalUserContext } from "../contexts/externalUserContext"; import { ExternalListContext } from "../contexts/externalListContext"; import { ScraperHookContext } from "../contexts/scraperHookContext"; -import { SubContext } from "../contexts/subContext"; import { AppEventContext } from "../contexts/appEventContext"; import { CustomHookContext } from "../contexts/customHookContext"; import { DatabaseContext } from "../contexts/databaseContext"; import { DatabaseConnectionError } from "../../error"; import { NotificationContext } from "../contexts/notificationContext"; +import { types } from "pg"; +import { + ClientConfigurationInput, + ConnectionOptions, + createBigintTypeParser, + createIntervalTypeParser, + createNumericTypeParser, + createPool, + DatabasePool, + stringifyDsn, +} from "slonik"; +import { Readable } from "stream"; +import { GenericContext } from "../contexts/genericContext"; +import { createFieldNameTransformationInterceptor } from "slonik-interceptor-field-name-transformation"; +import { MediumTocContext } from "../contexts/mediumTocContext"; +import { EpisodeReleaseContext } from "../contexts/episodeReleaseContext"; + +// parse float by default +types.setTypeParser(1700, parseFloat); +// parse int8 as normal number instead of bigint +types.setTypeParser(20, parseInt); function inContext(callback: ContextCallback, transaction = true) { return storageInContext(callback, (con) => queryContextProvider(con), transaction); @@ -69,60 +72,20 @@ export async function storageInContext( await poolProvider.startPromise; } const pool = await poolProvider.provide(); - const con = await getConnection(pool); - const context = provider(con); let result; try { - result = await doTransaction(callback, context, transaction); + result = await pool.transaction(async (con) => { + const result = await callback(provider(con)); + return result; + }); } catch (e) { console.log(e); throw e; - } finally { - if (isQuery(result)) { - result.on("end", () => con.release()); - } else { - // release connection into the pool - con.release(); - } } return result; } -async function getConnection(pool: mySql.Pool): Promise { - let attempt = 0; - const maxAttempts = 10; - - while (attempt < maxAttempts) { - try { - return await pool.getConnection(); - } catch (error: unknown) { - // check if it is any network or mysql error - if (typeof error === "object" && error && "code" in error) { - const code = (error as any).code; - - if (code === "ECONNREFUSED") { - logger.debug(`Database not up yet. Attempt ${attempt + 1}/${maxAttempts}`); - // the service may not be up right now, so wait - await delay(1000); - } else if (error instanceof Error) { - throw error; - } else { - throw Error(JSON.stringify(error)); - } - } else { - console.log("Error rethrown"); - // throw it is an unknown type of error - throw error; - } - attempt++; - } - } - throw new DatabaseConnectionError( - `Could not connect to Database, Maximum Attempts reached: ${attempt}/${maxAttempts}`, - ); -} - async function catchTransactionError( transaction: boolean, context: C, @@ -163,12 +126,11 @@ async function doTransaction( // let callback run with context result = await callback(context); - if (isQuery(result)) { - const query: Query = result; + if (result instanceof Readable) { let error = false; // TODO: 31.08.2019 returning query object does not allow normal error handling, // maybe return own stream where the control is completely in my own hands - query + result .on("error", (err) => { error = true; if (transaction) { @@ -186,20 +148,26 @@ async function doTransaction( await context.commit(); } } catch (e) { - return await catchTransactionError(transaction, context, e, attempts, callback); + if (transaction) { + context.markAborted(); + } + return catchTransactionError(transaction, context, e, attempts, callback); } return result; } +type ProviderConfig = ClientConfigurationInput & ConnectionOptions; +type ProviderPool = DatabasePool; + class SqlPoolProvider { private remake = true; - private pool?: Promise; - private config?: mySql.PoolConfig; + private pool?: Promise; + private config?: ProviderConfig; public running = false; public errorAtStart = false; public startPromise = Promise.resolve(); - public provide(): Promise { + public provide(): Promise { if (!this.pool || this.remake) { this.remake = false; this.pool = this.createPool(); @@ -212,7 +180,7 @@ class SqlPoolProvider { this.errorAtStart = false; } - public useConfig(config: mySql.PoolConfig) { + public useConfig(config: ProviderConfig) { this.config = { ...this.defaultConfig(), ...config }; this.remake = true; } @@ -232,20 +200,21 @@ class SqlPoolProvider { this.running = true; try { const manager = new SchemaManager(); - const database = this.getConfig().database; + const database = this.getConfig().databaseName; if (!database) { this.startPromise = Promise.reject(new Error("No database name defined")); return; } manager.initTableSchema(databaseSchema, database); - this.startPromise = inContext((context) => manager.checkTableSchema(context.databaseContext), true).catch( - (error) => { - logger.error(error); - this.errorAtStart = true; - return Promise.reject(new Error("Database error occurred while starting")); - }, - ); + this.startPromise = inContext( + (context) => manager.checkTableSchema(context.getContext(DatabaseContext)), + true, + ).catch((error) => { + logger.error(error); + this.errorAtStart = true; + return Promise.reject(new Error("Database error occurred while starting")); + }); } catch (e) { this.errorAtStart = true; logger.error(e); @@ -270,45 +239,88 @@ class SqlPoolProvider { return this.config || this.defaultConfig(); } - private async createPool(): Promise { + private async createPool(): Promise { // stop previous pool if available if (this.pool) { await this.stop(); } const config = this.getConfig(); - return mySql.createPool(config); + + return createPool( + stringifyDsn({ + applicationName: "enterprise", + databaseName: config.databaseName, + host: config.host, + password: config.password, + port: config.port, + username: config.username, + }), + { + maximumPoolSize: config.maximumPoolSize, + interceptors: [ + // transform snake_case columns to camelCase in QueryResult + createFieldNameTransformationInterceptor({ + format: "CAMEL_CASE", + }), + { + queryExecutionError(_queryContext, query, error, notices) { + console.log(query, error, notices); + return null; + }, + afterQueryExecution(queryContext, query, result) { + // console.log(queryContext, query, result); + return null; + }, + }, + ], + typeParsers: [ + createBigintTypeParser(), + createNumericTypeParser(), + createIntervalTypeParser(), + { + name: "timestamp", + parse(value) { + return new Date(value + " UTC"); + }, + }, + { + name: "timestamptz", + parse(value) { + return new Date(value); + }, + }, + { + name: "date", + parse(value) { + return new Date(value); + }, + }, + ], + captureStackTrace: true, + }, + ); } - private defaultConfig(): mySql.PoolConfig { + private defaultConfig(): ProviderConfig { return { - connectionLimit: env.dbConLimit, + maximumPoolSize: env.dbConLimit, host: env.dbHost, - user: env.dbUser, + username: env.dbUser, password: env.dbPassword, - // charset/collation of the current database and tables - charset: "utf8mb4", // we assume that the database exists already - database: "enterprise", + databaseName: "enterprise", port: env.dbPort, - typeCast(field, next) { - if (field.type === "TINY" && field.length === 1) { - return field.string() === "1"; // 1 = true, 0 = false - } else { - return next(); - } - }, }; } } const poolProvider = new SqlPoolProvider(); -// poolProvider.provide().catch(console.error); class SqlPoolConfigUpdater { /** - * Creates new Mysql Connection Pool with the given Config. + * Creates new Mysql Connection ProviderPool with the given Config. */ - public update(config: Partial): void { + public update(config: Partial): void { poolProvider.useConfig(config); } @@ -326,78 +338,14 @@ class SqlPoolConfigUpdater { export const poolConfig = new SqlPoolConfigUpdater(); -export class Storage { - public getPageInfo(link: string, key: string): Promise<{ link: string; key: string; values: string[] }> { - return inContext((context) => context.getPageInfo(link, key)); - } - - public updatePageInfo(link: string, key: string, values: string[], toDeleteValues?: string[]): EmptyPromise { - return inContext((context) => context.updatePageInfo(link, key, values, toDeleteValues)); - } - - public removePageInfo(link: string, key?: string): EmptyPromise { - return inContext((context) => context.removePageInfo(link, key)); - } - - public queueNewTocs(): EmptyPromise { - return inContext((context) => context.queueNewTocs()); - } - - public getStats(uuid: Uuid): Promise { - return inContext((context) => context.getStat(uuid)); - } - - public getNew(uuid: Uuid, date?: Date): Promise { - return inContext((context) => context.getNew(uuid, date)); - } - - public queryItems(uuid: Uuid, query: QueryItems): Promise { - return inContext((context) => context.queryItems(uuid, query)); - } - - /** - * - * @param result - */ - public processResult(result: Result): Promise>> { - return inContext((context) => context.processResult(result)); - } - - /** - * - * @param result - */ - public saveResult(result: Result): Promise>> { - return inContext((context) => context.saveResult(result)); - } - - /** - * - */ - public getInvalidated(uuid: Uuid): Promise { - return inContext((context) => context.getInvalidated(uuid)); - } - - /** - * - */ - public getInvalidatedStream(uuid: Uuid): Promise { - return inContext((context) => context.getInvalidatedStream(uuid)); - } -} - -/** - * Property names of QueryContext whose type extends from SubContext. - */ -type ContextName = PropertyNames; -type ContextProxy> = new () => PromiseFunctions; +type ContextProxy> = new () => PromiseFunctions; -function inContextGeneric(callback: ContextCallback, context: ContextName) { - return storageInContext(callback, (con) => queryContextProvider(con)[context] as unknown as C, true); +function inContextGeneric(callback: ContextCallback, context: ContextConstructor) { + return storageInContext(callback, (con) => queryContextProvider(con).getContext(context), true); } -export function ContextProxyFactory>( - contextName: ContextName, +export function ContextProxyFactory>( + contextConstructor: ContextConstructor, omitted: K[], ): ContextProxy { const hiddenProps: K[] = [...omitted]; @@ -413,22 +361,19 @@ export function ContextProxyFactory inContextGeneric((context) => context[prop](...args), contextName); + return (...args: any[]) => inContextGeneric((context) => context[prop](...args), contextConstructor); }, }, ); } as unknown as ContextProxy; } -export function SubContextProxyFactory = keyof SubContext>( - context: ContextName, +export function SubContextProxyFactory = keyof QueryContext>( + context: ContextConstructor, omitted?: K[], ): ContextProxy { - return ContextProxyFactory(context, [ + return ContextProxyFactory(context, [ "commit", - "dmlQuery", - "parentContext", - "query", "rollback", "startTransaction", ...(omitted || []), @@ -458,28 +403,30 @@ export function SubContextProxyFactory = keyof SubContext>( - context: ContextName, +export function createStorage = keyof QueryContext>( + context: ContextConstructor, ): PromiseFunctions { return new (SubContextProxyFactory(context))(); } -export const storage = new Storage(); -export const databaseStorage = createStorage("databaseContext"); -export const mediumStorage = createStorage("mediumContext"); -export const partStorage = createStorage("partContext"); -export const episodeStorage = createStorage("episodeContext"); -export const newsStorage = createStorage("newsContext"); -export const mediumInWaitStorage = createStorage("mediumInWaitContext"); -export const userStorage = createStorage("userContext"); -export const jobStorage = createStorage("jobContext"); -export const internalListStorage = createStorage("internalListContext"); -export const externalUserStorage = createStorage("externalUserContext"); -export const externalListStorage = createStorage("externalListContext"); -export const hookStorage = createStorage("scraperHookContext"); -export const appEventStorage = createStorage("appEventContext"); -export const customHookStorage = createStorage("customHookContext"); -export const notificationStorage = createStorage("notificationContext"); +export const databaseStorage = createStorage(DatabaseContext); +export const mediumStorage = createStorage(MediumContext); +export const partStorage = createStorage(PartContext); +export const episodeStorage = createStorage(EpisodeContext); +export const newsStorage = createStorage(NewsContext); +export const mediumInWaitStorage = createStorage(MediumInWaitContext); +export const userStorage = createStorage(UserContext); +export const jobStorage = createStorage(JobContext); +export const internalListStorage = createStorage(InternalListContext); +export const externalUserStorage = createStorage(ExternalUserContext); +export const externalListStorage = createStorage(ExternalListContext); +export const hookStorage = createStorage(ScraperHookContext); +export const appEventStorage = createStorage(AppEventContext); +export const customHookStorage = createStorage(CustomHookContext); +export const notificationStorage = createStorage(NotificationContext); +export const episodeReleaseStorage = createStorage(EpisodeReleaseContext); +export const mediumTocStorage = createStorage(MediumTocContext); +export const storage = createStorage(GenericContext); /** * diff --git a/packages/core/src/database/storages/storageTools.ts b/packages/core/src/database/storages/storageTools.ts index 1efe5ce9..0352017b 100644 --- a/packages/core/src/database/storages/storageTools.ts +++ b/packages/core/src/database/storages/storageTools.ts @@ -1,10 +1,13 @@ -import { Connection } from "promise-mysql"; +import { DatabaseConnection, DatabaseTransactionConnection } from "slonik"; import { QueryContext } from "../contexts/queryContext"; import { ConnectionContext } from "../databaseTypes"; export type ContextCallback = (context: C) => Promise; -export type ContextProvider = (con: Connection) => C; -export const queryContextProvider: ContextProvider = (con) => new QueryContext(con); +export type ContextProvider = ( + con: DatabaseConnection | DatabaseTransactionConnection, +) => C; +export const queryContextProvider: ContextProvider = (con) => + new QueryContext({ connection: con, subClass: new Map() }); /** * Escapes the Characters for an Like with the '|' char. diff --git a/packages/core/src/database/tableBuilder.ts b/packages/core/src/database/tableBuilder.ts index 73a76978..461a9aff 100644 --- a/packages/core/src/database/tableBuilder.ts +++ b/packages/core/src/database/tableBuilder.ts @@ -3,29 +3,22 @@ import { ColumnBuilder } from "./columnBuilder"; import { TableSchema } from "./tableSchema"; import { ColumnSchema } from "./columnSchema"; import { parseDataColumn, parseForeignKey, parsePrimaryKey, parseUnique } from "./tableParser"; -import { InvalidationType } from "./databaseTypes"; import { SchemaError } from "../error"; +import { sql } from "slonik"; export class TableBuilder { private readonly columns: ColumnSchema[] = []; private name?: string; private main?: boolean; - private invalidationTable?: boolean; private readonly invalidationColumn?: string; private readonly databaseBuilder: DataBaseBuilder; - private readonly stubTable = new TableSchema([], ""); - private readonly invalidations: Array<{ type: InvalidationType; table?: string }> = []; + private readonly stubTable = new TableSchema([], "", sql``); private readonly uniqueIndices: ColumnSchema[][] = []; public constructor(databaseBuilder: DataBaseBuilder) { this.databaseBuilder = databaseBuilder; } - public setInvalidationTable(): this { - this.invalidationTable = true; - return this; - } - public parseColumn(column: string): this { const dataColumn = parseDataColumn(this.stubTable, this.databaseBuilder.tables, column); if (!dataColumn) { @@ -73,25 +66,12 @@ export class TableBuilder { return this; } - public addInvalidation(type: InvalidationType, tableName?: string): this { - this.invalidations.push({ type, table: tableName }); - return this; - } - public build(): TableSchema { if (!this.name) { throw new SchemaError("table has no name"); } - const table = new TableSchema( - [...this.columns, ...this.stubTable.columns], - this.name, - this.main, - this.invalidationColumn, - this.invalidationTable, - this.uniqueIndices, - ); + const table = new TableSchema([...this.columns, ...this.stubTable.columns], this.name, sql``, this.uniqueIndices); table.columns.forEach((value) => (value.table = table)); - this.databaseBuilder.addTable(table, this.invalidations); return table; } } diff --git a/packages/core/src/database/tableSchema.ts b/packages/core/src/database/tableSchema.ts index 18873256..ffe35e8a 100644 --- a/packages/core/src/database/tableSchema.ts +++ b/packages/core/src/database/tableSchema.ts @@ -1,35 +1,30 @@ -import { InvalidationType } from "./databaseTypes"; import { ColumnSchema } from "./columnSchema"; import { SchemaError } from "../error"; +import { SqlSqlToken } from "slonik"; export class TableSchema { public readonly columns: ColumnSchema[]; public readonly foreignKeys: ColumnSchema[]; public readonly primaryKeys: ColumnSchema[]; public readonly name: string; - public readonly invalidations: Array<{ type: InvalidationType; table: TableSchema }> = []; - public readonly main: boolean; - public readonly invalidationColumn?: string; - public readonly invalidationTable: boolean; public readonly uniqueIndices: ColumnSchema[][]; - public mainDependent?: boolean; + public readonly indices: string[][]; + public readonly schema: Readonly>; public constructor( columns: ColumnSchema[], name: string, - main = false, - invalidationCol?: string, - invalidTable = false, + tableSchema: SqlSqlToken, uniqueIndices: ColumnSchema[][] = [], + indices: string[][] = [], ) { this.columns = columns; this.primaryKeys = this.columns.filter((value) => value.primaryKey); this.foreignKeys = this.columns.filter((value) => value.foreignKey); this.uniqueIndices = uniqueIndices; this.name = name; - this.main = main; - this.invalidationColumn = invalidationCol; - this.invalidationTable = invalidTable; + this.indices = indices; + this.schema = tableSchema; } public getTableSchema(): { name: string; columns: string[] } { @@ -91,7 +86,7 @@ export class TableSchema { return { name: this.name, columns: schemata }; } - public getSchema(): string { + public getGeneratedSchema(): string { const tableSchema = this.getTableSchema(); return `CREATE TABLE ${this.name} (${tableSchema.columns.join(", ")});`; } diff --git a/packages/core/src/database/triggerBuilder.ts b/packages/core/src/database/triggerBuilder.ts index 763ca685..c8c74648 100644 --- a/packages/core/src/database/triggerBuilder.ts +++ b/packages/core/src/database/triggerBuilder.ts @@ -44,8 +44,6 @@ export class TriggerBuilder { if (!this._name || !this._body || !this._table || !this._event || !this._timing) { throw new SchemaError("invalid trigger"); } - const trigger = new Trigger(this._name, this._timing, this._event, this._table, this._body); - this.databaseBuilder.addTrigger(trigger); - return trigger; + return new Trigger(this._name, this._timing, this._event, this._table, this._body); } } diff --git a/packages/core/src/error.ts b/packages/core/src/error.ts index 6799a578..d8220ccd 100644 --- a/packages/core/src/error.ts +++ b/packages/core/src/error.ts @@ -1,3 +1,5 @@ +import { DatabaseError as PgDatabaseError } from "pg"; + export class ParseError extends Error { public constructor(message: string) { super(message); @@ -117,3 +119,7 @@ export class ConfigurationError extends Error { Error.captureStackTrace(this, ConfigurationError); } } + +export function isDuplicateError(params: unknown): params is PgDatabaseError { + return params instanceof PgDatabaseError && params.code === "23505"; // unique violation +} diff --git a/packages/core/src/test/database/context/contextHelper.ts b/packages/core/src/test/database/context/contextHelper.ts index a6d43d84..c34e5042 100644 --- a/packages/core/src/test/database/context/contextHelper.ts +++ b/packages/core/src/test/database/context/contextHelper.ts @@ -2,10 +2,11 @@ import * as storageTools from "../../../database/storages/storageTools"; import * as storage from "../../../database/storages/storage"; import { QueryContext } from "../../../database/contexts/queryContext"; import { MediaType } from "../../../tools"; -import { EmptyPromise, EpisodeRelease, SimpleEpisode } from "../../../types"; -import { escapeId, Query } from "mysql"; +import { EmptyPromise, EpisodeRelease, SimpleEpisode, TypedQuery } from "../../../types"; import bcrypt from "bcryptjs"; import { MissingEntityError } from "../../../error"; +import { SimpleRelease } from "../../../database/databaseTypes"; +import { sql } from "slonik"; function inContext(callback: storageTools.ContextCallback, transaction = true) { return storage.storageInContext(callback, (con) => storageTools.queryContextProvider(con), transaction); @@ -25,7 +26,7 @@ async function recreateStorage() { export async function setupTestDatabase(): EmptyPromise { // assume the enterprise_test already exists - storage.poolConfig.update({ database: "enterprise_test", host: "localhost" }); + storage.poolConfig.update({ databaseName: "enterprise_test", host: "localhost" }); // database setup finished if recreation is successful if (await recreateStorage()) { @@ -36,12 +37,12 @@ export async function setupTestDatabase(): EmptyPromise { storage.poolConfig.update({ host: "localhost" }); await recreateStorage(); - await inContext((context) => context.query("CREATE DATABASE IF NOT EXISTS enterprise_test;")); - storage.poolConfig.update({ database: "enterprise_test", host: "localhost" }); + await inContext((context) => context.con.query(sql`CREATE DATABASE IF NOT EXISTS enterprise_test;`)); + storage.poolConfig.update({ databaseName: "enterprise_test", host: "localhost" }); await recreateStorage(); } -export function checkEmptyQuery(query: Query): EmptyPromise { +export function checkEmptyQuery(query: TypedQuery): EmptyPromise { return new Promise((resolve, reject) => { let rejected = false; query.on("result", () => { @@ -60,12 +61,12 @@ export function checkEmptyQuery(query: Query): EmptyPromise { }); } -export function resultFromQuery(query: Query): Promise { +export function resultFromQuery(query: TypedQuery): Promise { return new Promise((resolve, reject) => { const rows: any[] = []; let rejected = false; query - .on("result", (row: unknown) => rows.push(row)) + .on("data", (row: unknown) => rows.push(row)) .on("error", (error) => { rejected = true; reject(error); @@ -84,7 +85,7 @@ export function resultFromQuery(query: Query): Promise { interface StaticData { media: Array<{ id: number; medium: MediaType }>; parts: Array<{ id: number; mediumId: number; totalIndex: number }>; - episodes: Array<{ id: number; partId: number; totalIndex: number; releases: EpisodeRelease[] }>; + episodes: Array<{ id: number; partId: number; totalIndex: number; combiIndex: number; releases: EpisodeRelease[] }>; releases: Array<{ url: string; episodeId: number; title: string; releaseDate: Date }>; media_in_waits: Array<{ id: number; medium: MediaType }>; news: Array<{ id: number; medium: MediaType }>; @@ -112,6 +113,7 @@ const data: StaticData = { id: 1, partId: 1, totalIndex: 1, + combiIndex: 1, releases: [], }, ], @@ -209,13 +211,13 @@ export function getDatabaseData(): [Record, StaticData] { } export async function tearDownTestDatabase(): EmptyPromise { - return inContext((context) => context.query("DROP DATABASE enterprise_test;")); + await inContext((context) => context.con.query(sql`DROP DATABASE enterprise_test;`)); } export async function fillUserTable(): Promise { const dummy = user[0]; await inContext((context) => - context.query("INSERT IGNORE INTO user (name, uuid, password, alg) VALUES (?,?,?,?);", [ + context.con.query(sql`INSERT IGNORE INTO user (name, uuid, password, alg) VALUES (?,?,?,?);`, [ dummy.name, dummy.uuid, dummy.password, @@ -235,7 +237,7 @@ export async function fillUserTable(): Promise { export async function fillMediumTable(): EmptyPromise { const dummy = data.media[0]; await inContext((context) => - context.query("INSERT IGNORE INTO medium (id, medium) VALUES (?,?);", [dummy.id, dummy.medium]), + context.con.query(sql`INSERT IGNORE INTO medium (id, medium) VALUES (?,?);`, [dummy.id, dummy.medium]), ); if (!databaseData[1].media.find((value) => value.id === dummy.id)) { @@ -247,7 +249,7 @@ export async function fillPartTable(): Promise { await fillMediumTable(); const dummy = data.parts[0]; await inContext((context) => - context.query("INSERT IGNORE INTO part (id, medium_id, totalIndex) VALUES (?,?,?);", [ + context.con.query(sql`INSERT IGNORE INTO part (id, medium_id, totalIndex) VALUES (?,?,?);`, [ dummy.id, dummy.mediumId, dummy.totalIndex, @@ -265,7 +267,7 @@ export async function fillEpisodeTable(): Promise { const dummy = data.episodes[0]; await inContext((context) => - context.query("INSERT IGNORE INTO episode (id, part_id, totalIndex, combiIndex) VALUES (?,?,?,?);", [ + context.con.query(sql`INSERT IGNORE INTO episode (id, part_id, totalIndex, combiIndex) VALUES (?,?,?,?);`, [ dummy.id, dummy.partId, dummy.totalIndex, @@ -279,24 +281,25 @@ export async function fillEpisodeTable(): Promise { return [dummy]; } -export async function fillEpisodeReleaseTable(): Promise { +export async function fillEpisodeReleaseTable(): Promise { await fillEpisodeTable(); - const dummy = { + const dummy: SimpleRelease = { episodeId: 1, url: "https://my.url/release", title: "", releaseDate: new Date(), + locked: false, + id: 0, }; // set ms part to 0, as this is not saved in database dummy.releaseDate.setMilliseconds(0); await inContext((context) => - context.query("INSERT IGNORE INTO episode_release (episode_id, url, title, releaseDate) VALUES (?,?,?,?);", [ - dummy.episodeId, - dummy.url, - dummy.title, - dummy.releaseDate, - ]), + context.con.query( + sql` + INSERT IGNORE INTO episode_release (episode_id, url, title, releaseDate) + VALUES (${dummy.episodeId},${dummy.url},${dummy.title},${sql.timestamp(dummy.releaseDate)});`, + ), ); if (!databaseData[1].releases.find((value) => value.episodeId === dummy.episodeId && value.url === dummy.url)) { @@ -315,12 +318,13 @@ export async function fillUserEpisodeTable(): Promise { dummy.readDate = now; await inContext((context) => - context.query("INSERT IGNORE INTO user_episode (user_uuid, episode_id, progress, read_date) VALUES (?,?,?,?);", [ - dummy.uuid, - dummy.episodeId, - dummy.progress, - dummy.readDate, - ]), + context.con.query( + sql` + INSERT IGNORE INTO user_episode (user_uuid, episode_id, progress, read_date) + VALUES ( + ${dummy.uuid},${dummy.episodeId},${dummy.progress},${dummy.readDate ? sql.timestamp(dummy.readDate) : null} + );`, + ), ); if ( !databaseData[0][user[0].uuid].progress.find( @@ -333,36 +337,37 @@ export async function fillUserEpisodeTable(): Promise { } export async function dumpTable(table: string): Promise { - return inContext((context) => context.query(`SELECT * FROM ${escapeId(table)};`)); + const result = await inContext((context) => context.con.query(sql`SELECT * FROM ${sql.identifier([table])};`)); + return result.rows as any[]; } export async function cleanUser(): EmptyPromise { - await inContext((context) => context.query("DELETE FROM user;")); + await inContext((context) => context.con.query(sql`DELETE FROM user;`)); databaseData[0] = {}; } export async function cleanUserEpisode(): EmptyPromise { - await inContext((context) => context.query("TRUNCATE user_episode;")); + await inContext((context) => context.con.query(sql`TRUNCATE user_episode;`)); Object.values(databaseData[0]).forEach((value) => (value.progress = [])); } export async function cleanEpisodeRelease(): EmptyPromise { - await inContext((context) => context.query("TRUNCATE episode_release;")); + await inContext((context) => context.con.query(sql`TRUNCATE episode_release;`)); databaseData[1].releases = []; } export async function cleanEpisode(): EmptyPromise { - await inContext((context) => context.query("DELETE FROM episode;")); + await inContext((context) => context.con.query(sql`DELETE FROM episode;`)); databaseData[1].episodes = []; } export async function cleanPart(): EmptyPromise { - await inContext((context) => context.query("DELETE FROM part;")); + await inContext((context) => context.con.query(sql`DELETE FROM part;`)); databaseData[1].parts = []; } export async function cleanMedium(): EmptyPromise { - await inContext((context) => context.query("DELETE FROM medium;")); + await inContext((context) => context.con.query(sql`DELETE FROM medium;`)); databaseData[1].media = []; } diff --git a/packages/core/src/test/database/context/episodeContext.spec.ts b/packages/core/src/test/database/context/episodeContext.spec.ts index c89f99e6..01e00996 100644 --- a/packages/core/src/test/database/context/episodeContext.spec.ts +++ b/packages/core/src/test/database/context/episodeContext.spec.ts @@ -1,7 +1,7 @@ import * as tools from "../../../tools"; import { internetTester } from "../../../internetTester"; import * as storage from "../../../database/storages/storage"; -import { episodeStorage } from "../../../database/storages/storage"; +import { episodeStorage, episodeReleaseStorage } from "../../../database/storages/storage"; import { setupTestDatabase, checkEmptyQuery, @@ -17,7 +17,8 @@ import { getMediumOfEpisode, getEpisode, } from "./contextHelper"; -import { EpisodeRelease, ReadEpisode, SimpleRelease } from "../../../types"; +import { ReadEpisode } from "../../../types"; +import { SimpleRelease } from "../../../database/databaseTypes"; jest.setTimeout(60000); @@ -39,10 +40,10 @@ describe("episodeContext", () => { describe("getDisplayReleases", () => { it("should not throw, when using valid parameters", async () => { await expect( - episodeStorage.getDisplayReleases(new Date(), new Date(), true, "12", [], [], [], []), + episodeReleaseStorage.getDisplayReleases(new Date(), new Date(), true, "12", [], [], [], []), ).resolves.toBeDefined(); await expect( - episodeStorage.getDisplayReleases(new Date(), null, null, "12", [], [], [], []), + episodeReleaseStorage.getDisplayReleases(new Date(), null, null, "12", [], [], [], []), ).resolves.toBeDefined(); }); }); @@ -114,7 +115,7 @@ describe("episodeContext", () => { describe("getAllReleases", () => { it("should not throw", async () => { // TODO: write better test - await expect(episodeStorage.getAllReleases()).resolves.toBeDefined(); + await expect(episodeReleaseStorage.getAllReleases()).resolves.toBeDefined(); }); }); @@ -123,7 +124,7 @@ describe("episodeContext", () => { await fillEpisodeReleaseTable(); const mediumId = getDatabaseData()[1].media[0].id; const uuid = Object.keys(getDatabaseData()[0])[0]; - const result = await episodeStorage.getMediumReleases(mediumId, uuid); + const result = await episodeReleaseStorage.getMediumReleases(mediumId, uuid); expect(result.length).toBeGreaterThan(0); }); }); @@ -156,7 +157,7 @@ describe("episodeContext", () => { describe("getReleases", () => { it("should not throw when using valid parameters", async () => { const values = await fillEpisodeReleaseTable(); - const result = await episodeStorage.getReleases(values.map((release) => release.episodeId)); + const result = await episodeReleaseStorage.getReleases(values.map((release) => release.episodeId)); expect(values.length).toBe(result.length); }); }); @@ -164,7 +165,7 @@ describe("episodeContext", () => { describe("getReleasesByHost", () => { it("should not throw when using valid parameters", async () => { const values = await fillEpisodeReleaseTable(); - const result = await episodeStorage.getReleasesByHost(values[0].episodeId, values[0].url); + const result = await episodeReleaseStorage.getReleasesByHost([values[0].episodeId], values[0].url); expect(result.length).toBeGreaterThan(0); }); }); @@ -173,7 +174,7 @@ describe("episodeContext", () => { it("should not throw when using valid parameters", async () => { const [value] = await fillEpisodeReleaseTable(); const medium = getMediumOfEpisode(value.episodeId); - const result = await episodeStorage.getMediumReleasesByHost(medium.id, value.url); + const result = await episodeReleaseStorage.getMediumReleasesByHost(medium.id, value.url); expect(result.length).toBeGreaterThan(0); }); }); @@ -201,16 +202,6 @@ describe("episodeContext", () => { }); }); - /** - * Sets the progress of an user in regard to an episode with one or multiple progressResult objects. - */ - describe("setProgress", () => { - it("should not throw when using valid parameters", async () => { - // FIXME remove this test and the whole method - await expect(episodeStorage.setProgress("", [])).resolves.toBeUndefined(); - }); - }); - /** * Get the progress of an user in regard to an episode. */ @@ -238,25 +229,17 @@ describe("episodeContext", () => { }); }); - /** - * Marks an Episode as read and adds it into Storage if the episode does not exist yet. - */ - describe("markEpisodeRead", () => { - it("should not throw when using valid parameters", async () => { - // FIXME remote this test and the whole method - await expect(episodeStorage.markEpisodeRead("", { result: [], url: "", accept: true })).resolves.toBeUndefined(); - }); - }); describe("addRelease", () => { it("should not throw when using valid parameters", async () => { const [episode] = await fillEpisodeTable(); - const release: EpisodeRelease = { + const release = { episodeId: episode.id, releaseDate: new Date(), title: "hi", url: "https://book.url/test/", + locked: false, }; - await expect(episodeStorage.addRelease(release)).resolves.toEqual(release); + await expect(episodeReleaseStorage.addReleases([release])).resolves.toEqual(release); }); }); @@ -264,7 +247,7 @@ describe("episodeContext", () => { it("should not throw when using valid parameters", async () => { const [release] = await fillEpisodeReleaseTable(); const medium = getMediumOfEpisode(release.episodeId); - const result = await episodeStorage.getEpisodeLinksByMedium(medium.id); + const result = await episodeReleaseStorage.getEpisodeLinksByMedium(medium.id); expect(result).toContainEqual({ episodeId: release.episodeId, url: release.url }); }); }); @@ -272,25 +255,27 @@ describe("episodeContext", () => { describe("getSourcedReleases", () => { it("should not throw when using valid parameters", async () => { // TODO: write better test - await expect(episodeStorage.getSourcedReleases("", 0)).resolves.toBeDefined(); + await expect(episodeReleaseStorage.getSourcedReleases("", 0)).resolves.toBeDefined(); }); }); describe("updateRelease", () => { it("should not throw when using valid parameters", async () => { const [episode] = await fillEpisodeTable(); - const release: EpisodeRelease = { + const release: SimpleRelease = { + id: 0, episodeId: episode.id, releaseDate: new Date(), title: "hi", url: "https://book.url/test/", + locked: false, }; release.releaseDate.setMilliseconds(0); // FIXME: currently getReleases returns null values for properties which do not contain the null type // either allow null, or remove keys with null values - await episodeStorage.addRelease(release); - await expect(episodeStorage.getReleases(release.episodeId)).resolves.toEqual([ + await episodeReleaseStorage.addReleases([release]); + await expect(episodeReleaseStorage.getReleases([release.episodeId])).resolves.toEqual([ { ...release, sourceType: null, @@ -302,8 +287,10 @@ describe("episodeContext", () => { release.releaseDate.setHours(release.releaseDate.getHours() - 1); release.sourceType = "2"; release.title = "2"; - await expect(episodeStorage.updateRelease(release)).resolves.toBeUndefined(); - await expect(episodeStorage.getReleases(release.episodeId)).resolves.toEqual([{ ...release, tocId: null }]); + await expect(episodeReleaseStorage.updateReleases([release])).resolves.toBeUndefined(); + await expect(episodeReleaseStorage.getReleases([release.episodeId])).resolves.toEqual([ + { ...release, tocId: null }, + ]); }); }); @@ -311,16 +298,16 @@ describe("episodeContext", () => { it("should not throw when using valid parameters", async () => { const [release] = await fillEpisodeReleaseTable(); - await expect(episodeStorage.getReleases(release.episodeId)).resolves.toContainEqual({ + await expect(episodeReleaseStorage.getReleases([release.episodeId])).resolves.toContainEqual({ ...release, sourceType: null, tocId: null, locked: false, }); - await expect(episodeStorage.deleteRelease(release)).resolves.toBeUndefined(); + await expect(episodeReleaseStorage.deleteReleases([release])).resolves.toBeUndefined(); - await expect(episodeStorage.getReleases(release.episodeId)).resolves.not.toContainEqual({ + await expect(episodeReleaseStorage.getReleases([release.episodeId])).resolves.not.toContainEqual({ ...release, sourceType: null, tocId: null, @@ -349,10 +336,11 @@ describe("episodeContext", () => { const episode = { id: 0, partId: 1, - releases: [], totalIndex: 0, + combiIndex: 0, + releases: [], }; - const firstResult = await episodeStorage.addEpisode(episode); + const [firstResult] = await episodeStorage.addEpisode([episode]); expect(firstResult.id).not.toBe(episode.id); expect(firstResult).toEqual({ ...episode, @@ -360,8 +348,9 @@ describe("episodeContext", () => { combiIndex: episode.totalIndex, progress: 0, readDate: null, + releases: [], }); - const secondResult = await episodeStorage.addEpisode(episode); + const [secondResult] = await episodeStorage.addEpisode([episode]); expect(secondResult.id).not.toBe(episode.id); expect(secondResult.id).toBeGreaterThan(firstResult.id); expect(secondResult).toEqual({ @@ -382,7 +371,7 @@ describe("episodeContext", () => { // for this test both episode and progress must have the same id expect(episode.id).toBe(progress.episodeId); - await expect(episodeStorage.getEpisode(progress.episodeId, progress.uuid)).resolves.toEqual([ + await expect(episodeStorage.getEpisode([progress.episodeId], progress.uuid)).resolves.toEqual([ { ...episode, combiIndex: episode.totalIndex, @@ -410,7 +399,7 @@ describe("episodeContext", () => { describe("getPartEpisodePerIndex", () => { it("should not throw when using valid parameters", async () => { const [episode] = await fillEpisodeTable(); - await expect(episodeStorage.getPartEpisodePerIndex(episode.partId, episode.totalIndex)).resolves.toEqual([ + await expect(episodeStorage.getPartEpisodePerIndex(episode.partId, [episode.totalIndex])).resolves.toEqual([ { ...episode, partialIndex: null, @@ -425,7 +414,7 @@ describe("episodeContext", () => { const [episode] = await fillEpisodeTable(); const medium = getMediumOfEpisode(episode.id); - await expect(episodeStorage.getMediumEpisodePerIndex(medium.id, episode.totalIndex)).resolves.toEqual([ + await expect(episodeStorage.getMediumEpisodePerIndex(medium.id, [episode.totalIndex])).resolves.toEqual([ { ...episode, partialIndex: null, @@ -444,15 +433,16 @@ describe("episodeContext", () => { const episode = { id: 0, partId: part.id, - releases: [], totalIndex: 0, + combiIndex: 0, + releases: [], }; await expect(episodeStorage.updateEpisode(episode)).resolves.toBe(false); - const result = await episodeStorage.addEpisode(episode); + const [result] = await episodeStorage.addEpisode([episode]); episode.id = result.id; episode.totalIndex = 1; await expect(episodeStorage.updateEpisode(episode)).resolves.toBe(true); - await expect(episodeStorage.getEpisode(episode.id, "")).resolves.toEqual([ + await expect(episodeStorage.getEpisode([episode.id], "")).resolves.toEqual([ { ...episode, combiIndex: episode.totalIndex, @@ -480,7 +470,7 @@ describe("episodeContext", () => { describe("deleteEpisode", () => { it("should not throw when using valid parameters", async () => { const [episode] = await fillEpisodeTable(); - await expect(episodeStorage.getEpisode(episode.id, "")).resolves.toEqual([ + await expect(episodeStorage.getEpisode([episode.id], "")).resolves.toEqual([ { ...episode, combiIndex: episode.totalIndex, @@ -490,7 +480,7 @@ describe("episodeContext", () => { }, ]); await expect(episodeStorage.deleteEpisode(episode.id)).resolves.toBeDefined(); - await expect(episodeStorage.getEpisode(episode.id, "")).resolves.toEqual([]); + await expect(episodeStorage.getEpisode([episode.id], "")).resolves.toEqual([]); }); }); @@ -517,7 +507,7 @@ describe("episodeContext", () => { const [episode] = await fillEpisodeTable(); await expect(episodeStorage.getUnreadChapter(user.uuid)).resolves.toEqual([episode.id]); - await episodeStorage.addProgress(user.uuid, episode.id, 1, null); + await episodeStorage.addProgress(user.uuid, [episode.id], 1, null); await expect(episodeStorage.getUnreadChapter(user.uuid)).resolves.toEqual([]); }); }); @@ -531,7 +521,7 @@ describe("episodeContext", () => { const date = new Date(); date.setMilliseconds(0); - await episodeStorage.addProgress(user.uuid, episode.id, 1, date); + await episodeStorage.addProgress(user.uuid, [episode.id], 1, date); await expect(episodeStorage.getReadToday(user.uuid)).resolves.toStrictEqual([ { episodeId: episode.id, @@ -564,7 +554,7 @@ describe("episodeContext", () => { it("should not throw when using valid parameters", async () => { const [release] = await fillEpisodeReleaseTable(); - await expect(episodeStorage.getEpisodeLinks([release.episodeId])).resolves.toEqual([ + await expect(episodeReleaseStorage.getEpisodeLinks([release.episodeId])).resolves.toEqual([ { episodeId: release.episodeId, url: release.url, @@ -583,16 +573,16 @@ describe("episodeContext", () => { { id: 0, partId: episode.partId, - releases: [], totalIndex: episode.totalIndex + 1, combiIndex: episode.totalIndex + 1, + releases: [], }, { id: 0, partId: episode.partId, - releases: [], totalIndex: episode.totalIndex + 2, combiIndex: episode.totalIndex + 2, + releases: [], }, ]); diff --git a/packages/core/src/test/toolTest.spec.ts b/packages/core/src/test/toolTest.spec.ts index 81965dd7..ada9aca5 100644 --- a/packages/core/src/test/toolTest.spec.ts +++ b/packages/core/src/test/toolTest.spec.ts @@ -95,7 +95,6 @@ describe("testing tool.js", () => { it(`should always return true if same string - '${hashTool.tag}'`, async () => { for (const testString of testStrings) { const hash = await hashTool.hash(testString); - // @ts-expect-error await expect(hashTool.equals(testString, hash.hash, hash.salt)).resolves.toBe(true); } }); @@ -680,15 +679,19 @@ describe("testing tool.js", () => { const now = new Date(); const testRelease = { + id: 1, episodeId: 1, releaseDate: now, title: "none", url: "google.de", + locked: false, }; // should always equals itself expect(tools.equalsRelease(testRelease, testRelease)).toBe(true); expect( tools.equalsRelease(testRelease, { + id: 1, + locked: false, episodeId: 1, releaseDate: now, title: "none", @@ -697,6 +700,7 @@ describe("testing tool.js", () => { ).toBe(true); expect( tools.equalsRelease(testRelease, { + id: 1, episodeId: 1, releaseDate: now, title: "none", @@ -707,6 +711,7 @@ describe("testing tool.js", () => { expect( tools.equalsRelease(testRelease, { + id: 1, episodeId: 1, releaseDate: now, title: "none", diff --git a/packages/core/src/tools.ts b/packages/core/src/tools.ts index ffce3341..cba1dce0 100644 --- a/packages/core/src/tools.ts +++ b/packages/core/src/tools.ts @@ -10,32 +10,33 @@ import { Indexable, ExtractedIndex, NetworkTrack, + ReadonlyNullish, } from "./types"; import crypto from "crypto"; import bcrypt from "bcryptjs"; import emojiRegex from "emoji-regex"; import * as fs from "fs"; import * as path from "path"; -import { Query } from "mysql"; import { validate as validateUuid } from "uuid"; import { isNumber } from "validate.js"; import { setTimeout as setTimeoutPromise } from "timers/promises"; import { ParseError, ValidationError } from "./error"; import { networkInterfaces } from "os"; +import QueryStream from "pg-query-stream"; export function isAbortError(error: unknown): error is Error { return error instanceof Error && error.name === "AbortError"; } -export function isNumberOrArray(value: number | any[]): boolean { +export function isNumberOrArray(value: number | readonly any[]): boolean { return Array.isArray(value) ? !!value.length : Number.isInteger(value); } -export function isInvalidId(id: unknown): boolean { +export function isInvalidId(id: ReadonlyNullish): boolean { return !Number.isInteger(id) || (id as number) < 1; } -export function toArray(value: string): Nullable { +export function toArray(value: string): Nullable { try { return JSON.parse(value); } catch (error) { @@ -43,7 +44,7 @@ export function toArray(value: string): Nullable { } } -export function isInvalidSimpleMedium(value: unknown): boolean { +export function isInvalidSimpleMedium(value: ReadonlyNullish): boolean { if (typeof value !== "object" || !value) { return true; } @@ -59,7 +60,7 @@ export function isInvalidSimpleMedium(value: unknown): boolean { ); } -export function remove(array: T[], item: T): boolean { +export function remove>(array: T[], item: T): boolean { const index = array.indexOf(item); if (index < 0) { return false; @@ -68,7 +69,7 @@ export function remove(array: T[], item: T): boolean { return true; } -export function removeLike(array: T[], equals: (item: T) => boolean): boolean { +export function removeLike>(array: T[], equals: (item: T) => boolean): boolean { const index = array.findIndex((value) => equals(value)); if (index < 0) { return false; @@ -79,7 +80,7 @@ export function removeLike(array: T[], equals: (item: T) => boolean): boolean export type ArrayCallback = (value: T, index: number) => void; -export function forEachArrayLike(arrayLike: ArrayLike, callback: ArrayCallback, start = 0): void { +export function forEachArrayLike(arrayLike: Readonly>, callback: ArrayCallback, start = 0): void { for (let i = start; i < arrayLike.length; i++) { callback(arrayLike[i], i); } @@ -135,7 +136,11 @@ export function multiSingle(item: T, cb: multiSingleCallback(array: T[], item: T | T[], allowNull?: boolean): void { +export function addMultiSingle>( + array: T[], + item: T | T[], + allowNull?: boolean, +): void { if (item != null || allowNull) { if (Array.isArray(item)) { array.push(...item); @@ -153,7 +158,11 @@ export function addMultiSingle(array: T[], item: T | T[], allowNull?: boolean * @param item item or items to remove from the array * @param allowNull if a null-ish item value can be removed from the array */ -export function removeMultiSingle(array: T[], item: T | T[], allowNull?: boolean): void { +export function removeMultiSingle>( + array: T[], + item: T | T[], + allowNull?: boolean, +): void { if (item != null || allowNull) { if (Array.isArray(item)) { item.forEach((value) => remove(array, value)); @@ -172,7 +181,7 @@ export function removeMultiSingle(array: T[], item: T | T[], allowNull?: bool * @param key a key value for the map * @param valueCb value supplier if no non-null-ish value is mapped to the key */ -export function getElseSet(map: Map, key: K, valueCb: () => V): V { +export function getElseSet, V>(map: Map, key: K, valueCb: () => V): V { let value = map.get(key); if (value == null) { value = valueCb(); @@ -198,7 +207,10 @@ export function getElseSetObj(map: Record, k * @param array array to filter all duplicates out * @param isEqualCb alternative predicate determining if two values are equal */ -export function unique(array: ArrayLike, isEqualCb?: (value: T, other: T) => boolean): T[] { +export function unique>( + array: ArrayLike, + isEqualCb?: (value: T, other: T) => boolean, +): T[] { const uniques: T[] = []; if (isEqualCb) { @@ -231,7 +243,12 @@ export function unique(array: ArrayLike, isEqualCb?: (value: T, other: T) * @param start startIndex of the search (inclusively), a number greater than zero * @param end endIndex of the search (exclusively), a number smaller or equal to the length of the array-like */ -export function some(array: ArrayLike, predicate: Predicate, start = 0, end = array.length): boolean { +export function some>( + array: ArrayLike, + predicate: Predicate, + start = 0, + end = array.length, +): boolean { if (start < 0 || end > array.length) { throw RangeError(`Invalid Search Range, Valid: 0-${array.length}, Given: ${start}-${end}`); } @@ -281,7 +298,7 @@ export function contains(s1: string, s2: string): boolean { * * @param array array to count the value occurrences of */ -export function countOccurrence(array: T[]): Map { +export function countOccurrence>(array: readonly T[]): ReadonlyMap { const occurrenceMap: Map = new Map(); for (const value of array) { const counted = occurrenceMap.get(value) ?? 0; @@ -290,9 +307,9 @@ export function countOccurrence(array: T[]): Map { return occurrenceMap; } -export type Predicate = (value: T, index: number) => boolean; +export type Predicate> = (value: T, index: number) => boolean; -export function count(array: T[], condition: Predicate): number { +export function count>(array: readonly T[], condition: Predicate): number { let countNumber = 0; for (let i = 0; i < array.length; i++) { if (condition(array[i], i)) { @@ -302,9 +319,9 @@ export function count(array: T[], condition: Predicate): number { return countNumber; } -export type Comparator = (previous: T, current: T) => number; +export type Comparator> = (previous: T, current: T) => number; -function createComparator(key: keyof T): Comparator { +function createComparator>(key: keyof T): Comparator { // @ts-expect-error return (previousValue: T, currentValue: T) => previousValue[key] - currentValue[key]; } @@ -319,7 +336,10 @@ function createComparator(key: keyof T): Comparator { * @param array array to inspect * @param comparator field comparator or value comparator to compare values with */ -export function max(array: T[], comparator: keyof T | Comparator): Optional { +export function max>( + array: readonly T[], + comparator: keyof T | Comparator, +): Optional { if (!array.length) { return; } @@ -339,7 +359,7 @@ export function max(array: T[], comparator: keyof T | Comparator): Optiona * * @param array array to inspect */ -export function maxValue(array: T[]): Optional { +export function maxValue(array: readonly T[]): Optional { if (!array.length) { return; } @@ -355,7 +375,7 @@ export function maxValue(array: T[]): Optional { * * @param array array to inspect */ -export function minValue(array: T[]): Optional { +export function minValue(array: readonly T[]): Optional { if (!array.length) { return; } @@ -374,7 +394,10 @@ export function minValue(array: T[]): Optional { * @param array array to inspect * @param comparator field comparator or value comparator to compare values with */ -export function min(array: T[], comparator: keyof T | Comparator): Optional { +export function min>( + array: readonly T[], + comparator: keyof T | Comparator, +): Optional { if (!array.length) { return; } @@ -448,7 +471,10 @@ export const delay = setTimeoutPromise; * @param firstRelease first release * @param secondRelease second release */ -export function equalsRelease(firstRelease?: EpisodeRelease, secondRelease?: EpisodeRelease): boolean { +export function equalsRelease( + firstRelease?: Readonly, + secondRelease?: Readonly, +): boolean { return ( // eslint-disable-next-line eqeqeq firstRelease == secondRelease || @@ -485,7 +511,7 @@ export function stringify(object: unknown): string { }); } -export function jsonReplacer(key: unknown, value: unknown): unknown { +export function jsonReplacer(_key: unknown, value: ReadonlyNullish): unknown { if (value instanceof Error) { const error: any = {}; @@ -549,7 +575,7 @@ export function isString(value: unknown): value is string { * * @param s string to parse */ -export function stringToNumberList(s: string): number[] { +export function stringToNumberList(s: string): readonly number[] { s = s.trim(); if (!s.startsWith("[") || !s.endsWith("]")) { return []; @@ -568,9 +594,9 @@ interface Hash { export interface Hasher { tag: string; - hash(text: string, saltLength?: number): Promise; + hash(text: string, saltLength?: number): Promise>; - equals(text: string, hash: string, salt: string): Promise; + equals(text: string, hash: string, salt?: string): Promise; } interface ShaHasher extends Hasher { @@ -608,7 +634,7 @@ export const ShaHash: ShaHasher = { /** * Checks whether the text hashes to the same hash. */ - equals(text, hash, salt) { + equals(text, hash, salt: string) { return promisify(() => this.innerHash(text, salt) === hash); }, }; @@ -725,7 +751,7 @@ export function promisify(callback: () => T): Promise { * * @param value object to combine */ -export function combiIndex(value: Indexable): number { +export function combiIndex(value: Readonly): number { const combi = Number(`${value.totalIndex}.${value.partialIndex || 0}`); if (Number.isNaN(combi)) { throw new ParseError(`invalid argument: total: '${value.totalIndex}', partial: '${value.partialIndex + ""}'`); @@ -740,7 +766,7 @@ export function combiIndex(value: Indexable): number { * * @param value value to check the Indices from */ -export function checkIndices(value: Indexable): void { +export function checkIndices(value: Readonly): void { if (value.totalIndex == null || value.totalIndex < -1 || !Number.isInteger(value.totalIndex)) { throw new ValidationError("invalid toc content, totalIndex invalid"); } @@ -750,11 +776,11 @@ export function checkIndices(value: Indexable): void { } export function extractIndices( - groups: string[], + groups: readonly string[], allPosition: number, totalPosition: number, partialPosition: number, -): Nullable { +): Nullable> { const whole = Number(groups[allPosition]); if (Number.isNaN(whole)) { @@ -779,7 +805,7 @@ const indexRegex = /(-?\d+)(\.(\d+))?/; * * @param value the number to separate */ -export function separateIndex(value: number): Indexable { +export function separateIndex(value: number): Readonly { if (!isNumber(value)) { throw new TypeError("not a number"); } @@ -876,13 +902,8 @@ export function findAbsoluteProjectDirPath(dir = process.cwd()): string { return dir; } -export function isQuery(value: unknown): value is Query { - return ( - typeof value === "object" && - !!value && - typeof (value as any).on === "function" && - typeof (value as any).stream === "function" - ); +export function isQuery(value: ReadonlyNullish): value is QueryStream { + return value instanceof QueryStream; } /** @@ -894,7 +915,7 @@ export function isQuery(value: unknown): value is Query { * * @param value value to validate as an uuid */ -export function validUuid(value: unknown): value is Uuid { +export function validUuid(value: ReadonlyNullish): value is Uuid { return isString(value) && value.length === 36 && validateUuid(value); } @@ -907,7 +928,10 @@ export function getDate(value: string): Nullable { * Return 0 <= i <= array.length such that !pred(array[i - 1]) && pred(array[i]). * From Stackoverflow: https://stackoverflow.com/a/41956372 */ -export function binarySearch(array: T[], pred: (value: T) => boolean): number { +export function binarySearch>( + array: readonly T[], + pred: (value: T) => boolean, +): number { let lo = -1; let hi = array.length; while (1 + lo < hi) { @@ -929,7 +953,7 @@ export function binarySearch(array: T[], pred: (value: T) => boolean): number * @param array the array to batch * @param batchSize the maximum size of a batch */ -export function batch(array: T[], batchSize: number): T[][] { +export function batch(array: readonly T[], batchSize: number): T[][] { const batches = []; let currentBatch = []; diff --git a/packages/core/src/types.ts b/packages/core/src/types.ts index dae6b63a..77f4577b 100644 --- a/packages/core/src/types.ts +++ b/packages/core/src/types.ts @@ -1,5 +1,14 @@ import { MediaType } from "./tools"; -import { FieldInfo, MysqlError, Query } from "mysql"; +import { Readable } from "stream"; +import { SimpleEpisodeReleases, SimpleJob, SimpleJobHistory } from "./database/databaseTypes"; + +export type DBEntity = { + [K in keyof T as Lowercase]: T[K]; +}; + +export interface Entity { + id: number; +} export interface ExternalStorageUser { userUuid: Uuid; @@ -74,17 +83,17 @@ export interface MinMedium { */ export interface SimpleMedium { id?: Id; - countryOfOrigin?: string; - languageOfOrigin?: string; - author?: string; title: string; medium: MediaType; - artist?: string; - lang?: string; - stateOrigin?: ReleaseState; - stateTL?: ReleaseState; - series?: string; - universe?: string; + countryOfOrigin?: string | null; + languageOfOrigin?: string | null; + author?: string | null; + artist?: string | null; + lang?: string | null; + stateOrigin?: ReleaseState | null; + stateTl?: ReleaseState | null; + series?: string | null; + universe?: string | null; } /** @@ -195,10 +204,10 @@ export type UpdateMedium = Partial & { * $ref: "#/components/schemas/Id" */ export interface Medium extends SimpleMedium { - parts?: Id[]; - latestReleased: Id[]; - currentRead: Id; - unreadEpisodes: Id[]; + parts?: readonly Id[]; + latestReleased: readonly Id[]; + currentRead: Id | null; + unreadEpisodes: readonly Id[]; } export interface TocSearchMedium { @@ -260,7 +269,7 @@ export interface ExtractedIndex { export interface Indexable { totalIndex: number; - partialIndex?: number; + partialIndex?: number | null; } /** @@ -310,7 +319,7 @@ export interface MinPart extends Indexable { * $ref: "#/components/schemas/Episode" */ export interface Part extends MinPart { - episodes: Episode[] | Id[]; + episodes: readonly SimpleEpisodeReleases[] | Id[]; } /** @@ -336,15 +345,16 @@ export interface Part extends MinPart { * $ref: "#/components/schemas/SimpleEpisode" */ export interface AddPart extends MinPart { - episodes: SimpleEpisode[]; + episodes: SimpleEpisodeReleases[]; } export interface FullPart extends Part { - episodes: Episode[]; + episodes: SimpleEpisodeReleases[]; } +// @ts-expect-error export interface ShallowPart extends Part { - episodes: Id[]; + episodes: readonly Id[]; } /** @@ -372,7 +382,7 @@ export interface ShallowPart extends Part { export interface SimpleEpisode extends Indexable { id: Id; partId: Id; - combiIndex?: number; + combiIndex: number; releases: EpisodeRelease[]; } @@ -469,11 +479,12 @@ export interface SimpleRelease { * type: string */ export interface EpisodeRelease extends SimpleRelease { + id: number; title: string; releaseDate: Date; - locked?: boolean; - sourceType?: string; - tocId?: Id; + locked: boolean; + sourceType?: string | null; + tocId?: Id | null; } /** @@ -523,7 +534,7 @@ export type PureDisplayRelease = Omit; export interface DisplayRelease { episodeId: Id; title: string; - link: Link; + url: Link; mediumId: Id; locked?: boolean; date: Date; @@ -531,8 +542,8 @@ export interface DisplayRelease { } export interface DisplayReleasesResponse { - releases: DisplayRelease[]; - media: MinMedium[]; + releases: readonly DisplayRelease[]; + media: readonly MinMedium[]; latest: Date; } @@ -777,8 +788,8 @@ export interface ExternalUser { identifier: string; type: number; lists: ExternalList[]; - lastScrape?: Date; - cookies?: Nullable; + lastScrape?: Date | null; + cookies?: string | null; } /** @@ -1123,6 +1134,12 @@ export type Json> = { export type Primitive = string | number | boolean; +export type ReadonlyNullish = T extends null | undefined + ? T + : T extends unknown + ? Readonly | null | undefined + : Readonly; + export interface Invalidation { mediumId?: Id; partId?: Id; @@ -1177,7 +1194,11 @@ export type Link = string; */ export type Id = number; -export type Insert = Omit & Partial>; +export type Insert = T extends { id: Id } + ? Omit & Partial> + : T extends { uuid: Uuid } + ? Omit & Partial> + : never; export enum ScrapeName { searchForToc = "searchForToc", @@ -1249,14 +1270,9 @@ export interface JobItem { previousScheduledAt?: Date; } -export interface JobRequest { - type: ScrapeName; - interval: number; - deleteAfterRun: boolean; +export interface JobRequest + extends Pick { runImmediately: boolean; - name?: string; - runAfter?: JobRequest | JobItem; - arguments?: string; } export interface BasicJobStats { @@ -1493,7 +1509,7 @@ export type JobHistoryItem = Pick { - items: T[]; + items: readonly T[]; next: T[K]; total: number; } @@ -1551,9 +1567,10 @@ export interface JobTrack { * $ref: "#/components/schemas/JobHistoryItem" */ export interface JobDetails { - job?: JobItem; - history: JobHistoryItem[]; + job: SimpleJob | null; + history: readonly SimpleJobHistory[]; } + export type JobStatFilter = NamedJobStatFilter | TimeJobStatFilter; export interface NamedJobStatFilter { @@ -1575,16 +1592,15 @@ export enum MilliTime { DAY = 86400000, } -export interface TypedQuery extends Query { - on(ev: "packet", callback: (packet: any) => void): Query; - - on(ev: "result", callback: (row: Packet, index: number) => void): Query; - - on(ev: "error", callback: (err: MysqlError) => void): Query; - - on(ev: "fields", callback: (fields: FieldInfo[], index: number) => void): Query; - - on(ev: "end", callback: () => void): Query; +export interface TypedQuery extends Readable { + on(event: "close", listener: () => void): this; + on(event: "end", listener: () => void): this; + on(event: "data", callback: (row: Packet) => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "pause", listener: () => void): this; + on(event: "readable", listener: () => void): this; + on(event: "resume", listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; } /** @@ -1713,16 +1729,16 @@ export interface DataStats { * $ref: "#/components/schemas/PureNews" */ export interface NewData { - tocs: FullMediumToc[]; - media: SimpleMedium[]; - releases: PureDisplayRelease[]; - episodes: PureEpisode[]; - parts: MinPart[]; - lists: UserList[]; - extLists: PureExternalList[]; - extUser: PureExternalUser[]; - mediaInWait: MediumInWait[]; - news: PureNews[]; + tocs: readonly FullMediumToc[]; + media: readonly SimpleMedium[]; + releases: readonly PureDisplayRelease[]; + episodes: readonly PureEpisode[]; + parts: readonly MinPart[]; + lists: readonly UserList[]; + extLists: readonly PureExternalList[]; + extUser: readonly PureExternalUser[]; + mediaInWait: readonly MediumInWait[]; + news: readonly PureNews[]; } /** @@ -1771,7 +1787,7 @@ export interface MediumInWaitSearch { export interface ScraperHook { id: number; name: string; - state: string; + enabled: boolean; message: string; } @@ -1901,17 +1917,12 @@ export type JobStatSummary = { | "lagging" >; -export enum HookState { - ENABLED = "enabled", - DISABLED = "disabled", -} - export interface CustomHook { id: number; name: string; state: string; updated_at?: Date; - hookState: HookState; + enabled: boolean; comment: string; } @@ -1930,17 +1941,17 @@ export interface QueryItems { } export interface QueryItemsResult { - episodeReleases: EpisodeRelease[]; // by episode id - episodes: Episode[]; - partEpisodes: Record; // by part id - partReleases: Record; // by part id - parts: Part[]; - media: SimpleMedium[]; - tocs: FullMediumToc[]; // by toc id - mediaTocs: FullMediumToc[]; // by medium id - mediaLists: List[]; - externalMediaLists: ExternalList[]; - externalUser: ExternalUser[]; + episodeReleases: readonly EpisodeRelease[]; // by episode id + episodes: readonly Episode[]; + partEpisodes: Readonly>; // by part id + partReleases: Readonly>; // by part id + parts: readonly Part[]; + media: readonly SimpleMedium[]; + tocs: readonly FullMediumToc[]; // by toc id + mediaTocs: readonly FullMediumToc[]; // by medium id + mediaLists: readonly List[]; + externalMediaLists: readonly ExternalList[]; + externalUser: readonly ExternalUser[]; } export interface Notification { diff --git a/packages/core/tsconfig.json b/packages/core/tsconfig.json index 6847418b..95756fda 100644 --- a/packages/core/tsconfig.json +++ b/packages/core/tsconfig.json @@ -20,11 +20,6 @@ "jest", ], "outDir": "./dist/", - "paths": { - "@/*": [ - "./src/*" - ] - } }, "include": [ "src/**/*.ts", diff --git a/packages/scraper/src/externals/direct/undergroundScraper.ts b/packages/scraper/src/externals/direct/undergroundScraper.ts index 52c45ecb..971daccc 100644 --- a/packages/scraper/src/externals/direct/undergroundScraper.ts +++ b/packages/scraper/src/externals/direct/undergroundScraper.ts @@ -1,12 +1,18 @@ import { EpisodeContent, Hook, NewsScrapeResult } from "../types"; -import { EpisodeRelease, News, SimpleEpisode, EmptyPromise, VoidablePromise } from "enterprise-core/dist/types"; +import { News, SimpleEpisode, EmptyPromise, VoidablePromise } from "enterprise-core/dist/types"; import logger from "enterprise-core/dist/logger"; import { max, MediaType, sanitizeString } from "enterprise-core/dist/tools"; -import { episodeStorage, mediumStorage, partStorage } from "enterprise-core/dist/database/storages/storage"; +import { + episodeReleaseStorage, + episodeStorage, + mediumStorage, + partStorage, +} from "enterprise-core/dist/database/storages/storage"; import request from "../request"; import { ScraperError } from "../errors"; import { scraperLog, LogType, getText } from "./directTools"; import { storeHookName } from "../scraperTools"; +import { SimpleRelease } from "enterprise-core/dist/database/databaseTypes"; export const sourceType = "qidian_underground"; @@ -140,15 +146,17 @@ async function processMediumNews(mediumTitle: string, potentialNews: News[]): Em } }); - const sourcedReleases = await episodeStorage.getSourcedReleases(sourceType, mediumId); + const sourcedReleases = await episodeReleaseStorage.getSourcedReleases(sourceType, mediumId); const toUpdateReleases = oldReleases - .map((value): EpisodeRelease => { + .map((value): SimpleRelease => { return { title: value.title, url: value.link, releaseDate: value.date, sourceType, episodeId: 0, + id: 0, + locked: false, }; }) .filter((value) => { @@ -161,7 +169,7 @@ async function processMediumNews(mediumTitle: string, potentialNews: News[]): Em return foundRelease.url !== value.url; }); if (toUpdateReleases.length) { - episodeStorage.updateRelease(toUpdateReleases).catch(logger.error); + episodeReleaseStorage.updateReleases(toUpdateReleases).catch(logger.error); } } else { news = potentialNews; @@ -175,13 +183,16 @@ async function processMediumNews(mediumTitle: string, potentialNews: News[]): Em const totalIndex = Number(exec[1]); return { totalIndex, + combiIndex: totalIndex, releases: [ { + id: 0, episodeId: 0, sourceType, releaseDate: value.date, url: value.link, title: value.title, + locked: false, }, ], id: 0, diff --git a/packages/scraper/src/externals/hookManager.ts b/packages/scraper/src/externals/hookManager.ts index 9887979c..ee5bc52c 100644 --- a/packages/scraper/src/externals/hookManager.ts +++ b/packages/scraper/src/externals/hookManager.ts @@ -1,4 +1,4 @@ -import { CustomHook as CustomHookEntity, EmptyPromise } from "enterprise-core/dist/types"; +import { EmptyPromise } from "enterprise-core/dist/types"; import logger from "enterprise-core/dist/logger"; import { getHook as getWWHook } from "./direct/wuxiaworldScraper"; import { getHook as getGogoAnimeHook } from "./direct/gogoAnimeScraper"; @@ -11,7 +11,7 @@ import { getHook as getOpenLibraryHook } from "./direct/openLibraryScraper"; import { ContentDownloader, Hook, NewsScraper, SearchScraper, TocScraper, TocSearchScraper } from "./types"; import { customHookStorage, hookStorage } from "enterprise-core/dist/database/storages/storage"; import { getListManagerHooks } from "./listManager"; -import { MediaType, multiSingle } from "enterprise-core/dist/tools"; +import { isString, MediaType } from "enterprise-core/dist/tools"; import { HookConfig } from "./custom/types"; import { HookConfig as HookConfigV2 } from "./customv2/types"; import { createHook as createHookV2 } from "./customv2"; @@ -47,32 +47,32 @@ const nameHookMap = new Map(); const disabledHooks = new Set(); let timeoutId: NodeJS.Timeout | undefined; -export enum HookState { - ENABLED = "enabled", - DISABLED = "disabled", -} - function isHookConfigV2(config: HookConfig | HookConfigV2): config is HookConfigV2 { return "version" in config && config.version === 2; } async function loadCustomHooks(): Promise<{ custom: Hook[]; disabled: Set }> { - const hooks: CustomHookEntity[] = await customHookStorage.getHooks(); + const hooks = await customHookStorage.getHooks(); const loadedCustomHooks: Hook[] = []; const disabled = new Set(); for (const hookEntity of hooks) { - if (hookEntity.hookState === HookState.DISABLED) { + if (!hookEntity.enabled) { disabled.add(hookEntity.name); continue; } let hookConfig: HookConfig | HookConfigV2; - try { - hookConfig = JSON.parse(hookEntity.state); - } catch (error) { - logger.warn("Could not parse HookState of CustomHook", { hook_name: hookEntity.name }); - continue; + + if (isString(hookEntity.state)) { + try { + hookConfig = JSON.parse(hookEntity.state); + } catch (error) { + logger.warn("Could not parse HookConfig of CustomHook", { hook_name: hookEntity.name }); + continue; + } + } else { + hookConfig = hookEntity.state as unknown as HookConfig | HookConfigV2; } let customHook; @@ -99,7 +99,7 @@ async function loadCustomHooks(): Promise<{ custom: Hook[]; disabled: Set hook.name === value.name); @@ -109,10 +109,10 @@ async function loadRawHooks() { id: 0, message: "Newly discovered Hook", name: hook.name, - state: HookState.ENABLED, + enabled: true, }); } else { - if (storageHooks[index].state === HookState.DISABLED) { + if (!storageHooks[index].enabled) { disableHook(hook); } // remove all found storage hooks, so we can remove the superflous ones @@ -186,8 +186,8 @@ function disableHook(hook: Hook): Hook { return hook; } -function registerHooks(hook: Hook[] | Hook): void { - multiSingle(hook, (value: Hook) => { +function registerHooks(hooks: readonly Hook[]): void { + hooks.forEach((value: Hook) => { if (!value.name) { throw new ValidationError("hook without name!"); } diff --git a/packages/scraper/src/externals/scraperTools.ts b/packages/scraper/src/externals/scraperTools.ts index d9ef10b2..ee882f2d 100644 --- a/packages/scraper/src/externals/scraperTools.ts +++ b/packages/scraper/src/externals/scraperTools.ts @@ -5,7 +5,7 @@ import logger from "enterprise-core/dist/logger"; import { ContentDownloader, DownloadContent, EpisodeContent, Hook, Toc, TocContent } from "./types"; import { Cache } from "enterprise-core/dist/cache"; import env from "enterprise-core/dist/env"; -import { episodeStorage } from "enterprise-core/dist/database/storages/storage"; +import { episodeReleaseStorage, episodeStorage } from "enterprise-core/dist/database/storages/storage"; import { MissingResourceError } from "./errors"; import { episodeDownloaderEntries, @@ -167,7 +167,7 @@ export async function search(title: string, medium: number): Promise value) as unknown as SearchResult[]; } -export async function downloadEpisodes(episodes: Episode[]): Promise { +export async function downloadEpisodes(episodes: readonly Episode[]): Promise { await checkHooks(); const entries = episodeDownloaderEntries(); @@ -228,7 +228,7 @@ export async function downloadEpisodes(episodes: Episode[]): Promise { if (value.locked && value.index === combiIndex(episode)) { release.locked = true; - episodeStorage.updateRelease(release).catch(logger.error); + episodeReleaseStorage.updateReleases([release]).catch(logger.error); return false; } return value.content.filter((s) => s).length; diff --git a/packages/scraper/src/jobs/checkToc.ts b/packages/scraper/src/jobs/checkToc.ts index ad79e5e1..c900a9cc 100644 --- a/packages/scraper/src/jobs/checkToc.ts +++ b/packages/scraper/src/jobs/checkToc.ts @@ -1,11 +1,11 @@ -import { mediumStorage, episodeStorage } from "enterprise-core/dist/database/storages/storage"; +import { mediumStorage, episodeStorage, mediumTocStorage } from "enterprise-core/dist/database/storages/storage"; import logger from "enterprise-core/dist/logger"; import { getElseSet, hasMediaType, maxValue } from "enterprise-core/dist/tools"; -import { TocSearchMedium, JobRequest, ScrapeName, Optional } from "enterprise-core/dist/types"; +import { TocSearchMedium, JobRequest, ScrapeName } from "enterprise-core/dist/types"; import { tocScraperEntries, tocDiscoveryEntries, getHooks } from "../externals/hookManager"; -export const checkTocsJob = async (): Promise => { - const mediaTocs = await mediumStorage.getAllMediaTocs(); +export const checkTocsJob = async (): Promise => { + const mediaTocs = await mediumTocStorage.getAllMediaTocs(); const tocSearchMedia = await mediumStorage.getTocSearchMedia(); const mediaWithTocs: Map = new Map(); @@ -67,7 +67,7 @@ export const checkTocsJob = async (): Promise => { } }) .flat(2); - const newJobs2: Array> = await Promise.all(promises); + const newJobs2 = await Promise.all(promises); return ( [newJobs1, newJobs2] // flaten to one dimensional array @@ -77,7 +77,11 @@ export const checkTocsJob = async (): Promise => { ); }; -function searchTocJob(id: number, tocSearch?: TocSearchMedium, availableTocs?: string[]): JobRequest[] { +function searchTocJob( + id: number, + tocSearch?: Readonly, + availableTocs?: readonly string[], +): readonly JobRequest[] { const consumed: RegExp[] = []; if (availableTocs) { @@ -132,7 +136,7 @@ function searchTocJob(id: number, tocSearch?: TocSearchMedium, availableTocs?: s job.runImmediately = true; continue; } - job.runAfter = searchJobs[i - 1]; + // FIXME: job.runAfter = searchJobs[i - 1]; } return searchJobs; } diff --git a/packages/scraper/src/jobs/feed.ts b/packages/scraper/src/jobs/feed.ts index 11399394..19e2a6af 100644 --- a/packages/scraper/src/jobs/feed.ts +++ b/packages/scraper/src/jobs/feed.ts @@ -3,7 +3,7 @@ import { NewsResult } from "enterprise-core/dist/types"; import feedParserPromised from "feedparser-promised"; import { checkLink } from "../externals/scraperTools"; -export const feed = async (feedLink: string): Promise => { +export const feed = async (feedLink: string): Promise> => { logger.info("scraping feed", { url: feedLink }); const startTime = Date.now(); // noinspection JSValidateTypes diff --git a/packages/scraper/src/jobs/news.ts b/packages/scraper/src/jobs/news.ts index 86aa95d8..7d5dca5c 100644 --- a/packages/scraper/src/jobs/news.ts +++ b/packages/scraper/src/jobs/news.ts @@ -1,8 +1,11 @@ +import { SimpleEpisodeReleases, SimpleRelease } from "enterprise-core/dist/database/databaseTypes"; import { mediumInWaitStorage, mediumStorage, episodeStorage, partStorage, + episodeReleaseStorage, + mediumTocStorage, } from "enterprise-core/dist/database/storages/storage"; import { DatabaseError, MissingEntityError, ValidationError } from "enterprise-core/dist/error"; import logger from "enterprise-core/dist/logger"; @@ -13,7 +16,6 @@ import { MediumInWait, Optional, LikeMedium, - SimpleEpisode, EpisodeRelease, News, } from "enterprise-core/dist/types"; @@ -22,7 +24,7 @@ import { sourceType } from "../externals/direct/undergroundScraper"; import { getHook } from "../externals/hookManager"; import { NewsScraper } from "../externals/types"; -export const scrapeNewsJob = async (name: string): Promise => { +export const scrapeNewsJob = async (name: string): Promise> => { const hook = getHook(name); if (!hook.newsAdapter) { @@ -32,7 +34,7 @@ export const scrapeNewsJob = async (name: string): Promise => { return scrapeNews(hook.newsAdapter); }; -export const scrapeNews = async (adapter: NewsScraper): Promise => { +export const scrapeNews = async (adapter: NewsScraper): Promise> => { if (!adapter.link || !validate.isString(adapter.link)) { throw new ValidationError("missing link on newsScraper"); } @@ -79,7 +81,7 @@ async function processMediumNews( title: string, type: MediaType, tocLink: Optional, - potentialNews: EpisodeNews[], + potentialNews: readonly EpisodeNews[], update = false, ): Promise { const likeMedium: LikeMedium = await mediumStorage.getLikeMedium({ title, type }); @@ -91,7 +93,7 @@ async function processMediumNews( return; } const mediumId = likeMedium.medium.id; - const latestReleases: SimpleEpisode[] = await episodeStorage.getLatestReleases(mediumId); + const latestReleases = await episodeStorage.getLatestReleases(mediumId); const latestRelease = max(latestReleases, (previous, current) => { const maxPreviousRelease = max(previous.releases, "releaseDate"); @@ -107,7 +109,7 @@ async function processMediumNews( throw new DatabaseError(`could not create standard part for mediumId: '${mediumId}'`); } - let newEpisodeNews: EpisodeNews[]; + let newEpisodeNews: readonly EpisodeNews[]; if (latestRelease) { const oldReleases: EpisodeNews[] = []; @@ -126,8 +128,9 @@ async function processMediumNews( title: value.episodeTitle, url: value.link, releaseDate: value.date, - locked: value.locked, + locked: !!value.locked, episodeId: 0, + id: 0, }); return value.episodeIndex; }); @@ -149,7 +152,6 @@ async function processMediumNews( if (!value.id) { return episodeStorage .addEpisode({ - id: 0, // @ts-expect-error partId: standardPart.id, partialIndex: value.partialIndex, @@ -159,21 +161,22 @@ async function processMediumNews( .then(() => undefined); } release.episodeId = value.id; - return episodeStorage.addRelease(release).then(() => undefined); + return episodeReleaseStorage.addReleases([release]).then(() => undefined); }); await Promise.all(promises); } if (update) { - const sourcedReleases = await episodeStorage.getSourcedReleases(sourceType, mediumId); + const sourcedReleases = await episodeReleaseStorage.getSourcedReleases(sourceType, mediumId); const toUpdateReleases = oldReleases - .map((value): EpisodeRelease => { + .map((value): SimpleRelease => { return { title: value.episodeTitle, url: value.link, releaseDate: value.date, - locked: value.locked, + locked: !!value.locked, sourceType, episodeId: 0, + id: 0, }; }) .filter((value) => { @@ -186,22 +189,24 @@ async function processMediumNews( return foundRelease.url !== value.url; }); if (toUpdateReleases.length) { - episodeStorage.updateRelease(toUpdateReleases).catch(logger.error); + episodeReleaseStorage.updateReleases(toUpdateReleases).catch(logger.error); } } } else { newEpisodeNews = potentialNews; } - const newEpisodes = newEpisodeNews.map((value): SimpleEpisode => { + const newEpisodes = newEpisodeNews.map((value): SimpleEpisodeReleases => { return { totalIndex: value.episodeTotalIndex, partialIndex: value.episodePartialIndex, + combiIndex: value.episodeIndex, releases: [ { + id: 0, episodeId: 0, releaseDate: value.date, url: value.link, - locked: value.locked, + locked: !!value.locked, title: value.episodeTitle, }, ], @@ -215,6 +220,10 @@ async function processMediumNews( await episodeStorage.addEpisode(newEpisodes); } if (tocLink) { - await mediumStorage.addToc(mediumId, tocLink); + const links = await mediumTocStorage.getTocLinkByMediumId(mediumId); + + if (!links.includes(tocLink)) { + await mediumTocStorage.addToc(mediumId, tocLink); + } } } diff --git a/packages/scraper/src/jobs/queueExternalUser.ts b/packages/scraper/src/jobs/queueExternalUser.ts index ea06ac35..0f96db06 100644 --- a/packages/scraper/src/jobs/queueExternalUser.ts +++ b/packages/scraper/src/jobs/queueExternalUser.ts @@ -1,13 +1,14 @@ +import { SimpleExternalUser } from "enterprise-core/dist/database/databaseTypes"; import { externalUserStorage } from "enterprise-core/dist/database/storages/storage"; -import { JobRequest, ExternalUser, ScrapeName } from "enterprise-core/dist/types"; +import { JobRequest, ScrapeName } from "enterprise-core/dist/types"; import { factory } from "../externals/listManager"; -export async function queueExternalUser(): Promise { +export async function queueExternalUser(): Promise { // eslint-disable-next-line prefer-rest-params console.log("queueing all external user", arguments); - const externalUser: ExternalUser[] = await externalUserStorage.getScrapeExternalUser(); + const externalUser = await externalUserStorage.getScrapeExternalUser(); - const promises: Array> = []; + const promises: Array> = []; for (const user of externalUser) { const listManager = factory(user.type, user.cookies == null ? undefined : user.cookies); promises.push( @@ -17,7 +18,7 @@ export async function queueExternalUser(): Promise { }), ); } - const results: Array<[boolean, ExternalUser]> = await Promise.all(promises); + const results: Array<[boolean, SimpleExternalUser]> = await Promise.all(promises); return results .filter((value) => value[0]) diff --git a/packages/scraper/src/jobs/queueTocs.ts b/packages/scraper/src/jobs/queueTocs.ts index 186d27be..c0fc773b 100644 --- a/packages/scraper/src/jobs/queueTocs.ts +++ b/packages/scraper/src/jobs/queueTocs.ts @@ -1,4 +1,4 @@ -import { mediumStorage, storage } from "enterprise-core/dist/database/storages/storage"; +import { mediumTocStorage, storage } from "enterprise-core/dist/database/storages/storage"; import { EmptyPromise, JobRequest, MilliTime, ScrapeName } from "enterprise-core/dist/types"; import { TocRequest } from "../externals/types"; @@ -8,7 +8,7 @@ export const queueTocs = async (): EmptyPromise => { export const queueTocsJob = async (): Promise => { // TODO: 02.09.2019 a perfect candidate to use stream on - const tocs = await mediumStorage.getAllTocs(); + const tocs = await mediumTocStorage.getAllTocs(); return tocs.map((value): JobRequest => { const tocRequest: TocRequest = { mediumId: value.mediumId, url: value.link }; return { diff --git a/packages/scraper/src/jobs/searchForToc.ts b/packages/scraper/src/jobs/searchForToc.ts index d8f6dff0..033a945b 100644 --- a/packages/scraper/src/jobs/searchForToc.ts +++ b/packages/scraper/src/jobs/searchForToc.ts @@ -25,7 +25,9 @@ export async function searchForToc(item: TocSearchMedium, searcher: TocSearchScr const pageInfoKey = "search" + item.mediumId; const result = await storage.getPageInfo(link, pageInfoKey); - const dates = result.values.map((value) => new Date(value)).filter((value) => !Number.isNaN(value.getDate())); + const dates: Date[] = result.values + .map((value: string) => new Date(value)) + .filter((value: Date) => !Number.isNaN(value.getDate())); const maxDate = maxValue(dates); if (maxDate && maxDate.toDateString() === new Date().toDateString()) { diff --git a/packages/scraper/src/jobs/toc.ts b/packages/scraper/src/jobs/toc.ts index cd4aec78..6e8e0166 100644 --- a/packages/scraper/src/jobs/toc.ts +++ b/packages/scraper/src/jobs/toc.ts @@ -6,7 +6,7 @@ import { tocScraperEntries } from "../externals/hookManager"; import { TocRequest, TocResult, Toc, TocScraper } from "../externals/types"; import { isTocPart } from "../tools"; -export const toc = async (value: TocRequest): Promise => { +export const toc = async (value: Readonly): Promise> => { const result = await oneTimeToc(value); if (!result.tocs.length) { throw new ScraperError(`could not find toc for: url=${value.url} mediumId=${value.mediumId || ""}`); @@ -18,7 +18,12 @@ export const toc = async (value: TocRequest): Promise => { }; }; -export const oneTimeToc = async ({ url: link, uuid, mediumId, lastRequest }: TocRequest): Promise => { +export const oneTimeToc = async ({ + url: link, + uuid, + mediumId, + lastRequest, +}: Readonly): Promise> => { logger.info("scraping one time toc", { url: link }); const path = new URL(link).pathname; diff --git a/packages/scraper/src/scheduler/job.ts b/packages/scraper/src/scheduler/job.ts index c4112ee8..43554f8f 100644 --- a/packages/scraper/src/scheduler/job.ts +++ b/packages/scraper/src/scheduler/job.ts @@ -1,10 +1,11 @@ import { channel } from "diagnostics_channel"; import { runAsync, Store, StoreKey } from "enterprise-core/dist/asyncStorage"; +import { SimpleJob } from "enterprise-core/dist/database/databaseTypes"; import { jobStorage } from "enterprise-core/dist/database/storages/storage"; import { JobError } from "enterprise-core/dist/error"; import logger from "enterprise-core/dist/logger"; import { defaultNetworkTrack, stringify } from "enterprise-core/dist/tools"; -import { JobItem, JobState, Optional, ScrapeName } from "enterprise-core/dist/types"; +import { JobState, Optional, ScrapeName } from "enterprise-core/dist/types"; import { EndJobChannelMessage, StartJobChannelMessage } from "../externals/types"; import { scrapeMapping } from "./scrapeJobs"; @@ -20,7 +21,7 @@ type EventListener = { [K in keyof Events]: Array; }; -export function createJob(item: JobItem): Job | undefined { +export function createJob(item: Readonly): Job | undefined { let args: Optional; switch (item.type) { @@ -81,9 +82,9 @@ export class Job { private status: JobStatus = "waiting"; private readonly events: EventListener = Object.create(null); private startRun = 0; - public readonly currentItem: JobItem; + public readonly currentItem: SimpleJob; - public constructor(private readonly job: () => any | Promise, private readonly original: JobItem) { + public constructor(private readonly job: () => any | Promise, private readonly original: SimpleJob) { this.store.set(StoreKey.ABORT, this.controller.signal); if (original.lastRun) { @@ -196,21 +197,20 @@ export class Job { await this.emit("after"); const item = this.currentItem; + const previousScheduledAt = item.nextRun ?? undefined; if (item.deleteAfterRun) { - await jobStorage.removeJobs(item, end); + await jobStorage.removeFinishedJob(item, end, previousScheduledAt); } else { item.lastRun = new Date(); - item.previousScheduledAt = item.nextRun; - if (item.interval > 0) { if (item.interval < 60000) { item.interval = 60000; } - item.nextRun = new Date(item.lastRun.getTime() + item.interval); + item.nextRun = new Date(Date.now() + item.interval); } item.state = JobState.WAITING; - await jobStorage.updateJobs(item, end); + await jobStorage.updateFinishedJob(item, end, previousScheduledAt); } logger.info("Job finished now", { job_name: item.name, job_id: item.id }); diff --git a/packages/scraper/src/scheduler/jobHandler.ts b/packages/scraper/src/scheduler/jobHandler.ts index 34d016be..ff0b6409 100644 --- a/packages/scraper/src/scheduler/jobHandler.ts +++ b/packages/scraper/src/scheduler/jobHandler.ts @@ -10,7 +10,6 @@ import { import { isTocEpisode, isTocPart } from "../tools"; import { ScrapeList, ScrapeMedium } from "../externals/listManager"; import { - EpisodeRelease, ExternalList, Uuid, JobRequest, @@ -19,12 +18,10 @@ import { MinPart, News, ScrapeName, - SimpleEpisode, SimpleMedium, EmptyPromise, Optional, NewsResult, - CombinedEpisode, } from "enterprise-core/dist/types"; import logger from "enterprise-core/dist/logger"; import { ScrapeType, Toc, TocEpisode, TocPart, TocResult, ExternalListResult, ScrapeItem } from "../externals/types"; @@ -32,11 +29,13 @@ import * as validate from "validate.js"; import { checkTocContent } from "../externals/scraperTools"; import { DefaultJobScraper } from "./jobScheduler"; import { + episodeReleaseStorage, episodeStorage, externalListStorage, externalUserStorage, jobStorage, mediumStorage, + mediumTocStorage, newsStorage, partStorage, storage, @@ -47,6 +46,7 @@ import { MissingEntityError, ValidationError } from "enterprise-core/dist/error" import { DisabledHookError } from "../externals/hookManager"; import { registerOnExitHandler } from "enterprise-core/dist/exit"; import { remapMediumPart } from "../jobs/remapMediumParts"; +import { SimpleRelease, SimpleEpisode, SimpleEpisodeReleases } from "enterprise-core/dist/database/databaseTypes"; const scraper = DefaultJobScraper; @@ -54,7 +54,7 @@ const scraper = DefaultJobScraper; /** * */ -async function processNews({ link, rawNews }: NewsResult): EmptyPromise { +async function processNews({ link, rawNews }: Readonly): EmptyPromise { if (!link || !validate.isString(link)) { throw new ValidationError("link is not a string: " + typeof link); } @@ -70,7 +70,7 @@ async function processNews({ link, rawNews }: NewsResult): EmptyPromise { rawNews.filter(async (value) => { const newsHash = (await Md5Hash.hash(value.title + value.date)).hash; - const index = newsPageInfo.values.findIndex((hash) => newsHash === hash); + const index = newsPageInfo.values.findIndex((hash: string) => newsHash === hash); if (index >= 0) { newsPageInfo.values.splice(index, 1); @@ -98,7 +98,7 @@ async function processNews({ link, rawNews }: NewsResult): EmptyPromise { // await Storage.linkNewsToMedium(); } -async function feedHandler(result: NewsResult): EmptyPromise { +async function feedHandler(result: Readonly): EmptyPromise { result.rawNews.forEach((value) => { value.title = value.title.replace(/(\s|\n|\t)+/g, " "); }); @@ -129,7 +129,7 @@ async function getTocMedium(toc: Toc, uuid?: Uuid): Promise { let medium: Optional; if (toc.mediumId) { - medium = await mediumStorage.getSimpleMedium(toc.mediumId); + [medium] = await mediumStorage.getSimpleMedium([toc.mediumId]); } else { // get likemedium with similar title and same media type const likeMedium = await mediumStorage.getLikeMedium({ title: toc.title, type: toc.mediumType, link: "" }); @@ -147,23 +147,25 @@ async function getTocMedium(toc: Toc, uuid?: Uuid): Promise { uuid, ); - await mediumStorage.addToc(medium.id as number, toc.link); + await mediumTocStorage.addToc(medium.id as number, toc.link); } const mediumId = medium.id as number; - let currentToc = await mediumStorage.getSpecificToc(mediumId, toc.link); + let currentToc = await mediumTocStorage.getSpecificToc(mediumId, toc.link); // add toc if it does not still exist, instead of throwing an error currentToc ??= { link: toc.link, mediumId, - id: await mediumStorage.addToc(mediumId, toc.link), + medium: toc.mediumType, + title: toc.title, + id: await mediumTocStorage.addToc(mediumId, toc.link), }; // TODO: how to handle multiple authors, artists?, json array, csv, own table? const author = toc.authors?.length ? toc.authors[0].name : undefined; const artist = toc.artists?.length ? toc.artists[0].name : undefined; // update toc specific values - await mediumStorage.updateMediumToc({ + await mediumTocStorage.updateMediumToc({ id: currentToc.id, title: toc.title, mediumId, @@ -172,7 +174,7 @@ async function getTocMedium(toc: Toc, uuid?: Uuid): Promise { author, artist, stateOrigin: toc.statusCOO, - stateTL: toc.statusTl, + stateTl: toc.statusTl, languageOfOrigin: toc.langCOO, lang: toc.langTL, }); @@ -231,16 +233,16 @@ interface TocPartMapping { } interface PartChanges { - newEpisodes: SimpleEpisode[]; - newReleases: EpisodeRelease[]; - updateReleases: EpisodeRelease[]; - unchangedReleases: EpisodeRelease[]; + newEpisodes: SimpleEpisodeReleases[]; + newReleases: SimpleRelease[]; + updateReleases: SimpleRelease[]; + unchangedReleases: SimpleRelease[]; } function partEpisodesReleaseChanges( value: TocPartMapping, - storageEpisodes: Readonly, - storageReleases: Readonly, + storageEpisodes: Readonly, + storageReleases: Readonly, ): PartChanges { if (!value.part?.id) { throw new ValidationError(`something went wrong. got no part for tocPart ${value.tocPart.combiIndex}`); @@ -275,7 +277,7 @@ function partEpisodesReleaseChanges( }); const nonNewIndices: number[] = []; - const allEpisodes: SimpleEpisode[] = [...episodeMap.keys()] + const allEpisodes: SimpleEpisodeReleases[] = [...episodeMap.keys()] .filter((index) => { const notInStorage = episodes.every((episode) => combiIndex(episode) !== index || !episode.id); @@ -285,8 +287,8 @@ function partEpisodesReleaseChanges( nonNewIndices.push(index); return false; }) - .map((episodeIndex): SimpleEpisode => { - const episodeToc = episodeMap.get(episodeIndex); + .map((episodeCombiIndex): SimpleEpisodeReleases => { + const episodeToc = episodeMap.get(episodeCombiIndex); if (!episodeToc) { throw new ValidationError("something went wrong. got no value at this episode index"); @@ -297,12 +299,15 @@ function partEpisodesReleaseChanges( partId: value.part.id, totalIndex: episodeToc.tocEpisode.totalIndex, partialIndex: episodeToc.tocEpisode.partialIndex, + combiIndex: episodeCombiIndex, releases: [ { + id: 0, episodeId: 0, title: episodeToc.tocEpisode.title, url: episodeToc.tocEpisode.url, releaseDate: getLatestDate(episodeToc.tocEpisode.releaseDate || new Date()), + locked: false, }, ], }; @@ -338,16 +343,18 @@ function partEpisodesReleaseChanges( (release) => release.url === episodeValue.tocEpisode.url && release.episodeId === id, ); - const tocRelease: EpisodeRelease = { + const tocRelease: SimpleRelease = { episodeId: id, releaseDate: getLatestDate(episodeValue.tocEpisode.releaseDate || new Date()), title: episodeValue.tocEpisode.title, url: episodeValue.tocEpisode.url, - locked: episodeValue.tocEpisode.locked, + locked: !!episodeValue.tocEpisode.locked, tocId: episodeValue.tocEpisode.tocId, + id: 0, }; if (foundRelease) { + tocRelease.id = foundRelease.id; const date = foundRelease.releaseDate < tocRelease.releaseDate ? foundRelease.releaseDate : tocRelease.releaseDate; @@ -371,9 +378,9 @@ function partEpisodesReleaseChanges( return result; } -function filterToDeleteReleases(tocId: number, changes: PartChanges, releases: EpisodeRelease[]) { - const deleteReleases: EpisodeRelease[] = []; - const episodeReleasesMap = new Map(); +function filterToDeleteReleases(tocId: number, changes: PartChanges, releases: readonly SimpleRelease[]) { + const deleteReleases: SimpleRelease[] = []; + const episodeReleasesMap = new Map(); changes.newReleases.forEach((release) => { // map scraped toc @@ -408,7 +415,7 @@ function filterToDeleteReleases(tocId: number, changes: PartChanges, releases: E return deleteReleases; } -export async function saveToc(tocContent: MediumTocContent): EmptyPromise { +export async function saveToc(tocContent: Readonly): EmptyPromise { const mediumId = tocContent.medium.id as number; const tocParts = tocContent.parts; @@ -483,7 +490,7 @@ export async function saveToc(tocContent: MediumTocContent): EmptyPromise { throw new ValidationError("invalid url for release: " + tocContent.url); } - const releases: EpisodeRelease[] = await episodeStorage.getMediumReleasesByHost(mediumId, exec[0]); + const releases = await episodeReleaseStorage.getMediumReleasesByHost(mediumId, exec[0]); const changes: PartChanges[] = []; for (const mapping of indexPartsMap.values()) { @@ -501,13 +508,13 @@ export async function saveToc(tocContent: MediumTocContent): EmptyPromise { const deleteReleases = filterToDeleteReleases(tocContent.tocId, mergedChanges, releases); if (mergedChanges.newReleases.length) { - await episodeStorage.addRelease(mergedChanges.newReleases); + await episodeReleaseStorage.addReleases(mergedChanges.newReleases); } if (mergedChanges.updateReleases.length) { - await episodeStorage.updateRelease(mergedChanges.updateReleases); + await episodeReleaseStorage.updateReleases(mergedChanges.updateReleases); } if (deleteReleases.length) { - await episodeStorage.deleteRelease(deleteReleases); + await episodeReleaseStorage.deleteReleases(deleteReleases); } if (mergedChanges.newEpisodes.length) { await episodeStorage.addEpisode(mergedChanges.newEpisodes); @@ -571,7 +578,7 @@ function getLatestDate(date: Date): Date { return lastRun; } -async function addFeeds(feeds: string[]): EmptyPromise { +async function addFeeds(feeds: readonly string[]): EmptyPromise { if (!feeds.length) { return; } @@ -607,7 +614,7 @@ interface StoredMedium { /** * */ -async function processMedia(media: ScrapeMedium[], _listType: number, _userUuid: Uuid): Promise { +async function processMedia(media: readonly ScrapeMedium[], _listType: number, _userUuid: Uuid): Promise { const likeMedia = media.map((value) => { return { title: value.title.text, @@ -640,7 +647,7 @@ async function processMedia(media: ScrapeMedium[], _listType: number, _userUuid: if (likeMedium.medium?.id) { if (value.title.link) { - updateMediaPromises.push(mediumStorage.addToc(likeMedium.medium.id, value.title.link).then(ignore)); + updateMediaPromises.push(mediumTocStorage.addToc(likeMedium.medium.id, value.title.link).then(ignore)); } // TODO: 09.03.2020 episode Indices are not relative to the medium, which makes it unusable atm // if (value.current && ("partIndex" in value.current || "episodeIndex" in value.current)) { @@ -669,7 +676,7 @@ async function processMedia(media: ScrapeMedium[], _listType: number, _userUuid: .then(async (value) => { if (value.id) { if (scrapeMedium.title.link) { - await mediumStorage.addToc(value.id, scrapeMedium.title.link); + await mediumTocStorage.addToc(value.id, scrapeMedium.title.link); } } return { @@ -715,8 +722,9 @@ async function updateDatabase({ removedLists, result, addedLists, renamedLists, name: value.name, url: value.link, medium: value.medium, - items: value.media as number[], + userUuid: result.external.userUuid, }) + .then((list) => externalListStorage.addItemsToList(value.media as number[], list.id, result.external.userUuid)) .catch((error) => logger.error(error)), ), ); @@ -754,7 +762,7 @@ async function updateDatabase({ removedLists, result, addedLists, renamedLists, promisePool.push( ...newMedia.map((mediumId: number) => - externalListStorage.addItemToExternalList(externalList.id, mediumId).catch((error) => logger.error(error)), + externalListStorage.addItemToList(externalList.id, mediumId).catch((error) => logger.error(error)), ), ); }); @@ -877,11 +885,11 @@ async function tocErrorHandler(error: Error) { try { if (error instanceof MissingResourceError) { logger.warn("toc will be removed", { reason: "resource was seemingly deleted", resource: error.resource }); - await mediumStorage.removeToc(error.resource); + await mediumTocStorage.removeToc(error.resource); await jobStorage.removeJobLike("name", error.resource); } else if (error instanceof UrlError) { logger.warn("toc will be removed", { reason: "url is not what the scraper expected", url: error.url }); - await mediumStorage.removeToc(error.url); + await mediumTocStorage.removeToc(error.url); await jobStorage.removeJobLike("name", error.url); } else if (error instanceof DisabledHookError) { logger.warn(error.message); diff --git a/packages/scraper/src/scheduler/jobQueue.ts b/packages/scraper/src/scheduler/jobQueue.ts index 4ddb4116..1af38290 100644 --- a/packages/scraper/src/scheduler/jobQueue.ts +++ b/packages/scraper/src/scheduler/jobQueue.ts @@ -9,7 +9,7 @@ import { Job } from "./job"; const queueChannel = channel("enterprise-jobqueue"); -function createJobMessage(store: Map) { +function createJobMessage(store: ReadonlyMap) { const message = { modifications: store.get("modifications") || {}, queryCount: store.get("queryCount") || 0, @@ -199,7 +199,7 @@ export class JobQueue { * @param job the jobInfo of the job to remove * @return boolean true if there was a job removed from the active or waiting queue */ - public removeJob(job: Job): boolean { + public removeJob(job: Readonly): boolean { const predicate = (value: InternJob) => value.job.id === job.id; return removeLike(this.waitingJobs, predicate) || removeLike(this.activeJobs, predicate); } @@ -278,7 +278,7 @@ export class JobQueue { * * @return Array an array of the internal jobs. */ - public getJobs(): OutsideJob[] { + public getJobs(): ReadonlyArray> { const jobs = []; for (const job of this.activeJobs) { jobs.push({ @@ -485,7 +485,7 @@ export class JobQueue { } interface InternJob { - readonly job: Job; + readonly job: Readonly; startRun?: number; running?: boolean; active: boolean; diff --git a/packages/scraper/src/scheduler/jobScheduler.ts b/packages/scraper/src/scheduler/jobScheduler.ts index 221cf687..a39bcf3d 100644 --- a/packages/scraper/src/scheduler/jobScheduler.ts +++ b/packages/scraper/src/scheduler/jobScheduler.ts @@ -2,15 +2,7 @@ import { ScraperHelper } from "../externals/scraperTools"; import { JobQueue, OutsideJob } from "./jobQueue"; import { getElseSet, isString, maxValue, removeLike, stringify } from "enterprise-core/dist/tools"; import logger from "enterprise-core/dist/logger"; -import { - JobItem, - JobRequest, - JobState, - MilliTime, - ScrapeName, - EmptyPromise, - Optional, -} from "enterprise-core/dist/types"; +import { JobRequest, JobState, MilliTime, ScrapeName, EmptyPromise, Optional } from "enterprise-core/dist/types"; import { jobStorage, notificationStorage } from "enterprise-core/dist/database/storages/storage"; import * as dns from "dns"; import { getStore, StoreKey } from "enterprise-core/dist/asyncStorage"; @@ -21,19 +13,20 @@ import { channel } from "diagnostics_channel"; import { SchedulingStrategy, Strategies } from "./scheduling"; import { gracefulShutdown } from "enterprise-core/dist/exit"; import { createJob, Job } from "./job"; +import { SimpleJob } from "enterprise-core/dist/database/databaseTypes"; const missingConnections = new Set(); const jobChannel = channel("enterprise-jobs"); export class JobScheduler { public automatic = true; - public filter: undefined | ((item: JobItem) => boolean); + public filter: undefined | ((item: Readonly) => boolean); private readonly helper = new ScraperHelper(); private readonly queue = new JobQueue({ maxActive: 50 }); private fetching = false; private paused = true; - private readonly jobMap = new Map(); + private readonly jobMap = new Map>(); /** * Jobs of currently queued or running jobs @@ -187,14 +180,14 @@ export class JobScheduler { * Mainly for test purposes * @param jobIds */ - public async runJobs(...jobIds: number[]): EmptyPromise { + public async runJobs(...jobIds: readonly number[]): EmptyPromise { logger.info("start fetching jobs", { running: this.queue.runningJobs, schedulable: this.queue.schedulableJobs, total: this.queue.totalJobs, }); const jobs = await jobStorage.getJobsById(jobIds); - this.processJobItems(jobs); + this.processSimpleJobs(jobs); logger.info("fetched jobs", { running: this.queue.runningJobs, schedulable: this.queue.schedulableJobs, @@ -202,7 +195,7 @@ export class JobScheduler { }); } - public async addJobs(...jobs: JobRequest[]): EmptyPromise { + public async addJobs(...jobs: readonly JobRequest[]): EmptyPromise { let waitForOtherRequest: JobRequest[] = []; const addJobs = jobs.filter((value) => { if (value.runAfter) { @@ -217,7 +210,7 @@ export class JobScheduler { await jobStorage.addJobs(addJobs); addJobs.length = 0; waitForOtherRequest = waitForOtherRequest.filter((value) => { - if (isJobItem(value.runAfter)) { + if (isSimpleJob(value.runAfter)) { addJobs.push(value); return false; } else { @@ -231,7 +224,7 @@ export class JobScheduler { } } - public getJobs(): OutsideJob[] { + public getJobs(): readonly OutsideJob[] { return this.queue.getJobs(); } @@ -252,7 +245,7 @@ export class JobScheduler { }); } - private addDependant(jobsMap: Map): void { + private addDependant(jobsMap: ReadonlyMap): void { for (const [key, value] of jobsMap.entries()) { for (const job of value) { // skip jobs which are already known to be running/queued @@ -274,10 +267,10 @@ export class JobScheduler { } private async checkCurrentVsStorage() { - const runningJobs: JobItem[] = await jobStorage.getJobsInState(JobState.RUNNING); + const runningJobs = await jobStorage.getJobsInState(JobState.RUNNING); // jobs which are marked as running in storage, while not running - const invalidJobs: JobItem[] = []; + const invalidJobs: SimpleJob[] = []; const jobs = this.queue.getJobs(); const currentlyRunningJobIds = new Set(); @@ -294,7 +287,7 @@ export class JobScheduler { if (invalidJobs.length) { // TODO: what to do with these variables? const identifier = []; - const removeJobs: JobItem[] = []; + const removeJobs: SimpleJob[] = []; const updateJobs = invalidJobs.filter((value) => { identifier.push(value.name ? value.name : value.id); @@ -309,7 +302,7 @@ export class JobScheduler { if (value.interval < 60000) { value.interval = 60000; } - value.nextRun = new Date(value.lastRun.getTime() + value.interval); + value.nextRun = new Date(Date.now() + value.interval); } value.state = JobState.WAITING; return true; @@ -381,7 +374,7 @@ export class JobScheduler { } private async checkRunningStorageJobs() { - const runningJobs: JobItem[] = await jobStorage.getJobsInState(JobState.RUNNING); + const runningJobs = await jobStorage.getJobsInState(JobState.RUNNING); const twoHoursAgo = new Date(); twoHoursAgo.setHours(twoHoursAgo.getHours() - 2); @@ -419,11 +412,11 @@ export class JobScheduler { schedulable: this.queue.schedulableJobs, total: this.queue.totalJobs, }); - const jobs: JobItem[] = await this.schedulingStrategy( + const jobs = await this.schedulingStrategy( this.queue, this.jobs.map((job) => job.currentItem), ); - this.processJobItems(jobs); + this.processSimpleJobs(jobs); logger.info("fetched jobs", { running: this.queue.runningJobs, schedulable: this.queue.schedulableJobs, @@ -432,7 +425,7 @@ export class JobScheduler { this.fetching = false; } - private processJobItems(items: JobItem[]) { + private processSimpleJobs(items: readonly SimpleJob[]) { const jobMap = new Map(); items.forEach((value) => { const job = createJob(value); @@ -528,12 +521,12 @@ export class JobScheduler { removeLike(this.jobs, (value) => value.id === item.id); this.jobMap.delete(item.id); const newJobs = await jobStorage.getAfterJobs(item.id); - this.processJobItems(newJobs); + this.processSimpleJobs(newJobs); }); } } -function isJobItem(value: any): value is JobItem { +function isSimpleJob(value: any): value is SimpleJob { return value?.id; } diff --git a/packages/scraper/src/scheduler/scheduling.ts b/packages/scraper/src/scheduler/scheduling.ts index 872b7492..eb09af29 100644 --- a/packages/scraper/src/scheduler/scheduling.ts +++ b/packages/scraper/src/scheduler/scheduling.ts @@ -1,18 +1,18 @@ -import { JobItem } from "enterprise-core/dist/types"; import { jobStorage } from "enterprise-core/dist/database/storages/storage"; import { getElseSet, stringify } from "enterprise-core/dist/tools"; import { JobQueue } from "./jobQueue"; import { getQueueKey } from "../externals/queueRequest"; import { writeFile } from "fs/promises"; import { ValidationError } from "enterprise-core/dist/error"; +import { SimpleJob } from "enterprise-core/dist/database/databaseTypes"; -export type SchedulingStrategy = (queue: JobQueue, items: JobItem[]) => Promise; +export type SchedulingStrategy = (queue: Readonly, items: SimpleJob[]) => Promise; function create>(value: T): T { return value; } -function firstComeFirstServed(): Promise { +function firstComeFirstServed(): Promise { return jobStorage.getJobs(); } @@ -26,8 +26,8 @@ const UNKNOWN_QUEUE = "UNKNOWN"; * @param items jobs to group * @returns a mapping of request queue key to the jobs */ -function getRequestQueueShare(items: JobItem[]): Map { - const countingMap = new Map(); +function getRequestQueueShare(items: readonly SimpleJob[]): Map { + const countingMap = new Map(); for (const item of items) { if (!item) { @@ -62,7 +62,10 @@ function getRequestQueueShare(items: JobItem[]): Map { * @param currentItems jobs which are currently running or queued * @returns array of ordered jobs to queue */ -async function requestQueueBalanced(queue: JobQueue, currentItems: JobItem[]): Promise { +async function requestQueueBalanced( + queue: Readonly, + currentItems: readonly SimpleJob[], +): Promise { // grouping of current items const currentShares = getRequestQueueShare(currentItems); @@ -85,7 +88,7 @@ async function requestQueueBalanced(queue: JobQueue, currentItems: JobItem[]): P } // for debugging purpose only - const futureOnlyShares = new Map([...futureShares.keys()].map((key) => [key, []])); + const futureOnlyShares = new Map([...futureShares.keys()].map((key) => [key, []])); // number of items to queue const maximumSchedulableJobs = Math.max(queue.maxActive - queue.queuedJobs, 0); @@ -156,7 +159,7 @@ async function requestQueueBalanced(queue: JobQueue, currentItems: JobItem[]): P * @param queue current used queue * @returns a baseline of jobs per minute, maxed at the maximum of the queue */ -async function calculateBaseLine(queue: JobQueue): Promise { +async function calculateBaseLine(queue: Readonly): Promise { const allJobs = await jobStorage.getAllJobs(); const averageJobsPerMinute = allJobs.reduce((previous, current) => { if (!current.interval) { @@ -192,7 +195,10 @@ let previousBaseLineCalculation = 0; * * Do not prefer jobs of queue {@link UNKNOWN_QUEUE} over known queues. */ -async function jobsQueueForcedBalance(queue: JobQueue, currentItems: JobItem[]): Promise { +async function jobsQueueForcedBalance( + queue: Readonly, + currentItems: readonly SimpleJob[], +): Promise { const now = Date.now(); // tolerate a delay 10% of the interval const delayTolerance = 0.1; diff --git a/packages/server/src/api/apiTools.ts b/packages/server/src/api/apiTools.ts index 37150663..64e8955d 100644 --- a/packages/server/src/api/apiTools.ts +++ b/packages/server/src/api/apiTools.ts @@ -1,6 +1,6 @@ import { RestResponseError } from "../errors"; import logger from "enterprise-core/dist/logger"; -import { isQuery, Errors, isError, isString } from "enterprise-core/dist/tools"; +import { Errors, ignore, isError, isString } from "enterprise-core/dist/tools"; import { Handler, NextFunction, Request, Response } from "express"; import stringify from "stringify-stream"; import { ValidationError } from "enterprise-core/dist/error"; @@ -8,6 +8,7 @@ import { JSONSchemaType } from "enterprise-core/dist/validation"; import { Validator } from "express-json-validator-middleware"; import addFormats from "ajv-formats"; import * as validationSchemata from "../validation"; +import { pipeline, Readable } from "stream"; export function castQuery>(req: Request): T { return req.query as T; @@ -20,11 +21,8 @@ export function stopper(_req: Request, _res: Response, next: NextFunction): any export function sendResult(res: Response, promise: Promise): void { promise .then((result) => { - if (isQuery(result)) { - result - .stream({ objectMode: true, highWaterMark: 10 }) - .pipe(stringify({ open: "[", close: "]" })) - .pipe(res); + if (result instanceof Readable) { + pipeline(result, stringify({ open: "[", close: "]" }), res, ignore); } else { res.json(result); } diff --git a/packages/server/src/api/episode.ts b/packages/server/src/api/episode.ts index 4ea62c58..9520213e 100644 --- a/packages/server/src/api/episode.ts +++ b/packages/server/src/api/episode.ts @@ -1,4 +1,4 @@ -import { episodeStorage } from "enterprise-core/dist/database/storages/storage"; +import { episodeReleaseStorage, episodeStorage } from "enterprise-core/dist/database/storages/storage"; import { Errors, getDate } from "enterprise-core/dist/tools"; import { Router } from "express"; import { @@ -65,7 +65,7 @@ export const getAllEpisodes = createHandler((req) => { }); export const getAllReleases = createHandler(() => { - return episodeStorage.getAllReleases(); + return episodeReleaseStorage.getAllReleases(); }); export const getDisplayReleases = createHandler( @@ -88,7 +88,7 @@ export const getDisplayReleases = createHandler( return Promise.reject(Errors.INVALID_INPUT); } - return episodeStorage.getDisplayReleases( + return episodeReleaseStorage.getDisplayReleases( latestDate, untilDate, read ?? null, diff --git a/packages/server/src/api/externalUser.ts b/packages/server/src/api/externalUser.ts index f69e946f..828a2098 100644 --- a/packages/server/src/api/externalUser.ts +++ b/packages/server/src/api/externalUser.ts @@ -1,7 +1,7 @@ import { externalUserStorage, jobStorage } from "enterprise-core/dist/database/storages/storage"; import { factory } from "enterprise-scraper/dist/externals/listManager"; import { Errors } from "enterprise-core/dist/tools"; -import { DisplayExternalUser, ExternalUser, ScrapeName } from "enterprise-core/dist/types"; +import { Insert, ScrapeName } from "enterprise-core/dist/types"; import { Router } from "express"; import { castQuery, createHandler, extractQueryParam } from "./apiTools"; import { ValidationError } from "enterprise-core/dist/error"; @@ -15,14 +15,19 @@ import { RefreshExternalUser, refreshExternalUserSchema, } from "../validation"; +import { + BasicDisplayExternalUser, + DisplayExternalUser, + SimpleExternalUser, +} from "enterprise-core/dist/database/databaseTypes"; -function toDisplayExternalUser(value: ExternalUser): DisplayExternalUser { +function toDisplayExternalUser(value: BasicDisplayExternalUser): DisplayExternalUser { return { identifier: value.identifier, - lists: value.lists, localUuid: value.localUuid, type: value.type, uuid: value.uuid, + lists: [], }; } @@ -44,9 +49,8 @@ export const postExternalUser = createHandler( if (!valid) { throw new ValidationError(Errors.INVALID_DATA); } - const addExternalUser: ExternalUser = { + const addExternalUser: Insert = { identifier: externalUser.identifier, - lists: [], localUuid: uuid, type: externalUser.type, uuid: "", @@ -81,21 +85,23 @@ export const refreshExternalUser = createHandler( async (req) => { const { externalUuid } = castQuery(req); - const externalUserWithCookies = await externalUserStorage.getExternalUserWithCookies(externalUuid); + const externalUserWithCookies = await externalUserStorage.getSimpleExternalUser(externalUuid); - await jobStorage.addJobs({ - type: ScrapeName.oneTimeUser, - interval: -1, - deleteAfterRun: true, - runImmediately: true, - name: `${ScrapeName.oneTimeUser}-${externalUserWithCookies.uuid}`, - arguments: JSON.stringify({ - link: externalUserWithCookies.uuid, - userId: externalUserWithCookies.userUuid, - externalUserId: externalUserWithCookies.uuid, - info: externalUserWithCookies.cookies, - }), - }); + await jobStorage.addJobs([ + { + type: ScrapeName.oneTimeUser, + interval: -1, + deleteAfterRun: true, + runImmediately: true, + name: `${ScrapeName.oneTimeUser}-${externalUserWithCookies.uuid}`, + arguments: JSON.stringify({ + link: externalUserWithCookies.uuid, + userId: externalUserWithCookies.localUuid, + externalUserId: externalUserWithCookies.uuid, + info: externalUserWithCookies.cookies, + }), + }, + ]); return true; }, { query: refreshExternalUserSchema }, diff --git a/packages/server/src/api/list.ts b/packages/server/src/api/list.ts index 1a01e757..6f9623f9 100644 --- a/packages/server/src/api/list.ts +++ b/packages/server/src/api/list.ts @@ -22,15 +22,15 @@ import { castQuery, createHandler, extractQueryParam } from "./apiTools"; export const getList = createHandler( (req) => { const { listId, media, uuid } = castQuery(req); - return internalListStorage.getList(listId, media || [], uuid); + return internalListStorage.getLists(listId, media ?? [], uuid); }, { query: getListSchema }, ); export const postList = createHandler( (req) => { - const { uuid, list }: PostList = req.body; - return internalListStorage.addList(uuid, list); + const { list }: PostList = req.body; + return internalListStorage.addList(list); }, { body: postListSchema }, ); @@ -54,7 +54,7 @@ export const deleteList = createHandler( export const getListMedium = createHandler( (req) => { const { listId, media, uuid } = castQuery(req); - return internalListStorage.getList(listId, media, uuid); + return internalListStorage.getLists([listId], media, uuid); }, { query: getListMediumSchema }, ); @@ -62,15 +62,15 @@ export const getListMedium = createHandler( export const postListMedium = createHandler( (req) => { const { listId, mediumId, uuid } = req.body; - return internalListStorage.addItemToList({ listId, id: mediumId }, uuid); + return internalListStorage.addItemsToList(mediumId, uuid, listId); }, { body: postListMediumSchema }, ); export const putListMedium = createHandler( (req) => { - const { oldListId, newListId, mediumId }: PutListMedium = req.body; - return internalListStorage.moveMedium(oldListId, newListId, mediumId); + const { oldListId, newListId, mediumId, uuid }: PutListMedium = req.body; + return internalListStorage.moveMedium(oldListId, newListId, mediumId, uuid); }, { body: putListMediumSchema }, ); diff --git a/packages/server/src/api/medium.ts b/packages/server/src/api/medium.ts index d41d95ae..bb08bb39 100644 --- a/packages/server/src/api/medium.ts +++ b/packages/server/src/api/medium.ts @@ -1,4 +1,9 @@ -import { mediumStorage, mediumInWaitStorage, episodeStorage } from "enterprise-core/dist/database/storages/storage"; +import { + mediumStorage, + mediumInWaitStorage, + episodeStorage, + episodeReleaseStorage, +} from "enterprise-core/dist/database/storages/storage"; import logger from "enterprise-core/dist/logger"; import { Errors, getDate } from "enterprise-core/dist/tools"; import { MediumInWaitSearch } from "enterprise-core/dist/types"; @@ -69,8 +74,8 @@ export const putConsumeUnusedMedia = createHandler( export const postCreateFromUnusedMedia = createHandler( (req) => { - const { createMedium, tocsMedia, listId }: PostCreateFromUnusedMedia = req.body; - return mediumInWaitStorage.createFromMediaInWait(createMedium, tocsMedia, listId); + const { createMedium, tocsMedia, listId, uuid }: PostCreateFromUnusedMedia = req.body; + return mediumInWaitStorage.createFromMediaInWait(createMedium, uuid, tocsMedia, listId); }, { body: postCreateFromUnusedMediaSchema }, ); @@ -160,7 +165,7 @@ export const deleteProgress = createHandler( export const getMediumReleases = createHandler( (req) => { const { id, uuid } = castQuery(req); - return episodeStorage.getMediumReleases(id, uuid); + return episodeReleaseStorage.getMediumReleases(id, uuid); }, { query: getMediumReleasesSchema }, ); diff --git a/packages/server/src/api/process.ts b/packages/server/src/api/process.ts deleted file mode 100644 index 0f1aad31..00000000 --- a/packages/server/src/api/process.ts +++ /dev/null @@ -1,105 +0,0 @@ -import { episodeStorage, storage } from "enterprise-core/dist/database/storages/storage"; -import { Errors } from "enterprise-core/dist/tools"; -import { Router } from "express"; -import { createHandler } from "./apiTools"; - -// TODO: deprecate/remove this api -export const processReadEpisode = createHandler((req) => { - const { uuid, result } = req.body; - if (!result) { - return Promise.reject(Errors.INVALID_INPUT); - } - return episodeStorage.markEpisodeRead(uuid, result); -}); - -export const processProgress = createHandler((req) => { - const { uuid, progress } = req.body; - if (!progress) { - return Promise.reject(Errors.INVALID_INPUT); - } - return episodeStorage.setProgress(uuid, progress); -}); - -export const processResult = createHandler((req) => { - if (!req.body) { - return Promise.reject(Errors.INVALID_INPUT); - } - return storage.processResult(req.body); -}); - -/** - * @openapi - * tags: - * name: Process - * description: API for Process - */ -export function processRouter(): Router { - const router = Router(); - - /** - * @openapi - * /api/user/process/result: - * post: - * tags: [Process] - * description: Process a Result, TODO - * responses: - * 200: - * content: - * application/json: - * schema: - * type: boolean - * description: true if update succeeded - */ - router.post("/result", processResult); - - /** - * @openapi - * /api/user/process/read: - * post: - * tags: [Process] - * description: Process Result. - * requestBody: - * content: - * application/json: - * schema: - * type: object - * properties: - * uuid: - * type: string - * session: - * type: string - * result: - * $ref: "#/components/schemas/Result" - * required: true - * responses: - * 200: - * description: no body - */ - router.post("/read", processReadEpisode); - - /** - * @openapi - * /api/user/process/progress: - * post: - * tags: [Process] - * description: Process Progress Result. - * requestBody: - * content: - * application/json: - * schema: - * type: object - * properties: - * uuid: - * type: string - * session: - * type: string - * progress: - * $ref: "#/components/schemas/ProgressResult" - * required: true - * responses: - * 200: - * description: no body - */ - router.post("/progress", processProgress); - return router; -} diff --git a/packages/server/src/api/user.ts b/packages/server/src/api/user.ts index dcdd672b..ba0c5c0d 100644 --- a/packages/server/src/api/user.ts +++ b/packages/server/src/api/user.ts @@ -3,7 +3,7 @@ import { databaseStorage, episodeStorage, jobStorage, - mediumStorage, + mediumTocStorage, notificationStorage, storage, userStorage, @@ -26,7 +26,6 @@ import { jobsRouter } from "./jobs"; import { getAllLists, listRouter } from "./list"; import { mediumRouter } from "./medium"; import { newsRouter } from "./news"; -import { processRouter } from "./process"; import { crawlerRouter } from "./crawler"; import { CrawlerStatus, DatabaseStatus, Status } from "../types"; import os from "os"; @@ -153,14 +152,16 @@ export const addToc = createHandler( mediumId, }; // TODO: directly request the scrape and insert immediately - await jobStorage.addJobs({ - name: `${ScrapeName.oneTimeToc}-${toc}`, - type: ScrapeName.oneTimeToc, - runImmediately: true, - deleteAfterRun: true, - interval: -1, - arguments: JSON.stringify(tocRequest), - }); + await jobStorage.addJobs([ + { + name: `${ScrapeName.oneTimeToc}-${toc}`, + type: ScrapeName.oneTimeToc, + runImmediately: true, + deleteAfterRun: true, + interval: -1, + arguments: JSON.stringify(tocRequest), + }, + ]); return true; }, { body: addTocSchema }, @@ -195,7 +196,7 @@ export const search = createHandler( export const getStats = createHandler((req) => { const uuid = extractQueryParam(req, "uuid"); - return storage.getStats(uuid); + return storage.getStat(uuid); }); export const getNew = createHandler((req) => { @@ -217,7 +218,7 @@ export const getToc = createHandler( media = [media]; } - return mediumStorage.getMediumTocs(media); + return mediumTocStorage.getTocsByMediumIds(media); }, { query: getTocSchema }, ); @@ -225,7 +226,7 @@ export const getToc = createHandler( export const deleteToc = createHandler( (req) => { const { mediumId, link }: DeleteToc = req.body; - return mediumStorage.removeMediumToc(mediumId, link); + return mediumTocStorage.removeMediumToc(mediumId, link); }, { body: deleteTocSchema }, ); @@ -292,7 +293,7 @@ async function getDatabaseStatus(): Promise { status: "available", host: `${appConfig.dbHost}:${appConfig.dbPort}`, type: "mariadb", - version: dbVersion.version, + version: dbVersion, }; } catch (error) { return { @@ -898,7 +899,6 @@ export function userRouter(): Router { router.use("/hook", hooksRouter()); router.use("/news", newsRouter()); router.use("/list", listRouter()); - router.use("/process", processRouter()); router.use("/externalUser", externalUserRouter()); router.use("/crawler", crawlerRouter()); return router; diff --git a/packages/server/src/misc/fillSystemEvents.ts b/packages/server/src/misc/fillSystemEvents.ts index 8a1d0ca9..aa26e868 100644 --- a/packages/server/src/misc/fillSystemEvents.ts +++ b/packages/server/src/misc/fillSystemEvents.ts @@ -130,7 +130,6 @@ async function streamed() { await Promise.allSettled(promises); process.exit(0); }); - jobs.start(); } main(); diff --git a/packages/server/src/updgradeToHttps.ts b/packages/server/src/updgradeToHttps.ts index f3119981..d4398aec 100644 --- a/packages/server/src/updgradeToHttps.ts +++ b/packages/server/src/updgradeToHttps.ts @@ -1,5 +1,6 @@ +import { SimpleJob, SimpleRelease } from "enterprise-core/dist/database/databaseTypes"; import * as storage from "enterprise-core/dist/database/storages/storage"; -import { EpisodeRelease, JobItem, JobRequest, JobState, ScrapeName, MediumToc } from "enterprise-core/dist/types"; +import { JobRequest, ScrapeName, MediumToc } from "enterprise-core/dist/types"; async function updateReleaseProtocol(domainReg: RegExp, toc: MediumToc, values: MediumToc[]) { const domainMatch = domainReg.exec(toc.link); @@ -22,9 +23,10 @@ async function updateReleaseProtocol(domainReg: RegExp, toc: MediumToc, values: } const partIds = await storage.partStorage.getMediumParts(id); const episodeIds: number[] = partIds.flatMap((value) => value.episodes as number[]); - const mediumReleases = await storage.episodeStorage.getReleases(episodeIds); + const mediumReleases = await storage.episodeReleaseStorage.getReleases(episodeIds); + + const episodeMap: Map = new Map(); - const episodeMap: Map = new Map(); for (const release of mediumReleases) { const releaseDomainMatch = domainReg.exec(release.url); if (!releaseDomainMatch) { @@ -37,12 +39,13 @@ async function updateReleaseProtocol(domainReg: RegExp, toc: MediumToc, values: if (!episodeMap.has(release.episodeId)) { episodeMap.set(release.episodeId, []); } - const links = episodeMap.get(release.episodeId) as EpisodeRelease[]; + const links = episodeMap.get(release.episodeId) as SimpleRelease[]; links.push(release); } } - const deleteReleases = []; - const updateToHttpsReleases = []; + + const deleteReleases: SimpleRelease[] = []; + const updateToHttpsReleases: SimpleRelease[] = []; for (const [, releases] of episodeMap.entries()) { if (releases.length > 1) { @@ -76,18 +79,17 @@ async function updateReleaseProtocol(domainReg: RegExp, toc: MediumToc, values: async function updateHttps(): Promise { const httpsOnly = ["http://novelfull.com"]; - const jobItems = await storage.jobStorage.getJobsInState(JobState.WAITING); - jobItems.push(...(await storage.jobStorage.getJobsInState(JobState.RUNNING))); - const allTocs: MediumToc[] = await storage.mediumStorage.getAllTocs(); + const jobItems = await storage.jobStorage.getAllJobs(); + const allTocs = await storage.mediumTocStorage.getAllTocs(); const tocMap: Map = new Map(); const regExp = /https?:\/\/(.+)/; const domainReg = /https?:\/\/(.+?)(\/|$)/; - const removeJobs: JobItem[] = []; - const addJobs: JobItem[] = []; - const addReleases: EpisodeRelease[] = []; - const removeReleases: EpisodeRelease[] = []; + const removeJobs: SimpleJob[] = []; + const addJobs: SimpleJob[] = []; + const addReleases: SimpleRelease[] = []; + const removeReleases: SimpleRelease[] = []; const removeTocs: MediumToc[] = []; const addTocs: MediumToc[] = []; @@ -262,12 +264,12 @@ async function updateHttps(): Promise { } interface Change { - addJobs: JobItem[]; + addJobs: SimpleJob[]; removeTocs: MediumToc[]; addTocs: MediumToc[]; - removeReleases: EpisodeRelease[]; - removeJobs: JobItem[]; - addReleases: EpisodeRelease[]; + removeReleases: SimpleRelease[]; + removeJobs: SimpleJob[]; + addReleases: SimpleRelease[]; } async function executeChange(changes: Change) { @@ -288,12 +290,12 @@ async function executeChange(changes: Change) { }); } await storage.jobStorage.addJobs(jobRequests); - await Promise.all(changes.addTocs.map((value) => storage.mediumStorage.addToc(value.mediumId, value.link))); - await storage.episodeStorage.addRelease(changes.addReleases); - await Promise.all(changes.removeReleases.map((value) => storage.episodeStorage.deleteRelease(value))); + await Promise.all(changes.addTocs.map((value) => storage.mediumTocStorage.addToc(value.mediumId, value.link))); + await storage.episodeReleaseStorage.addReleases(changes.addReleases); + await Promise.all(changes.removeReleases.map((value) => storage.episodeReleaseStorage.deleteReleases([value]))); await storage.jobStorage.removeJobs(changes.removeJobs); const removeTocLinks = [...new Set(changes.removeTocs.map((value) => value.link))]; - await Promise.all(removeTocLinks.map((value) => storage.mediumStorage.removeToc(value))); + await Promise.all(removeTocLinks.map((value) => storage.mediumTocStorage.removeToc(value))); } async function makeChanges() { diff --git a/packages/server/src/validation.ts b/packages/server/src/validation.ts index 60f4716c..59d3c58f 100644 --- a/packages/server/src/validation.ts +++ b/packages/server/src/validation.ts @@ -4,13 +4,13 @@ import { AppEventProgram, AppEventType, Id, + Insert, JobHistoryResult, Json, Link, List, MediumInWait, MediumInWaitSearch, - MinList, QueryItems, ScrapeName, SimpleEpisode, @@ -33,6 +33,8 @@ import { uuid, uuidArray, } from "enterprise-core/dist/validation"; +import { SimpleList, SimpleMedium as SimpleDBMedium } from "enterprise-core/dist/database/databaseTypes"; +import { MediaType } from "enterprise-core/dist/tools"; export interface ReadNotification { id: Id; @@ -350,7 +352,7 @@ export const simpleEpisodeSchema: JSONSchemaType> = { properties: { id: id(), partId: id(), - combiIndex: { type: "integer", nullable: true }, + combiIndex: { type: "integer" }, totalIndex: { type: "integer" }, partialIndex: { type: "integer", nullable: true }, releases: { @@ -358,11 +360,12 @@ export const simpleEpisodeSchema: JSONSchemaType> = { items: { type: "object", properties: { + id: id(), episodeId: id(), url: link(), title: string(), releaseDate: string(), - locked: { type: "boolean", nullable: true }, + locked: { type: "boolean" }, sourceType: { type: "string", nullable: true }, tocId: { type: "integer", nullable: true }, }, @@ -551,16 +554,40 @@ export const simpleMediumSchema: JSONSchemaType = { artist: { ...string(), nullable: true }, lang: { ...string(), nullable: true }, stateOrigin: { ...integer(), nullable: true }, - stateTL: { ...integer(), nullable: true }, + stateTl: { ...integer(), nullable: true }, series: { ...string(), nullable: true }, universe: { ...string(), nullable: true }, }, required: ["title", "medium"], }; +// eslint-disable-next-line @typescript-eslint/prefer-ts-expect-error +// @ts-ignore +export const simpleDBMediumSchema: JSONSchemaType = { + type: "object", + properties: { + id: id(), + title: string(), + medium: { + type: "integer", + enum: [MediaType.AUDIO, MediaType.IMAGE, MediaType.VIDEO, MediaType.TEXT], + }, + countryOfOrigin: { ...string(), nullable: true }, + languageOfOrigin: { ...string(), nullable: true }, + author: { ...string(), nullable: true }, + artist: { ...string(), nullable: true }, + lang: { ...string(), nullable: true }, + stateOrigin: { ...integer(), nullable: true }, + stateTl: { ...integer(), nullable: true }, + series: { ...string(), nullable: true }, + universe: { ...string(), nullable: true }, + }, + required: ["title", "medium", "id"], +}; + export interface PostSplitMedium { sourceId: Id; - destinationMedium: SimpleMedium; + destinationMedium: SimpleDBMedium; toc: Link; } @@ -569,7 +596,7 @@ export const postSplitMediumSchema: JSONSchemaType = { type: "object", properties: { sourceId: id(), - destinationMedium: simpleMediumSchema, + destinationMedium: simpleDBMediumSchema, toc: link(), }, required: ["destinationMedium", "toc", "sourceId"], @@ -617,6 +644,7 @@ export interface PostCreateFromUnusedMedia { listId: Id; createMedium: MediumInWait; tocsMedia?: MediumInWait[]; + uuid: Uuid; } export const postCreateFromUnusedMediaSchema: JSONSchemaType = { @@ -626,8 +654,9 @@ export const postCreateFromUnusedMediaSchema: JSONSchemaType = { @@ -687,7 +716,7 @@ export const updateMediumSchema: JSONSchemaType = { artist: { ...string(), nullable: true }, lang: { ...string(), nullable: true }, stateOrigin: { ...integer(), nullable: true }, - stateTL: { ...integer(), nullable: true }, + stateTl: { ...integer(), nullable: true }, series: { ...string(), nullable: true }, universe: { ...string(), nullable: true }, }, @@ -938,18 +967,20 @@ export const getListSchema: JSONSchemaType = { }; export interface PostList { - list: MinList; - uuid: Uuid; + list: Insert; } export const postListSchema: JSONSchemaType = { $id: "/PostList", type: "object", properties: { - list: { type: "object", properties: { medium: integer(), name: string() }, required: ["medium", "name"] }, - uuid: uuid(), + list: { + type: "object", + properties: { medium: integer(), name: string(), userUuid: uuid(), id: { ...id(), nullable: true } }, + required: ["medium", "name"], + }, }, - required: ["list", "uuid"], + required: ["list"], }; export interface PutList { diff --git a/packages/website/src/Httpclient.ts b/packages/website/src/Httpclient.ts index e1221a2b..895c3620 100644 --- a/packages/website/src/Httpclient.ts +++ b/packages/website/src/Httpclient.ts @@ -18,7 +18,6 @@ import { TimeBucket, MediaType, SearchResult, - ScraperHook, MediumInWait, MediumInWaitSearch, Part, @@ -34,9 +33,10 @@ import { PostList, PostListMedium, } from "enterprise-server/dist/validation"; -import { CustomHook, Id, Notification, Nullable, Paginated, SimpleUser } from "enterprise-core/dist/types"; +import { Id, Notification, Nullable, Paginated, ScraperHook, SimpleUser } from "enterprise-core/dist/types"; import qs from "qs"; import { useUserStore } from "./store/store"; +import { CustomHook } from "enterprise-core/dist/database/databaseTypes"; /** * Allowed Methods for the API. diff --git a/packages/website/src/components/customHook/custom-hook-form.vue b/packages/website/src/components/customHook/custom-hook-form.vue index f81b6b2c..23c94b16 100644 --- a/packages/website/src/components/customHook/custom-hook-form.vue +++ b/packages/website/src/components/customHook/custom-hook-form.vue @@ -125,7 +125,7 @@