diff --git a/README.md b/README.md index 7e15159eb9..4895254926 100644 --- a/README.md +++ b/README.md @@ -73,4 +73,4 @@ Please see the [CONTRIBUTING](CONTRIBUTING.md) file for information on contribut The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the [`LICENSE`](LICENSE) file. -Copyright (c) Overleaf, 2014-2024. +Copyright (c) Overleaf, 2014-2025. diff --git a/services/web/docker/mongodb-init-replica-set.js b/bin/shared/mongodb-init-replica-set.js similarity index 100% rename from services/web/docker/mongodb-init-replica-set.js rename to bin/shared/mongodb-init-replica-set.js diff --git a/develop/README.md b/develop/README.md index 49f7d63350..568259c4e3 100644 --- a/develop/README.md +++ b/develop/README.md @@ -11,12 +11,6 @@ bin/build > [!NOTE] > If Docker is running out of RAM while building the services in parallel, create a `.env` file in this directory containing `COMPOSE_PARALLEL_LIMIT=1`. -Next, initialize the database: - -```shell -bin/init -``` - Then start the services: ```shell diff --git a/develop/bin/init b/develop/bin/init deleted file mode 100755 index 4cb2a4eae1..0000000000 --- a/develop/bin/init +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env bash - -docker compose up --detach mongo -curl --max-time 10 --retry 5 --retry-delay 5 --retry-all-errors --silent --output /dev/null localhost:27017 -docker compose exec mongo mongosh --eval "rs.initiate({ _id: 'overleaf', members: [{ _id: 0, host: 'mongo:27017' }] })" -docker compose down mongo diff --git a/develop/docker-compose.yml b/develop/docker-compose.yml index 075583cb1a..d0dc8ec6da 100644 --- a/develop/docker-compose.yml +++ b/develop/docker-compose.yml @@ -94,6 +94,14 @@ services: - "127.0.0.1:27017:27017" # for debugging volumes: - mongo-data:/data/db + - ../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 notifications: build: diff --git a/docker-compose.yml b/docker-compose.yml index a762bf73e1..08d6db6fe7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -103,7 +103,7 @@ services: command: '--replSet overleaf' volumes: - ~/mongo_data:/data/db - - ./server-ce/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + - ./bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js environment: MONGO_INITDB_DATABASE: sharelatex extra_hosts: diff --git a/libraries/fetch-utils/index.js b/libraries/fetch-utils/index.js index f5073e826d..643dcc752b 100644 --- a/libraries/fetch-utils/index.js +++ b/libraries/fetch-utils/index.js @@ -95,6 +95,19 @@ async function fetchNothing(url, opts = {}) { * @throws {RequestFailedError} if the response has a non redirect status code or missing Location header */ async function fetchRedirect(url, opts = {}) { + const { location } = await fetchRedirectWithResponse(url, opts) + return location +} + +/** + * Make a request and extract the redirect from the response. + * + * @param {string | URL} url - request URL + * @param {object} opts - fetch options + * @return {Promise<{location: string, response: Response}>} + * @throws {RequestFailedError} if the response has a non redirect status code or missing Location header + */ +async function fetchRedirectWithResponse(url, opts = {}) { const { fetchOpts } = parseOpts(opts) fetchOpts.redirect = 'manual' const response = await performRequest(url, fetchOpts) @@ -112,7 +125,7 @@ async function fetchRedirect(url, opts = {}) { ) } await discardResponseBody(response) - return location + return { location, response } } /** @@ -297,6 +310,7 @@ module.exports = { fetchStreamWithResponse, fetchNothing, fetchRedirect, + fetchRedirectWithResponse, fetchString, fetchStringWithResponse, RequestFailedError, diff --git a/libraries/mongo-utils/batchedUpdate.js b/libraries/mongo-utils/batchedUpdate.js index 5e97f45aca..7e3ad677db 100644 --- a/libraries/mongo-utils/batchedUpdate.js +++ b/libraries/mongo-utils/batchedUpdate.js @@ -16,6 +16,7 @@ let VERBOSE_LOGGING let BATCH_RANGE_START let BATCH_RANGE_END let BATCH_MAX_TIME_SPAN_IN_MS +let BATCHED_UPDATE_RUNNING = false /** * @typedef {import("mongodb").Collection} Collection @@ -211,57 +212,66 @@ async function batchedUpdate( findOptions, batchedUpdateOptions ) { - ID_EDGE_PAST = await getIdEdgePast(collection) - if (!ID_EDGE_PAST) { - console.warn( - `The collection ${collection.collectionName} appears to be empty.` - ) - return 0 + // only a single batchedUpdate can run at a time due to global variables + if (BATCHED_UPDATE_RUNNING) { + throw new Error('batchedUpdate is already running') } - refreshGlobalOptionsForBatchedUpdate(batchedUpdateOptions) - - findOptions = findOptions || {} - findOptions.readPreference = READ_PREFERENCE_SECONDARY - - projection = projection || { _id: 1 } - let nextBatch - let updated = 0 - let start = BATCH_RANGE_START - - while (start !== BATCH_RANGE_END) { - let end = getNextEnd(start) - nextBatch = await getNextBatch( - collection, - query, - start, - end, - projection, - findOptions - ) - if (nextBatch.length > 0) { - end = nextBatch[nextBatch.length - 1]._id - updated += nextBatch.length - - if (VERBOSE_LOGGING) { - console.log( - `Running update on batch with ids ${JSON.stringify( - nextBatch.map(entry => entry._id) - )}` - ) - } else { - console.error(`Running update on batch ending ${renderObjectId(end)}`) - } - - if (typeof update === 'function') { - await update(nextBatch) - } else { - await performUpdate(collection, nextBatch, update) - } + try { + BATCHED_UPDATE_RUNNING = true + ID_EDGE_PAST = await getIdEdgePast(collection) + if (!ID_EDGE_PAST) { + console.warn( + `The collection ${collection.collectionName} appears to be empty.` + ) + return 0 } - console.error(`Completed batch ending ${renderObjectId(end)}`) - start = end + refreshGlobalOptionsForBatchedUpdate(batchedUpdateOptions) + + findOptions = findOptions || {} + findOptions.readPreference = READ_PREFERENCE_SECONDARY + + projection = projection || { _id: 1 } + let nextBatch + let updated = 0 + let start = BATCH_RANGE_START + + while (start !== BATCH_RANGE_END) { + let end = getNextEnd(start) + nextBatch = await getNextBatch( + collection, + query, + start, + end, + projection, + findOptions + ) + if (nextBatch.length > 0) { + end = nextBatch[nextBatch.length - 1]._id + updated += nextBatch.length + + if (VERBOSE_LOGGING) { + console.log( + `Running update on batch with ids ${JSON.stringify( + nextBatch.map(entry => entry._id) + )}` + ) + } else { + console.error(`Running update on batch ending ${renderObjectId(end)}`) + } + + if (typeof update === 'function') { + await update(nextBatch) + } else { + await performUpdate(collection, nextBatch, update) + } + } + console.error(`Completed batch ending ${renderObjectId(end)}`) + start = end + } + return updated + } finally { + BATCHED_UPDATE_RUNNING = false } - return updated } /** diff --git a/libraries/object-persistor/src/PerProjectEncryptedS3Persistor.js b/libraries/object-persistor/src/PerProjectEncryptedS3Persistor.js index 0395bcaa48..7bd4bb93e5 100644 --- a/libraries/object-persistor/src/PerProjectEncryptedS3Persistor.js +++ b/libraries/object-persistor/src/PerProjectEncryptedS3Persistor.js @@ -414,6 +414,16 @@ class CachedPerProjectEncryptedS3Persistor { return await this.sendStream(bucketName, path, fs.createReadStream(fsPath)) } + /** + * + * @param {string} bucketName + * @param {string} path + * @return {Promise} + */ + async getObjectSize(bucketName, path) { + return await this.#parent.getObjectSize(bucketName, path) + } + /** * @param {string} bucketName * @param {string} path diff --git a/libraries/promise-utils/index.js b/libraries/promise-utils/index.js index c71b17127b..557210ae26 100644 --- a/libraries/promise-utils/index.js +++ b/libraries/promise-utils/index.js @@ -13,6 +13,7 @@ module.exports = { expressify, expressifyErrorHandler, promiseMapWithLimit, + promiseMapSettledWithLimit, } /** @@ -264,3 +265,19 @@ async function promiseMapWithLimit(concurrency, array, fn) { const limit = pLimit(concurrency) return await Promise.all(array.map(x => limit(() => fn(x)))) } + +/** + * Map values in `array` with the async function `fn` + * + * Limit the number of unresolved promises to `concurrency`. + * + * @template T, U + * @param {number} concurrency + * @param {Array} array + * @param {(T) => Promise} fn + * @return {Promise>>} + */ +function promiseMapSettledWithLimit(concurrency, array, fn) { + const limit = pLimit(concurrency) + return Promise.allSettled(array.map(x => limit(() => fn(x)))) +} diff --git a/package-lock.json b/package-lock.json index 1827290165..5f0eb0baf1 100644 --- a/package-lock.json +++ b/package-lock.json @@ -30,7 +30,8 @@ "services/third-party-datastore", "services/third-party-references", "services/tpdsworker", - "services/web" + "services/web", + "tools/saas-e2e" ], "dependencies": { "patch-package": "^8.0.0" @@ -514,9 +515,9 @@ } }, "node_modules/@adobe/css-tools": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.4.1.tgz", - "integrity": "sha512-12WGKBQzjUAI4ayyF4IAtfw2QR/IDoqk6jTddXDhtYTJF9ASmoE1zst7cVtP0aL/F1jUJL5r+JxKXKEgHNbEUQ==", + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.4.2.tgz", + "integrity": "sha512-baYZExFpsdkBNuvGKTKWCwKH57HRZLVtycZS05WTQNVOiXVSeAki3nU35zlRbToeMW8aHlJfyS+1C4BOv27q0A==", "dev": true, "license": "MIT" }, @@ -1307,12 +1308,14 @@ "dev": true }, "node_modules/@babel/code-frame": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.7.tgz", - "integrity": "sha512-BcYH1CVJBO9tvyIZ2jVeXgSIMvGZ2FDRvDdOIVQyuklNKSsx+eppDEBq/g47Ayw+RqNFE+URvOShmf+f/qwAlA==", + "version": "7.26.2", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", + "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/highlight": "^7.24.7", + "@babel/helper-validator-identifier": "^7.25.9", + "js-tokens": "^4.0.0", "picocolors": "^1.0.0" }, "engines": { @@ -1320,30 +1323,32 @@ } }, "node_modules/@babel/compat-data": { - "version": "7.25.2", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.25.2.tgz", - "integrity": "sha512-bYcppcpKBvX4znYaPEeFau03bp89ShqNMLs+rmdptMw+heSZh9+z84d2YG+K7cYLbWwzdjtDoW/uqZmPjulClQ==", + "version": "7.26.8", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.26.8.tgz", + "integrity": "sha512-oH5UPLMWR3L2wEFLnFJ1TZXqHufiTKAiLfqw5zkhS4dKXLJ10yVztfil/twG8EDTA4F/tvVNw9nOl4ZMslB8rQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/core": { - "version": "7.25.2", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.25.2.tgz", - "integrity": "sha512-BBt3opiCOxUr9euZ5/ro/Xv8/V7yJ5bjYMqG/C1YAo8MIKAnumZalCN+msbci3Pigy4lIQfPUpfMM27HMGaYEA==", + "version": "7.26.9", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.26.9.tgz", + "integrity": "sha512-lWBYIrF7qK5+GjY5Uy+/hEgp8OJWOD/rpy74GplYRhEauvbHDeFB8t5hPOZxCZ0Oxf4Cc36tK51/l3ymJysrKw==", "dev": true, + "license": "MIT", "dependencies": { "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.24.7", - "@babel/generator": "^7.25.0", - "@babel/helper-compilation-targets": "^7.25.2", - "@babel/helper-module-transforms": "^7.25.2", - "@babel/helpers": "^7.25.0", - "@babel/parser": "^7.25.0", - "@babel/template": "^7.25.0", - "@babel/traverse": "^7.25.2", - "@babel/types": "^7.25.2", + "@babel/code-frame": "^7.26.2", + "@babel/generator": "^7.26.9", + "@babel/helper-compilation-targets": "^7.26.5", + "@babel/helper-module-transforms": "^7.26.0", + "@babel/helpers": "^7.26.9", + "@babel/parser": "^7.26.9", + "@babel/template": "^7.26.9", + "@babel/traverse": "^7.26.9", + "@babel/types": "^7.26.9", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", @@ -1365,15 +1370,17 @@ "dev": true }, "node_modules/@babel/generator": { - "version": "7.25.0", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.25.0.tgz", - "integrity": "sha512-3LEEcj3PVW8pW2R1SR1M89g/qrYk/m/mB/tLqn7dn4sbBUQyTqnlod+II2U4dqiGtUmkcnAmkMDralTFZttRiw==", + "version": "7.26.9", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.26.9.tgz", + "integrity": "sha512-kEWdzjOAUMW4hAyrzJ0ZaTOu9OmpyDIQicIh0zg0EEcEkYXZb2TjtBhnHi2ViX7PKwZqF4xwqfAm299/QMP3lg==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/types": "^7.25.0", + "@babel/parser": "^7.26.9", + "@babel/types": "^7.26.9", "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.25", - "jsesc": "^2.5.1" + "jsesc": "^3.0.2" }, "engines": { "node": ">=6.9.0" @@ -1418,14 +1425,15 @@ } }, "node_modules/@babel/helper-compilation-targets": { - "version": "7.25.2", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.25.2.tgz", - "integrity": "sha512-U2U5LsSaZ7TAt3cfaymQ8WHh0pxvdHoEk6HVpaexxixjyEquMh0L0YNJNM6CTGKMXV1iksi0iZkGw4AcFkPaaw==", + "version": "7.26.5", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.26.5.tgz", + "integrity": "sha512-IXuyn5EkouFJscIDuFF5EsiSolseme1s0CZB+QxVugqJLYmKdxI1VfIBOst0SUu4rnk2Z7kqTwmoO1lp3HIfnA==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/compat-data": "^7.25.2", - "@babel/helper-validator-option": "^7.24.8", - "browserslist": "^4.23.1", + "@babel/compat-data": "^7.26.5", + "@babel/helper-validator-option": "^7.25.9", + "browserslist": "^4.24.0", "lru-cache": "^5.1.1", "semver": "^6.3.1" }, @@ -1501,28 +1509,29 @@ } }, "node_modules/@babel/helper-module-imports": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.24.7.tgz", - "integrity": "sha512-8AyH3C+74cgCVVXow/myrynrAGv+nTVg5vKu2nZph9x7RcRwzmh0VFallJuFTZ9mx6u4eSdXZfcOzSqTUm0HCA==", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.25.9.tgz", + "integrity": "sha512-tnUA4RsrmflIM6W6RFTLFSXITtl0wKjgpnLgXyowocVPrbYrLUXSBXDgTs8BlbmIzIdlBySRQjINYs2BAkiLtw==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/traverse": "^7.24.7", - "@babel/types": "^7.24.7" + "@babel/traverse": "^7.25.9", + "@babel/types": "^7.25.9" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-transforms": { - "version": "7.25.2", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.25.2.tgz", - "integrity": "sha512-BjyRAbix6j/wv83ftcVJmBt72QtHI56C7JXZoG2xATiLpmoC7dpd8WnkikExHDVPpi/3qCmO6WY1EaXOluiecQ==", + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.26.0.tgz", + "integrity": "sha512-xO+xu6B5K2czEnQye6BHA7DolFFmS3LB7stHZFaOLb1pAwO1HWLS8fXA+eh0A2yIvltPVmx3eNNDBJA2SLHXFw==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/helper-module-imports": "^7.24.7", - "@babel/helper-simple-access": "^7.24.7", - "@babel/helper-validator-identifier": "^7.24.7", - "@babel/traverse": "^7.25.2" + "@babel/helper-module-imports": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9", + "@babel/traverse": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1613,17 +1622,19 @@ } }, "node_modules/@babel/helper-string-parser": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.8.tgz", - "integrity": "sha512-pO9KhhRcuUyGnJWwyEgnRJTSIZHiT+vMD0kPeD+so0l7mxkMT19g3pjY9GTnHySck/hDzq+dtW/4VgnMkippsQ==", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", + "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", + "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.7.tgz", - "integrity": "sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w==", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", + "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", + "license": "MIT", "engines": { "node": ">=6.9.0" } @@ -1652,13 +1663,14 @@ } }, "node_modules/@babel/helpers": { - "version": "7.25.0", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.25.0.tgz", - "integrity": "sha512-MjgLZ42aCm0oGjJj8CtSM3DB8NOOf8h2l7DCTePJs29u+v7yO/RBX9nShlKMgFnRks/Q4tBAe7Hxnov9VkGwLw==", + "version": "7.26.9", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.26.9.tgz", + "integrity": "sha512-Mz/4+y8udxBKdmzt/UjPACs4G3j5SshJJEFFKxlCGPydG4JAHXxjWjAwjd09tf6oINvl1VfMJo+nB7H2YKQ0dA==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/template": "^7.25.0", - "@babel/types": "^7.25.0" + "@babel/template": "^7.26.9", + "@babel/types": "^7.26.9" }, "engines": { "node": ">=6.9.0" @@ -1680,11 +1692,12 @@ } }, "node_modules/@babel/parser": { - "version": "7.25.3", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.25.3.tgz", - "integrity": "sha512-iLTJKDbJ4hMvFPgQwwsVoxtHyWpKKPBrxkANrSYewDPaPpT5py5yeVkgPIJ7XYXhndxJpaA3PyALSXQ7u8e/Dw==", + "version": "7.26.9", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.9.tgz", + "integrity": "sha512-81NWa1njQblgZbQHxWHpxxCzNsa3ZwvFqpUg7P+NNUU6f3UU2jBEg4OlF/J6rl8+PQGh1q6/zWScd001YwcA5A==", + "license": "MIT", "dependencies": { - "@babel/types": "^7.25.2" + "@babel/types": "^7.26.9" }, "bin": { "parser": "bin/babel-parser.js" @@ -3258,6 +3271,7 @@ "version": "7.16.7", "resolved": "https://registry.npmjs.org/@babel/runtime-corejs2/-/runtime-corejs2-7.16.7.tgz", "integrity": "sha512-ec0BM0J/9M5Cncha++AlgvvDlk+uM+m6f7K0t74ClcYzsE8LgX4RstRreksMSCI82o3LJS//UswmA0pUWkJpqg==", + "dev": true, "dependencies": { "core-js": "^2.6.5", "regenerator-runtime": "^0.13.4" @@ -3271,6 +3285,7 @@ "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.12.tgz", "integrity": "sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ==", "deprecated": "core-js@<3.4 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Please, upgrade your dependencies to the actual version of core-js.", + "dev": true, "hasInstallScript": true }, "node_modules/@babel/runtime-corejs3": { @@ -3291,30 +3306,32 @@ "integrity": "sha512-srw17NI0TUWHuGa5CFGGmhfNIeja30WMBfbslPNhf6JrqQlLN5gcrvig1oqPxiVaXb0oW0XRKtH6Nngs5lKCIA==" }, "node_modules/@babel/template": { - "version": "7.25.0", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.25.0.tgz", - "integrity": "sha512-aOOgh1/5XzKvg1jvVz7AVrx2piJ2XBi227DHmbY6y+bM9H2FlN+IfecYu4Xl0cNiiVejlsCri89LUsbj8vJD9Q==", + "version": "7.26.9", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.26.9.tgz", + "integrity": "sha512-qyRplbeIpNZhmzOysF/wFMuP9sctmh2cFzRAZOn1YapxBsE1i9bJIY586R/WBLfLcmcBlM8ROBiQURnnNy+zfA==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.24.7", - "@babel/parser": "^7.25.0", - "@babel/types": "^7.25.0" + "@babel/code-frame": "^7.26.2", + "@babel/parser": "^7.26.9", + "@babel/types": "^7.26.9" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/traverse": { - "version": "7.25.3", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.25.3.tgz", - "integrity": "sha512-HefgyP1x754oGCsKmV5reSmtV7IXj/kpaE1XYY+D9G5PvKKoFfSbiS4M77MdjuwlZKDIKFCffq9rPU+H/s3ZdQ==", + "version": "7.26.9", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.26.9.tgz", + "integrity": "sha512-ZYW7L+pL8ahU5fXmNbPF+iZFHCv5scFak7MZ9bwaRPLUhHh7QQEMjZUg0HevihoqCM5iSYHN61EyCoZvqC+bxg==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.24.7", - "@babel/generator": "^7.25.0", - "@babel/parser": "^7.25.3", - "@babel/template": "^7.25.0", - "@babel/types": "^7.25.2", + "@babel/code-frame": "^7.26.2", + "@babel/generator": "^7.26.9", + "@babel/parser": "^7.26.9", + "@babel/template": "^7.26.9", + "@babel/types": "^7.26.9", "debug": "^4.3.1", "globals": "^11.1.0" }, @@ -3323,18 +3340,24 @@ } }, "node_modules/@babel/types": { - "version": "7.25.2", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.25.2.tgz", - "integrity": "sha512-YTnYtra7W9e6/oAZEHj0bJehPRUlLH9/fbpT5LfB0NhQXyALCRkRs3zH9v07IYhkgpqX6Z78FnuccZr/l4Fs4Q==", + "version": "7.26.9", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.9.tgz", + "integrity": "sha512-Y3IR1cRnOxOCDvMmNiym7XpXQ93iGDDPHx+Zj+NM+rg0fBaShfQLkg+hKPaZCEvg5N/LeCo4+Rj/i3FuJsIQaw==", + "license": "MIT", "dependencies": { - "@babel/helper-string-parser": "^7.24.8", - "@babel/helper-validator-identifier": "^7.24.7", - "to-fast-properties": "^2.0.0" + "@babel/helper-string-parser": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9" }, "engines": { "node": ">=6.9.0" } }, + "node_modules/@balena/dockerignore": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@balena/dockerignore/-/dockerignore-1.0.2.tgz", + "integrity": "sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q==", + "license": "Apache-2.0" + }, "node_modules/@bcoe/v8-coverage": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", @@ -4459,9 +4482,9 @@ } }, "node_modules/@esbuild/aix-ppc64": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.24.0.tgz", - "integrity": "sha512-WtKdFM7ls47zkKHFVzMz8opM7LkcsIp9amDUBIAWirg70RM71WRSjdILPsY5Uv1D42ZpUfaPILDlfactHgsRkw==", + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.0.tgz", + "integrity": "sha512-O7vun9Sf8DFjH2UtqK8Ku3LkquL9SZL8OLY1T5NZkA34+wG3OQF7cl4Ql8vdNzM6fzBbYfLaiRLIOZ+2FOCgBQ==", "cpu": [ "ppc64" ], @@ -4476,9 +4499,9 @@ } }, "node_modules/@esbuild/android-arm": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.24.0.tgz", - "integrity": "sha512-arAtTPo76fJ/ICkXWetLCc9EwEHKaeya4vMrReVlEIUCAUncH7M4bhMQ+M9Vf+FFOZJdTNMXNBrWwW+OXWpSew==", + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.0.tgz", + "integrity": "sha512-PTyWCYYiU0+1eJKmw21lWtC+d08JDZPQ5g+kFyxP0V+es6VPPSUhM6zk8iImp2jbV6GwjX4pap0JFbUQN65X1g==", "cpu": [ "arm" ], @@ -4493,9 +4516,9 @@ } }, "node_modules/@esbuild/android-arm64": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.24.0.tgz", - "integrity": "sha512-Vsm497xFM7tTIPYK9bNTYJyF/lsP590Qc1WxJdlB6ljCbdZKU9SY8i7+Iin4kyhV/KV5J2rOKsBQbB77Ab7L/w==", + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.0.tgz", + "integrity": "sha512-grvv8WncGjDSyUBjN9yHXNt+cq0snxXbDxy5pJtzMKGmmpPxeAmAhWxXI+01lU5rwZomDgD3kJwulEnhTRUd6g==", "cpu": [ "arm64" ], @@ -4510,9 +4533,9 @@ } }, "node_modules/@esbuild/android-x64": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.24.0.tgz", - "integrity": "sha512-t8GrvnFkiIY7pa7mMgJd7p8p8qqYIz1NYiAoKc75Zyv73L3DZW++oYMSHPRarcotTKuSs6m3hTOa5CKHaS02TQ==", + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.0.tgz", + "integrity": "sha512-m/ix7SfKG5buCnxasr52+LI78SQ+wgdENi9CqyCXwjVR2X4Jkz+BpC3le3AoBPYTC9NHklwngVXvbJ9/Akhrfg==", "cpu": [ "x64" ], @@ -4527,9 +4550,9 @@ } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.24.0.tgz", - "integrity": "sha512-CKyDpRbK1hXwv79soeTJNHb5EiG6ct3efd/FTPdzOWdbZZfGhpbcqIpiD0+vwmpu0wTIL97ZRPZu8vUt46nBSw==", + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.0.tgz", + "integrity": "sha512-mVwdUb5SRkPayVadIOI78K7aAnPamoeFR2bT5nszFUZ9P8UpK4ratOdYbZZXYSqPKMHfS1wdHCJk1P1EZpRdvw==", "cpu": [ "arm64" ], @@ -4544,9 +4567,9 @@ } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.24.0.tgz", - "integrity": "sha512-rgtz6flkVkh58od4PwTRqxbKH9cOjaXCMZgWD905JOzjFKW+7EiUObfd/Kav+A6Gyud6WZk9w+xu6QLytdi2OA==", + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.0.tgz", + "integrity": "sha512-DgDaYsPWFTS4S3nWpFcMn/33ZZwAAeAFKNHNa1QN0rI4pUjgqf0f7ONmXf6d22tqTY+H9FNdgeaAa+YIFUn2Rg==", "cpu": [ "x64" ], @@ -4561,9 +4584,9 @@ } }, "node_modules/@esbuild/freebsd-arm64": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.24.0.tgz", - "integrity": "sha512-6Mtdq5nHggwfDNLAHkPlyLBpE5L6hwsuXZX8XNmHno9JuL2+bg2BX5tRkwjyfn6sKbxZTq68suOjgWqCicvPXA==", + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.0.tgz", + "integrity": "sha512-VN4ocxy6dxefN1MepBx/iD1dH5K8qNtNe227I0mnTRjry8tj5MRk4zprLEdG8WPyAPb93/e4pSgi1SoHdgOa4w==", "cpu": [ "arm64" ], @@ -4578,9 +4601,9 @@ } }, "node_modules/@esbuild/freebsd-x64": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.24.0.tgz", - "integrity": "sha512-D3H+xh3/zphoX8ck4S2RxKR6gHlHDXXzOf6f/9dbFt/NRBDIE33+cVa49Kil4WUjxMGW0ZIYBYtaGCa2+OsQwQ==", + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.0.tgz", + "integrity": "sha512-mrSgt7lCh07FY+hDD1TxiTyIHyttn6vnjesnPoVDNmDfOmggTLXRv8Id5fNZey1gl/V2dyVK1VXXqVsQIiAk+A==", "cpu": [ "x64" ], @@ -4595,9 +4618,9 @@ } }, "node_modules/@esbuild/linux-arm": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.24.0.tgz", - "integrity": "sha512-gJKIi2IjRo5G6Glxb8d3DzYXlxdEj2NlkixPsqePSZMhLudqPhtZ4BUrpIuTjJYXxvF9njql+vRjB2oaC9XpBw==", + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.0.tgz", + "integrity": "sha512-vkB3IYj2IDo3g9xX7HqhPYxVkNQe8qTK55fraQyTzTX/fxaDtXiEnavv9geOsonh2Fd2RMB+i5cbhu2zMNWJwg==", "cpu": [ "arm" ], @@ -4612,9 +4635,9 @@ } }, "node_modules/@esbuild/linux-arm64": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.24.0.tgz", - "integrity": "sha512-TDijPXTOeE3eaMkRYpcy3LarIg13dS9wWHRdwYRnzlwlA370rNdZqbcp0WTyyV/k2zSxfko52+C7jU5F9Tfj1g==", + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.0.tgz", + "integrity": "sha512-9QAQjTWNDM/Vk2bgBl17yWuZxZNQIF0OUUuPZRKoDtqF2k4EtYbpyiG5/Dk7nqeK6kIJWPYldkOcBqjXjrUlmg==", "cpu": [ "arm64" ], @@ -4629,9 +4652,9 @@ } }, "node_modules/@esbuild/linux-ia32": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.24.0.tgz", - "integrity": "sha512-K40ip1LAcA0byL05TbCQ4yJ4swvnbzHscRmUilrmP9Am7//0UjPreh4lpYzvThT2Quw66MhjG//20mrufm40mA==", + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.0.tgz", + "integrity": "sha512-43ET5bHbphBegyeqLb7I1eYn2P/JYGNmzzdidq/w0T8E2SsYL1U6un2NFROFRg1JZLTzdCoRomg8Rvf9M6W6Gg==", "cpu": [ "ia32" ], @@ -4646,9 +4669,9 @@ } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.24.0.tgz", - "integrity": "sha512-0mswrYP/9ai+CU0BzBfPMZ8RVm3RGAN/lmOMgW4aFUSOQBjA31UP8Mr6DDhWSuMwj7jaWOT0p0WoZ6jeHhrD7g==", + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.0.tgz", + "integrity": "sha512-fC95c/xyNFueMhClxJmeRIj2yrSMdDfmqJnyOY4ZqsALkDrrKJfIg5NTMSzVBr5YW1jf+l7/cndBfP3MSDpoHw==", "cpu": [ "loong64" ], @@ -4663,9 +4686,9 @@ } }, "node_modules/@esbuild/linux-mips64el": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.24.0.tgz", - "integrity": "sha512-hIKvXm0/3w/5+RDtCJeXqMZGkI2s4oMUGj3/jM0QzhgIASWrGO5/RlzAzm5nNh/awHE0A19h/CvHQe6FaBNrRA==", + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.0.tgz", + "integrity": "sha512-nkAMFju7KDW73T1DdH7glcyIptm95a7Le8irTQNO/qtkoyypZAnjchQgooFUDQhNAy4iu08N79W4T4pMBwhPwQ==", "cpu": [ "mips64el" ], @@ -4680,9 +4703,9 @@ } }, "node_modules/@esbuild/linux-ppc64": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.24.0.tgz", - "integrity": "sha512-HcZh5BNq0aC52UoocJxaKORfFODWXZxtBaaZNuN3PUX3MoDsChsZqopzi5UupRhPHSEHotoiptqikjN/B77mYQ==", + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.0.tgz", + "integrity": "sha512-NhyOejdhRGS8Iwv+KKR2zTq2PpysF9XqY+Zk77vQHqNbo/PwZCzB5/h7VGuREZm1fixhs4Q/qWRSi5zmAiO4Fw==", "cpu": [ "ppc64" ], @@ -4697,9 +4720,9 @@ } }, "node_modules/@esbuild/linux-riscv64": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.24.0.tgz", - "integrity": "sha512-bEh7dMn/h3QxeR2KTy1DUszQjUrIHPZKyO6aN1X4BCnhfYhuQqedHaa5MxSQA/06j3GpiIlFGSsy1c7Gf9padw==", + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.0.tgz", + "integrity": "sha512-5S/rbP5OY+GHLC5qXp1y/Mx//e92L1YDqkiBbO9TQOvuFXM+iDqUNG5XopAnXoRH3FjIUDkeGcY1cgNvnXp/kA==", "cpu": [ "riscv64" ], @@ -4714,9 +4737,9 @@ } }, "node_modules/@esbuild/linux-s390x": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.24.0.tgz", - "integrity": "sha512-ZcQ6+qRkw1UcZGPyrCiHHkmBaj9SiCD8Oqd556HldP+QlpUIe2Wgn3ehQGVoPOvZvtHm8HPx+bH20c9pvbkX3g==", + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.0.tgz", + "integrity": "sha512-XM2BFsEBz0Fw37V0zU4CXfcfuACMrppsMFKdYY2WuTS3yi8O1nFOhil/xhKTmE1nPmVyvQJjJivgDT+xh8pXJA==", "cpu": [ "s390x" ], @@ -4731,9 +4754,9 @@ } }, "node_modules/@esbuild/linux-x64": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.24.0.tgz", - "integrity": "sha512-vbutsFqQ+foy3wSSbmjBXXIJ6PL3scghJoM8zCL142cGaZKAdCZHyf+Bpu/MmX9zT9Q0zFBVKb36Ma5Fzfa8xA==", + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.0.tgz", + "integrity": "sha512-9yl91rHw/cpwMCNytUDxwj2XjFpxML0y9HAOH9pNVQDpQrBxHy01Dx+vaMu0N1CKa/RzBD2hB4u//nfc+Sd3Cw==", "cpu": [ "x64" ], @@ -4747,10 +4770,27 @@ "node": ">=18" } }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.0.tgz", + "integrity": "sha512-RuG4PSMPFfrkH6UwCAqBzauBWTygTvb1nxWasEJooGSJ/NwRw7b2HOwyRTQIU97Hq37l3npXoZGYMy3b3xYvPw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, "node_modules/@esbuild/netbsd-x64": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.24.0.tgz", - "integrity": "sha512-hjQ0R/ulkO8fCYFsG0FZoH+pWgTTDreqpqY7UnQntnaKv95uP5iW3+dChxnx7C3trQQU40S+OgWhUVwCjVFLvg==", + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.0.tgz", + "integrity": "sha512-jl+qisSB5jk01N5f7sPCsBENCOlPiS/xptD5yxOx2oqQfyourJwIKLRA2yqWdifj3owQZCL2sn6o08dBzZGQzA==", "cpu": [ "x64" ], @@ -4765,9 +4805,9 @@ } }, "node_modules/@esbuild/openbsd-arm64": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.24.0.tgz", - "integrity": "sha512-MD9uzzkPQbYehwcN583yx3Tu5M8EIoTD+tUgKF982WYL9Pf5rKy9ltgD0eUgs8pvKnmizxjXZyLt0z6DC3rRXg==", + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.0.tgz", + "integrity": "sha512-21sUNbq2r84YE+SJDfaQRvdgznTD8Xc0oc3p3iW/a1EVWeNj/SdUCbm5U0itZPQYRuRTW20fPMWMpcrciH2EJw==", "cpu": [ "arm64" ], @@ -4782,9 +4822,9 @@ } }, "node_modules/@esbuild/openbsd-x64": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.24.0.tgz", - "integrity": "sha512-4ir0aY1NGUhIC1hdoCzr1+5b43mw99uNwVzhIq1OY3QcEwPDO3B7WNXBzaKY5Nsf1+N11i1eOfFcq+D/gOS15Q==", + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.0.tgz", + "integrity": "sha512-2gwwriSMPcCFRlPlKx3zLQhfN/2WjJ2NSlg5TKLQOJdV0mSxIcYNTMhk3H3ulL/cak+Xj0lY1Ym9ysDV1igceg==", "cpu": [ "x64" ], @@ -4799,9 +4839,9 @@ } }, "node_modules/@esbuild/sunos-x64": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.24.0.tgz", - "integrity": "sha512-jVzdzsbM5xrotH+W5f1s+JtUy1UWgjU0Cf4wMvffTB8m6wP5/kx0KiaLHlbJO+dMgtxKV8RQ/JvtlFcdZ1zCPA==", + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.0.tgz", + "integrity": "sha512-bxI7ThgLzPrPz484/S9jLlvUAHYMzy6I0XiU1ZMeAEOBcS0VePBFxh1JjTQt3Xiat5b6Oh4x7UC7IwKQKIJRIg==", "cpu": [ "x64" ], @@ -4816,9 +4856,9 @@ } }, "node_modules/@esbuild/win32-arm64": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.24.0.tgz", - "integrity": "sha512-iKc8GAslzRpBytO2/aN3d2yb2z8XTVfNV0PjGlCxKo5SgWmNXx82I/Q3aG1tFfS+A2igVCY97TJ8tnYwpUWLCA==", + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.0.tgz", + "integrity": "sha512-ZUAc2YK6JW89xTbXvftxdnYy3m4iHIkDtK3CLce8wg8M2L+YZhIvO1DKpxrd0Yr59AeNNkTiic9YLf6FTtXWMw==", "cpu": [ "arm64" ], @@ -4833,9 +4873,9 @@ } }, "node_modules/@esbuild/win32-ia32": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.24.0.tgz", - "integrity": "sha512-vQW36KZolfIudCcTnaTpmLQ24Ha1RjygBo39/aLkM2kmjkWmZGEJ5Gn9l5/7tzXA42QGIoWbICfg6KLLkIw6yw==", + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.0.tgz", + "integrity": "sha512-eSNxISBu8XweVEWG31/JzjkIGbGIJN/TrRoiSVZwZ6pkC6VX4Im/WV2cz559/TXLcYbcrDN8JtKgd9DJVIo8GA==", "cpu": [ "ia32" ], @@ -4850,9 +4890,9 @@ } }, "node_modules/@esbuild/win32-x64": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.24.0.tgz", - "integrity": "sha512-7IAFPrjSQIJrGsK6flwg7NFmwBoSTyF3rl7If0hNUFQU4ilTsEPL6GuMuU9BfIWVVGuRnuIidkSMC+c0Otu8IA==", + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.0.tgz", + "integrity": "sha512-ZENoHJBxA20C2zFzh6AI4fT6RraMzjYw4xKWemRTRmRVtN9c5DcH9r/f2ihEkMjOW5eGgrwCslG/+Y/3bL+DHQ==", "cpu": [ "x64" ], @@ -6408,6 +6448,29 @@ "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, + "node_modules/@isomorphic-git/idb-keyval": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/@isomorphic-git/idb-keyval/-/idb-keyval-3.3.2.tgz", + "integrity": "sha512-r8/AdpiS0/WJCNR/t/gsgL+M8NMVj/ek7s60uz3LmpCaTF2mEVlZJlB01ZzalgYzRLXwSPC92o+pdzjM7PN/pA==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/@isomorphic-git/lightning-fs": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@isomorphic-git/lightning-fs/-/lightning-fs-4.6.0.tgz", + "integrity": "sha512-tfon8f1h6LawjFI/d8lZPWRPTxmdvyTMbkT/j5yo6dB0hALhKw5D9JsdCcUu/D1pAcMMiU7GZFDsDGqylerr7g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@isomorphic-git/idb-keyval": "3.3.2", + "isomorphic-textencoder": "1.0.1", + "just-debounce-it": "1.1.0", + "just-once": "1.1.0" + }, + "bin": { + "superblocktxt": "src/superblocktxt.js" + } + }, "node_modules/@istanbuljs/schema": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", @@ -7233,18 +7296,6 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, - "node_modules/@node-saml/node-saml/node_modules/xml-crypto": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-3.2.0.tgz", - "integrity": "sha512-qVurBUOQrmvlgmZqIVBqmb06TD2a/PpEUfFPgD7BuBfjmoH4zgkqaWSIJrnymlCvM2GGt9x+XtJFA+ttoAufqg==", - "dependencies": { - "@xmldom/xmldom": "^0.8.8", - "xpath": "0.0.32" - }, - "engines": { - "node": ">=4.0.0" - } - }, "node_modules/@node-saml/node-saml/node_modules/xml-encryption": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/xml-encryption/-/xml-encryption-3.0.2.tgz", @@ -7421,32 +7472,6 @@ "@opentelemetry/api": ">=1.0.0 <1.5.0" } }, - "node_modules/@opentelemetry/context-zone": { - "version": "1.15.2", - "resolved": "https://registry.npmjs.org/@opentelemetry/context-zone/-/context-zone-1.15.2.tgz", - "integrity": "sha512-VdzdaETT7Tm7OXRGLe+I01L0MERR+eMcKK7KLbIyLQFA4ThVWK5TUW+A28jQQ1P0UfHRPw/kub7162yUui5YKw==", - "dev": true, - "dependencies": { - "@opentelemetry/context-zone-peer-dep": "1.15.2", - "zone.js": "^0.11.0" - }, - "engines": { - "node": ">=14" - } - }, - "node_modules/@opentelemetry/context-zone-peer-dep": { - "version": "1.15.2", - "resolved": "https://registry.npmjs.org/@opentelemetry/context-zone-peer-dep/-/context-zone-peer-dep-1.15.2.tgz", - "integrity": "sha512-AEi2rTyLCL6y8jjD33lSQ6tEUMOT4QJH6Ep1RpT56UdkrPQbf60uUSuSx5Ufpms0DNZt2AnFZYShzfYUZmvOJQ==", - "dev": true, - "engines": { - "node": ">=14" - }, - "peerDependencies": { - "@opentelemetry/api": ">=1.0.0 <1.5.0", - "zone.js": "^0.10.2 || ^0.11.0" - } - }, "node_modules/@opentelemetry/core": { "version": "1.15.2", "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.15.2.tgz", @@ -8271,24 +8296,6 @@ "@opentelemetry/api": "^1.3.0" } }, - "node_modules/@opentelemetry/instrumentation-xml-http-request": { - "version": "0.41.2", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-xml-http-request/-/instrumentation-xml-http-request-0.41.2.tgz", - "integrity": "sha512-lRj9JPSVoE/lReUQ8afekoCyEAGdIWNrzF42Kv63cf5CCMqB/aoHH+NdnSXZ5ANsOvNQ9H65qTsqCbwkzn1x6g==", - "dev": true, - "dependencies": { - "@opentelemetry/core": "1.15.2", - "@opentelemetry/instrumentation": "0.41.2", - "@opentelemetry/sdk-trace-web": "1.15.2", - "@opentelemetry/semantic-conventions": "1.15.2" - }, - "engines": { - "node": ">=14" - }, - "peerDependencies": { - "@opentelemetry/api": "^1.0.0" - } - }, "node_modules/@opentelemetry/instrumentation/node_modules/lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -8642,23 +8649,6 @@ "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, - "node_modules/@opentelemetry/sdk-trace-web": { - "version": "1.15.2", - "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-web/-/sdk-trace-web-1.15.2.tgz", - "integrity": "sha512-OjCrwtu4b+cAt540wyIr7d0lCA/cY9y42lmYDFUfJ8Ixj2bByIUJ4yyd9M7mXHpQHdiR/Kq2vzsgS14Uj+RU0Q==", - "dev": true, - "dependencies": { - "@opentelemetry/core": "1.15.2", - "@opentelemetry/sdk-trace-base": "1.15.2", - "@opentelemetry/semantic-conventions": "1.15.2" - }, - "engines": { - "node": ">=14" - }, - "peerDependencies": { - "@opentelemetry/api": ">=1.0.0 <1.5.0" - } - }, "node_modules/@opentelemetry/semantic-conventions": { "version": "1.15.2", "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.15.2.tgz", @@ -8838,6 +8828,10 @@ "resolved": "services/references", "link": true }, + "node_modules/@overleaf/saas-e2e": { + "resolved": "tools/saas-e2e", + "link": true + }, "node_modules/@overleaf/settings": { "resolved": "libraries/settings", "link": true @@ -9107,7 +9101,6 @@ "version": "2.11.8", "resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.8.tgz", "integrity": "sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==", - "dev": true, "funding": { "type": "opencollective", "url": "https://opencollective.com/popperjs" @@ -10096,13 +10089,14 @@ } }, "node_modules/@storybook/addon-a11y": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-a11y/-/addon-a11y-8.4.7.tgz", - "integrity": "sha512-GpUvXp6n25U1ZSv+hmDC+05BEqxWdlWjQTb/GaboRXZQeMBlze6zckpVb66spjmmtQAIISo0eZxX1+mGcVR7lA==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-a11y/-/addon-a11y-8.6.4.tgz", + "integrity": "sha512-B3/d2cRlnpAlE3kh+OBaly6qrWN9DEqwDyZsNeobaiXnNp11xoHZP2OWjEwXldc0pKls41jeOksXyXrILfvTng==", "dev": true, "license": "MIT", "dependencies": { - "@storybook/addon-highlight": "8.4.7", + "@storybook/addon-highlight": "8.6.4", + "@storybook/test": "8.6.4", "axe-core": "^4.2.0" }, "funding": { @@ -10110,13 +10104,13 @@ "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "storybook": "^8.4.7" + "storybook": "^8.6.4" } }, "node_modules/@storybook/addon-actions": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-actions/-/addon-actions-8.4.7.tgz", - "integrity": "sha512-mjtD5JxcPuW74T6h7nqMxWTvDneFtokg88p6kQ5OnC1M259iAXb//yiSZgu/quunMHPCXSiqn4FNOSgASTSbsA==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-actions/-/addon-actions-8.6.4.tgz", + "integrity": "sha512-mCcyfkeb19fJX0dpQqqZCnWBwjVn0/27xcpR0mbm/KW2wTByU6bKFFujgrHsX3ONl97IcIaUnmwwUwBr1ebZXw==", "dev": true, "license": "MIT", "dependencies": { @@ -10131,7 +10125,7 @@ "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "storybook": "^8.4.7" + "storybook": "^8.6.4" } }, "node_modules/@storybook/addon-actions/node_modules/uuid": { @@ -10149,9 +10143,9 @@ } }, "node_modules/@storybook/addon-backgrounds": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-backgrounds/-/addon-backgrounds-8.4.7.tgz", - "integrity": "sha512-I4/aErqtFiazcoWyKafOAm3bLpxTj6eQuH/woSbk1Yx+EzN+Dbrgx1Updy8//bsNtKkcrXETITreqHC+a57DHQ==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-backgrounds/-/addon-backgrounds-8.6.4.tgz", + "integrity": "sha512-lRYGumlYdd1RptQJvOTRMx/q2pDmg2MO5GX4la7VfI8KrUyeuC1ZOSRDEcXeTuAZWJztqmtymg6bB7cAAoxCFA==", "dev": true, "license": "MIT", "dependencies": { @@ -10164,13 +10158,13 @@ "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "storybook": "^8.4.7" + "storybook": "^8.6.4" } }, "node_modules/@storybook/addon-controls": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-controls/-/addon-controls-8.4.7.tgz", - "integrity": "sha512-377uo5IsJgXLnQLJixa47+11V+7Wn9KcDEw+96aGCBCfLbWNH8S08tJHHnSu+jXg9zoqCAC23MetntVp6LetHA==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-controls/-/addon-controls-8.6.4.tgz", + "integrity": "sha512-oMMP9Bj0RMfYmaitjFt6oBSjKH4titUqP+wE6PrZ3v+Om56f4buqfNKXRf80As2OrsZn0pjj95muWzVVHqIhyQ==", "dev": true, "license": "MIT", "dependencies": { @@ -10183,22 +10177,22 @@ "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "storybook": "^8.4.7" + "storybook": "^8.6.4" } }, "node_modules/@storybook/addon-docs": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-docs/-/addon-docs-8.4.7.tgz", - "integrity": "sha512-NwWaiTDT5puCBSUOVuf6ME7Zsbwz7Y79WF5tMZBx/sLQ60vpmJVQsap6NSjvK1Ravhc21EsIXqemAcBjAWu80w==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-docs/-/addon-docs-8.6.4.tgz", + "integrity": "sha512-+kbcjvEAH0Xs+k+raAwfC0WmJilWhxBYnLLeazP3m5AkVI3sIjbzuuZ78NR0DCdRkw9BpuuXMHv5o4tIvLIUlw==", "dev": true, "license": "MIT", "dependencies": { "@mdx-js/react": "^3.0.0", - "@storybook/blocks": "8.4.7", - "@storybook/csf-plugin": "8.4.7", - "@storybook/react-dom-shim": "8.4.7", - "react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0", + "@storybook/blocks": "8.6.4", + "@storybook/csf-plugin": "8.6.4", + "@storybook/react-dom-shim": "8.6.4", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", "ts-dedent": "^2.0.0" }, "funding": { @@ -10206,25 +10200,25 @@ "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "storybook": "^8.4.7" + "storybook": "^8.6.4" } }, "node_modules/@storybook/addon-essentials": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-essentials/-/addon-essentials-8.4.7.tgz", - "integrity": "sha512-+BtZHCBrYtQKILtejKxh0CDRGIgTl9PumfBOKRaihYb4FX1IjSAxoV/oo/IfEjlkF5f87vouShWsRa8EUauFDw==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-essentials/-/addon-essentials-8.6.4.tgz", + "integrity": "sha512-3pF0ZDl5EICqe0eOupPQq6PxeupwkLsfTWANuuJUYTJur82kvJd3Chb7P9vqw0A0QBx6106mL6PIyjrFJJMhLg==", "dev": true, "license": "MIT", "dependencies": { - "@storybook/addon-actions": "8.4.7", - "@storybook/addon-backgrounds": "8.4.7", - "@storybook/addon-controls": "8.4.7", - "@storybook/addon-docs": "8.4.7", - "@storybook/addon-highlight": "8.4.7", - "@storybook/addon-measure": "8.4.7", - "@storybook/addon-outline": "8.4.7", - "@storybook/addon-toolbars": "8.4.7", - "@storybook/addon-viewport": "8.4.7", + "@storybook/addon-actions": "8.6.4", + "@storybook/addon-backgrounds": "8.6.4", + "@storybook/addon-controls": "8.6.4", + "@storybook/addon-docs": "8.6.4", + "@storybook/addon-highlight": "8.6.4", + "@storybook/addon-measure": "8.6.4", + "@storybook/addon-outline": "8.6.4", + "@storybook/addon-toolbars": "8.6.4", + "@storybook/addon-viewport": "8.6.4", "ts-dedent": "^2.0.0" }, "funding": { @@ -10232,13 +10226,13 @@ "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "storybook": "^8.4.7" + "storybook": "^8.6.4" } }, "node_modules/@storybook/addon-highlight": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-highlight/-/addon-highlight-8.4.7.tgz", - "integrity": "sha512-whQIDBd3PfVwcUCrRXvCUHWClXe9mQ7XkTPCdPo4B/tZ6Z9c6zD8JUHT76ddyHivixFLowMnA8PxMU6kCMAiNw==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-highlight/-/addon-highlight-8.6.4.tgz", + "integrity": "sha512-jFREXnSE/7VuBR8kbluN+DBVkMXEV7MGuCe8Ytb1/D2Q0ohgJe395dfVgEgSMXErOwsn//NV/NgJp6JNXH2DrA==", "dev": true, "license": "MIT", "dependencies": { @@ -10249,19 +10243,19 @@ "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "storybook": "^8.4.7" + "storybook": "^8.6.4" } }, "node_modules/@storybook/addon-interactions": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-interactions/-/addon-interactions-8.4.7.tgz", - "integrity": "sha512-fnufT3ym8ht3HHUIRVXAH47iOJW/QOb0VSM+j269gDuvyDcY03D1civCu1v+eZLGaXPKJ8vtjr0L8zKQ/4P0JQ==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-interactions/-/addon-interactions-8.6.4.tgz", + "integrity": "sha512-MZAAZjyvmJXCvM35zEiPpXz7vK+fimovt+WZKAMayAbXy5fT+7El0c9dDyTQ2norNKNj9QU/8hiU/1zARSUELQ==", "dev": true, "license": "MIT", "dependencies": { "@storybook/global": "^5.0.0", - "@storybook/instrumenter": "8.4.7", - "@storybook/test": "8.4.7", + "@storybook/instrumenter": "8.6.4", + "@storybook/test": "8.6.4", "polished": "^4.2.2", "ts-dedent": "^2.2.0" }, @@ -10270,17 +10264,16 @@ "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "storybook": "^8.4.7" + "storybook": "^8.6.4" } }, "node_modules/@storybook/addon-links": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-links/-/addon-links-8.4.7.tgz", - "integrity": "sha512-L/1h4dMeMKF+MM0DanN24v5p3faNYbbtOApMgg7SlcBT/tgo3+cAjkgmNpYA8XtKnDezm+T2mTDhB8mmIRZpIQ==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-links/-/addon-links-8.6.4.tgz", + "integrity": "sha512-TaSIteYLJ12+dVBk7fW96ZvNIFizKs+Vo/YuNAe4xTzFJRrjLkFj9htLVi/dusMfn7lYo5DHIns08LuM+po1Dg==", "dev": true, "license": "MIT", "dependencies": { - "@storybook/csf": "^0.1.11", "@storybook/global": "^5.0.0", "ts-dedent": "^2.0.0" }, @@ -10290,7 +10283,7 @@ }, "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", - "storybook": "^8.4.7" + "storybook": "^8.6.4" }, "peerDependenciesMeta": { "react": { @@ -10299,9 +10292,9 @@ } }, "node_modules/@storybook/addon-measure": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-measure/-/addon-measure-8.4.7.tgz", - "integrity": "sha512-QfvqYWDSI5F68mKvafEmZic3SMiK7zZM8VA0kTXx55hF/+vx61Mm0HccApUT96xCXIgmwQwDvn9gS4TkX81Dmw==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-measure/-/addon-measure-8.6.4.tgz", + "integrity": "sha512-IpVL1rTy1tO8sy140eU3GdVB1QJ6J62+V6GSstcmqTLxDJQk5jFfg7hVbPEAZZ2sPFmeyceP9AMoBBo0EB355A==", "dev": true, "license": "MIT", "dependencies": { @@ -10313,13 +10306,13 @@ "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "storybook": "^8.4.7" + "storybook": "^8.6.4" } }, "node_modules/@storybook/addon-outline": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-outline/-/addon-outline-8.4.7.tgz", - "integrity": "sha512-6LYRqUZxSodmAIl8icr585Oi8pmzbZ90aloZJIpve+dBAzo7ydYrSQxxoQEVltXbKf3VeVcrs64ouAYqjisMYA==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-outline/-/addon-outline-8.6.4.tgz", + "integrity": "sha512-28nAslKTy0zWMdxAZcipMDYrEp1TkXVooAsqMGY5AMXMiORi1ObjhmjTLhVt1dXp+aDg0X+M3B6PqoingmHhqQ==", "dev": true, "license": "MIT", "dependencies": { @@ -10331,7 +10324,7 @@ "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "storybook": "^8.4.7" + "storybook": "^8.6.4" } }, "node_modules/@storybook/addon-styling-webpack": { @@ -10348,9 +10341,9 @@ } }, "node_modules/@storybook/addon-toolbars": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-toolbars/-/addon-toolbars-8.4.7.tgz", - "integrity": "sha512-OSfdv5UZs+NdGB+nZmbafGUWimiweJ/56gShlw8Neo/4jOJl1R3rnRqqY7MYx8E4GwoX+i3GF5C3iWFNQqlDcw==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-toolbars/-/addon-toolbars-8.6.4.tgz", + "integrity": "sha512-PU2lvgwCKDn93zpp5MEog103UUmSSugcxDf18xaoa9D15Qtr+YuQHd2hXbxA7+dnYL9lA7MLYsstfxE91ieM4Q==", "dev": true, "license": "MIT", "funding": { @@ -10358,13 +10351,13 @@ "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "storybook": "^8.4.7" + "storybook": "^8.6.4" } }, "node_modules/@storybook/addon-viewport": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-viewport/-/addon-viewport-8.4.7.tgz", - "integrity": "sha512-hvczh/jjuXXcOogih09a663sRDDSATXwbE866al1DXgbDFraYD/LxX/QDb38W9hdjU9+Qhx8VFIcNWoMQns5HQ==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-viewport/-/addon-viewport-8.6.4.tgz", + "integrity": "sha512-O5Ij+SRVg6grY6JOL5lOpsFyopZxuZEl2GHfh2SUf9hfowNS0QAgFpJupqXkwZzRSrlf9uKrLkjB6ulLgN2gOQ==", "dev": true, "license": "MIT", "dependencies": { @@ -10375,30 +10368,30 @@ "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "storybook": "^8.4.7" + "storybook": "^8.6.4" } }, "node_modules/@storybook/addon-webpack5-compiler-babel": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@storybook/addon-webpack5-compiler-babel/-/addon-webpack5-compiler-babel-3.0.3.tgz", - "integrity": "sha512-rVQTTw+oxJltbVKaejIWSHwVKOBJs3au21f/pYXhV0aiNgNhxEa3vr79t/j0j8ox8uJtzM8XYOb7FlkvGfHlwQ==", + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@storybook/addon-webpack5-compiler-babel/-/addon-webpack5-compiler-babel-3.0.5.tgz", + "integrity": "sha512-9dlc5PrehEFUHqkgj8x+aKtOY9XH9Zk6WBbtpgY/JCQ7waJ2VvhyDnrgJeXfek+WYlSkJElnta6SlqP+XRG0PQ==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/core": "^7.23.7", - "babel-loader": "^9.1.3" + "@babel/core": "^7.26.0", + "babel-loader": "^9.2.1" }, "engines": { "node": ">=18" } }, "node_modules/@storybook/blocks": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/blocks/-/blocks-8.4.7.tgz", - "integrity": "sha512-+QH7+JwXXXIyP3fRCxz/7E2VZepAanXJM7G8nbR3wWsqWgrRp4Wra6MvybxAYCxU7aNfJX5c+RW84SNikFpcIA==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/blocks/-/blocks-8.6.4.tgz", + "integrity": "sha512-+oPXwT3KzJzsdkQuGEzBqOKTIFlb6qmlCWWbDwAnP0SEqYHoTVRTAIa44icFP0EZeIe+ypFVAm1E7kWTLmw1hQ==", "dev": true, "license": "MIT", "dependencies": { - "@storybook/csf": "^0.1.11", "@storybook/icons": "^1.2.12", "ts-dedent": "^2.0.0" }, @@ -10407,9 +10400,9 @@ "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", - "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", - "storybook": "^8.4.7" + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "storybook": "^8.6.4" }, "peerDependenciesMeta": { "react": { @@ -10421,14 +10414,13 @@ } }, "node_modules/@storybook/builder-webpack5": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/builder-webpack5/-/builder-webpack5-8.4.7.tgz", - "integrity": "sha512-O8LpsQ+4g2x5kh7rI9+jEUdX8k1a5egBQU1lbudmHchqsV0IKiVqBD9LL5Gj3wpit4vB8coSW4ZWTFBw8FQb4Q==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/builder-webpack5/-/builder-webpack5-8.6.4.tgz", + "integrity": "sha512-6fhjt3uiBZeapRbF477bkJ+ln+yA8vOz0qR86XTq79VrYY5AbBL6F8swVMk9LG1t49vYPR/UuPjYBxsUNKK8MQ==", "dev": true, "license": "MIT", "dependencies": { - "@storybook/core-webpack": "8.4.7", - "@types/node": "^22.0.0", + "@storybook/core-webpack": "8.6.4", "@types/semver": "^7.3.4", "browser-assert": "^1.2.1", "case-sensitive-paths-webpack-plugin": "^2.4.0", @@ -10458,7 +10450,7 @@ "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "storybook": "^8.4.7" + "storybook": "^8.6.4" }, "peerDependenciesMeta": { "typescript": { @@ -10466,16 +10458,6 @@ } } }, - "node_modules/@storybook/builder-webpack5/node_modules/@types/node": { - "version": "22.10.2", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.10.2.tgz", - "integrity": "sha512-Xxr6BBRCAOQixvonOye19wnzyDiUtTeqldOOmj3CkeblonbccA12PFwlufvRdrpjXxqnmUaeiU5EOA+7s5diUQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "undici-types": "~6.20.0" - } - }, "node_modules/@storybook/builder-webpack5/node_modules/ajv": { "version": "8.17.1", "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", @@ -10541,9 +10523,9 @@ } }, "node_modules/@storybook/builder-webpack5/node_modules/semver": { - "version": "7.6.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", - "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", + "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", "dev": true, "license": "ISC", "bin": { @@ -10597,18 +10579,18 @@ } }, "node_modules/@storybook/cli": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/cli/-/cli-8.4.7.tgz", - "integrity": "sha512-eqHhO30FLxFuoSA+wKWB+aGvQOVcCkGLbJ4RaffjCbSbC9S2YfKLvd3Sb6gFwy6e8x+MnEkvv3g0h8LixT/C9Q==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/cli/-/cli-8.6.4.tgz", + "integrity": "sha512-iVw4B2Pe4/ERDkDeaXtXamFXatNgvtiA6G9p3wUpVSlxjgKW/JbjSwKAMTCsgDIj4dCMm8i0fzmiYXeg5Yprng==", "dev": true, "license": "MIT", "dependencies": { "@babel/core": "^7.24.4", "@babel/types": "^7.24.0", - "@storybook/codemod": "8.4.7", + "@storybook/codemod": "8.6.4", "@types/semver": "^7.3.4", "commander": "^12.1.0", - "create-storybook": "8.4.7", + "create-storybook": "8.6.4", "cross-spawn": "^7.0.3", "envinfo": "^7.7.3", "fd-package-json": "^1.2.0", @@ -10618,9 +10600,10 @@ "globby": "^14.0.1", "jscodeshift": "^0.15.1", "leven": "^3.1.0", + "p-limit": "^6.2.0", "prompts": "^2.4.0", "semver": "^7.3.7", - "storybook": "8.4.7", + "storybook": "8.6.4", "tiny-invariant": "^1.3.1", "ts-dedent": "^2.0.0" }, @@ -10686,13 +10669,13 @@ } }, "node_modules/@storybook/cli/node_modules/foreground-child": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.0.tgz", - "integrity": "sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz", + "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==", "dev": true, "license": "ISC", "dependencies": { - "cross-spawn": "^7.0.0", + "cross-spawn": "^7.0.6", "signal-exit": "^4.0.1" }, "engines": { @@ -10724,18 +10707,18 @@ } }, "node_modules/@storybook/cli/node_modules/globby": { - "version": "14.0.2", - "resolved": "https://registry.npmjs.org/globby/-/globby-14.0.2.tgz", - "integrity": "sha512-s3Fq41ZVh7vbbe2PN3nrW7yC7U7MFVc5c98/iTl9c2GawNMKx/J648KQRW6WKkuU8GIbbh2IXfIRQjOZnXcTnw==", + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-14.1.0.tgz", + "integrity": "sha512-0Ia46fDOaT7k4og1PDW4YbodWWr3scS2vAr2lTbsplOt2WkKp0vQbkI9wKis/T5LV/dqPjO3bpS/z6GTJB82LA==", "dev": true, "license": "MIT", "dependencies": { "@sindresorhus/merge-streams": "^2.1.0", - "fast-glob": "^3.3.2", - "ignore": "^5.2.4", - "path-type": "^5.0.0", + "fast-glob": "^3.3.3", + "ignore": "^7.0.3", + "path-type": "^6.0.0", "slash": "^5.1.0", - "unicorn-magic": "^0.1.0" + "unicorn-magic": "^0.3.0" }, "engines": { "node": ">=18" @@ -10744,6 +10727,16 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/@storybook/cli/node_modules/ignore": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.3.tgz", + "integrity": "sha512-bAH5jbK/F3T3Jls4I0SO1hmPR0dKU0a7+SY6n1yzRtG54FLO8d6w/nxLFX2Nb7dBu6cCWXPaAME6cYqFUMmuCA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, "node_modules/@storybook/cli/node_modules/jscodeshift": { "version": "0.15.2", "resolved": "https://registry.npmjs.org/jscodeshift/-/jscodeshift-0.15.2.tgz", @@ -10810,23 +10803,39 @@ "node": ">=16 || 14 >=14.17" } }, + "node_modules/@storybook/cli/node_modules/p-limit": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-6.2.0.tgz", + "integrity": "sha512-kuUqqHNUqoIWp/c467RI4X6mmyuojY5jGutNU0wVTmEOOfcuwLqyMVoAi9MKi2Ak+5i9+nhmrK4ufZE8069kHA==", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^1.1.1" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/@storybook/cli/node_modules/path-type": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-5.0.0.tgz", - "integrity": "sha512-5HviZNaZcfqP95rwpv+1HDgUamezbqdSYTyzjTvwtJSnIH+3vnbmWsItli8OFEndS984VT55M3jduxZbX351gg==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-6.0.0.tgz", + "integrity": "sha512-Vj7sf++t5pBD637NSfkxpHSMfWaeig5+DKWLhcqIYx6mWQz5hdJTGDVMQiJcw1ZYkhs7AazKDGpRVji1LJCZUQ==", "dev": true, "license": "MIT", "engines": { - "node": ">=12" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/@storybook/cli/node_modules/semver": { - "version": "7.6.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", - "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", + "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", "dev": true, "license": "ISC", "bin": { @@ -10894,18 +10903,30 @@ "dev": true, "license": "ISC" }, + "node_modules/@storybook/cli/node_modules/yocto-queue": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.2.0.tgz", + "integrity": "sha512-KHBC7z61OJeaMGnF3wqNZj+GGNXOyypZviiKpQeiHirG5Ib1ImwcLBH70rbMSkKfSmUNBsdf2PwaEJtKvgmkNw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/@storybook/codemod": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/codemod/-/codemod-8.4.7.tgz", - "integrity": "sha512-VpYEZCj1EXCcqlOqI8lL58dlHJALW+OMAE1yB72GT8RaT5zSP43jK5t80cPhh70zyaPqS27wKOROcpaRS7eNRA==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/codemod/-/codemod-8.6.4.tgz", + "integrity": "sha512-HVB7py6vKB9OMzQ02aAhcqmyT/IDlYrT1960HO6LWRhcpztnBlOHAAlhM91DN8yqN0K47B+GsaN5eDzCT8ggBw==", "dev": true, "license": "MIT", "dependencies": { "@babel/core": "^7.24.4", "@babel/preset-env": "^7.24.4", "@babel/types": "^7.24.0", - "@storybook/core": "8.4.7", - "@storybook/csf": "^0.1.11", + "@storybook/core": "8.6.4", "@types/cross-spawn": "^6.0.2", "cross-spawn": "^7.0.3", "es-toolkit": "^1.22.0", @@ -10954,18 +10975,18 @@ } }, "node_modules/@storybook/codemod/node_modules/globby": { - "version": "14.0.2", - "resolved": "https://registry.npmjs.org/globby/-/globby-14.0.2.tgz", - "integrity": "sha512-s3Fq41ZVh7vbbe2PN3nrW7yC7U7MFVc5c98/iTl9c2GawNMKx/J648KQRW6WKkuU8GIbbh2IXfIRQjOZnXcTnw==", + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-14.1.0.tgz", + "integrity": "sha512-0Ia46fDOaT7k4og1PDW4YbodWWr3scS2vAr2lTbsplOt2WkKp0vQbkI9wKis/T5LV/dqPjO3bpS/z6GTJB82LA==", "dev": true, "license": "MIT", "dependencies": { "@sindresorhus/merge-streams": "^2.1.0", - "fast-glob": "^3.3.2", - "ignore": "^5.2.4", - "path-type": "^5.0.0", + "fast-glob": "^3.3.3", + "ignore": "^7.0.3", + "path-type": "^6.0.0", "slash": "^5.1.0", - "unicorn-magic": "^0.1.0" + "unicorn-magic": "^0.3.0" }, "engines": { "node": ">=18" @@ -10974,6 +10995,16 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/@storybook/codemod/node_modules/ignore": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.3.tgz", + "integrity": "sha512-bAH5jbK/F3T3Jls4I0SO1hmPR0dKU0a7+SY6n1yzRtG54FLO8d6w/nxLFX2Nb7dBu6cCWXPaAME6cYqFUMmuCA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, "node_modules/@storybook/codemod/node_modules/jscodeshift": { "version": "0.15.2", "resolved": "https://registry.npmjs.org/jscodeshift/-/jscodeshift-0.15.2.tgz", @@ -11015,13 +11046,13 @@ } }, "node_modules/@storybook/codemod/node_modules/path-type": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-5.0.0.tgz", - "integrity": "sha512-5HviZNaZcfqP95rwpv+1HDgUamezbqdSYTyzjTvwtJSnIH+3vnbmWsItli8OFEndS984VT55M3jduxZbX351gg==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-6.0.0.tgz", + "integrity": "sha512-Vj7sf++t5pBD637NSfkxpHSMfWaeig5+DKWLhcqIYx6mWQz5hdJTGDVMQiJcw1ZYkhs7AazKDGpRVji1LJCZUQ==", "dev": true, "license": "MIT", "engines": { - "node": ">=12" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -11066,9 +11097,9 @@ } }, "node_modules/@storybook/components": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/components/-/components-8.4.7.tgz", - "integrity": "sha512-uyJIcoyeMWKAvjrG9tJBUCKxr2WZk+PomgrgrUwejkIfXMO76i6jw9BwLa0NZjYdlthDv30r9FfbYZyeNPmF0g==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/components/-/components-8.6.4.tgz", + "integrity": "sha512-91VEVFWOgHkEFoNFMk6gs1AuOE9Yp7N283BXQOW+AgP+atpzED6t/fIBPGqJ2ewAuzLJ+cFOrasSzoNwVfg3Jg==", "dev": true, "license": "MIT", "funding": { @@ -11080,16 +11111,16 @@ } }, "node_modules/@storybook/core": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/core/-/core-8.4.7.tgz", - "integrity": "sha512-7Z8Z0A+1YnhrrSXoKKwFFI4gnsLbWzr8fnDCU6+6HlDukFYh8GHRcZ9zKfqmy6U3hw2h8H5DrHsxWfyaYUUOoA==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/core/-/core-8.6.4.tgz", + "integrity": "sha512-glDbjEBi3wokw1T+KQtl93irHO9N0LCwgylWfWVXYDdQjUJ7pGRQGnw73gPX7Ds9tg3myXFC83GjmY94UYSMbA==", "dev": true, "license": "MIT", "dependencies": { - "@storybook/csf": "^0.1.11", + "@storybook/theming": "8.6.4", "better-opn": "^3.0.2", "browser-assert": "^1.2.1", - "esbuild": "^0.18.0 || ^0.19.0 || ^0.20.0 || ^0.21.0 || ^0.22.0 || ^0.23.0 || ^0.24.0", + "esbuild": "^0.18.0 || ^0.19.0 || ^0.20.0 || ^0.21.0 || ^0.22.0 || ^0.23.0 || ^0.24.0 || ^0.25.0", "esbuild-register": "^3.5.0", "jsdoc-type-pratt-parser": "^4.0.0", "process": "^0.11.10", @@ -11112,13 +11143,12 @@ } }, "node_modules/@storybook/core-webpack": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/core-webpack/-/core-webpack-8.4.7.tgz", - "integrity": "sha512-Tj+CjQLpFyBJxhhMms+vbPT3+gTRAiQlrhY3L1IEVwBa3wtRMS0qjozH26d1hK4G6mUIEdwu13L54HMU/w33Sg==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/core-webpack/-/core-webpack-8.6.4.tgz", + "integrity": "sha512-/E+NDs4Ls2KQhQJyEbqyddvcevPGCNbBIRoR691gq2lnZV7lYFfhpGfYlXL1uSoA3WUWmql/gBsa2/O3vB+HKg==", "dev": true, "license": "MIT", "dependencies": { - "@types/node": "^22.0.0", "ts-dedent": "^2.0.0" }, "funding": { @@ -11126,23 +11156,13 @@ "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "storybook": "^8.4.7" - } - }, - "node_modules/@storybook/core-webpack/node_modules/@types/node": { - "version": "22.10.2", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.10.2.tgz", - "integrity": "sha512-Xxr6BBRCAOQixvonOye19wnzyDiUtTeqldOOmj3CkeblonbccA12PFwlufvRdrpjXxqnmUaeiU5EOA+7s5diUQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "undici-types": "~6.20.0" + "storybook": "^8.6.4" } }, "node_modules/@storybook/core/node_modules/semver": { - "version": "7.6.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", - "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", + "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", "dev": true, "license": "ISC", "bin": { @@ -11152,19 +11172,10 @@ "node": ">=10" } }, - "node_modules/@storybook/csf": { - "version": "0.1.11", - "resolved": "https://registry.npmjs.org/@storybook/csf/-/csf-0.1.11.tgz", - "integrity": "sha512-dHYFQH3mA+EtnCkHXzicbLgsvzYjcDJ1JWsogbItZogkPHgSJM/Wr71uMkcvw8v9mmCyP4NpXJuu6bPoVsOnzg==", - "dev": true, - "dependencies": { - "type-fest": "^2.19.0" - } - }, "node_modules/@storybook/csf-plugin": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/csf-plugin/-/csf-plugin-8.4.7.tgz", - "integrity": "sha512-Fgogplu4HImgC+AYDcdGm1rmL6OR1rVdNX1Be9C/NEXwOCpbbBwi0BxTf/2ZxHRk9fCeaPEcOdP5S8QHfltc1g==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/csf-plugin/-/csf-plugin-8.6.4.tgz", + "integrity": "sha512-7UpEp4PFTy1iKjZiRaYMG7zvnpLIRPyD0+lUJUlLYG4UIemV3onvnIi1Je1tSZ4hfTup+ulom7JLztVSHZGRMg==", "dev": true, "license": "MIT", "dependencies": { @@ -11175,31 +11186,20 @@ "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "storybook": "^8.4.7" - } - }, - "node_modules/@storybook/csf/node_modules/type-fest": { - "version": "2.19.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz", - "integrity": "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==", - "dev": true, - "engines": { - "node": ">=12.20" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "storybook": "^8.6.4" } }, "node_modules/@storybook/global": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/@storybook/global/-/global-5.0.0.tgz", "integrity": "sha512-FcOqPAXACP0I3oJ/ws6/rrPT9WGhu915Cg8D02a9YxLo0DE9zI+a9A5gRGvmQ09fiWPukqI8ZAEoQEdWUKMQdQ==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/@storybook/icons": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/@storybook/icons/-/icons-1.3.0.tgz", - "integrity": "sha512-Nz/UzeYQdUZUhacrPyfkiiysSjydyjgg/p0P9HxB4p/WaJUUjMAcaoaLgy3EXx61zZJ3iD36WPuDkZs5QYrA0A==", + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@storybook/icons/-/icons-1.3.2.tgz", + "integrity": "sha512-t3xcbCKkPvqyef8urBM0j/nP6sKtnlRkVgC+8JTbTAZQjaTmOjes3byEgzs89p4B/K6cJsg9wLW2k3SknLtYJw==", "dev": true, "license": "MIT", "engines": { @@ -11211,9 +11211,9 @@ } }, "node_modules/@storybook/instrumenter": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/instrumenter/-/instrumenter-8.4.7.tgz", - "integrity": "sha512-k6NSD3jaRCCHAFtqXZ7tw8jAzD/yTEWXGya+REgZqq5RCkmJ+9S4Ytp/6OhQMPtPFX23gAuJJzTQVLcCr+gjRg==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/instrumenter/-/instrumenter-8.6.4.tgz", + "integrity": "sha512-8OtIWLhayTUdqJEeXiPm6l3LTdSkWgQzzV2l2HIe4Adedeot+Rkwu6XHmyRDpnb0+Ish6zmMDqtJBxC2PQsy6Q==", "dev": true, "license": "MIT", "dependencies": { @@ -11225,13 +11225,13 @@ "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "storybook": "^8.4.7" + "storybook": "^8.6.4" } }, "node_modules/@storybook/manager-api": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/manager-api/-/manager-api-8.4.7.tgz", - "integrity": "sha512-ELqemTviCxAsZ5tqUz39sDmQkvhVAvAgiplYy9Uf15kO0SP2+HKsCMzlrm2ue2FfkUNyqbDayCPPCB0Cdn/mpQ==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/manager-api/-/manager-api-8.6.4.tgz", + "integrity": "sha512-w/Nn/VznfbIg2oezDfzZNwSTDY5kBZbzxVBHLCnIcyu2AKt2Yto3pfGi60SikFcTrsClaAKT7D92kMQ9qdQNQQ==", "dev": true, "license": "MIT", "funding": { @@ -11253,16 +11253,15 @@ } }, "node_modules/@storybook/preset-react-webpack": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/preset-react-webpack/-/preset-react-webpack-8.4.7.tgz", - "integrity": "sha512-geTSBKyrBagVihil5MF7LkVFynbfHhCinvnbCZZqXW7M1vgcxvatunUENB+iV8eWg/0EJ+8O7scZL+BAxQ/2qg==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/preset-react-webpack/-/preset-react-webpack-8.6.4.tgz", + "integrity": "sha512-rFd1NvSE2ZP5ZFEqH7wdXXlvnyNChSMp+w4FyGSCgFQOwQKZhhWPPyloi3gGSWztFV9qpzC/ri7TTvG6ptqPPw==", "dev": true, "license": "MIT", "dependencies": { - "@storybook/core-webpack": "8.4.7", - "@storybook/react": "8.4.7", + "@storybook/core-webpack": "8.6.4", + "@storybook/react": "8.6.4", "@storybook/react-docgen-typescript-plugin": "1.0.6--canary.9.0c3f3b7.0", - "@types/node": "^22.0.0", "@types/semver": "^7.3.4", "find-up": "^5.0.0", "magic-string": "^0.30.5", @@ -11282,7 +11281,7 @@ "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", - "storybook": "^8.4.7" + "storybook": "^8.6.4" }, "peerDependenciesMeta": { "typescript": { @@ -11290,20 +11289,10 @@ } } }, - "node_modules/@storybook/preset-react-webpack/node_modules/@types/node": { - "version": "22.10.2", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.10.2.tgz", - "integrity": "sha512-Xxr6BBRCAOQixvonOye19wnzyDiUtTeqldOOmj3CkeblonbccA12PFwlufvRdrpjXxqnmUaeiU5EOA+7s5diUQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "undici-types": "~6.20.0" - } - }, "node_modules/@storybook/preset-react-webpack/node_modules/semver": { - "version": "7.6.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", - "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", + "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", "dev": true, "license": "ISC", "bin": { @@ -11329,9 +11318,9 @@ } }, "node_modules/@storybook/preview-api": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/preview-api/-/preview-api-8.4.7.tgz", - "integrity": "sha512-0QVQwHw+OyZGHAJEXo6Knx+6/4er7n2rTDE5RYJ9F2E2Lg42E19pfdLlq2Jhoods2Xrclo3wj6GWR//Ahi39Eg==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/preview-api/-/preview-api-8.6.4.tgz", + "integrity": "sha512-5HBfxggzxGz0dg2c61NpPiQJav7UAmzsQlzmI5SzWOS6lkaylcDG8giwKzASVCXVWBxNji9qIDFM++UH090aDg==", "dev": true, "license": "MIT", "funding": { @@ -11343,18 +11332,18 @@ } }, "node_modules/@storybook/react": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/react/-/react-8.4.7.tgz", - "integrity": "sha512-nQ0/7i2DkaCb7dy0NaT95llRVNYWQiPIVuhNfjr1mVhEP7XD090p0g7eqUmsx8vfdHh2BzWEo6CoBFRd3+EXxw==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/react/-/react-8.6.4.tgz", + "integrity": "sha512-pfv4hMhu3AScOh0l86uIzmXLSQ0XA/e0reIVwQcxKht6miaKArhx9GkS4mMp6SO23ZoV5G/nfLgUaMVPVE0ZPg==", "dev": true, "license": "MIT", "dependencies": { - "@storybook/components": "8.4.7", + "@storybook/components": "8.6.4", "@storybook/global": "^5.0.0", - "@storybook/manager-api": "8.4.7", - "@storybook/preview-api": "8.4.7", - "@storybook/react-dom-shim": "8.4.7", - "@storybook/theming": "8.4.7" + "@storybook/manager-api": "8.6.4", + "@storybook/preview-api": "8.6.4", + "@storybook/react-dom-shim": "8.6.4", + "@storybook/theming": "8.6.4" }, "engines": { "node": ">=18.0.0" @@ -11364,10 +11353,10 @@ "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "@storybook/test": "8.4.7", + "@storybook/test": "8.6.4", "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", - "storybook": "^8.4.7", + "storybook": "^8.6.4", "typescript": ">= 4.2.x" }, "peerDependenciesMeta": { @@ -11434,9 +11423,9 @@ } }, "node_modules/@storybook/react-dom-shim": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/react-dom-shim/-/react-dom-shim-8.4.7.tgz", - "integrity": "sha512-6bkG2jvKTmWrmVzCgwpTxwIugd7Lu+2btsLAqhQSzDyIj2/uhMNp8xIMr/NBDtLgq3nomt9gefNa9xxLwk/OMg==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/react-dom-shim/-/react-dom-shim-8.6.4.tgz", + "integrity": "sha512-kTGJ3aFdmfCFzYaDFGmZWfTXr9xhbUaf0tJ6+nEjc4tME6mFwMI+tTUT6U/J6mJhZuc2DjvIRA7bM0x77dIDqw==", "dev": true, "license": "MIT", "funding": { @@ -11446,20 +11435,19 @@ "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", - "storybook": "^8.4.7" + "storybook": "^8.6.4" } }, "node_modules/@storybook/react-webpack5": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/react-webpack5/-/react-webpack5-8.4.7.tgz", - "integrity": "sha512-T9GLqlsP4It4El7cC8rSkBPRWvORAsTDULeWlO36RST2TrYnmBOUytsi22mk7cAAAVhhD6rTrs1YdqWRMpfa1w==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/react-webpack5/-/react-webpack5-8.6.4.tgz", + "integrity": "sha512-kH439Atpp94+hWF/xftOJ4ZCy7bnNWuLSni7sWvOGkYZpzzzkLXfACanvK6ZY9wUxAh0bbdGfbc3McMvIWfYlw==", "dev": true, "license": "MIT", "dependencies": { - "@storybook/builder-webpack5": "8.4.7", - "@storybook/preset-react-webpack": "8.4.7", - "@storybook/react": "8.4.7", - "@types/node": "^22.0.0" + "@storybook/builder-webpack5": "8.6.4", + "@storybook/preset-react-webpack": "8.6.4", + "@storybook/react": "8.6.4" }, "engines": { "node": ">=18.0.0" @@ -11471,7 +11459,7 @@ "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", - "storybook": "^8.4.7", + "storybook": "^8.6.4", "typescript": ">= 4.2.x" }, "peerDependenciesMeta": { @@ -11480,26 +11468,15 @@ } } }, - "node_modules/@storybook/react-webpack5/node_modules/@types/node": { - "version": "22.10.2", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.10.2.tgz", - "integrity": "sha512-Xxr6BBRCAOQixvonOye19wnzyDiUtTeqldOOmj3CkeblonbccA12PFwlufvRdrpjXxqnmUaeiU5EOA+7s5diUQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "undici-types": "~6.20.0" - } - }, "node_modules/@storybook/test": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/test/-/test-8.4.7.tgz", - "integrity": "sha512-AhvJsu5zl3uG40itSQVuSy5WByp3UVhS6xAnme4FWRwgSxhvZjATJ3AZkkHWOYjnnk+P2/sbz/XuPli1FVCWoQ==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/test/-/test-8.6.4.tgz", + "integrity": "sha512-JPjfbaMMuCBT47pg3/MDD9vYFF5OGPAOWEB9nJWJ9IjYAb2Nd8OYJQIDoYJQNT+aLkTVLtvzGnVNwdxpouAJcQ==", "dev": true, "license": "MIT", "dependencies": { - "@storybook/csf": "^0.1.11", "@storybook/global": "^5.0.0", - "@storybook/instrumenter": "8.4.7", + "@storybook/instrumenter": "8.6.4", "@testing-library/dom": "10.4.0", "@testing-library/jest-dom": "6.5.0", "@testing-library/user-event": "14.5.2", @@ -11511,7 +11488,7 @@ "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "storybook": "^8.4.7" + "storybook": "^8.6.4" } }, "node_modules/@storybook/test/node_modules/@testing-library/dom": { @@ -11598,9 +11575,9 @@ } }, "node_modules/@storybook/theming": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/@storybook/theming/-/theming-8.4.7.tgz", - "integrity": "sha512-99rgLEjf7iwfSEmdqlHkSG3AyLcK0sfExcr0jnc6rLiAkBhzuIsvcHjjUwkR210SOCgXqBPW0ZA6uhnuyppHLw==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/theming/-/theming-8.6.4.tgz", + "integrity": "sha512-g9Ns4uenC9oAWETaJ/tEKEIPMdS+CqjNWZz5Wbw1bLNhXwADZgKrVqawzZi64+bYYtQ+i8VCTjPoFa6s2eHiDQ==", "dev": true, "license": "MIT", "funding": { @@ -11611,6 +11588,29 @@ "storybook": "^8.2.0 || ^8.3.0-0 || ^8.4.0-0 || ^8.5.0-0 || ^8.6.0-0" } }, + "node_modules/@stripe/react-stripe-js": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/@stripe/react-stripe-js/-/react-stripe-js-3.5.0.tgz", + "integrity": "sha512-oo5J2SNbuAUjE9XmQv/SOD7vgZCa1Y9OcZyRAfvQPkyrDrru35sg5c64ANdHEmOWUibism3+25rKdARSw3HOfA==", + "license": "MIT", + "dependencies": { + "prop-types": "^15.7.2" + }, + "peerDependencies": { + "@stripe/stripe-js": ">=1.44.1 <7.0.0", + "react": ">=16.8.0 <20.0.0", + "react-dom": ">=16.8.0 <20.0.0" + } + }, + "node_modules/@stripe/stripe-js": { + "version": "5.10.0", + "resolved": "https://registry.npmjs.org/@stripe/stripe-js/-/stripe-js-5.10.0.tgz", + "integrity": "sha512-PTigkxMdMUP6B5ISS7jMqJAKhgrhZwjprDqR1eATtFfh0OpKVNp110xiH+goeVdrJ29/4LeZJR4FaHHWstsu0A==", + "license": "MIT", + "engines": { + "node": ">=12.16" + } + }, "node_modules/@swc/helpers": { "version": "0.5.11", "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.11.tgz", @@ -11946,6 +11946,16 @@ "@types/node": "*" } }, + "node_modules/@types/adm-zip": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/@types/adm-zip/-/adm-zip-0.5.7.tgz", + "integrity": "sha512-DNEs/QvmyRLurdQPChqq0Md4zGvPwHerAJYWk9l2jCbD1VPpnzRJorOdiq4zsw09NFbYnhfsoEhWtxIzXpn2yw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/aria-query": { "version": "4.2.2", "resolved": "https://registry.npmjs.org/@types/aria-query/-/aria-query-4.2.2.tgz", @@ -12528,6 +12538,13 @@ "integrity": "sha512-ZkC5IUqqIFPXx3ASTTybTzmQdwHwe2C0u3eL75ldQ6T9E9IWFJodn6hIfbZGab73DfyiHN4Xw15gNxUq2FbvBA==", "dev": true }, + "node_modules/@types/pdf-parse": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/@types/pdf-parse/-/pdf-parse-1.1.4.tgz", + "integrity": "sha512-+gbBHbNCVGGYw1S9lAIIvrHW47UYOhMIFUsJcMkMrzy1Jf0vulBN3XQIjPgnoOXveMuHnF3b57fXROnY/Or7eg==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/pg": { "version": "8.6.1", "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.6.1.tgz", @@ -13888,9 +13905,9 @@ } }, "node_modules/@vitest/expect/node_modules/chai": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/chai/-/chai-5.1.2.tgz", - "integrity": "sha512-aGtmf24DW6MLHHG5gCx4zaI3uBq3KRtxeVs0DjFH6Z0rDNbsvTxFASFvdj79pxjxZ8/5u3PIiN3IwEIQkiiuPw==", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/chai/-/chai-5.2.0.tgz", + "integrity": "sha512-mCuXncKXk5iCLhfhwTc0izo0gtEmpz5CtG2y8GiOINBlMVS6v8TMRc5TaLWKS6692m9+dVVfzgeVxR5UxWHTYw==", "dev": true, "license": "MIT", "dependencies": { @@ -13925,9 +13942,9 @@ } }, "node_modules/@vitest/expect/node_modules/loupe": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.1.2.tgz", - "integrity": "sha512-23I4pFZHmAemUnz8WZXbYRSKYj801VDaNv9ETuMh7IrMc7VuVVSo+Z9iLE3ni30+U48iDWfi30d3twAXBYmnCg==", + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.1.3.tgz", + "integrity": "sha512-kkIp7XSkP78ZxJEsSxW3712C6teJVoeHHwgo9zJ380de7IYyJ2ISlxojcH2pC5OFLewESmnRi/+XCDIEEVyoug==", "dev": true, "license": "MIT" }, @@ -13942,9 +13959,9 @@ } }, "node_modules/@vitest/pretty-format": { - "version": "2.1.8", - "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-2.1.8.tgz", - "integrity": "sha512-9HiSZ9zpqNLKlbIDRWOnAWqgcA7xu+8YxXSekhr0Ykab7PAYFkhkwoqVArPOtJhPmYeE2YHgKZlj3CP36z2AJQ==", + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-2.1.9.tgz", + "integrity": "sha512-KhRIdGV2U9HOUzxfiHmY8IFHTdqtOhIzCpd8WRdJiE7D/HUcZVD0EgQCVjm+Q9gkUXWgBvMmTtZgIG48wq7sOQ==", "dev": true, "license": "MIT", "dependencies": { @@ -13968,13 +13985,13 @@ } }, "node_modules/@vitest/utils": { - "version": "2.1.8", - "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-2.1.8.tgz", - "integrity": "sha512-dwSoui6djdwbfFmIgbIjX2ZhIoG7Ex/+xpxyiEgIGzjliY8xGkcpITKTlp6B4MgtGkF2ilvm97cPM96XZaAgcA==", + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-2.1.9.tgz", + "integrity": "sha512-v0psaMSkNJ3A2NMrUEHFRzJtDPFn+/VWZ5WxImB21T9fjucJRmS7xCS3ppEnARb9y11OAzaD+P2Ps+b+BGX5iQ==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/pretty-format": "2.1.8", + "@vitest/pretty-format": "2.1.9", "loupe": "^3.1.2", "tinyrainbow": "^1.2.0" }, @@ -13983,9 +14000,9 @@ } }, "node_modules/@vitest/utils/node_modules/loupe": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.1.2.tgz", - "integrity": "sha512-23I4pFZHmAemUnz8WZXbYRSKYj801VDaNv9ETuMh7IrMc7VuVVSo+Z9iLE3ni30+U48iDWfi30d3twAXBYmnCg==", + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.1.3.tgz", + "integrity": "sha512-kkIp7XSkP78ZxJEsSxW3712C6teJVoeHHwgo9zJ380de7IYyJ2ISlxojcH2pC5OFLewESmnRi/+XCDIEEVyoug==", "dev": true, "license": "MIT" }, @@ -14618,6 +14635,16 @@ "node": ">=8.9" } }, + "node_modules/adm-zip": { + "version": "0.5.16", + "resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.5.16.tgz", + "integrity": "sha512-TGw5yVi4saajsSEgz25grObGHEUaDrniwvA2qwSC060KfqGPdglhvPMA2lPIoxs3PQIItj2iag35fONcQqgUaQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0" + } + }, "node_modules/agent-base": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", @@ -15207,6 +15234,15 @@ "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", "integrity": "sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY=" }, + "node_modules/asn1": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", + "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==", + "license": "MIT", + "dependencies": { + "safer-buffer": "~2.1.0" + } + }, "node_modules/assert-never": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/assert-never/-/assert-never-1.2.1.tgz", @@ -15270,6 +15306,13 @@ "resolved": "https://registry.npmjs.org/async/-/async-3.2.5.tgz", "integrity": "sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==" }, + "node_modules/async-lock": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/async-lock/-/async-lock-1.4.1.tgz", + "integrity": "sha512-Az2ZTpuytrtqENulXwO3GGv1Bztugx6TT37NIo7imr/Qo0gsYiGtSdBa2B6fsXhTpVZDNfu1Qn3pk531e3q+nQ==", + "dev": true, + "license": "MIT" + }, "node_modules/async-retry": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz", @@ -15431,9 +15474,10 @@ } }, "node_modules/axios": { - "version": "1.7.4", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.4.tgz", - "integrity": "sha512-DukmaFRnY6AzAALSH4J2M3k6PkaC+MfaAGdEERRWcC9q3/TWQwLpHR8ZRLKTdQ3aBDL64EdluRDjJqKw+BPZEw==", + "version": "1.8.4", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.8.4.tgz", + "integrity": "sha512-eBSYY4Y68NNlHbHBMdeDmKNtDgXWhQsJcGqzO3iLUM0GraQFSS9cVgPX5I9b3lbdFKyYoAEGAZF1DwhTaljNAw==", + "license": "MIT", "dependencies": { "follow-redirects": "^1.15.6", "form-data": "^4.0.0", @@ -15464,10 +15508,11 @@ } }, "node_modules/babel-loader": { - "version": "9.1.3", - "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-9.1.3.tgz", - "integrity": "sha512-xG3ST4DglodGf8qSwv0MdeWLhrDsw/32QMdTO5T1ZIp9gQur0HkCyFs7Awskr10JKXFXwpAhiCuYX5oGXnRGbw==", + "version": "9.2.1", + "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-9.2.1.tgz", + "integrity": "sha512-fqe8naHt46e0yIdkjUZYqddSXfej3AHajX+CSO5X7oy0EmPc6o5Xh+RClNoHjnieWz9AW4kZxW9yyFMhVB1QLA==", "dev": true, + "license": "MIT", "dependencies": { "find-cache-dir": "^4.0.0", "schema-utils": "^4.0.0" @@ -16376,6 +16421,15 @@ "node": ">= 0.10.x" } }, + "node_modules/buildcheck": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/buildcheck/-/buildcheck-0.0.6.tgz", + "integrity": "sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A==", + "optional": true, + "engines": { + "node": ">=10.0.0" + } + }, "node_modules/builtin-modules": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.3.0.tgz", @@ -17244,7 +17298,8 @@ "node_modules/classnames": { "version": "2.5.1", "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.5.1.tgz", - "integrity": "sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow==" + "integrity": "sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow==", + "dev": true }, "node_modules/clean-css": { "version": "5.3.0", @@ -17258,6 +17313,13 @@ "node": ">= 10.0" } }, + "node_modules/clean-git-ref": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/clean-git-ref/-/clean-git-ref-2.0.1.tgz", + "integrity": "sha512-bLSptAy2P0s6hU4PzuIMKmMJJSE6gLXGH1cntDu7bWJUksvuM+7ReOK61mozULErYvP6a15rnYl0zFDef+pyPw==", + "dev": true, + "license": "Apache-2.0" + }, "node_modules/clean-regexp": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/clean-regexp/-/clean-regexp-1.0.0.tgz", @@ -17315,19 +17377,6 @@ "node": ">=8" } }, - "node_modules/cli-spinners": { - "version": "2.9.2", - "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.2.tgz", - "integrity": "sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/cli-table3": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.1.tgz", @@ -17742,9 +17791,9 @@ } }, "node_modules/consola": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/consola/-/consola-3.2.3.tgz", - "integrity": "sha512-I5qxpzLv+sJhTVEoLYNcTW+bThDCPsit0vLNKShZx6rLtpilNpmmeTPaeqJb9ZE9dV3DGaeby6Vuhrw38WjeyQ==", + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/consola/-/consola-3.4.0.tgz", + "integrity": "sha512-EiPU8G6dQG0GFHNR8ljnZFki/8a+cQwEQ+7wpxdChl02Q8HXlwEZWD5lqAF8vC2sEC3Tehr8hy7vErz88LHyUA==", "dev": true, "license": "MIT", "engines": { @@ -18142,19 +18191,6 @@ "node": ">=10" } }, - "node_modules/cpu-features": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.2.tgz", - "integrity": "sha512-/2yieBqvMcRj8McNzkycjW2v3OIUOibBfd2dLEJ0nWts8NobAxwiyw9phVNS6oDL8x8tz9F7uNVFEVpJncQpeA==", - "hasInstallScript": true, - "optional": true, - "dependencies": { - "nan": "^2.14.1" - }, - "engines": { - "node": ">=8.0.0" - } - }, "node_modules/crc-32": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz", @@ -18167,24 +18203,14 @@ } }, "node_modules/create-storybook": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/create-storybook/-/create-storybook-8.4.7.tgz", - "integrity": "sha512-Q2DkZEWkIUGv5EACT4SRsHnKO5WDZQAu772B/WeyYr1g38ksJziOut2auzS5sks5dWBmUgYssW8htSELuVRLGQ==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/create-storybook/-/create-storybook-8.6.4.tgz", + "integrity": "sha512-YwxtA+CtGHWYvQrFh1dat3Q/kXWHekok0MAqaorD9/Mf/cpybA8afHDsdq2PDq0LXg0Od3QI4Ha04+eaB6F8gA==", "dev": true, "license": "MIT", "dependencies": { - "@types/semver": "^7.3.4", - "commander": "^12.1.0", - "execa": "^5.0.0", - "fd-package-json": "^1.2.0", - "find-up": "^5.0.0", - "ora": "^5.4.1", - "prettier": "^3.1.1", - "prompts": "^2.4.0", - "semver": "^7.3.7", - "storybook": "8.4.7", - "tiny-invariant": "^1.3.1", - "ts-dedent": "^2.0.0" + "recast": "^0.23.5", + "semver": "^7.6.2" }, "bin": { "create-storybook": "bin/index.cjs" @@ -18194,20 +18220,10 @@ "url": "https://opencollective.com/storybook" } }, - "node_modules/create-storybook/node_modules/commander": { - "version": "12.1.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-12.1.0.tgz", - "integrity": "sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - } - }, "node_modules/create-storybook/node_modules/semver": { - "version": "7.6.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", - "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", + "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", "dev": true, "license": "ISC", "bin": { @@ -19376,6 +19392,22 @@ "node": ">=0.10" } }, + "node_modules/decompress-response": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", + "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-response": "^3.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/dedent": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/dedent/-/dedent-0.7.0.tgz", @@ -19525,6 +19557,7 @@ "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz", "integrity": "sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } @@ -19680,6 +19713,13 @@ "integrity": "sha512-UfwfKGxT/Wm2KaxFSZsp7/+YOnAgIzxQXs86zu1IFMLU/+3ouxnEvXqPDn3yxSxlsO4r1B+I2GGQHsIthp6/7Q==", "license": "Apache-2.0" }, + "node_modules/diff3": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/diff3/-/diff3-0.0.3.tgz", + "integrity": "sha512-iSq8ngPOt0K53A6eVr4d5Kn6GNrM2nQZtC740pzIriHtn4pOQ2lyzEXQMBeVcWERN0ye7fhBsk9PbLLQOnUx/g==", + "dev": true, + "license": "MIT" + }, "node_modules/dijkstrajs": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/dijkstrajs/-/dijkstrajs-1.0.2.tgz", @@ -19731,32 +19771,6 @@ "node": ">=6" } }, - "node_modules/docker-modem": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-3.0.3.tgz", - "integrity": "sha512-Tgkn2a+yiNP9FoZgMa/D9Wk+D2Db///0KOyKSYZRJa8w4+DzKyzQMkczKSdR/adQ0x46BOpeNkoyEOKjPhCzjw==", - "dependencies": { - "debug": "^4.1.1", - "readable-stream": "^3.5.0", - "split-ca": "^1.0.1", - "ssh2": "^1.4.0" - }, - "engines": { - "node": ">= 8.0" - } - }, - "node_modules/dockerode": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-3.3.1.tgz", - "integrity": "sha512-AS2mr8Lp122aa5n6d99HkuTNdRV1wkkhHwBdcnY6V0+28D3DSYwhxAk85/mM9XwD3RMliTxyr63iuvn5ZblFYQ==", - "dependencies": { - "docker-modem": "^3.0.0", - "tar-fs": "~2.0.1" - }, - "engines": { - "node": ">= 8.0" - } - }, "node_modules/doctrine": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", @@ -19792,6 +19806,7 @@ "version": "3.4.0", "resolved": "https://registry.npmjs.org/dom-helpers/-/dom-helpers-3.4.0.tgz", "integrity": "sha512-LnuPJ+dwqKDIyotW1VzmOZ5TONUN7CwkCR5hrgawTUbkBGYdeoNLZo6nNfGkCrjtE1nXXaj7iMMpDa8/d9WoIA==", + "dev": true, "dependencies": { "@babel/runtime": "^7.1.2" } @@ -20440,9 +20455,9 @@ } }, "node_modules/es-toolkit": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/es-toolkit/-/es-toolkit-1.30.1.tgz", - "integrity": "sha512-ZXflqanzH8BpHkDhFa10bBf6ONDCe84EPUm7SSICGzuuROSluT2ynTPtwn9PcRelMtorCRozSknI/U0MNYp0Uw==", + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/es-toolkit/-/es-toolkit-1.32.0.tgz", + "integrity": "sha512-ZfSfHP1l6ubgW/B/FRtqb9bYdMvI6jizbOSfbwwJNcOQ1QE6TFsC3jpQkZ900uUPSR3t3SU5Ds7UWKnYz+uP8Q==", "dev": true, "license": "MIT", "workspaces": [ @@ -20502,9 +20517,9 @@ } }, "node_modules/esbuild": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.24.0.tgz", - "integrity": "sha512-FuLPevChGDshgSicjisSooU0cemp/sGXR841D5LHMB7mTVOmsEHcAxaH3irL53+8YDIeVNQEySh4DaYU/iuPqQ==", + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.0.tgz", + "integrity": "sha512-BXq5mqc8ltbaN34cDqWuYKyNhX8D/Z0J1xdtdQ8UcIIIyJyz+ZMKUt58tF3SrZ85jcfN/PZYhjR5uDQAYNVbuw==", "dev": true, "hasInstallScript": true, "license": "MIT", @@ -20515,30 +20530,31 @@ "node": ">=18" }, "optionalDependencies": { - "@esbuild/aix-ppc64": "0.24.0", - "@esbuild/android-arm": "0.24.0", - "@esbuild/android-arm64": "0.24.0", - "@esbuild/android-x64": "0.24.0", - "@esbuild/darwin-arm64": "0.24.0", - "@esbuild/darwin-x64": "0.24.0", - "@esbuild/freebsd-arm64": "0.24.0", - "@esbuild/freebsd-x64": "0.24.0", - "@esbuild/linux-arm": "0.24.0", - "@esbuild/linux-arm64": "0.24.0", - "@esbuild/linux-ia32": "0.24.0", - "@esbuild/linux-loong64": "0.24.0", - "@esbuild/linux-mips64el": "0.24.0", - "@esbuild/linux-ppc64": "0.24.0", - "@esbuild/linux-riscv64": "0.24.0", - "@esbuild/linux-s390x": "0.24.0", - "@esbuild/linux-x64": "0.24.0", - "@esbuild/netbsd-x64": "0.24.0", - "@esbuild/openbsd-arm64": "0.24.0", - "@esbuild/openbsd-x64": "0.24.0", - "@esbuild/sunos-x64": "0.24.0", - "@esbuild/win32-arm64": "0.24.0", - "@esbuild/win32-ia32": "0.24.0", - "@esbuild/win32-x64": "0.24.0" + "@esbuild/aix-ppc64": "0.25.0", + "@esbuild/android-arm": "0.25.0", + "@esbuild/android-arm64": "0.25.0", + "@esbuild/android-x64": "0.25.0", + "@esbuild/darwin-arm64": "0.25.0", + "@esbuild/darwin-x64": "0.25.0", + "@esbuild/freebsd-arm64": "0.25.0", + "@esbuild/freebsd-x64": "0.25.0", + "@esbuild/linux-arm": "0.25.0", + "@esbuild/linux-arm64": "0.25.0", + "@esbuild/linux-ia32": "0.25.0", + "@esbuild/linux-loong64": "0.25.0", + "@esbuild/linux-mips64el": "0.25.0", + "@esbuild/linux-ppc64": "0.25.0", + "@esbuild/linux-riscv64": "0.25.0", + "@esbuild/linux-s390x": "0.25.0", + "@esbuild/linux-x64": "0.25.0", + "@esbuild/netbsd-arm64": "0.25.0", + "@esbuild/netbsd-x64": "0.25.0", + "@esbuild/openbsd-arm64": "0.25.0", + "@esbuild/openbsd-x64": "0.25.0", + "@esbuild/sunos-x64": "0.25.0", + "@esbuild/win32-arm64": "0.25.0", + "@esbuild/win32-ia32": "0.25.0", + "@esbuild/win32-x64": "0.25.0" } }, "node_modules/esbuild-register": { @@ -21485,18 +21501,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/eslint-plugin-unicorn/node_modules/jsesc": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.0.2.tgz", - "integrity": "sha512-xKqzzWXDttJuOcawBt4KnKHHIf5oQ/Cxax+0PWFG+DFDgHNAdi+TXECADI+RYiFUMmx8792xsMbbgXj4CwnP4g==", - "dev": true, - "bin": { - "jsesc": "bin/jsesc" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/eslint-plugin-unicorn/node_modules/regjsparser": { "version": "0.10.0", "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.10.0.tgz", @@ -21923,50 +21927,6 @@ "node": ">=14.18" } }, - "node_modules/execa": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", - "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", - "dev": true, - "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^6.0.0", - "human-signals": "^2.1.0", - "is-stream": "^2.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^4.0.1", - "onetime": "^5.1.2", - "signal-exit": "^3.0.3", - "strip-final-newline": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sindresorhus/execa?sponsor=1" - } - }, - "node_modules/execa/node_modules/get-stream": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", - "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", - "dev": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/execa/node_modules/human-signals": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", - "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", - "dev": true, - "engines": { - "node": ">=10.17.0" - } - }, "node_modules/executable": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/executable/-/executable-4.1.1.tgz", @@ -22613,16 +22573,17 @@ "dev": true }, "node_modules/fast-glob": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", - "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", "dev": true, + "license": "MIT", "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", "glob-parent": "^5.1.2", "merge2": "^1.3.0", - "micromatch": "^4.0.4" + "micromatch": "^4.0.8" }, "engines": { "node": ">=8.6.0" @@ -23305,9 +23266,9 @@ } }, "node_modules/fork-ts-checker-webpack-plugin/node_modules/semver": { - "version": "7.6.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", - "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", + "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", "dev": true, "license": "ISC", "bin": { @@ -23763,20 +23724,19 @@ } }, "node_modules/giget": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/giget/-/giget-1.2.3.tgz", - "integrity": "sha512-8EHPljDvs7qKykr6uw8b+lqLiUc/vUg+KVTI0uND4s63TdsZM2Xus3mflvF0DDG9SiM4RlCkFGL+7aAjRmV7KA==", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/giget/-/giget-1.2.5.tgz", + "integrity": "sha512-r1ekGw/Bgpi3HLV3h1MRBIlSAdHoIMklpaQ3OQLFcRw9PwAj2rqigvIbg+dBUI51OxVI2jsEtDywDBjSiuf7Ug==", "dev": true, "license": "MIT", "dependencies": { "citty": "^0.1.6", - "consola": "^3.2.3", + "consola": "^3.4.0", "defu": "^6.1.4", - "node-fetch-native": "^1.6.3", - "nypm": "^0.3.8", - "ohash": "^1.1.3", - "pathe": "^1.1.2", - "tar": "^6.2.0" + "node-fetch-native": "^1.6.6", + "nypm": "^0.5.4", + "pathe": "^2.0.3", + "tar": "^6.2.1" }, "bin": { "giget": "dist/cli.mjs" @@ -25715,6 +25675,7 @@ "version": "2.2.4", "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz", "integrity": "sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==", + "dev": true, "dependencies": { "loose-envify": "^1.0.0" } @@ -26231,16 +26192,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/is-interactive": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-interactive/-/is-interactive-1.0.0.tgz", - "integrity": "sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/is-ip": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-ip/-/is-ip-2.0.0.tgz", @@ -26599,6 +26550,79 @@ "node": ">=0.10.0" } }, + "node_modules/isomorphic-git": { + "version": "1.29.0", + "resolved": "https://registry.npmjs.org/isomorphic-git/-/isomorphic-git-1.29.0.tgz", + "integrity": "sha512-zWGqk8901cicvVEhVpN76AwKrS/TzHak2NQCtNXIAavpMIy/yqh+d/JtC9A8AUKZAauUdOyEWKI29tuCLAL+Zg==", + "dev": true, + "license": "MIT", + "dependencies": { + "async-lock": "^1.4.1", + "clean-git-ref": "^2.0.1", + "crc-32": "^1.2.0", + "diff3": "0.0.3", + "ignore": "^5.1.4", + "minimisted": "^2.0.0", + "pako": "^1.0.10", + "path-browserify": "^1.0.1", + "pify": "^4.0.1", + "readable-stream": "^3.4.0", + "sha.js": "^2.4.9", + "simple-get": "^4.0.1" + }, + "bin": { + "isogit": "cli.cjs" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/isomorphic-git/node_modules/pify": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/isomorphic-git/node_modules/simple-get": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-4.0.1.tgz", + "integrity": "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "decompress-response": "^6.0.0", + "once": "^1.3.1", + "simple-concat": "^1.0.0" + } + }, + "node_modules/isomorphic-textencoder": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/isomorphic-textencoder/-/isomorphic-textencoder-1.0.1.tgz", + "integrity": "sha512-676hESgHullDdHDsj469hr+7t3i/neBKU9J7q1T4RHaWwLAsaQnywC0D1dIUId0YZ+JtVrShzuBk1soo0+GVcQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-text-encoding": "^1.0.0" + } + }, "node_modules/isstream": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", @@ -27028,19 +27052,6 @@ } } }, - "node_modules/jscodeshift/node_modules/micromatch": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", - "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", - "dev": true, - "dependencies": { - "braces": "^3.0.3", - "picomatch": "^2.3.1" - }, - "engines": { - "node": ">=8.6" - } - }, "node_modules/jscodeshift/node_modules/rimraf": { "version": "2.6.3", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", @@ -27217,15 +27228,16 @@ } }, "node_modules/jsesc": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", - "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", "dev": true, + "license": "MIT", "bin": { "jsesc": "bin/jsesc" }, "engines": { - "node": ">=4" + "node": ">=6" } }, "node_modules/json-bigint": { @@ -27517,12 +27529,26 @@ "node": ">=4.0" } }, + "node_modules/just-debounce-it": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/just-debounce-it/-/just-debounce-it-1.1.0.tgz", + "integrity": "sha512-87Nnc0qZKgBZuhFZjYVjSraic0x7zwjhaTMrCKlj0QYKH6lh0KbFzVnfu6LHan03NO7J8ygjeBeD0epejn5Zcg==", + "dev": true, + "license": "MIT" + }, "node_modules/just-extend": { "version": "4.2.1", "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-4.2.1.tgz", "integrity": "sha512-g3UB796vUFIY90VIv/WX3L2c8CS2MdWUww3CNrYmqza1Fg0DURc2K/O4YrnklBdQarSJ/y8JnJYDGc+1iumQjg==", "dev": true }, + "node_modules/just-once": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/just-once/-/just-once-1.1.0.tgz", + "integrity": "sha512-+rZVpl+6VyTilK7vB/svlMPil4pxqIJZkbnN7DKZTOzyXfun6ZiFeq2Pk4EtCEHZ0VU4EkdFzG8ZK5F3PErcDw==", + "dev": true, + "license": "MIT" + }, "node_modules/jwa": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz", @@ -27554,7 +27580,8 @@ "node_modules/keycode": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/keycode/-/keycode-2.2.1.tgz", - "integrity": "sha512-Rdgz9Hl9Iv4QKi8b0OlCRQEzp4AgVxyCtz5S/+VIHezDmrDhkp2N2TqBWOLz0/gbeREXOOiI9/4b8BY9uw2vFg==" + "integrity": "sha512-Rdgz9Hl9Iv4QKi8b0OlCRQEzp4AgVxyCtz5S/+VIHezDmrDhkp2N2TqBWOLz0/gbeREXOOiI9/4b8BY9uw2vFg==", + "dev": true }, "node_modules/keyv": { "version": "4.5.4", @@ -28666,6 +28693,32 @@ "@jridgewell/sourcemap-codec": "^1.5.0" } }, + "node_modules/mailtrap": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/mailtrap/-/mailtrap-3.4.0.tgz", + "integrity": "sha512-gegg90/gMY8hvfxB+WMtE8RRZyhQr90jUw00QOLApIAomItumqFBCpZv5IfG51EUKThu9+p7X4QdNA4buryenw==", + "dev": true, + "license": "MIT", + "dependencies": { + "axios": ">=0.27" + }, + "engines": { + "node": ">=16.20.1", + "yarn": ">=1.22.17" + }, + "peerDependencies": { + "@types/nodemailer": "^6.4.9", + "nodemailer": "^6.9.4" + }, + "peerDependenciesMeta": { + "@types/nodemailer": { + "optional": true + }, + "nodemailer": { + "optional": true + } + } + }, "node_modules/make-dir": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", @@ -29404,11 +29457,12 @@ ] }, "node_modules/micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "license": "MIT", "dependencies": { - "braces": "^3.0.2", + "braces": "^3.0.3", "picomatch": "^2.3.1" }, "engines": { @@ -29464,6 +29518,19 @@ "node": ">=6" } }, + "node_modules/mimic-response": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", + "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/min-document": { "version": "2.19.0", "resolved": "https://registry.npmjs.org/min-document/-/min-document-2.19.0.tgz", @@ -29579,6 +29646,16 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/minimisted": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/minimisted/-/minimisted-2.0.1.tgz", + "integrity": "sha512-1oPjfuLQa2caorJUM8HV8lGgWCc0qqAO1MNv/k05G4qslmsndV/5WdNZrqCiyqiz3wohia2Ij2B7w2Dr7/IyrA==", + "dev": true, + "license": "MIT", + "dependencies": { + "minimist": "^1.2.5" + } + }, "node_modules/minipass": { "version": "3.1.6", "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.6.tgz", @@ -29665,22 +29742,22 @@ "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==" }, "node_modules/mlly": { - "version": "1.7.3", - "resolved": "https://registry.npmjs.org/mlly/-/mlly-1.7.3.tgz", - "integrity": "sha512-xUsx5n/mN0uQf4V548PKQ+YShA4/IW0KI1dZhrNrPCLG+xizETbHTkOa1f8/xut9JRPp8kQuMnz0oqwkTiLo/A==", + "version": "1.7.4", + "resolved": "https://registry.npmjs.org/mlly/-/mlly-1.7.4.tgz", + "integrity": "sha512-qmdSIPC4bDJXgZTCR7XosJiNKySV7O215tsPtDN9iEO/7q/76b/ijtgRu/+epFXSJhijtTCCGp3DWS549P3xKw==", "dev": true, "license": "MIT", "dependencies": { "acorn": "^8.14.0", - "pathe": "^1.1.2", - "pkg-types": "^1.2.1", + "pathe": "^2.0.1", + "pkg-types": "^1.3.0", "ufo": "^1.5.4" } }, "node_modules/mlly/node_modules/acorn": { - "version": "8.14.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz", - "integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==", + "version": "8.14.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz", + "integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==", "dev": true, "license": "MIT", "bin": { @@ -30390,6 +30467,13 @@ "node": ">=10.5.0" } }, + "node_modules/node-ensure": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/node-ensure/-/node-ensure-0.0.0.tgz", + "integrity": "sha512-DRI60hzo2oKN1ma0ckc6nQWlHU69RH6xN0sjQTjMpChPfTYvKZdcQFfdYK2RWbJcKyUizSIy/l8OTGxMAM1QDw==", + "dev": true, + "license": "MIT" + }, "node_modules/node-fetch": { "version": "2.7.0", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", @@ -30410,9 +30494,9 @@ } }, "node_modules/node-fetch-native": { - "version": "1.6.4", - "resolved": "https://registry.npmjs.org/node-fetch-native/-/node-fetch-native-1.6.4.tgz", - "integrity": "sha512-IhOigYzAKHd244OC0JIMIUrjzctirCmPkaIfhDeGcEETWof5zKYUW7e7MYvChGWh/4CJeXEgsRyGzuF334rOOQ==", + "version": "1.6.6", + "resolved": "https://registry.npmjs.org/node-fetch-native/-/node-fetch-native-1.6.6.tgz", + "integrity": "sha512-8Mc2HhqPdlIfedsuZoc3yioPuzp6b+L5jRCRY1QzuWZh2EGJVQrGppC6V6cF0bLdbW0+O2YpqCA25aF/1lvipQ==", "dev": true, "license": "MIT" }, @@ -30701,17 +30785,17 @@ "dev": true }, "node_modules/nypm": { - "version": "0.3.12", - "resolved": "https://registry.npmjs.org/nypm/-/nypm-0.3.12.tgz", - "integrity": "sha512-D3pzNDWIvgA+7IORhD/IuWzEk4uXv6GsgOxiid4UU3h9oq5IqV1KtPDi63n4sZJ/xcWlr88c0QM2RgN5VbOhFA==", + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/nypm/-/nypm-0.5.4.tgz", + "integrity": "sha512-X0SNNrZiGU8/e/zAB7sCTtdxWTMSIO73q+xuKgglm2Yvzwlo8UoC5FNySQFCvl84uPaeADkqHUZUkWy4aH4xOA==", "dev": true, "license": "MIT", "dependencies": { "citty": "^0.1.6", - "consola": "^3.2.3", - "execa": "^8.0.1", - "pathe": "^1.1.2", - "pkg-types": "^1.2.0", + "consola": "^3.4.0", + "pathe": "^2.0.3", + "pkg-types": "^1.3.1", + "tinyexec": "^0.3.2", "ufo": "^1.5.4" }, "bin": { @@ -30721,150 +30805,6 @@ "node": "^14.16.0 || >=16.10.0" } }, - "node_modules/nypm/node_modules/execa": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/execa/-/execa-8.0.1.tgz", - "integrity": "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==", - "dev": true, - "license": "MIT", - "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^8.0.1", - "human-signals": "^5.0.0", - "is-stream": "^3.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^5.1.0", - "onetime": "^6.0.0", - "signal-exit": "^4.1.0", - "strip-final-newline": "^3.0.0" - }, - "engines": { - "node": ">=16.17" - }, - "funding": { - "url": "https://github.com/sindresorhus/execa?sponsor=1" - } - }, - "node_modules/nypm/node_modules/get-stream": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-8.0.1.tgz", - "integrity": "sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/nypm/node_modules/human-signals": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-5.0.0.tgz", - "integrity": "sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": ">=16.17.0" - } - }, - "node_modules/nypm/node_modules/is-stream": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", - "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/nypm/node_modules/mimic-fn": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", - "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/nypm/node_modules/npm-run-path": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", - "integrity": "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "path-key": "^4.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/nypm/node_modules/onetime": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", - "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "mimic-fn": "^4.0.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/nypm/node_modules/path-key": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", - "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/nypm/node_modules/signal-exit": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", - "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/nypm/node_modules/strip-final-newline": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", - "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/oauth": { "version": "0.9.15", "resolved": "https://registry.npmjs.org/oauth/-/oauth-0.9.15.tgz", @@ -31141,13 +31081,6 @@ "node": ">0.4.11" } }, - "node_modules/ohash": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/ohash/-/ohash-1.1.4.tgz", - "integrity": "sha512-FlDryZAahJmEF3VR3w1KogSEdWX3WhA5GPakFx4J81kEAiHyLMpdLLElS8n8dfNadMgAne/MywcvmogzscVt4g==", - "dev": true, - "license": "MIT" - }, "node_modules/on-finished": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", @@ -31191,10 +31124,11 @@ } }, "node_modules/open": { - "version": "8.4.0", - "resolved": "https://registry.npmjs.org/open/-/open-8.4.0.tgz", - "integrity": "sha512-XgFPPM+B28FtCCgSb9I+s9szOC1vZRSwgWsRUA5ylIxRTgKozqjOCrVOqGsYABPYK5qnfqClxZTFBa8PKt2v6Q==", + "version": "8.4.2", + "resolved": "https://registry.npmjs.org/open/-/open-8.4.2.tgz", + "integrity": "sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==", "dev": true, + "license": "MIT", "dependencies": { "define-lazy-prop": "^2.0.0", "is-docker": "^2.1.1", @@ -31286,76 +31220,6 @@ "node": ">=0.4.0" } }, - "node_modules/ora": { - "version": "5.4.1", - "resolved": "https://registry.npmjs.org/ora/-/ora-5.4.1.tgz", - "integrity": "sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "bl": "^4.1.0", - "chalk": "^4.1.0", - "cli-cursor": "^3.1.0", - "cli-spinners": "^2.5.0", - "is-interactive": "^1.0.0", - "is-unicode-supported": "^0.1.0", - "log-symbols": "^4.1.0", - "strip-ansi": "^6.0.0", - "wcwidth": "^1.0.1" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/ora/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "license": "MIT", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/ora/node_modules/chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/ora/node_modules/supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "dev": true, - "license": "MIT", - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/os-tmpdir": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", @@ -31559,6 +31423,13 @@ "resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz", "integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==" }, + "node_modules/pako": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", + "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==", + "dev": true, + "license": "(MIT AND Zlib)" + }, "node_modules/param-case": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/param-case/-/param-case-3.0.4.tgz", @@ -32008,9 +31879,9 @@ } }, "node_modules/pathe": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/pathe/-/pathe-1.1.2.tgz", - "integrity": "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", "dev": true, "license": "MIT" }, @@ -32028,6 +31899,30 @@ "resolved": "https://registry.npmjs.org/pause/-/pause-0.0.1.tgz", "integrity": "sha1-HUCLP9t2kjuVQ9lvtMnf1TXZy10=" }, + "node_modules/pdf-parse": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pdf-parse/-/pdf-parse-1.1.1.tgz", + "integrity": "sha512-v6ZJ/efsBpGrGGknjtq9J/oC8tZWq0KWL5vQrk2GlzLEQPUDB1ex+13Rmidl1neNN358Jn9EHZw5y07FFtaC7A==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^3.1.0", + "node-ensure": "^0.0.0" + }, + "engines": { + "node": ">=6.8.1" + } + }, + "node_modules/pdf-parse/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.1" + } + }, "node_modules/pdfjs-dist": { "version": "4.10.38", "resolved": "https://registry.npmjs.org/pdfjs-dist/-/pdfjs-dist-4.10.38.tgz", @@ -32266,15 +32161,15 @@ } }, "node_modules/pkg-types": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-1.2.1.tgz", - "integrity": "sha512-sQoqa8alT3nHjGuTjuKgOnvjo4cljkufdtLMnO2LBP/wRwuDlo1tkaEdMxCRhyGRPacv/ztlZgDPm2b7FAmEvw==", + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-1.3.1.tgz", + "integrity": "sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==", "dev": true, "license": "MIT", "dependencies": { "confbox": "^0.1.8", - "mlly": "^1.7.2", - "pathe": "^1.1.2" + "mlly": "^1.7.4", + "pathe": "^2.0.1" } }, "node_modules/pkg-up": { @@ -32383,10 +32278,11 @@ } }, "node_modules/polished": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/polished/-/polished-4.2.2.tgz", - "integrity": "sha512-Sz2Lkdxz6F2Pgnpi9U5Ng/WdWAUZxmHrNPoVlm3aAemxoy2Qy7LGjQg4uf8qKelDAUW94F4np3iH2YPf2qefcQ==", + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/polished/-/polished-4.3.1.tgz", + "integrity": "sha512-OBatVyC/N7SCW/FaDHrSd+vn0o5cS855TOmYi4OkdWUMSJCET/xip//ch8xGUvtr3i44X9LVyWwQlRMTN3pwSA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/runtime": "^7.17.8" }, @@ -32394,17 +32290,6 @@ "node": ">=10" } }, - "node_modules/popper.js": { - "version": "1.16.1", - "resolved": "https://registry.npmjs.org/popper.js/-/popper.js-1.16.1.tgz", - "integrity": "sha512-Wb4p1J4zyFTbM+u6WuO4XstYx4Ky9Cewe4DWrel7B0w6VVICvPwdOpotjzcf6eD8TsckVnIMNONQyPIUFOUbCQ==", - "deprecated": "You can find the new Popper v2 at @popperjs/core, this package is dedicated to the legacy v1", - "peer": true, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/popperjs" - } - }, "node_modules/posix-character-classes": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/posix-character-classes/-/posix-character-classes-0.1.1.tgz", @@ -33983,6 +33868,7 @@ "version": "1.1.1", "resolved": "https://registry.npmjs.org/prop-types-extra/-/prop-types-extra-1.1.1.tgz", "integrity": "sha512-59+AHNnHYCdiC+vMwY52WmvP5dM3QLeoumYuEyceQDi9aEhtwN9zIQ2ZNo25sMyXnbh32h+P1ezDsUpUH3JAew==", + "dev": true, "dependencies": { "react-is": "^16.3.2", "warning": "^4.0.0" @@ -33994,12 +33880,14 @@ "node_modules/prop-types-extra/node_modules/react-is": { "version": "16.13.1", "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", - "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", + "dev": true }, "node_modules/prop-types-extra/node_modules/warning": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/warning/-/warning-4.0.3.tgz", "integrity": "sha512-rpJyN222KWIvHJ/F53XSZv0Zl/accqHR8et1kpaMTD/fLCRxtV8iX8czMzY7sVZupTI3zcUTg8eycS2kNF9l6w==", + "dev": true, "dependencies": { "loose-envify": "^1.0.0" } @@ -34736,6 +34624,7 @@ "version": "0.33.1", "resolved": "https://registry.npmjs.org/react-bootstrap/-/react-bootstrap-0.33.1.tgz", "integrity": "sha512-qWTRravSds87P8WC82tETy2yIso8qDqlIm0czsrduCaYAFtHuyLu0XDbUlfLXeRzqgwm5sRk2wRaTNoiVkk/YQ==", + "dev": true, "dependencies": { "@babel/runtime-corejs2": "^7.0.0", "classnames": "^2.2.5", @@ -34829,9 +34718,9 @@ } }, "node_modules/react-docgen": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/react-docgen/-/react-docgen-7.1.0.tgz", - "integrity": "sha512-APPU8HB2uZnpl6Vt/+0AFoVYgSRtfiP6FLrZgPPTDmqSb2R4qZRbgd0A3VzIFxDt5e+Fozjx79WjLWnF69DK8g==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/react-docgen/-/react-docgen-7.1.1.tgz", + "integrity": "sha512-hlSJDQ2synMPKFZOsKo9Hi8WWZTC7POR8EmWvTSjow+VDgKzkmjQvFm2fk0tmRw+f0vTOIYKlarR0iL4996pdg==", "dev": true, "license": "MIT", "dependencies": { @@ -34938,7 +34827,8 @@ "node_modules/react-lifecycles-compat": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/react-lifecycles-compat/-/react-lifecycles-compat-3.0.4.tgz", - "integrity": "sha512-fBASbA6LnOU9dOU2eW7aQ8xmYBSXUIWr+UmF9b1efZBazGNO+rcXT/icdKnYm2pTwcRylVUYwW7H1PHfLekVzA==" + "integrity": "sha512-fBASbA6LnOU9dOU2eW7aQ8xmYBSXUIWr+UmF9b1efZBazGNO+rcXT/icdKnYm2pTwcRylVUYwW7H1PHfLekVzA==", + "dev": true }, "node_modules/react-linkify": { "version": "1.0.0-alpha", @@ -34963,6 +34853,7 @@ "version": "0.9.3", "resolved": "https://registry.npmjs.org/react-overlays/-/react-overlays-0.9.3.tgz", "integrity": "sha512-u2T7nOLnK+Hrntho4p0Nxh+BsJl0bl4Xuwj/Y0a56xywLMetgAfyjnDVrudLXsNcKGaspoC+t3C1V80W9QQTdQ==", + "dev": true, "dependencies": { "classnames": "^2.2.5", "dom-helpers": "^3.2.1", @@ -34980,6 +34871,7 @@ "version": "0.4.0", "resolved": "https://registry.npmjs.org/react-prop-types/-/react-prop-types-0.4.0.tgz", "integrity": "sha1-+ZsL+0AGkpya8gUefBQUpcdbk9A=", + "dev": true, "dependencies": { "warning": "^3.0.0" }, @@ -35101,6 +34993,7 @@ "version": "2.9.0", "resolved": "https://registry.npmjs.org/react-transition-group/-/react-transition-group-2.9.0.tgz", "integrity": "sha512-+HzNTCHpeQyl4MJ/bdE0u6XRMe9+XG/+aL4mCxVN4DnPBQ0/5bfHWPDuOZUzYdMj94daZaZdCCc1Dzt9R/xSSg==", + "dev": true, "dependencies": { "dom-helpers": "^3.4.0", "loose-envify": "^1.4.0", @@ -36058,6 +35951,28 @@ "node": ">=12" } }, + "node_modules/saml/node_modules/xml-crypto": { + "version": "2.1.6", + "resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-2.1.6.tgz", + "integrity": "sha512-jjvpO8vHNV8QFhW5bMypP+k4BjBqHe/HrpIwpPcdUnUTIJakSIuN96o3Sdah4tKu2z64kM/JHEH8iEHGCc6Gyw==", + "license": "MIT", + "dependencies": { + "@xmldom/xmldom": "^0.7.9", + "xpath": "0.0.32" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/saml/node_modules/xml-crypto/node_modules/xpath": { + "version": "0.0.32", + "resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.32.tgz", + "integrity": "sha512-rxMJhSIoiO8vXcWvSifKqhvV96GjiD5wYb8/QHdoRyQvraTpp4IEv944nhGausZZ3u7dhQXteZuZbaqfpB7uYw==", + "license": "MIT", + "engines": { + "node": ">=0.6.0" + } + }, "node_modules/saml/node_modules/xml-name-validator": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-2.0.1.tgz", @@ -36083,6 +35998,28 @@ "node": ">=12" } }, + "node_modules/samlp/node_modules/xml-crypto": { + "version": "2.1.6", + "resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-2.1.6.tgz", + "integrity": "sha512-jjvpO8vHNV8QFhW5bMypP+k4BjBqHe/HrpIwpPcdUnUTIJakSIuN96o3Sdah4tKu2z64kM/JHEH8iEHGCc6Gyw==", + "license": "MIT", + "dependencies": { + "@xmldom/xmldom": "^0.7.9", + "xpath": "0.0.32" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/samlp/node_modules/xml-crypto/node_modules/xpath": { + "version": "0.0.32", + "resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.32.tgz", + "integrity": "sha512-rxMJhSIoiO8vXcWvSifKqhvV96GjiD5wYb8/QHdoRyQvraTpp4IEv944nhGausZZ3u7dhQXteZuZbaqfpB7uYw==", + "license": "MIT", + "engines": { + "node": ">=0.6.0" + } + }, "node_modules/samlp/node_modules/xtend": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/xtend/-/xtend-1.0.3.tgz", @@ -36746,6 +36683,20 @@ "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" }, + "node_modules/sha.js": { + "version": "2.4.11", + "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz", + "integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==", + "dev": true, + "license": "(MIT AND BSD-3-Clause)", + "dependencies": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + }, + "bin": { + "sha.js": "bin.js" + } + }, "node_modules/shallow-clone": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz", @@ -36848,9 +36799,7 @@ "type": "consulting", "url": "https://feross.org/support" } - ], - "optional": true, - "peer": true + ] }, "node_modules/simple-get": { "version": "3.1.1", @@ -37505,31 +37454,6 @@ "es5-ext": "^0.10.53" } }, - "node_modules/ssh2": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.6.0.tgz", - "integrity": "sha512-lxc+uvXqOxyQ99N2M7k5o4pkYDO5GptOTYduWw7hIM41icxvoBcCNHcj+LTKrjkL0vFcAl+qfZekthoSFRJn2Q==", - "hasInstallScript": true, - "dependencies": { - "asn1": "^0.2.4", - "bcrypt-pbkdf": "^1.0.2" - }, - "engines": { - "node": ">=10.16.0" - }, - "optionalDependencies": { - "cpu-features": "0.0.2", - "nan": "^2.15.0" - } - }, - "node_modules/ssh2/node_modules/asn1": { - "version": "0.2.6", - "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", - "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==", - "dependencies": { - "safer-buffer": "~2.1.0" - } - }, "node_modules/sshpk": { "version": "1.17.0", "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.17.0.tgz", @@ -37554,14 +37478,6 @@ "node": ">=0.10.0" } }, - "node_modules/sshpk/node_modules/asn1": { - "version": "0.2.6", - "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", - "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==", - "dependencies": { - "safer-buffer": "~2.1.0" - } - }, "node_modules/sshpk/node_modules/assert-plus": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", @@ -37649,13 +37565,13 @@ } }, "node_modules/storybook": { - "version": "8.4.7", - "resolved": "https://registry.npmjs.org/storybook/-/storybook-8.4.7.tgz", - "integrity": "sha512-RP/nMJxiWyFc8EVMH5gp20ID032Wvk+Yr3lmKidoegto5Iy+2dVQnUoElZb2zpbVXNHWakGuAkfI0dY1Hfp/vw==", + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/storybook/-/storybook-8.6.4.tgz", + "integrity": "sha512-XXh1Acvf1r3BQX0BDLQw6yhZ7yUGvYxIcKOBuMdetnX7iXtczipJTfw0uyFwk0ltkKEE9PpJvivYmARF3u64VQ==", "dev": true, "license": "MIT", "dependencies": { - "@storybook/core": "8.4.7" + "@storybook/core": "8.6.4" }, "bin": { "getstorybook": "bin/index.cjs", @@ -37929,6 +37845,19 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/stripe": { + "version": "17.7.0", + "resolved": "https://registry.npmjs.org/stripe/-/stripe-17.7.0.tgz", + "integrity": "sha512-aT2BU9KkizY9SATf14WhhYVv2uOapBWX0OFWF4xvcj1mPaNotlSc2CsxpS4DS46ZueSppmCF5BX1sNYBtwBvfw==", + "license": "MIT", + "dependencies": { + "@types/node": ">=8.1.0", + "qs": "^6.11.0" + }, + "engines": { + "node": ">=12.*" + } + }, "node_modules/strnum": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.0.5.tgz", @@ -38792,17 +38721,6 @@ "node": ">=10" } }, - "node_modules/tar-fs": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.0.1.tgz", - "integrity": "sha512-6tzWDMeroL87uF/+lin46k+Q+46rAJ0SyPGz7OW7wTgblI273hsBqk2C1j0/xNadNLKDTUL9BukSjB7cwgmlPA==", - "dependencies": { - "chownr": "^1.1.1", - "mkdirp-classic": "^0.5.2", - "pump": "^3.0.0", - "tar-stream": "^2.0.0" - } - }, "node_modules/tar-stream": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", @@ -39296,6 +39214,13 @@ "integrity": "sha512-XPaBkWQJdsf3pLKJV9p4qN/S+fm2Oj8AIPo1BTUhg5oxkvm9+SVEGFdhyOz7tTdUTfvxMiAs4sp6/eZO2Ew+pw==", "dev": true }, + "node_modules/tinyexec": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-0.3.2.tgz", + "integrity": "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==", + "dev": true, + "license": "MIT" + }, "node_modules/tinyrainbow": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-1.2.0.tgz", @@ -39339,14 +39264,6 @@ "node": ">=14.14" } }, - "node_modules/to-fast-properties": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=", - "engines": { - "node": ">=4" - } - }, "node_modules/to-object-path": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/to-object-path/-/to-object-path-0.3.0.tgz", @@ -39586,6 +39503,7 @@ "resolved": "https://registry.npmjs.org/ts-dedent/-/ts-dedent-2.2.0.tgz", "integrity": "sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.10" } @@ -39888,6 +39806,7 @@ "version": "7.2.1", "resolved": "https://registry.npmjs.org/uncontrollable/-/uncontrollable-7.2.1.tgz", "integrity": "sha512-svtcfoTADIB0nT9nltgjujTi7BzVmwjZClOmskKu/E8FW9BXzg9os8OLr4f8Dlnk0rYWJIWr4wv9eKUXiQvQwQ==", + "dev": true, "dependencies": { "@babel/runtime": "^7.6.3", "@types/react": ">=16.9.11", @@ -39909,13 +39828,6 @@ "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.6.tgz", "integrity": "sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A==" }, - "node_modules/undici-types": { - "version": "6.20.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz", - "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==", - "dev": true, - "license": "MIT" - }, "node_modules/uni-global": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/uni-global/-/uni-global-1.0.0.tgz", @@ -39965,9 +39877,9 @@ } }, "node_modules/unicorn-magic": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.1.0.tgz", - "integrity": "sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==", + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.3.0.tgz", + "integrity": "sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==", "dev": true, "license": "MIT", "engines": { @@ -40018,9 +39930,9 @@ } }, "node_modules/unplugin": { - "version": "1.16.0", - "resolved": "https://registry.npmjs.org/unplugin/-/unplugin-1.16.0.tgz", - "integrity": "sha512-5liCNPuJW8dqh3+DM6uNM2EI3MLLpCKp/KY+9pB5M2S2SR2qvvDHhKgBOaTWEbZTAws3CXfB0rKTIolWKL05VQ==", + "version": "1.16.1", + "resolved": "https://registry.npmjs.org/unplugin/-/unplugin-1.16.1.tgz", + "integrity": "sha512-4/u/j4FrCKdi17jaxuJA0jClGxB1AvU2hw/IuayPc4ay1XGaJs/rbb4v5WKwAjNifjmXK9PIFyuPiaK8azyR9w==", "dev": true, "license": "MIT", "dependencies": { @@ -40032,9 +39944,9 @@ } }, "node_modules/unplugin/node_modules/acorn": { - "version": "8.14.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz", - "integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==", + "version": "8.14.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz", + "integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==", "dev": true, "license": "MIT", "bin": { @@ -40594,6 +40506,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/warning/-/warning-3.0.0.tgz", "integrity": "sha1-MuU3fLVy3kqwR1O9+IIcAe1gW3w=", + "dev": true, "dependencies": { "loose-envify": "^1.0.0" } @@ -40620,16 +40533,6 @@ "minimalistic-assert": "^1.0.0" } }, - "node_modules/wcwidth": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/wcwidth/-/wcwidth-1.0.1.tgz", - "integrity": "sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg==", - "dev": true, - "license": "MIT", - "dependencies": { - "defaults": "^1.0.3" - } - }, "node_modules/web-streams-polyfill": { "version": "4.0.0-beta.1", "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.1.tgz", @@ -41675,15 +41578,25 @@ } }, "node_modules/xml-crypto": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-2.1.3.tgz", - "integrity": "sha512-MpXZwnn9JK0mNPZ5mnFIbNnQa+8lMGK4NtnX2FlJMfMWR60sJdFO9X72yO6ji068pxixzk53O7x0/iSKh6IhyQ==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-3.2.1.tgz", + "integrity": "sha512-0GUNbPtQt+PLMsC5HoZRONX+K6NBJEqpXe/lsvrFj0EqfpGPpVfJKGE7a5jCg8s2+Wkrf/2U1G41kIH+zC9eyQ==", + "license": "MIT", "dependencies": { - "@xmldom/xmldom": "^0.7.0", + "@xmldom/xmldom": "^0.8.8", "xpath": "0.0.32" }, "engines": { - "node": ">=0.4.0" + "node": ">=4.0.0" + } + }, + "node_modules/xml-crypto/node_modules/@xmldom/xmldom": { + "version": "0.8.10", + "resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.10.tgz", + "integrity": "sha512-2WALfTl4xo2SkGCYRt6rDTFfk9R1czmBvUQy12gK2KuRKIpWEhcbbzy8EZXtz/jkRqHX8bFEc6FC1HjX4TUWYw==", + "license": "MIT", + "engines": { + "node": ">=10.0.0" } }, "node_modules/xml-crypto/node_modules/xpath": { @@ -41996,15 +41909,6 @@ "node": ">=0.2.0" } }, - "node_modules/zone.js": { - "version": "0.11.4", - "resolved": "https://registry.npmjs.org/zone.js/-/zone.js-0.11.4.tgz", - "integrity": "sha512-DDh2Ab+A/B+9mJyajPjHFPWfYU1H+pdun4wnnk0OcQTNjem1XQSZ2CDW+rfZEUDjv5M19SBqAkjZi0x5wuB5Qw==", - "dev": true, - "dependencies": { - "tslib": "^2.0.0" - } - }, "services/analytics": { "name": "@overleaf/analytics", "dependencies": { @@ -42112,7 +42016,7 @@ "body-parser": "^1.20.3", "bunyan": "^1.8.15", "diskusage": "^1.1.3", - "dockerode": "^3.1.0", + "dockerode": "^4.0.5", "express": "^4.21.2", "lodash": "^4.17.21", "p-limit": "^3.1.0", @@ -42163,6 +42067,33 @@ "node": ">= 0.6" } }, + "services/clsi/node_modules/@grpc/grpc-js": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.13.2.tgz", + "integrity": "sha512-nnR5nmL6lxF8YBqb6gWvEgLdLh/Fn+kvAdX5hUOnt48sNSb0riz/93ASd2E5gvanPA41X6Yp25bIfGRp1SMb2g==", + "license": "Apache-2.0", + "dependencies": { + "@grpc/proto-loader": "^0.7.13", + "@js-sdsl/ordered-map": "^4.4.2" + }, + "engines": { + "node": ">=12.10.0" + } + }, + "services/clsi/node_modules/cpu-features": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz", + "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==", + "hasInstallScript": true, + "optional": true, + "dependencies": { + "buildcheck": "~0.0.6", + "nan": "^2.19.0" + }, + "engines": { + "node": ">=10.0.0" + } + }, "services/clsi/node_modules/diff": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", @@ -42172,6 +42103,70 @@ "node": ">=0.3.1" } }, + "services/clsi/node_modules/docker-modem": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.6.tgz", + "integrity": "sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ==", + "license": "Apache-2.0", + "dependencies": { + "debug": "^4.1.1", + "readable-stream": "^3.5.0", + "split-ca": "^1.0.1", + "ssh2": "^1.15.0" + }, + "engines": { + "node": ">= 8.0" + } + }, + "services/clsi/node_modules/dockerode": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.5.tgz", + "integrity": "sha512-ZPmKSr1k1571Mrh7oIBS/j0AqAccoecY2yH420ni5j1KyNMgnoTh4Nu4FWunh0HZIJmRSmSysJjBIpa/zyWUEA==", + "license": "Apache-2.0", + "dependencies": { + "@balena/dockerignore": "^1.0.2", + "@grpc/grpc-js": "^1.11.1", + "@grpc/proto-loader": "^0.7.13", + "docker-modem": "^5.0.6", + "protobufjs": "^7.3.2", + "tar-fs": "~2.1.2", + "uuid": "^10.0.0" + }, + "engines": { + "node": ">= 8.0" + } + }, + "services/clsi/node_modules/nan": { + "version": "2.22.2", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.22.2.tgz", + "integrity": "sha512-DANghxFkS1plDdRsX0X9pm0Z6SJNN6gBdtXfanwoZ8hooC5gosGFSBGRYHUVPz1asKA/kMRqDRdHrluZ61SpBQ==", + "license": "MIT", + "optional": true + }, + "services/clsi/node_modules/protobufjs": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz", + "integrity": "sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==", + "hasInstallScript": true, + "license": "BSD-3-Clause", + "dependencies": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/node": ">=13.7.0", + "long": "^5.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, "services/clsi/node_modules/sinon": { "version": "9.0.3", "resolved": "https://registry.npmjs.org/sinon/-/sinon-9.0.3.tgz", @@ -42191,6 +42186,23 @@ "url": "https://opencollective.com/sinon" } }, + "services/clsi/node_modules/ssh2": { + "version": "1.16.0", + "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.16.0.tgz", + "integrity": "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg==", + "hasInstallScript": true, + "dependencies": { + "asn1": "^0.2.6", + "bcrypt-pbkdf": "^1.0.2" + }, + "engines": { + "node": ">=10.16.0" + }, + "optionalDependencies": { + "cpu-features": "~0.0.10", + "nan": "^2.20.0" + } + }, "services/clsi/node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -42203,6 +42215,31 @@ "node": ">=8" } }, + "services/clsi/node_modules/tar-fs": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.2.tgz", + "integrity": "sha512-EsaAXwxmx8UB7FRKqeozqEPop69DXcmYwTQwXvyAPF352HJsPdkVhvTaDPYqfNgruveJIJy3TA2l+2zj8LJIJA==", + "license": "MIT", + "dependencies": { + "chownr": "^1.1.1", + "mkdirp-classic": "^0.5.2", + "pump": "^3.0.0", + "tar-stream": "^2.1.4" + } + }, + "services/clsi/node_modules/uuid": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", + "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, "services/contacts": { "name": "@overleaf/contacts", "dependencies": { @@ -42861,7 +42898,7 @@ "@overleaf/o-error": "*", "aws-sdk": "^2.1174.0", "body-parser": "^1.20.3", - "bootstrap": "^4.3.1", + "bootstrap": "^5.3.3", "compression": "^1.7.1", "cookie-parser": "^1.4.6", "cross-env": "^4.0.0", @@ -42876,7 +42913,6 @@ "method-override": "^2.3.10", "prop-types": "^15.8.1", "react": "^17.0.2", - "react-bootstrap": "^0.33.1", "react-cookie": "^7.2.0", "react-dom": "^17.0.2", "react-dropzone": "^14.2.3", @@ -43117,9 +43153,9 @@ } }, "services/latexqc/node_modules/bootstrap": { - "version": "4.6.2", - "resolved": "https://registry.npmjs.org/bootstrap/-/bootstrap-4.6.2.tgz", - "integrity": "sha512-51Bbp/Uxr9aTuy6ca/8FbFloBUJZLHwnhTcnjIeRn2suQWsWzcuJhGjKDB5eppVte/8oCdOL3VuwxvZDUggwGQ==", + "version": "5.3.3", + "resolved": "https://registry.npmjs.org/bootstrap/-/bootstrap-5.3.3.tgz", + "integrity": "sha512-8HLCdWgyoMguSO9o+aH+iuZ+aht+mzW0u3HIMzVu7Srrpv7EBBxTnrFlSCskwdY1+EOFQSm7uMJhNQHkdPcmjg==", "funding": [ { "type": "github", @@ -43130,9 +43166,9 @@ "url": "https://opencollective.com/bootstrap" } ], + "license": "MIT", "peerDependencies": { - "jquery": "1.9.1 - 3", - "popper.js": "^1.16.1" + "@popperjs/core": "^2.11.8" } }, "services/latexqc/node_modules/chai": { @@ -43846,6 +43882,7 @@ "minimist": "^1.2.8", "mongodb-legacy": "6.1.3", "overleaf-editor-core": "*", + "p-queue": "^8.1.0", "request": "^2.88.2" }, "devDependencies": { @@ -44292,6 +44329,8 @@ "@overleaf/settings": "*", "@phosphor-icons/react": "^2.1.7", "@slack/webhook": "^7.0.2", + "@stripe/react-stripe-js": "^3.1.1", + "@stripe/stripe-js": "^5.6.0", "@xmldom/xmldom": "^0.7.13", "accepts": "^1.3.7", "ajv": "^8.12.0", @@ -44325,6 +44364,7 @@ "express-session": "^1.17.1", "globby": "^5.0.0", "helmet": "^6.0.1", + "https-proxy-agent": "^7.0.6", "i18next": "^23.10.0", "i18next-fs-backend": "^2.3.1", "i18next-http-middleware": "^3.5.0", @@ -44367,13 +44407,14 @@ "request": "^2.88.2", "requestretry": "^7.1.0", "sanitize-html": "^2.8.1", + "stripe": "^17.7.0", "tough-cookie": "^4.0.0", "tsscmp": "^1.0.6", "uid-safe": "^2.1.5", "utf-8-validate": "^5.0.2", "valid-data-url": "^2.0.0", "valid-url": "^1.0.9", - "xml-crypto": "^2.1.2", + "xml-crypto": "^2.1.6", "xml2js": "^0.6.2", "xregexp": "^4.3.0", "yauzl": "^2.10.0" @@ -44399,17 +44440,6 @@ "@lezer/highlight": "^1.2.1", "@lezer/lr": "^1.4.2", "@lezer/markdown": "^1.3.2", - "@opentelemetry/api": "^1.4.1", - "@opentelemetry/auto-instrumentations-web": "^0.33.1", - "@opentelemetry/context-zone": "^1.15.2", - "@opentelemetry/exporter-trace-otlp-http": "^0.41.2", - "@opentelemetry/instrumentation": "^0.41.2", - "@opentelemetry/instrumentation-document-load": "^0.33.1", - "@opentelemetry/instrumentation-xml-http-request": "^0.41.2", - "@opentelemetry/resources": "^1.15.2", - "@opentelemetry/sdk-trace-base": "^1.15.2", - "@opentelemetry/sdk-trace-web": "^1.15.2", - "@opentelemetry/semantic-conventions": "^1.15.2", "@overleaf/codemirror-tree-view": "^0.1.3", "@overleaf/dictionaries": "https://github.com/overleaf/dictionaries/archive/refs/tags/v0.0.3.tar.gz", "@overleaf/ranges-tracker": "*", @@ -44422,16 +44452,16 @@ "@replit/codemirror-indentation-markers": "overleaf/codemirror-indentation-markers#78264032eb286bc47871569ae87bff5ca1c6c161", "@replit/codemirror-vim": "overleaf/codemirror-vim#1bef138382d948018f3f9b8a4d7a70ab61774e4b", "@sentry/browser": "7.46.0", - "@storybook/addon-a11y": "^8.4.7", - "@storybook/addon-essentials": "^8.4.7", - "@storybook/addon-interactions": "^8.4.7", - "@storybook/addon-links": "^8.4.7", + "@storybook/addon-a11y": "^8.6.4", + "@storybook/addon-essentials": "^8.6.4", + "@storybook/addon-interactions": "^8.6.4", + "@storybook/addon-links": "^8.6.4", "@storybook/addon-styling-webpack": "^1.0.1", - "@storybook/addon-webpack5-compiler-babel": "^3.0.3", - "@storybook/cli": "^8.4.7", - "@storybook/react": "^8.4.7", - "@storybook/react-webpack5": "^8.4.7", - "@storybook/theming": "^8.4.7", + "@storybook/addon-webpack5-compiler-babel": "^3.0.5", + "@storybook/cli": "^8.6.4", + "@storybook/react": "^8.6.4", + "@storybook/react-webpack5": "^8.6.4", + "@storybook/theming": "^8.6.4", "@testing-library/cypress": "^10.0.1", "@testing-library/dom": "^9.3.0", "@testing-library/react": "^12.1.5", @@ -44563,7 +44593,7 @@ "sinon": "^7.5.0", "sinon-chai": "^3.7.0", "sinon-mongoose": "^2.3.0", - "storybook": "^8.4.7", + "storybook": "^8.6.4", "stylelint-config-standard-scss": "^13.1.0", "terser-webpack-plugin": "^5.3.9", "thread-loader": "^4.0.2", @@ -44684,80 +44714,6 @@ "node": ">=12" } }, - "services/web/node_modules/@opentelemetry/auto-instrumentations-web": { - "version": "0.33.1", - "resolved": "https://registry.npmjs.org/@opentelemetry/auto-instrumentations-web/-/auto-instrumentations-web-0.33.1.tgz", - "integrity": "sha512-0Tz4cnFEa49Opm74TTHpwGuwk9geWoYBEQxOGoUYOe8A3qFaEnW9jl69AnyEPfODhfUHcunwbzOjge4r3j2nHg==", - "dev": true, - "dependencies": { - "@opentelemetry/instrumentation": "^0.41.2", - "@opentelemetry/instrumentation-document-load": "^0.33.1", - "@opentelemetry/instrumentation-fetch": "^0.41.2", - "@opentelemetry/instrumentation-user-interaction": "^0.33.1", - "@opentelemetry/instrumentation-xml-http-request": "^0.41.2" - }, - "engines": { - "node": ">=14" - }, - "peerDependencies": { - "@opentelemetry/api": "^1.3.0" - } - }, - "services/web/node_modules/@opentelemetry/instrumentation-document-load": { - "version": "0.33.1", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-document-load/-/instrumentation-document-load-0.33.1.tgz", - "integrity": "sha512-FyLe5i85likVEp36ZewtM8jIZ8/7w55yz9tVoBBJHlTRCVZutff2EhVVCnGHx4etnMvuF+Es8CU2tTsnlCtl5g==", - "dev": true, - "dependencies": { - "@opentelemetry/core": "^1.8.0", - "@opentelemetry/instrumentation": "^0.41.2", - "@opentelemetry/sdk-trace-base": "^1.0.0", - "@opentelemetry/sdk-trace-web": "^1.15.0", - "@opentelemetry/semantic-conventions": "^1.0.0" - }, - "engines": { - "node": ">=14" - }, - "peerDependencies": { - "@opentelemetry/api": "^1.3.0" - } - }, - "services/web/node_modules/@opentelemetry/instrumentation-fetch": { - "version": "0.41.2", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-fetch/-/instrumentation-fetch-0.41.2.tgz", - "integrity": "sha512-L4jx7kq0R5XWAf5YcekSQ3Zm/6PE/+p/6rZe4NdtC+gp9u1lrQ/Vr0lwexxubS1odghbUSFo6PXKrqc25c+2hA==", - "dev": true, - "dependencies": { - "@opentelemetry/core": "1.15.2", - "@opentelemetry/instrumentation": "0.41.2", - "@opentelemetry/sdk-trace-web": "1.15.2", - "@opentelemetry/semantic-conventions": "1.15.2" - }, - "engines": { - "node": ">=14" - }, - "peerDependencies": { - "@opentelemetry/api": "^1.0.0" - } - }, - "services/web/node_modules/@opentelemetry/instrumentation-user-interaction": { - "version": "0.33.1", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-user-interaction/-/instrumentation-user-interaction-0.33.1.tgz", - "integrity": "sha512-2oun4gwWpqtCW+qydC51jqSSUZSOuNCBXOWinTuTmO6w/sd4DJaQ6kEboSx5gENT/56qnCM9jQZlHI+zKbL63w==", - "dev": true, - "dependencies": { - "@opentelemetry/core": "^1.8.0", - "@opentelemetry/instrumentation": "^0.41.2", - "@opentelemetry/sdk-trace-web": "^1.8.0" - }, - "engines": { - "node": ">=14" - }, - "peerDependencies": { - "@opentelemetry/api": "^1.3.0", - "zone.js": "0.11.4" - } - }, "services/web/node_modules/@overleaf/dictionaries": { "version": "0.0.3", "resolved": "https://github.com/overleaf/dictionaries/archive/refs/tags/v0.0.3.tar.gz", @@ -45160,6 +45116,15 @@ "@uppy/core": "^3.8.0" } }, + "services/web/node_modules/agent-base": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz", + "integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==", + "license": "MIT", + "engines": { + "node": ">= 14" + } + }, "services/web/node_modules/ajv": { "version": "8.16.0", "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.16.0.tgz", @@ -45413,6 +45378,19 @@ "node": ">=14.0.0" } }, + "services/web/node_modules/https-proxy-agent": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, "services/web/node_modules/ieee754": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", @@ -45817,6 +45795,31 @@ "node": ">=12" } }, + "services/web/node_modules/teeny-request/node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "license": "MIT", + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "services/web/node_modules/teeny-request/node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "license": "MIT", + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, "services/web/node_modules/terser-webpack-plugin": { "version": "5.3.11", "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.11.tgz", @@ -45873,10 +45876,63 @@ "loose-envify": "^1.0.0" } }, + "services/web/node_modules/xml-crypto": { + "version": "2.1.6", + "resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-2.1.6.tgz", + "integrity": "sha512-jjvpO8vHNV8QFhW5bMypP+k4BjBqHe/HrpIwpPcdUnUTIJakSIuN96o3Sdah4tKu2z64kM/JHEH8iEHGCc6Gyw==", + "license": "MIT", + "dependencies": { + "@xmldom/xmldom": "^0.7.9", + "xpath": "0.0.32" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "services/web/node_modules/xpath": { + "version": "0.0.32", + "resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.32.tgz", + "integrity": "sha512-rxMJhSIoiO8vXcWvSifKqhvV96GjiD5wYb8/QHdoRyQvraTpp4IEv944nhGausZZ3u7dhQXteZuZbaqfpB7uYw==", + "license": "MIT", + "engines": { + "node": ">=0.6.0" + } + }, "services/web/node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "tools/saas-e2e": { + "name": "@overleaf/saas-e2e", + "devDependencies": { + "@isomorphic-git/lightning-fs": "^4.6.0", + "@testing-library/cypress": "^10.0.1", + "@types/adm-zip": "^0.5.5", + "@types/pdf-parse": "^1.1.4", + "@types/uuid": "^9.0.8", + "adm-zip": "^0.5.12", + "cypress": "13.13.2", + "isomorphic-git": "^1.25.10", + "mailtrap": "^3.4.0", + "pdf-parse": "^1.1.1", + "typescript": "^5.0.4", + "uuid": "^9.0.1" + } + }, + "tools/saas-e2e/node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "dev": true, + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } } } } diff --git a/package.json b/package.json index a18f518f8e..ae25e19029 100644 --- a/package.json +++ b/package.json @@ -73,6 +73,7 @@ "services/third-party-datastore", "services/third-party-references", "services/tpdsworker", - "services/web" + "services/web", + "tools/saas-e2e" ] } diff --git a/server-ce/Dockerfile-base b/server-ce/Dockerfile-base index 6c25f69cf8..ca3c45cc1d 100644 --- a/server-ce/Dockerfile-base +++ b/server-ce/Dockerfile-base @@ -10,7 +10,7 @@ ENV TEXMFVAR=/var/lib/overleaf/tmp/texmf-var # Update to ensure dependencies are updated # ------------------------------------------ -ENV REBUILT_AFTER="2024-15-10" +ENV REBUILT_AFTER="2025-03-27" # Install dependencies # -------------------- diff --git a/server-ce/bin/shared b/server-ce/bin/shared new file mode 120000 index 0000000000..418b1bc599 --- /dev/null +++ b/server-ce/bin/shared @@ -0,0 +1 @@ +../../bin/shared/ \ No newline at end of file diff --git a/server-ce/hotfix/4.2.9/Dockerfile b/server-ce/hotfix/4.2.9/Dockerfile new file mode 100644 index 0000000000..43ca479826 --- /dev/null +++ b/server-ce/hotfix/4.2.9/Dockerfile @@ -0,0 +1 @@ +FROM sharelatex/sharelatex:4.2.8 diff --git a/server-ce/hotfix/5.3.3/Dockerfile b/server-ce/hotfix/5.3.3/Dockerfile new file mode 100644 index 0000000000..034eafbe76 --- /dev/null +++ b/server-ce/hotfix/5.3.3/Dockerfile @@ -0,0 +1 @@ +FROM sharelatex/sharelatex:5.3.2 diff --git a/server-ce/mongodb-init-replica-set.js b/server-ce/mongodb-init-replica-set.js deleted file mode 100644 index 8d993774c7..0000000000 --- a/server-ce/mongodb-init-replica-set.js +++ /dev/null @@ -1 +0,0 @@ -rs.initiate({ _id: "overleaf", members: [ { _id: 0, host: "mongo:27017" } ] }) diff --git a/server-ce/nginx/clsi-nginx.conf b/server-ce/nginx/clsi-nginx.conf index 94ce060706..aac976ecd8 100644 --- a/server-ce/nginx/clsi-nginx.conf +++ b/server-ce/nginx/clsi-nginx.conf @@ -30,7 +30,7 @@ server { application/pdf pdf; } # handle output files for specific users - location ~ ^/project/([0-9a-f]+)/user/([0-9a-f]+)/build/([0-9a-f-]+)/output/output\.([a-z]+)$ { + location ~ ^/project/([0-9a-f]+)/user/([0-9a-f]+)/build/([0-9a-f-]+)/output/output\.([a-z.]+)$ { alias /var/lib/overleaf/data/output/$1-$2/generated-files/$3/output.$4; } # handle .blg files for specific users @@ -38,7 +38,7 @@ server { alias /var/lib/overleaf/data/output/$1-$2/generated-files/$3/$4.blg; } # handle output files for anonymous users - location ~ ^/project/([0-9a-f]+)/build/([0-9a-f-]+)/output/output\.([a-z]+)$ { + location ~ ^/project/([0-9a-f]+)/build/([0-9a-f-]+)/output/output\.([a-z.]+)$ { alias /var/lib/overleaf/data/output/$1/generated-files/$2/output.$3; } # handle .blg files for anonymous users diff --git a/server-ce/nginx/overleaf.conf b/server-ce/nginx/overleaf.conf index 78af603c1e..77e59df5a0 100644 --- a/server-ce/nginx/overleaf.conf +++ b/server-ce/nginx/overleaf.conf @@ -47,12 +47,12 @@ server { } # handle output files for specific users - location ~ ^/project/([0-9a-f]+)/user/([0-9a-f]+)/build/([0-9a-f-]+)/output/output\.([a-z]+)$ { + location ~ ^/project/([0-9a-f]+)/user/([0-9a-f]+)/build/([0-9a-f-]+)/output/output\.([a-z.]+)$ { proxy_pass http://127.0.0.1:8080; # clsi-nginx.conf proxy_http_version 1.1; } # handle output files for anonymous users - location ~ ^/project/([0-9a-f]+)/build/([0-9a-f-]+)/output/output\.([a-z]+)$ { + location ~ ^/project/([0-9a-f]+)/build/([0-9a-f-]+)/output/output\.([a-z.]+)$ { proxy_pass http://127.0.0.1:8080; # clsi-nginx.conf proxy_http_version 1.1; } diff --git a/server-ce/server-ce b/server-ce/server-ce deleted file mode 120000 index 945c9b46d6..0000000000 --- a/server-ce/server-ce +++ /dev/null @@ -1 +0,0 @@ -. \ No newline at end of file diff --git a/server-ce/test/create-and-compile-project.spec.ts b/server-ce/test/create-and-compile-project.spec.ts index afee2072b5..20f8f0dd6d 100644 --- a/server-ce/test/create-and-compile-project.spec.ts +++ b/server-ce/test/create-and-compile-project.spec.ts @@ -95,7 +95,9 @@ describe('Project creation and compilation', function () { cy.findByText('Share').click() cy.findByRole('dialog').within(() => { - cy.get('input').type('collaborator@example.com,') + cy.findByTestId('collaborator-email-input').type( + 'collaborator@example.com,' + ) cy.findByText('Invite').click({ force: true }) cy.findByText('Invite not yet accepted.') }) diff --git a/server-ce/test/docker-compose.yml b/server-ce/test/docker-compose.yml index 0e5d520fae..43f494a084 100644 --- a/server-ce/test/docker-compose.yml +++ b/server-ce/test/docker-compose.yml @@ -38,7 +38,7 @@ services: image: mongo:6.0 command: '--replSet overleaf' volumes: - - ../mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + - ../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js environment: MONGO_INITDB_DATABASE: sharelatex extra_hosts: diff --git a/server-ce/test/editor.spec.ts b/server-ce/test/editor.spec.ts index d12a55427e..648c55a907 100644 --- a/server-ce/test/editor.spec.ts +++ b/server-ce/test/editor.spec.ts @@ -149,10 +149,10 @@ describe('editor', () => { openFile(fileName, 'static') cy.log('reject changes') - cy.findByText('Review').click() + cy.contains('.toolbar-item', 'Review').click() cy.get('.cm-content').should('not.contain.text', oldContent) - cy.findByText('Reject').click({ force: true }) - cy.findByText('Review').click() + cy.findByText('Reject change').click({ force: true }) + cy.contains('.toolbar-item', 'Review').click() cy.log('recompile to force flush') recompile() @@ -205,10 +205,10 @@ describe('editor', () => { openFile(fileName, 'static') cy.log('reject changes') - cy.findByText('Review').click() + cy.contains('.toolbar-item', 'Review').click() cy.get('.cm-content').should('not.contain.text', oldContent) - cy.findAllByText('Reject').first().click({ force: true }) - cy.findByText('Review').click() + cy.findAllByText('Reject change').first().click({ force: true }) + cy.contains('.toolbar-item', 'Review').click() cy.log('recompile to force flush') recompile() diff --git a/server-ce/test/external-auth.spec.ts b/server-ce/test/external-auth.spec.ts index a285f97034..f26947e8a8 100644 --- a/server-ce/test/external-auth.spec.ts +++ b/server-ce/test/external-auth.spec.ts @@ -59,7 +59,7 @@ describe('LDAP', () => { it('login', () => { cy.visit('/') - cy.findByText('Login LDAP') + cy.findByText('Log in LDAP') cy.get('input[name="login"]').type('fry') cy.get('input[name="password"]').type('fry') diff --git a/server-ce/test/git-bridge.spec.ts b/server-ce/test/git-bridge.spec.ts index 010b8ccf74..071091bdfd 100644 --- a/server-ce/test/git-bridge.spec.ts +++ b/server-ce/test/git-bridge.spec.ts @@ -136,7 +136,7 @@ describe('git-bridge', function () { shareProjectByEmailAndAcceptInviteViaDash( projectName, 'collaborator-rw@example.com', - 'Can edit' + 'Editor' ) maybeClearAllTokens() openProjectByName(projectName) @@ -147,7 +147,7 @@ describe('git-bridge', function () { shareProjectByEmailAndAcceptInviteViaDash( projectName, 'collaborator-ro@example.com', - 'Can view' + 'Viewer' ) maybeClearAllTokens() openProjectByName(projectName) diff --git a/server-ce/test/helpers/compile.ts b/server-ce/test/helpers/compile.ts index 066fc4f9d3..9f0c9e4150 100644 --- a/server-ce/test/helpers/compile.ts +++ b/server-ce/test/helpers/compile.ts @@ -24,7 +24,7 @@ export function prepareWaitForNextCompileSlot() { queueReset() triggerCompile() cy.log('Wait for compile to finish') - cy.findByText('Recompile') + cy.findByText('Recompile').should('be.visible') }) } function recompile() { diff --git a/server-ce/test/helpers/project.ts b/server-ce/test/helpers/project.ts index 15f801634f..662327d6f2 100644 --- a/server-ce/test/helpers/project.ts +++ b/server-ce/test/helpers/project.ts @@ -100,7 +100,7 @@ export function openProjectViaInviteNotification(projectName: string) { function shareProjectByEmail( projectName: string, email: string, - level: 'Can view' | 'Can edit' + level: 'Viewer' | 'Editor' ) { openProjectByName(projectName) cy.findByText('Share').click() @@ -108,7 +108,13 @@ function shareProjectByEmail( cy.findByLabelText('Add people', { selector: 'input' }).type(`${email},`) cy.findByLabelText('Add people', { selector: 'input' }) .parents('form') - .within(() => cy.findByText('Can edit').parent().select(level)) + .within(() => { + cy.findByTestId('add-collaborator-select') + .click() + .then(() => { + cy.findByText(level).click() + }) + }) cy.findByText('Invite').click({ force: true }) cy.findByText('Invite not yet accepted.') }) @@ -117,7 +123,7 @@ function shareProjectByEmail( export function shareProjectByEmailAndAcceptInviteViaDash( projectName: string, email: string, - level: 'Can view' | 'Can edit' + level: 'Viewer' | 'Editor' ) { shareProjectByEmail(projectName, email, level) @@ -128,7 +134,7 @@ export function shareProjectByEmailAndAcceptInviteViaDash( export function shareProjectByEmailAndAcceptInviteViaEmail( projectName: string, email: string, - level: 'Can view' | 'Can edit' + level: 'Viewer' | 'Editor' ) { shareProjectByEmail(projectName, email, level) @@ -212,11 +218,11 @@ export function createNewFile() { export function toggleTrackChanges(state: boolean) { cy.findByText('Review').click() - cy.get('.rp-tc-state-collapse').then(el => { - // TODO: simplify this in the frontend? - if (el.hasClass('rp-tc-state-collapse-on')) { - // make track-changes switches visible - cy.get('.rp-tc-state-collapse').click() + cy.get('.track-changes-menu-button').then(el => { + // when the menu is expanded renders the `expand_more` icon, + // and the `chevron_right` icon when it's collapsed + if (!el.text().includes('expand_more')) { + el.click() } }) @@ -241,5 +247,5 @@ export function toggleTrackChanges(state: boolean) { cy.wait(alias) }) }) - cy.findByText('Review').click() + cy.contains('.toolbar-item', 'Review').click() } diff --git a/server-ce/test/project-sharing.spec.ts b/server-ce/test/project-sharing.spec.ts index 44887f2fd2..e26439264b 100644 --- a/server-ce/test/project-sharing.spec.ts +++ b/server-ce/test/project-sharing.spec.ts @@ -154,7 +154,7 @@ describe('Project Sharing', function () { beforeEach(function () { login('user@example.com') - shareProjectByEmailAndAcceptInviteViaEmail(projectName, email, 'Can view') + shareProjectByEmailAndAcceptInviteViaEmail(projectName, email, 'Viewer') }) it('should grant the collaborator read access', () => { @@ -169,7 +169,7 @@ describe('Project Sharing', function () { beforeWithReRunOnTestRetry(function () { login('user@example.com') - shareProjectByEmailAndAcceptInviteViaDash(projectName, email, 'Can view') + shareProjectByEmailAndAcceptInviteViaDash(projectName, email, 'Viewer') }) it('should grant the collaborator read access', () => { @@ -186,7 +186,7 @@ describe('Project Sharing', function () { beforeWithReRunOnTestRetry(function () { login('user@example.com') - shareProjectByEmailAndAcceptInviteViaDash(projectName, email, 'Can edit') + shareProjectByEmailAndAcceptInviteViaDash(projectName, email, 'Editor') }) it('should grant the collaborator write access', () => { diff --git a/server-ce/test/sandboxed-compiles.spec.ts b/server-ce/test/sandboxed-compiles.spec.ts index f95caa503c..505f8cffd2 100644 --- a/server-ce/test/sandboxed-compiles.spec.ts +++ b/server-ce/test/sandboxed-compiles.spec.ts @@ -204,9 +204,9 @@ describe('SandboxedCompiles', function () { cy.log('wait for compile') cy.get('.pdf-viewer').should('contain.text', 'sandboxed') - cy.log('Check which compiler version was used, expect 2024') + cy.log('Check which compiler version was used, expect 2025') cy.get('[aria-label="View logs"]').click() - cy.findByText(/This is pdfTeX, Version .+ \(TeX Live 2024\) /) + cy.findByText(/This is pdfTeX, Version .+ \(TeX Live 2025\) /) cy.log('Check that there is no TeX Live version toggle') cy.get('header').findByText('Menu').click() diff --git a/services/chat/Makefile b/services/chat/Makefile index 0eb3adea63..94f0afb567 100644 --- a/services/chat/Makefile +++ b/services/chat/Makefile @@ -116,13 +116,6 @@ test_acceptance_clean: $(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0 test_acceptance_pre_run: - $(DOCKER_COMPOSE_TEST_ACCEPTANCE) up -d mongo - $(DOCKER_COMPOSE_TEST_ACCEPTANCE) exec -T mongo sh -c ' \ - while ! mongosh --eval "db.version()" > /dev/null; do \ - echo "Waiting for Mongo..."; \ - sleep 1; \ - done; \ - mongosh --eval "rs.initiate({ _id: \"overleaf\", members: [ { _id: 0, host: \"mongo:27017\" } ] })"' ifneq (,$(wildcard test/acceptance/js/scripts/pre-run)) $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run endif diff --git a/services/chat/docker-compose.ci.yml b/services/chat/docker-compose.ci.yml index 6f1a608534..51eb64d126 100644 --- a/services/chat/docker-compose.ci.yml +++ b/services/chat/docker-compose.ci.yml @@ -26,7 +26,7 @@ services: NODE_OPTIONS: "--unhandled-rejections=strict" depends_on: mongo: - condition: service_healthy + condition: service_started user: node command: npm run test:acceptance @@ -41,7 +41,12 @@ services: mongo: image: mongo:6.0.13 command: --replSet overleaf - healthcheck: - test: "mongosh --quiet localhost/test --eval 'quit(db.runCommand({ ping: 1 }).ok ? 0 : 1)'" - interval: 1s - retries: 20 + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 diff --git a/services/chat/docker-compose.yml b/services/chat/docker-compose.yml index 8fec13cbb2..b830d25453 100644 --- a/services/chat/docker-compose.yml +++ b/services/chat/docker-compose.yml @@ -38,14 +38,19 @@ services: user: node depends_on: mongo: - condition: service_healthy + condition: service_started command: npm run --silent test:acceptance mongo: image: mongo:6.0.13 command: --replSet overleaf - healthcheck: - test: "mongosh --quiet localhost/test --eval 'quit(db.runCommand({ ping: 1 }).ok ? 0 : 1)'" - interval: 1s - retries: 20 + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 diff --git a/services/clsi/app.js b/services/clsi/app.js index f8db431f53..8715802a0e 100644 --- a/services/clsi/app.js +++ b/services/clsi/app.js @@ -309,6 +309,10 @@ const loadTcpServer = net.createServer(function (socket) { } else { // Ready will cancel the maint state. socket.write(`up, ready, ${Math.max(freeLoadPercentage, 1)}%\n`, 'ASCII') + if (freeLoadPercentage <= 0) { + // This metric records how often we would have gone into maintenance mode. + Metrics.inc('clsi-prevented-maint') + } } socket.end() } else { diff --git a/services/clsi/app/js/OutputCacheManager.js b/services/clsi/app/js/OutputCacheManager.js index a7338c356c..1e9a10c921 100644 --- a/services/clsi/app/js/OutputCacheManager.js +++ b/services/clsi/app/js/OutputCacheManager.js @@ -98,12 +98,11 @@ module.exports = OutputCacheManager = { CONTENT_SUBDIR: 'content', CACHE_SUBDIR: 'generated-files', ARCHIVE_SUBDIR: 'archived-logs', - // build id is HEXDATE-HEXRANDOM from Date.now()and RandomBytes - // for backwards compatibility, make the randombytes part optional - BUILD_REGEX: /^[0-9a-f]+(-[0-9a-f]+)?$/, - CONTENT_REGEX: /^[0-9a-f]+(-[0-9a-f]+)?$/, + // build id is HEXDATE-HEXRANDOM from Date.now() and RandomBytes + BUILD_REGEX: /^[0-9a-f]+-[0-9a-f]+$/, + CONTENT_REGEX: /^[0-9a-f]+-[0-9a-f]+$/, CACHE_LIMIT: 2, // maximum number of cache directories - CACHE_AGE: 60 * 60 * 1000, // up to one hour old + CACHE_AGE: 90 * 60 * 1000, // up to 90 minutes old init, queueDirOperation: callbackify(queueDirOperation), @@ -137,7 +136,11 @@ module.exports = OutputCacheManager = { outputDir, callback ) { - OutputCacheManager.generateBuildId(function (err, buildId) { + const getBuildId = cb => { + if (request.buildId) return cb(null, request.buildId) + OutputCacheManager.generateBuildId(cb) + } + getBuildId(function (err, buildId) { if (err) { return callback(err) } diff --git a/services/clsi/app/js/OutputFileArchiveManager.js b/services/clsi/app/js/OutputFileArchiveManager.js index 3c5a6c8197..a64d634e12 100644 --- a/services/clsi/app/js/OutputFileArchiveManager.js +++ b/services/clsi/app/js/OutputFileArchiveManager.js @@ -7,7 +7,7 @@ const { NotFoundError } = require('./Errors') const logger = require('@overleaf/logger') // NOTE: Updating this list requires a corresponding change in -// * services/web/frontend/js/features/pdf-preview/util/file-list.js +// * services/web/frontend/js/features/pdf-preview/util/file-list.ts const ignoreFiles = ['output.fls', 'output.fdb_latexmk'] function getContentDir(projectId, userId) { diff --git a/services/clsi/app/js/ProjectPersistenceManager.js b/services/clsi/app/js/ProjectPersistenceManager.js index 66c6be5108..7d4f071d2c 100644 --- a/services/clsi/app/js/ProjectPersistenceManager.js +++ b/services/clsi/app/js/ProjectPersistenceManager.js @@ -13,6 +13,7 @@ const CompileManager = require('./CompileManager') const async = require('async') const logger = require('@overleaf/logger') const oneDay = 24 * 60 * 60 * 1000 +const Metrics = require('@overleaf/metrics') const Settings = require('@overleaf/settings') const diskusage = require('diskusage') const { callbackify } = require('node:util') @@ -22,33 +23,48 @@ const fs = require('node:fs') // projectId -> timestamp mapping. const LAST_ACCESS = new Map() -async function refreshExpiryTimeout() { +async function collectDiskStats() { const paths = [ Settings.path.compilesDir, Settings.path.outputDir, Settings.path.clsiCacheDir, ] + + const diskStats = {} for (const path of paths) { try { const stats = await diskusage.check(path) - const lowDisk = stats.available / stats.total < 0.1 - - const lowerExpiry = ProjectPersistenceManager.EXPIRY_TIMEOUT * 0.9 - if (lowDisk && Settings.project_cache_length_ms / 2 < lowerExpiry) { - logger.warn( - { - stats, - newExpiryTimeoutInDays: (lowerExpiry / oneDay).toFixed(2), - }, - 'disk running low on space, modifying EXPIRY_TIMEOUT' - ) - ProjectPersistenceManager.EXPIRY_TIMEOUT = lowerExpiry - break - } + const diskAvailablePercent = (stats.available / stats.total) * 100 + Metrics.gauge('disk_available_percent', diskAvailablePercent, 1, { + path, + }) + const lowDisk = diskAvailablePercent < 10 + diskStats[path] = { stats, lowDisk } } catch (err) { logger.err({ err, path }, 'error getting disk usage') } } + return diskStats +} + +async function refreshExpiryTimeout() { + for (const [path, { stats, lowDisk }] of Object.entries( + await collectDiskStats() + )) { + const lowerExpiry = ProjectPersistenceManager.EXPIRY_TIMEOUT * 0.9 + if (lowDisk && Settings.project_cache_length_ms / 2 < lowerExpiry) { + logger.warn( + { + path, + stats, + newExpiryTimeoutInDays: (lowerExpiry / oneDay).toFixed(2), + }, + 'disk running low on space, modifying EXPIRY_TIMEOUT' + ) + ProjectPersistenceManager.EXPIRY_TIMEOUT = lowerExpiry + break + } + } } module.exports = ProjectPersistenceManager = { @@ -103,6 +119,13 @@ module.exports = ProjectPersistenceManager = { } ) }) + + // Collect disk stats frequently to have them ready the next time /metrics is scraped (60s +- jitter). + setInterval(() => { + collectDiskStats().catch(err => { + logger.err({ err }, 'low level error collecting disk stats') + }) + }, 50_000) }, markProjectAsJustAccessed(projectId, callback) { diff --git a/services/clsi/app/js/RequestParser.js b/services/clsi/app/js/RequestParser.js index 28e182ea44..f5c07d3bcf 100644 --- a/services/clsi/app/js/RequestParser.js +++ b/services/clsi/app/js/RequestParser.js @@ -1,4 +1,5 @@ const settings = require('@overleaf/settings') +const OutputCacheManager = require('./OutputCacheManager') const VALID_COMPILERS = ['pdflatex', 'latex', 'xelatex', 'lualatex'] const MAX_TIMEOUT = 600 @@ -135,6 +136,11 @@ function parse(body, callback) { } ) response.rootResourcePath = _checkPath(rootResourcePath) + + response.buildId = _parseAttribute('buildId', compile.options.buildId, { + type: 'string', + regex: OutputCacheManager.BUILD_REGEX, + }) } catch (error1) { const error = error1 return callback(error) @@ -199,6 +205,13 @@ function _parseAttribute(name, attribute, options) { throw new Error(`${name} attribute should be a ${options.type}`) } } + if (options.type === 'string' && options.regex instanceof RegExp) { + if (!options.regex.test(attribute)) { + throw new Error( + `${name} attribute does not match regex ${options.regex}` + ) + } + } } else { if (options.default != null) { return options.default diff --git a/services/clsi/app/js/ResourceWriter.js b/services/clsi/app/js/ResourceWriter.js index 6fa6f85e1f..1db1c2baac 100644 --- a/services/clsi/app/js/ResourceWriter.js +++ b/services/clsi/app/js/ResourceWriter.js @@ -200,73 +200,22 @@ module.exports = ResourceWriter = { return OutputFileFinder.findOutputFiles( resources, basePath, - function (error, outputFiles, allFiles) { + (error, outputFiles, allFiles) => { if (error != null) { return callback(error) } const jobs = [] - for (const file of Array.from(outputFiles || [])) { - ;(function (file) { - const { path } = file - let shouldDelete = true - if ( - path.match(/^output\./) || - path.match(/\.aux$/) || - path.match(/^cache\//) - ) { - // knitr cache - shouldDelete = false - } - if (path.match(/^output-.*/)) { - // Tikz cached figures (default case) - shouldDelete = false - } - if (path.match(/\.(pdf|dpth|md5)$/)) { - // Tikz cached figures (by extension) - shouldDelete = false - } - if ( - path.match(/\.(pygtex|pygstyle)$/) || - path.match(/(^|\/)_minted-[^\/]+\//) - ) { - // minted files/directory - shouldDelete = false - } - if ( - path.match(/\.md\.tex$/) || - path.match(/(^|\/)_markdown_[^\/]+\//) - ) { - // markdown files/directory - shouldDelete = false - } - if (path.match(/-eps-converted-to\.pdf$/)) { - // Epstopdf generated files - shouldDelete = false - } - if ( - path === 'output.pdf' || - path === 'output.dvi' || - path === 'output.log' || - path === 'output.xdv' || - path === 'output.stdout' || - path === 'output.stderr' - ) { - shouldDelete = true - } - if (path === 'output.tex') { - // created by TikzManager if present in output files - shouldDelete = true - } - if (shouldDelete) { - return jobs.push(callback => - ResourceWriter._deleteFileIfNotDirectory( - Path.join(basePath, path), - callback - ) + for (const { path } of outputFiles || []) { + const shouldDelete = ResourceWriter.isExtraneousFile(path) + if (shouldDelete) { + jobs.push(callback => + ResourceWriter._deleteFileIfNotDirectory( + Path.join(basePath, path), + callback ) - } - })(file) + ) + } } return async.series(jobs, function (error) { @@ -279,6 +228,58 @@ module.exports = ResourceWriter = { ) }, + isExtraneousFile(path) { + let shouldDelete = true + if ( + path.match(/^output\./) || + path.match(/\.aux$/) || + path.match(/^cache\//) + ) { + // knitr cache + shouldDelete = false + } + if (path.match(/^output-.*/)) { + // Tikz cached figures (default case) + shouldDelete = false + } + if (path.match(/\.(pdf|dpth|md5)$/)) { + // Tikz cached figures (by extension) + shouldDelete = false + } + if ( + path.match(/\.(pygtex|pygstyle)$/) || + path.match(/(^|\/)_minted-[^\/]+\//) + ) { + // minted files/directory + shouldDelete = false + } + if (path.match(/\.md\.tex$/) || path.match(/(^|\/)_markdown_[^\/]+\//)) { + // markdown files/directory + shouldDelete = false + } + if (path.match(/-eps-converted-to\.pdf$/)) { + // Epstopdf generated files + shouldDelete = false + } + if ( + path === 'output.synctex.gz' || + path === 'output.pdfxref' || + path === 'output.pdf' || + path === 'output.dvi' || + path === 'output.log' || + path === 'output.xdv' || + path === 'output.stdout' || + path === 'output.stderr' + ) { + shouldDelete = true + } + if (path === 'output.tex') { + // created by TikzManager if present in output files + shouldDelete = true + } + return shouldDelete + }, + _deleteFileIfNotDirectory(path, callback) { if (callback == null) { callback = function () {} diff --git a/services/clsi/nginx.conf b/services/clsi/nginx.conf index 2290aeb444..604eb93fbf 100644 --- a/services/clsi/nginx.conf +++ b/services/clsi/nginx.conf @@ -46,7 +46,7 @@ server { } # handle output files for specific users - location ~ ^/project/([0-9a-f]+)/user/([0-9a-f]+)/build/([0-9a-f-]+)/output/output\.([a-z]+)$ { + location ~ ^/project/([0-9a-f]+)/user/([0-9a-f]+)/build/([0-9a-f-]+)/output/output\.([a-z.]+)$ { if ($request_method = 'OPTIONS') { # handle OPTIONS method for CORS requests add_header 'Allow' 'GET,HEAD'; @@ -64,7 +64,7 @@ server { alias /output/$1-$2/generated-files/$3/$4.blg; } # handle output files for anonymous users - location ~ ^/project/([0-9a-f]+)/build/([0-9a-f-]+)/output/output\.([a-z]+)$ { + location ~ ^/project/([0-9a-f]+)/build/([0-9a-f-]+)/output/output\.([a-z.]+)$ { if ($request_method = 'OPTIONS') { # handle OPTIONS method for CORS requests add_header 'Allow' 'GET,HEAD'; diff --git a/services/clsi/package.json b/services/clsi/package.json index 5df3f0e8b9..3f05ab543d 100644 --- a/services/clsi/package.json +++ b/services/clsi/package.json @@ -28,7 +28,7 @@ "body-parser": "^1.20.3", "bunyan": "^1.8.15", "diskusage": "^1.1.3", - "dockerode": "^3.1.0", + "dockerode": "^4.0.5", "express": "^4.21.2", "lodash": "^4.17.21", "p-limit": "^3.1.0", diff --git a/services/clsi/test/acceptance/js/BrokenLatexFileTests.js b/services/clsi/test/acceptance/js/BrokenLatexFileTests.js index 07c690738d..d22c142cff 100644 --- a/services/clsi/test/acceptance/js/BrokenLatexFileTests.js +++ b/services/clsi/test/acceptance/js/BrokenLatexFileTests.js @@ -107,7 +107,6 @@ Hello world 'output.fdb_latexmk', 'output.fls', 'output.log', - 'output.pdfxref', 'output.stderr', 'output.stdout', ]) diff --git a/services/clsi/test/unit/js/DockerLockManagerTests.js b/services/clsi/test/unit/js/DockerLockManagerTests.js index c494a85c66..f69179443c 100644 --- a/services/clsi/test/unit/js/DockerLockManagerTests.js +++ b/services/clsi/test/unit/js/DockerLockManagerTests.js @@ -16,7 +16,7 @@ const modulePath = require('node:path').join( '../../../app/js/DockerLockManager' ) -describe('LockManager', function () { +describe('DockerLockManager', function () { beforeEach(function () { return (this.LockManager = SandboxedModule.require(modulePath, { requires: { diff --git a/services/clsi/test/unit/js/LockManagerTests.js b/services/clsi/test/unit/js/LockManagerTests.js index 64238ea0c6..7005b3e5a3 100644 --- a/services/clsi/test/unit/js/LockManagerTests.js +++ b/services/clsi/test/unit/js/LockManagerTests.js @@ -21,6 +21,7 @@ describe('LockManager', function () { compileConcurrencyLimit: 5, }), './Errors': (this.Erros = Errors), + './RequestParser': { MAX_TIMEOUT: 600 }, }, }) }) diff --git a/services/clsi/test/unit/js/ProjectPersistenceManagerTests.js b/services/clsi/test/unit/js/ProjectPersistenceManagerTests.js index b36b9245e4..2504d266ca 100644 --- a/services/clsi/test/unit/js/ProjectPersistenceManagerTests.js +++ b/services/clsi/test/unit/js/ProjectPersistenceManagerTests.js @@ -23,6 +23,7 @@ describe('ProjectPersistenceManager', function () { beforeEach(function () { this.ProjectPersistenceManager = SandboxedModule.require(modulePath, { requires: { + '@overleaf/metrics': (this.Metrics = { gauge: sinon.stub() }), './UrlCache': (this.UrlCache = {}), './CompileManager': (this.CompileManager = {}), diskusage: (this.diskusage = { check: sinon.stub() }), @@ -49,6 +50,10 @@ describe('ProjectPersistenceManager', function () { }) this.ProjectPersistenceManager.refreshExpiryTimeout(() => { + this.Metrics.gauge.should.have.been.calledWith( + 'disk_available_percent', + 40 + ) this.ProjectPersistenceManager.EXPIRY_TIMEOUT.should.equal( this.settings.project_cache_length_ms ) @@ -63,6 +68,10 @@ describe('ProjectPersistenceManager', function () { }) this.ProjectPersistenceManager.refreshExpiryTimeout(() => { + this.Metrics.gauge.should.have.been.calledWith( + 'disk_available_percent', + 5 + ) this.ProjectPersistenceManager.EXPIRY_TIMEOUT.should.equal(900) done() }) @@ -75,6 +84,10 @@ describe('ProjectPersistenceManager', function () { }) this.ProjectPersistenceManager.EXPIRY_TIMEOUT = 500 this.ProjectPersistenceManager.refreshExpiryTimeout(() => { + this.Metrics.gauge.should.have.been.calledWith( + 'disk_available_percent', + 5 + ) this.ProjectPersistenceManager.EXPIRY_TIMEOUT.should.equal(500) done() }) diff --git a/services/clsi/test/unit/js/RequestParserTests.js b/services/clsi/test/unit/js/RequestParserTests.js index 5fdae7ed3d..437c3c4fbe 100644 --- a/services/clsi/test/unit/js/RequestParserTests.js +++ b/services/clsi/test/unit/js/RequestParserTests.js @@ -30,6 +30,7 @@ describe('RequestParser', function () { this.RequestParser = SandboxedModule.require(modulePath, { requires: { '@overleaf/settings': (this.settings = {}), + './OutputCacheManager': { BUILD_REGEX: /^[0-9a-f]+-[0-9a-f]+$/ }, }, }) }) @@ -274,6 +275,37 @@ describe('RequestParser', function () { }) }) + describe('with a valid buildId', function () { + beforeEach(function (done) { + this.validRequest.compile.options.buildId = '195a4869176-a4ad60bee7bf35e4' + this.RequestParser.parse(this.validRequest, (error, data) => { + if (error) return done(error) + this.data = data + done() + }) + }) + + it('should return an error', function () { + this.data.buildId.should.equal('195a4869176-a4ad60bee7bf35e4') + }) + }) + + describe('with a bad buildId', function () { + beforeEach(function () { + this.validRequest.compile.options.buildId = 'foo/bar' + this.RequestParser.parse(this.validRequest, this.callback) + }) + + it('should return an error', function () { + this.callback + .calledWithMatch({ + message: + 'buildId attribute does not match regex /^[0-9a-f]+-[0-9a-f]+$/', + }) + .should.equal(true) + }) + }) + describe('with a resource with a valid date', function () { beforeEach(function () { this.date = '12:00 01/02/03' diff --git a/services/contacts/Makefile b/services/contacts/Makefile index 351070e0b7..97a348d219 100644 --- a/services/contacts/Makefile +++ b/services/contacts/Makefile @@ -116,13 +116,6 @@ test_acceptance_clean: $(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0 test_acceptance_pre_run: - $(DOCKER_COMPOSE_TEST_ACCEPTANCE) up -d mongo - $(DOCKER_COMPOSE_TEST_ACCEPTANCE) exec -T mongo sh -c ' \ - while ! mongosh --eval "db.version()" > /dev/null; do \ - echo "Waiting for Mongo..."; \ - sleep 1; \ - done; \ - mongosh --eval "rs.initiate({ _id: \"overleaf\", members: [ { _id: 0, host: \"mongo:27017\" } ] })"' ifneq (,$(wildcard test/acceptance/js/scripts/pre-run)) $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run endif diff --git a/services/contacts/docker-compose.ci.yml b/services/contacts/docker-compose.ci.yml index 6f1a608534..51eb64d126 100644 --- a/services/contacts/docker-compose.ci.yml +++ b/services/contacts/docker-compose.ci.yml @@ -26,7 +26,7 @@ services: NODE_OPTIONS: "--unhandled-rejections=strict" depends_on: mongo: - condition: service_healthy + condition: service_started user: node command: npm run test:acceptance @@ -41,7 +41,12 @@ services: mongo: image: mongo:6.0.13 command: --replSet overleaf - healthcheck: - test: "mongosh --quiet localhost/test --eval 'quit(db.runCommand({ ping: 1 }).ok ? 0 : 1)'" - interval: 1s - retries: 20 + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 diff --git a/services/contacts/docker-compose.yml b/services/contacts/docker-compose.yml index 8cdedbe1ef..310220bd20 100644 --- a/services/contacts/docker-compose.yml +++ b/services/contacts/docker-compose.yml @@ -38,14 +38,19 @@ services: user: node depends_on: mongo: - condition: service_healthy + condition: service_started command: npm run --silent test:acceptance mongo: image: mongo:6.0.13 command: --replSet overleaf - healthcheck: - test: "mongosh --quiet localhost/test --eval 'quit(db.runCommand({ ping: 1 }).ok ? 0 : 1)'" - interval: 1s - retries: 20 + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 diff --git a/services/docstore/Makefile b/services/docstore/Makefile index f06ad3c14c..6efd053025 100644 --- a/services/docstore/Makefile +++ b/services/docstore/Makefile @@ -116,13 +116,6 @@ test_acceptance_clean: $(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0 test_acceptance_pre_run: - $(DOCKER_COMPOSE_TEST_ACCEPTANCE) up -d mongo - $(DOCKER_COMPOSE_TEST_ACCEPTANCE) exec -T mongo sh -c ' \ - while ! mongosh --eval "db.version()" > /dev/null; do \ - echo "Waiting for Mongo..."; \ - sleep 1; \ - done; \ - mongosh --eval "rs.initiate({ _id: \"overleaf\", members: [ { _id: 0, host: \"mongo:27017\" } ] })"' ifneq (,$(wildcard test/acceptance/js/scripts/pre-run)) $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run endif diff --git a/services/docstore/app.js b/services/docstore/app.js index b4a26fc24a..76659e8411 100644 --- a/services/docstore/app.js +++ b/services/docstore/app.js @@ -88,14 +88,17 @@ app.get('/status', (req, res) => res.send('docstore is alive')) app.use(handleValidationErrors()) app.use(function (error, req, res, next) { - logger.error({ err: error, req }, 'request errored') if (error instanceof Errors.NotFoundError) { + logger.warn({ req }, 'not found') res.sendStatus(404) } else if (error instanceof Errors.DocModifiedError) { + logger.warn({ req }, 'conflict: doc modified') res.sendStatus(409) } else if (error instanceof Errors.DocVersionDecrementedError) { + logger.warn({ req }, 'conflict: doc version decremented') res.sendStatus(409) } else { + logger.error({ err: error, req }, 'request errored') res.status(500).send('Oops, something went wrong') } }) diff --git a/services/docstore/docker-compose.ci.yml b/services/docstore/docker-compose.ci.yml index a8847e8996..a1a9995f60 100644 --- a/services/docstore/docker-compose.ci.yml +++ b/services/docstore/docker-compose.ci.yml @@ -29,7 +29,7 @@ services: NODE_OPTIONS: "--unhandled-rejections=strict" depends_on: mongo: - condition: service_healthy + condition: service_started gcs: condition: service_healthy user: node @@ -46,10 +46,15 @@ services: mongo: image: mongo:6.0.13 command: --replSet overleaf - healthcheck: - test: "mongosh --quiet localhost/test --eval 'quit(db.runCommand({ ping: 1 }).ok ? 0 : 1)'" - interval: 1s - retries: 20 + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 gcs: image: fsouza/fake-gcs-server:1.45.2 command: ["--port=9090", "--scheme=http"] diff --git a/services/docstore/docker-compose.yml b/services/docstore/docker-compose.yml index b170934f48..93a029b00a 100644 --- a/services/docstore/docker-compose.yml +++ b/services/docstore/docker-compose.yml @@ -41,7 +41,7 @@ services: user: node depends_on: mongo: - condition: service_healthy + condition: service_started gcs: condition: service_healthy command: npm run --silent test:acceptance @@ -49,10 +49,15 @@ services: mongo: image: mongo:6.0.13 command: --replSet overleaf - healthcheck: - test: "mongosh --quiet localhost/test --eval 'quit(db.runCommand({ ping: 1 }).ok ? 0 : 1)'" - interval: 1s - retries: 20 + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 gcs: image: fsouza/fake-gcs-server:1.45.2 diff --git a/services/document-updater/Makefile b/services/document-updater/Makefile index b8a3e1a02e..55f483fc89 100644 --- a/services/document-updater/Makefile +++ b/services/document-updater/Makefile @@ -116,13 +116,6 @@ test_acceptance_clean: $(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0 test_acceptance_pre_run: - $(DOCKER_COMPOSE_TEST_ACCEPTANCE) up -d mongo - $(DOCKER_COMPOSE_TEST_ACCEPTANCE) exec -T mongo sh -c ' \ - while ! mongosh --eval "db.version()" > /dev/null; do \ - echo "Waiting for Mongo..."; \ - sleep 1; \ - done; \ - mongosh --eval "rs.initiate({ _id: \"overleaf\", members: [ { _id: 0, host: \"mongo:27017\" } ] })"' ifneq (,$(wildcard test/acceptance/js/scripts/pre-run)) $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run endif diff --git a/services/document-updater/app.js b/services/document-updater/app.js index 2932bba87d..65c9895377 100644 --- a/services/document-updater/app.js +++ b/services/document-updater/app.js @@ -147,6 +147,10 @@ app.post( '/project/:project_id/get_and_flush_if_old', HttpController.getProjectDocsAndFlushIfOld ) +app.get( + '/project/:project_id/last_updated_at', + HttpController.getProjectLastUpdatedAt +) app.post('/project/:project_id/clearState', HttpController.clearProjectState) app.post('/project/:project_id/doc/:doc_id', HttpController.setDoc) app.post('/project/:project_id/doc/:doc_id/append', HttpController.appendToDoc) diff --git a/services/document-updater/app/js/HttpController.js b/services/document-updater/app/js/HttpController.js index 4a1539b07c..95fe9b7ba9 100644 --- a/services/document-updater/app/js/HttpController.js +++ b/services/document-updater/app/js/HttpController.js @@ -129,6 +129,22 @@ function getProjectDocsAndFlushIfOld(req, res, next) { ) } +function getProjectLastUpdatedAt(req, res, next) { + const projectId = req.params.project_id + ProjectManager.getProjectDocsTimestamps(projectId, (err, timestamps) => { + if (err) return next(err) + + // Filter out nulls. This can happen when + // - docs get flushed between the listing and getting the individual docs ts + // - a doc flush failed half way (doc keys removed, project tracking not updated) + timestamps = timestamps.filter(ts => !!ts) + + timestamps = timestamps.map(ts => parseInt(ts, 10)) + timestamps.sort((a, b) => (a > b ? 1 : -1)) + res.json({ lastUpdatedAt: timestamps.pop() }) + }) +} + function clearProjectState(req, res, next) { const projectId = req.params.project_id const timer = new Metrics.Timer('http.clearProjectState') @@ -521,6 +537,7 @@ module.exports = { getDoc, peekDoc, getProjectDocsAndFlushIfOld, + getProjectLastUpdatedAt, clearProjectState, appendToDoc, setDoc, diff --git a/services/document-updater/docker-compose.ci.yml b/services/document-updater/docker-compose.ci.yml index 332a9710ca..bdf10c9732 100644 --- a/services/document-updater/docker-compose.ci.yml +++ b/services/document-updater/docker-compose.ci.yml @@ -29,7 +29,7 @@ services: NODE_OPTIONS: "--unhandled-rejections=strict" depends_on: mongo: - condition: service_healthy + condition: service_started redis: condition: service_healthy user: node @@ -53,7 +53,12 @@ services: mongo: image: mongo:6.0.13 command: --replSet overleaf - healthcheck: - test: "mongosh --quiet localhost/test --eval 'quit(db.runCommand({ ping: 1 }).ok ? 0 : 1)'" - interval: 1s - retries: 20 + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index 7770e52d26..7dd27c9a39 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -41,7 +41,7 @@ services: user: node depends_on: mongo: - condition: service_healthy + condition: service_started redis: condition: service_healthy command: npm run --silent test:acceptance @@ -56,8 +56,13 @@ services: mongo: image: mongo:6.0.13 command: --replSet overleaf - healthcheck: - test: "mongosh --quiet localhost/test --eval 'quit(db.runCommand({ ping: 1 }).ok ? 0 : 1)'" - interval: 1s - retries: 20 + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 diff --git a/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js index 8de7f091a8..73e22aace7 100644 --- a/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js +++ b/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js @@ -109,11 +109,40 @@ describe('Applying updates to a doc', function () { ) }) + it('should yield last updated time', function (done) { + DocUpdaterClient.getProjectLastUpdatedAt( + this.project_id, + (error, res, body) => { + if (error != null) { + throw error + } + res.statusCode.should.equal(200) + body.lastUpdatedAt.should.be.within(this.startTime, Date.now()) + done() + } + ) + }) + + it('should yield no last updated time for another project', function (done) { + DocUpdaterClient.getProjectLastUpdatedAt( + DocUpdaterClient.randomId(), + (error, res, body) => { + if (error != null) { + throw error + } + res.statusCode.should.equal(200) + body.should.deep.equal({}) + done() + } + ) + }) + describe('when sending another update', function () { before(function (done) { - this.timeout = 10000 - this.second_update = Object.create(this.update) + this.timeout(10000) + this.second_update = Object.assign({}, this.update) this.second_update.v = this.version + 1 + this.secondStartTime = Date.now() DocUpdaterClient.sendUpdate( this.project_id, this.doc_id, @@ -127,6 +156,24 @@ describe('Applying updates to a doc', function () { ) }) + it('should update the doc', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) done(error) + doc.lines.should.deep.equal([ + 'one', + 'one and a half', + 'one and a half', + 'two', + 'three', + ]) + done() + } + ) + }) + it('should not change the first op timestamp', function (done) { rclientProjectHistory.get( ProjectHistoryKeys.projectHistoryFirstOpTimestamp({ @@ -142,6 +189,23 @@ describe('Applying updates to a doc', function () { } ) }) + + it('should yield last updated time', function (done) { + DocUpdaterClient.getProjectLastUpdatedAt( + this.project_id, + (error, res, body) => { + if (error != null) { + throw error + } + res.statusCode.should.equal(200) + body.lastUpdatedAt.should.be.within( + this.secondStartTime, + Date.now() + ) + done() + } + ) + }) }) }) diff --git a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js index 4ed4f929de..0a4ec8922e 100644 --- a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js +++ b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js @@ -119,6 +119,18 @@ module.exports = DocUpdaterClient = { ) }, + getProjectLastUpdatedAt(projectId, callback) { + request.get( + `http://127.0.0.1:3003/project/${projectId}/last_updated_at`, + (error, res, body) => { + if (body != null && res.statusCode >= 200 && res.statusCode < 300) { + body = JSON.parse(body) + } + callback(error, res, body) + } + ) + }, + preloadDoc(projectId, docId, callback) { DocUpdaterClient.getDoc(projectId, docId, callback) }, diff --git a/services/git-bridge/README.md b/services/git-bridge/README.md index 13b24cc6d0..eadc2abc4f 100644 --- a/services/git-bridge/README.md +++ b/services/git-bridge/README.md @@ -76,12 +76,10 @@ The configuration file is in `.json` format. "postbackBaseUrl" (string): the postback url, "serviceName" (string): current name of writeLaTeX in case it ever changes, - "oauth2" (object): { null or missing if oauth2 shouldn't be used - "oauth2ClientID" (string): oauth2 client ID, - "oauth2ClientSecret" (string): oauth2 client secret, - "oauth2Server" (string): oauth2 server, - with protocol and - without trailing slash + "oauth2Server" (string): oauth2 server, + with protocol and + without trailing slash, + null or missing if oauth2 shouldn't be used }, "repoStore" (object, optional): { configure the repo store "maxFileSize" (long, optional): maximum size of a file, inclusive diff --git a/services/git-bridge/conf/envsubst_template.json b/services/git-bridge/conf/envsubst_template.json index 6aa91be700..4ede5bab7f 100644 --- a/services/git-bridge/conf/envsubst_template.json +++ b/services/git-bridge/conf/envsubst_template.json @@ -7,11 +7,7 @@ "apiBaseUrl": "${GIT_BRIDGE_API_BASE_URL:-https://localhost/api/v0}", "postbackBaseUrl": "${GIT_BRIDGE_POSTBACK_BASE_URL:-https://localhost}", "serviceName": "${GIT_BRIDGE_SERVICE_NAME:-Overleaf}", - "oauth2": { - "oauth2ClientID": "${GIT_BRIDGE_OAUTH2_CLIENT_ID}", - "oauth2ClientSecret": "${GIT_BRIDGE_OAUTH2_CLIENT_SECRET}", - "oauth2Server": "${GIT_BRIDGE_OAUTH2_SERVER:-https://localhost}" - }, + "oauth2Server": "${GIT_BRIDGE_OAUTH2_SERVER:-https://localhost}", "userPasswordEnabled": ${GIT_BRIDGE_USER_PASSWORD_ENABLED:-false}, "repoStore": { "maxFileNum": ${GIT_BRIDGE_REPOSTORE_MAX_FILE_NUM:-2000}, diff --git a/services/git-bridge/conf/example_config.json b/services/git-bridge/conf/example_config.json index 1e5b95e5a6..76b82eb6a0 100644 --- a/services/git-bridge/conf/example_config.json +++ b/services/git-bridge/conf/example_config.json @@ -7,11 +7,7 @@ "apiBaseUrl": "https://localhost/api/v0", "postbackBaseUrl": "https://localhost", "serviceName": "Overleaf", - "oauth2": { - "oauth2ClientID": "asdf", - "oauth2ClientSecret": "asdf", - "oauth2Server": "https://localhost" - }, + "oauth2Server": "https://localhost", "repoStore": { "maxFileNum": 2000, "maxFileSize": 52428800 diff --git a/services/git-bridge/conf/local.json b/services/git-bridge/conf/local.json index 69eb31ab2f..c4de48d819 100644 --- a/services/git-bridge/conf/local.json +++ b/services/git-bridge/conf/local.json @@ -7,11 +7,7 @@ "apiBaseUrl": "http://v2.overleaf.test:3000/api/v0", "postbackBaseUrl": "http://git-bridge:8000", "serviceName": "Overleaf", - "oauth2": { - "oauth2ClientID": "264c723c925c13590880751f861f13084934030c13b4452901e73bdfab226edc", - "oauth2ClientSecret": "e6b2e9eee7ae2bb653823250bb69594a91db0547fe3790a7135acb497108e62d", - "oauth2Server": "http://v2.overleaf.test:3000" - }, + "oauth2Server": "http://v2.overleaf.test:3000", "repoStore": { "maxFileNum": 2000, "maxFileSize": 52428800 diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/application/config/Config.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/application/config/Config.java index 492302721b..d5b530100e 100644 --- a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/application/config/Config.java +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/application/config/Config.java @@ -30,7 +30,7 @@ public class Config implements JSONSource { config.apiBaseURL, config.postbackURL, config.serviceName, - Oauth2.asSanitised(config.oauth2), + config.oauth2Server, config.userPasswordEnabled, config.repoStore, SwapStoreConfig.sanitisedCopy(config.swapStore), @@ -46,7 +46,7 @@ public class Config implements JSONSource { private String apiBaseURL; private String postbackURL; private String serviceName; - @Nullable private Oauth2 oauth2; + @Nullable private String oauth2Server; private boolean userPasswordEnabled; @Nullable private RepoStoreConfig repoStore; @Nullable private SwapStoreConfig swapStore; @@ -70,7 +70,7 @@ public class Config implements JSONSource { String apiBaseURL, String postbackURL, String serviceName, - Oauth2 oauth2, + String oauth2Server, boolean userPasswordEnabled, RepoStoreConfig repoStore, SwapStoreConfig swapStore, @@ -84,7 +84,7 @@ public class Config implements JSONSource { this.apiBaseURL = apiBaseURL; this.postbackURL = postbackURL; this.serviceName = serviceName; - this.oauth2 = oauth2; + this.oauth2Server = oauth2Server; this.userPasswordEnabled = userPasswordEnabled; this.repoStore = repoStore; this.swapStore = swapStore; @@ -116,7 +116,7 @@ public class Config implements JSONSource { if (!postbackURL.endsWith("/")) { postbackURL += "/"; } - oauth2 = new Gson().fromJson(configObject.get("oauth2"), Oauth2.class); + oauth2Server = getOptionalString(configObject, "oauth2Server"); userPasswordEnabled = getOptionalString(configObject, "userPasswordEnabled").equals("true"); repoStore = new Gson().fromJson(configObject.get("repoStore"), RepoStoreConfig.class); swapStore = new Gson().fromJson(configObject.get("swapStore"), SwapStoreConfig.class); @@ -166,19 +166,12 @@ public class Config implements JSONSource { return postbackURL; } - public boolean isUsingOauth2() { - return oauth2 != null; - } - public boolean isUserPasswordEnabled() { return userPasswordEnabled; } - public Oauth2 getOauth2() { - if (!isUsingOauth2()) { - throw new AssertionError("Getting oauth2 when not using it"); - } - return oauth2; + public String getOauth2Server() { + return oauth2Server; } public Optional getRepoStore() { diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/application/config/Oauth2.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/application/config/Oauth2.java deleted file mode 100644 index 1db7d3b4d2..0000000000 --- a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/application/config/Oauth2.java +++ /dev/null @@ -1,33 +0,0 @@ -package uk.ac.ic.wlgitbridge.application.config; - -/* - * Created by winston on 25/10/15. - */ -public class Oauth2 { - - private final String oauth2ClientID; - private final String oauth2ClientSecret; - private final String oauth2Server; - - public Oauth2(String oauth2ClientID, String oauth2ClientSecret, String oauth2Server) { - this.oauth2ClientID = oauth2ClientID; - this.oauth2ClientSecret = oauth2ClientSecret; - this.oauth2Server = oauth2Server; - } - - public String getOauth2ClientID() { - return oauth2ClientID; - } - - public String getOauth2ClientSecret() { - return oauth2ClientSecret; - } - - public String getOauth2Server() { - return oauth2Server; - } - - public static Oauth2 asSanitised(Oauth2 oauth2) { - return new Oauth2("", "", oauth2.oauth2Server); - } -} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/GitBridgeServer.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/GitBridgeServer.java index c576e2e9d8..57d1b34a7b 100644 --- a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/GitBridgeServer.java +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/GitBridgeServer.java @@ -151,9 +151,9 @@ public class GitBridgeServer { throws ServletException { final ServletContextHandler servletContextHandler = new ServletContextHandler(ServletContextHandler.SESSIONS); - if (config.isUsingOauth2()) { + if (config.getOauth2Server() != null) { Filter filter = - new Oauth2Filter(snapshotApi, config.getOauth2(), config.isUserPasswordEnabled()); + new Oauth2Filter(snapshotApi, config.getOauth2Server(), config.isUserPasswordEnabled()); servletContextHandler.addFilter( new FilterHolder(filter), "/*", EnumSet.of(DispatcherType.REQUEST)); } diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/Oauth2Filter.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/Oauth2Filter.java index 5bd3904e47..586a21ab3f 100644 --- a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/Oauth2Filter.java +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/Oauth2Filter.java @@ -13,7 +13,6 @@ import javax.servlet.*; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.codec.binary.Base64; -import uk.ac.ic.wlgitbridge.application.config.Oauth2; import uk.ac.ic.wlgitbridge.bridge.snapshot.SnapshotApi; import uk.ac.ic.wlgitbridge.util.Instance; import uk.ac.ic.wlgitbridge.util.Log; @@ -28,13 +27,13 @@ public class Oauth2Filter implements Filter { private final SnapshotApi snapshotApi; - private final Oauth2 oauth2; + private final String oauth2Server; private final boolean isUserPasswordEnabled; - public Oauth2Filter(SnapshotApi snapshotApi, Oauth2 oauth2, boolean isUserPasswordEnabled) { + public Oauth2Filter(SnapshotApi snapshotApi, String oauth2Server, boolean isUserPasswordEnabled) { this.snapshotApi = snapshotApi; - this.oauth2 = oauth2; + this.oauth2Server = oauth2Server; this.isUserPasswordEnabled = isUserPasswordEnabled; } @@ -108,7 +107,7 @@ public class Oauth2Filter implements Filter { // fail later (for example, in the unlikely event that the token // expired between the two requests). In that case, JGit will // return a 401 without a custom error message. - int statusCode = checkAccessToken(oauth2, password, getClientIp(request)); + int statusCode = checkAccessToken(this.oauth2Server, password, getClientIp(request)); if (statusCode == 429) { handleRateLimit(projectId, username, request, response); return; @@ -238,10 +237,9 @@ public class Oauth2Filter implements Filter { "your Overleaf Account Settings.")); } - private int checkAccessToken(Oauth2 oauth2, String accessToken, String clientIp) + private int checkAccessToken(String oauth2Server, String accessToken, String clientIp) throws IOException { - GenericUrl url = - new GenericUrl(oauth2.getOauth2Server() + "/oauth/token/info?client_ip=" + clientIp); + GenericUrl url = new GenericUrl(oauth2Server + "/oauth/token/info?client_ip=" + clientIp); HttpRequest request = Instance.httpRequestFactory.buildGetRequest(url); HttpHeaders headers = new HttpHeaders(); headers.setAuthorization("Bearer " + accessToken); diff --git a/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/application/WLGitBridgeIntegrationTest.java b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/application/WLGitBridgeIntegrationTest.java index 8491aa8055..f706d98edf 100644 --- a/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/application/WLGitBridgeIntegrationTest.java +++ b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/application/WLGitBridgeIntegrationTest.java @@ -1495,13 +1495,9 @@ public class WLGitBridgeIntegrationTest { + port + "\",\n" + " \"serviceName\": \"Overleaf\",\n" - + " \"oauth2\": {\n" - + " \"oauth2ClientID\": \"clientID\",\n" - + " \"oauth2ClientSecret\": \"oauth2 client secret\",\n" - + " \"oauth2Server\": \"http://127.0.0.1:" + + " \"oauth2Server\": \"http://127.0.0.1:" + apiPort - + "\"\n" - + " }"; + + "\""; if (swapCfg != null) { cfgStr += ",\n" @@ -1524,7 +1520,6 @@ public class WLGitBridgeIntegrationTest { + ",\n" + " \"intervalMillis\": " + swapCfg.getIntervalMillis() - + "\n" + " }\n"; } cfgStr += "}\n"; diff --git a/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/application/config/ConfigTest.java b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/application/config/ConfigTest.java index cbb4265d5b..8c102dbda3 100644 --- a/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/application/config/ConfigTest.java +++ b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/application/config/ConfigTest.java @@ -23,11 +23,7 @@ public class ConfigTest { + " \"apiBaseUrl\": \"http://127.0.0.1:60000/api/v0\",\n" + " \"postbackBaseUrl\": \"http://127.0.0.1\",\n" + " \"serviceName\": \"Overleaf\",\n" - + " \"oauth2\": {\n" - + " \"oauth2ClientID\": \"clientID\",\n" - + " \"oauth2ClientSecret\": \"oauth2 client secret\",\n" - + " \"oauth2Server\": \"https://www.overleaf.com\"\n" - + " }\n" + + " \"oauth2Server\": \"https://www.overleaf.com\"\n" + "}\n"); Config config = new Config(reader); assertEquals(80, config.getPort()); @@ -35,10 +31,7 @@ public class ConfigTest { assertEquals("http://127.0.0.1:60000/api/v0/", config.getAPIBaseURL()); assertEquals("http://127.0.0.1/", config.getPostbackURL()); assertEquals("Overleaf", config.getServiceName()); - assertTrue(config.isUsingOauth2()); - assertEquals("clientID", config.getOauth2().getOauth2ClientID()); - assertEquals("oauth2 client secret", config.getOauth2().getOauth2ClientSecret()); - assertEquals("https://www.overleaf.com", config.getOauth2().getOauth2Server()); + assertEquals("https://www.overleaf.com", config.getOauth2Server()); } @Test(expected = AssertionError.class) @@ -53,7 +46,7 @@ public class ConfigTest { + " \"apiBaseUrl\": \"http://127.0.0.1:60000/api/v0\",\n" + " \"postbackBaseUrl\": \"http://127.0.0.1\",\n" + " \"serviceName\": \"Overleaf\",\n" - + " \"oauth2\": null\n" + + " \"oauth2Server\": null\n" + "}\n"); Config config = new Config(reader); assertEquals(80, config.getPort()); @@ -61,8 +54,7 @@ public class ConfigTest { assertEquals("http://127.0.0.1:60000/api/v0/", config.getAPIBaseURL()); assertEquals("http://127.0.0.1/", config.getPostbackURL()); assertEquals("Overleaf", config.getServiceName()); - assertFalse(config.isUsingOauth2()); - config.getOauth2(); + assertNull(config.getOauth2Server()); } @Test @@ -77,11 +69,7 @@ public class ConfigTest { + " \"apiBaseUrl\": \"http://127.0.0.1:60000/api/v0\",\n" + " \"postbackBaseUrl\": \"http://127.0.0.1\",\n" + " \"serviceName\": \"Overleaf\",\n" - + " \"oauth2\": {\n" - + " \"oauth2ClientID\": \"my oauth2 client id\",\n" - + " \"oauth2ClientSecret\": \"my oauth2 client secret\",\n" - + " \"oauth2Server\": \"https://www.overleaf.com\"\n" - + " }\n" + + " \"oauth2Server\": \"https://www.overleaf.com\"\n" + "}\n"); Config config = new Config(reader); String expected = @@ -94,11 +82,7 @@ public class ConfigTest { + " \"apiBaseURL\": \"http://127.0.0.1:60000/api/v0/\",\n" + " \"postbackURL\": \"http://127.0.0.1/\",\n" + " \"serviceName\": \"Overleaf\",\n" - + " \"oauth2\": {\n" - + " \"oauth2ClientID\": \"\",\n" - + " \"oauth2ClientSecret\": \"\",\n" - + " \"oauth2Server\": \"https://www.overleaf.com\"\n" - + " },\n" + + " \"oauth2Server\": \"https://www.overleaf.com\",\n" + " \"userPasswordEnabled\": false,\n" + " \"repoStore\": null,\n" + " \"swapStore\": null,\n" diff --git a/services/history-v1/Makefile b/services/history-v1/Makefile index b9ab568b48..1f03a21f18 100644 --- a/services/history-v1/Makefile +++ b/services/history-v1/Makefile @@ -116,13 +116,6 @@ test_acceptance_clean: $(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0 test_acceptance_pre_run: - $(DOCKER_COMPOSE_TEST_ACCEPTANCE) up -d mongo - $(DOCKER_COMPOSE_TEST_ACCEPTANCE) exec -T mongo sh -c ' \ - while ! mongosh --eval "db.version()" > /dev/null; do \ - echo "Waiting for Mongo..."; \ - sleep 1; \ - done; \ - mongosh --eval "rs.initiate({ _id: \"overleaf\", members: [ { _id: 0, host: \"mongo:27017\" } ] })"' ifneq (,$(wildcard test/acceptance/js/scripts/pre-run)) $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run endif diff --git a/services/history-v1/api/controllers/project_import.js b/services/history-v1/api/controllers/project_import.js index 84bd9e0768..5dec84d843 100644 --- a/services/history-v1/api/controllers/project_import.js +++ b/services/history-v1/api/controllers/project_import.js @@ -22,6 +22,7 @@ const BlobStore = storage.BlobStore const chunkStore = storage.chunkStore const HashCheckBlobStore = storage.HashCheckBlobStore const persistChanges = storage.persistChanges +const InvalidChangeError = storage.InvalidChangeError const render = require('./render') @@ -113,7 +114,8 @@ async function importChanges(req, res, next) { err instanceof File.NotEditableError || err instanceof FileMap.PathnameError || err instanceof Snapshot.EditMissingFileError || - err instanceof chunkStore.ChunkVersionConflictError + err instanceof chunkStore.ChunkVersionConflictError || + err instanceof InvalidChangeError ) { // If we failed to apply operations, that's probably because they were // invalid. diff --git a/services/history-v1/backup-verifier-app.mjs b/services/history-v1/backup-verifier-app.mjs index de427a3765..3949e6a62d 100644 --- a/services/history-v1/backup-verifier-app.mjs +++ b/services/history-v1/backup-verifier-app.mjs @@ -4,17 +4,24 @@ import '@overleaf/metrics/initialize.js' import http from 'node:http' import { fileURLToPath } from 'node:url' import { promisify } from 'node:util' +import { setTimeout } from 'node:timers/promises' import express from 'express' import logger from '@overleaf/logger' import Metrics from '@overleaf/metrics' +import { healthCheck } from './backupVerifier/healthCheck.mjs' import { BackupCorruptedError, - healthCheck, verifyBlob, } from './storage/lib/backupVerifier.mjs' import { mongodb } from './storage/index.js' import { expressify } from '@overleaf/promise-utils' import { Blob } from 'overleaf-editor-core' +import { loadGlobalBlobs } from './storage/lib/blob_store/index.js' +import { EventEmitter } from 'node:events' +import { + loopRandomProjects, + setWriteMetrics, +} from './backupVerifier/ProjectVerifier.mjs' const app = express() @@ -64,20 +71,46 @@ app.use((err, req, res, next) => { next(err) }) +const shutdownEmitter = new EventEmitter() + +shutdownEmitter.once('shutdown', async code => { + logger.info({ code }, 'shutting down') + await mongodb.client.close() + await setTimeout(100) + process.exit(code) +}) + +process.on('SIGTERM', () => { + shutdownEmitter.emit('shutdown', 0) +}) + +process.on('SIGINT', () => { + shutdownEmitter.emit('shutdown', 0) +}) + /** * @param {number} port * @return {Promise} */ export async function startApp(port) { await mongodb.client.connect() + await loadGlobalBlobs() await healthCheck() const server = http.createServer(app) await promisify(server.listen.bind(server, port))() + loopRandomProjects(shutdownEmitter) return server } +setWriteMetrics(true) + // Run this if we're called directly if (process.argv[1] === fileURLToPath(import.meta.url)) { const PORT = parseInt(process.env.PORT || '3102', 10) - await startApp(PORT) + try { + await startApp(PORT) + } catch (error) { + shutdownEmitter.emit('shutdown', 1) + logger.error({ error }, 'error starting app') + } } diff --git a/services/history-v1/backup-worker-app.mjs b/services/history-v1/backup-worker-app.mjs index 584ee71b01..b21e55aafe 100644 --- a/services/history-v1/backup-worker-app.mjs +++ b/services/history-v1/backup-worker-app.mjs @@ -38,10 +38,10 @@ app.use((err, req, res, next) => { }) async function triggerGracefulShutdown(server, signal) { - logger.warn({ signal }, 'graceful shutdown: started shutdown sequence') + logger.info({ signal }, 'graceful shutdown: started shutdown sequence') await drainQueue() server.close(function () { - logger.warn({ signal }, 'graceful shutdown: closed server') + logger.info({ signal }, 'graceful shutdown: closed server') setTimeout(() => { process.exit(0) }, 1000) diff --git a/services/history-v1/backupVerifier/ProjectMetrics.mjs b/services/history-v1/backupVerifier/ProjectMetrics.mjs new file mode 100644 index 0000000000..ff37085787 --- /dev/null +++ b/services/history-v1/backupVerifier/ProjectMetrics.mjs @@ -0,0 +1,33 @@ +import Metrics from '@overleaf/metrics' +import { objectIdFromDate } from './utils.mjs' +import { db } from '../storage/lib/mongodb.js' + +const projectsCollection = db.collection('projects') + +/** + * + * @param {Date} beforeTime + * @return {Promise} + */ +export async function measurePendingChangesBeforeTime(beforeTime) { + const pendingChangeCount = await projectsCollection.countDocuments({ + 'overleaf.backup.pendingChangeAt': { + $lt: beforeTime, + }, + }) + + Metrics.gauge('backup_verification_pending_changes', pendingChangeCount) +} + +/** + * + * @param {Date} graceTime + * @return {Promise} + */ +export async function measureNeverBackedUpProjects(graceTime) { + const neverBackedUpCount = await projectsCollection.countDocuments({ + 'overleaf.backup.lastBackedUpVersion': null, + _id: { $lt: objectIdFromDate(graceTime) }, + }) + Metrics.gauge('backup_verification_never_backed_up', neverBackedUpCount) +} diff --git a/services/history-v1/backupVerifier/ProjectSampler.mjs b/services/history-v1/backupVerifier/ProjectSampler.mjs new file mode 100644 index 0000000000..93d9a1a31f --- /dev/null +++ b/services/history-v1/backupVerifier/ProjectSampler.mjs @@ -0,0 +1,79 @@ +// @ts-check +import { objectIdFromDate } from './utils.mjs' +import { db } from '../storage/lib/mongodb.js' +import config from 'config' + +const projectsCollection = db.collection('projects') + +const HAS_PROJECTS_WITHOUT_HISTORY = + config.get('hasProjectsWithoutHistory') === 'true' + +/** + * @param {Date} start + * @param {Date} end + * @param {number} N + * @yields {string} + */ +export async function* getProjectsCreatedInDateRangeCursor(start, end, N) { + yield* getSampleProjectsCursor(N, [ + { + $match: { + _id: { + $gt: objectIdFromDate(start), + $lte: objectIdFromDate(end), + }, + }, + }, + ]) +} + +export async function* getProjectsUpdatedInDateRangeCursor(start, end, N) { + yield* getSampleProjectsCursor(N, [ + { + $match: { + 'overleaf.history.updatedAt': { + $gt: start, + $lte: end, + }, + }, + }, + ]) +} + +/** + * @typedef {import('mongodb').Document} Document + */ + +/** + * + * @generator + * @param {number} N + * @param {Array} preSampleAggregationStages + * @yields {string} + */ +export async function* getSampleProjectsCursor( + N, + preSampleAggregationStages = [] +) { + const cursor = projectsCollection.aggregate([ + ...preSampleAggregationStages, + { $sample: { size: N } }, + { $project: { 'overleaf.history.id': 1 } }, + ]) + + let validProjects = 0 + let hasInvalidProject = false + + for await (const project of cursor) { + if (HAS_PROJECTS_WITHOUT_HISTORY && !project.overleaf?.history?.id) { + hasInvalidProject = true + continue + } + validProjects++ + yield project.overleaf.history.id.toString() + } + + if (validProjects === 0 && hasInvalidProject) { + yield* getSampleProjectsCursor(N, preSampleAggregationStages) + } +} diff --git a/services/history-v1/backupVerifier/ProjectVerifier.mjs b/services/history-v1/backupVerifier/ProjectVerifier.mjs new file mode 100644 index 0000000000..1e4086b700 --- /dev/null +++ b/services/history-v1/backupVerifier/ProjectVerifier.mjs @@ -0,0 +1,320 @@ +// @ts-check +import { verifyProjectWithErrorContext } from '../storage/lib/backupVerifier.mjs' +import { promiseMapSettledWithLimit } from '@overleaf/promise-utils' +import logger from '@overleaf/logger' +import metrics from '@overleaf/metrics' +import { + getSampleProjectsCursor, + getProjectsCreatedInDateRangeCursor, + getProjectsUpdatedInDateRangeCursor, +} from './ProjectSampler.mjs' +import OError from '@overleaf/o-error' +import { setTimeout } from 'node:timers/promises' + +const MS_PER_30_DAYS = 30 * 24 * 60 * 60 * 1000 + +const failureCounter = new metrics.prom.Counter({ + name: 'backup_project_verification_failed', + help: 'Number of projects that failed verification', + labelNames: ['name'], +}) + +const successCounter = new metrics.prom.Counter({ + name: 'backup_project_verification_succeeded', + help: 'Number of projects that succeeded verification', +}) + +let WRITE_METRICS = false + +/** + * @typedef {import('node:events').EventEmitter} EventEmitter + */ + +/** + * Allows writing metrics to be enabled or disabled. + * @param {Boolean} writeMetrics + */ +export function setWriteMetrics(writeMetrics) { + WRITE_METRICS = writeMetrics +} + +/** + * + * @param {Error|unknown} error + * @param {string} historyId + */ +function handleVerificationError(error, historyId) { + const name = error instanceof Error ? error.name : 'UnknownError' + logger.error({ historyId, error, name }, 'error verifying project backup') + + WRITE_METRICS && failureCounter.inc({ name }) + + return name +} + +/** + * + * @param {Date} startDate + * @param {Date} endDate + * @param {number} interval + * @returns {Array} + */ +function splitJobs(startDate, endDate, interval) { + /** @type {Array} */ + const jobs = [] + while (startDate < endDate) { + const nextStart = new Date( + Math.min(startDate.getTime() + interval, endDate.getTime()) + ) + jobs.push({ startDate, endDate: nextStart }) + startDate = nextStart + } + return jobs +} + +/** + * + * @param {AsyncGenerator} historyIdCursor + * @param {EventEmitter} [eventEmitter] + * @param {number} [delay] - Allows a delay between each verification + * @return {Promise<{verified: number, total: number, errorTypes: *[], hasFailure: boolean}>} + */ +async function verifyProjectsFromCursor( + historyIdCursor, + eventEmitter, + delay = 0 +) { + const errorTypes = [] + let verified = 0 + let total = 0 + let receivedShutdownSignal = false + if (eventEmitter) { + eventEmitter.once('shutdown', () => { + receivedShutdownSignal = true + }) + } + for await (const historyId of historyIdCursor) { + if (receivedShutdownSignal) { + break + } + total++ + try { + await verifyProjectWithErrorContext(historyId) + logger.debug({ historyId }, 'verified project backup successfully') + WRITE_METRICS && successCounter.inc() + verified++ + } catch (error) { + const errorType = handleVerificationError(error, historyId) + errorTypes.push(errorType) + } + if (delay > 0) { + await setTimeout(delay) + } + } + return { + verified, + total, + errorTypes, + hasFailure: errorTypes.length > 0, + } +} + +/** + * + * @param {number} nProjectsToSample + * @param {EventEmitter} [signal] + * @param {number} [delay] + * @return {Promise} + */ +export async function verifyRandomProjectSample( + nProjectsToSample, + signal, + delay = 0 +) { + const historyIds = await getSampleProjectsCursor(nProjectsToSample) + return await verifyProjectsFromCursor(historyIds, signal, delay) +} + +/** + * Samples projects with history IDs between the specified dates and verifies them. + * + * @param {Date} startDate + * @param {Date} endDate + * @param {number} projectsPerRange + * @param {EventEmitter} [signal] + * @return {Promise} + */ +async function verifyRange(startDate, endDate, projectsPerRange, signal) { + logger.info({ startDate, endDate }, 'verifying range') + + const results = await verifyProjectsFromCursor( + getProjectsCreatedInDateRangeCursor(startDate, endDate, projectsPerRange), + signal + ) + + if (results.total === 0) { + logger.debug( + { start: startDate, end: endDate }, + 'No projects found in range' + ) + } + + const jobStatus = { + ...results, + startDate, + endDate, + } + + logger.debug( + { ...jobStatus, errorTypes: Array.from(new Set(jobStatus.errorTypes)) }, + 'Verified range' + ) + return jobStatus +} + +/** + * @typedef {Object} VerificationJobSpecification + * @property {Date} startDate + * @property {Date} endDate + */ + +/** + * @typedef {import('./types.d.ts').VerificationJobStatus} VerificationJobStatus + */ + +/** + * @typedef {Object} VerifyDateRangeOptions + * @property {Date} startDate + * @property {Date} endDate + * @property {number} [interval] + * @property {number} [projectsPerRange] + * @property {number} [concurrency] + * @property {EventEmitter} [signal] + */ + +/** + * + * @param {VerifyDateRangeOptions} options + * @return {Promise} + */ +export async function verifyProjectsCreatedInDateRange({ + concurrency = 0, + projectsPerRange = 10, + startDate, + endDate, + interval = MS_PER_30_DAYS, + signal, +}) { + const jobs = splitJobs(startDate, endDate, interval) + if (jobs.length === 0) { + throw new OError('Time range could not be split into jobs', { + start: startDate, + end: endDate, + interval, + }) + } + const settlements = await promiseMapSettledWithLimit( + concurrency, + jobs, + ({ startDate, endDate }) => + verifyRange(startDate, endDate, projectsPerRange, signal) + ) + return settlements.reduce( + /** + * + * @param {VerificationJobStatus} acc + * @param settlement + * @return {VerificationJobStatus} + */ + (acc, settlement) => { + if (settlement.status !== 'rejected') { + if (settlement.value.hasFailure) { + acc.hasFailure = true + } + acc.total += settlement.value.total + acc.verified += settlement.value.verified + acc.errorTypes = acc.errorTypes.concat(settlement.value.errorTypes) + } else { + logger.error({ ...settlement.reason }, 'Error processing range') + } + return acc + }, + /** @type {VerificationJobStatus} */ + { + startDate, + endDate, + verified: 0, + total: 0, + hasFailure: false, + errorTypes: [], + } + ) +} + +/** + * Verifies that projects that have recently gone out of RPO have been updated. + * + * @param {Date} startDate + * @param {Date} endDate + * @param {number} nProjects + * @param {EventEmitter} [signal] + * @return {Promise} + */ +export async function verifyProjectsUpdatedInDateRange( + startDate, + endDate, + nProjects, + signal +) { + logger.debug( + { startDate, endDate, nProjects }, + 'Sampling projects updated in date range' + ) + const results = await verifyProjectsFromCursor( + getProjectsUpdatedInDateRangeCursor(startDate, endDate, nProjects), + signal + ) + + if (results.total === 0) { + logger.debug( + { start: startDate, end: endDate }, + 'No projects updated recently' + ) + } + + const jobStatus = { + ...results, + startDate, + endDate, + } + + logger.debug( + { ...jobStatus, errorTypes: Array.from(new Set(jobStatus.errorTypes)) }, + 'Verified recently updated projects' + ) + return jobStatus +} + +/** + * + * @param {EventEmitter} signal + * @return {void} + */ +export function loopRandomProjects(signal) { + let shutdown = false + signal.on('shutdown', function () { + shutdown = true + }) + async function loop() { + do { + try { + const result = await verifyRandomProjectSample(100, signal, 2_000) + logger.debug({ result }, 'verified random project sample') + } catch (error) { + logger.error({ error }, 'error verifying random project sample') + } + // eslint-disable-next-line no-unmodified-loop-condition + } while (!shutdown) + } + loop() +} diff --git a/services/history-v1/backupVerifier/healthCheck.mjs b/services/history-v1/backupVerifier/healthCheck.mjs new file mode 100644 index 0000000000..af998748b5 --- /dev/null +++ b/services/history-v1/backupVerifier/healthCheck.mjs @@ -0,0 +1,32 @@ +import config from 'config' +import { verifyProjectWithErrorContext } from '../storage/lib/backupVerifier.mjs' +import { + measureNeverBackedUpProjects, + measurePendingChangesBeforeTime, +} from './ProjectMetrics.mjs' +import { getEndDateForRPO, RPO } from './utils.mjs' + +/** @type {Array} */ +const HEALTH_CHECK_PROJECTS = JSON.parse(config.get('healthCheckProjects')) + +export async function healthCheck() { + if (!Array.isArray(HEALTH_CHECK_PROJECTS)) { + throw new Error('expected healthCheckProjects to be an array') + } + if (HEALTH_CHECK_PROJECTS.length !== 2) { + throw new Error('expected 2 healthCheckProjects') + } + if (!HEALTH_CHECK_PROJECTS.some(id => id.length === 24)) { + throw new Error('expected mongo id in healthCheckProjects') + } + if (!HEALTH_CHECK_PROJECTS.some(id => id.length < 24)) { + throw new Error('expected postgres id in healthCheckProjects') + } + + for (const historyId of HEALTH_CHECK_PROJECTS) { + await verifyProjectWithErrorContext(historyId) + } + + await measurePendingChangesBeforeTime(getEndDateForRPO(2)) + await measureNeverBackedUpProjects(getEndDateForRPO(2)) +} diff --git a/services/history-v1/backupVerifier/types.d.ts b/services/history-v1/backupVerifier/types.d.ts new file mode 100644 index 0000000000..7bfa4a85ff --- /dev/null +++ b/services/history-v1/backupVerifier/types.d.ts @@ -0,0 +1,8 @@ +export type VerificationJobStatus = { + verified: number + total: number + startDate?: Date + endDate?: Date + hasFailure: boolean + errorTypes: Array +} diff --git a/services/history-v1/backupVerifier/utils.mjs b/services/history-v1/backupVerifier/utils.mjs new file mode 100644 index 0000000000..b2d7ed2d3c --- /dev/null +++ b/services/history-v1/backupVerifier/utils.mjs @@ -0,0 +1,35 @@ +import { ObjectId } from 'mongodb' +import config from 'config' + +export const RPO = parseInt(config.get('backupRPOInMS'), 10) + +/** + * @param {Date} time + * @return {ObjectId} + */ +export function objectIdFromDate(time) { + return ObjectId.createFromTime(time.getTime() / 1000) +} + +/** + * @param {number} [factor] - Multiply RPO by this factor, default is 1 + * @return {Date} + */ +export function getEndDateForRPO(factor = 1) { + return new Date(Date.now() - RPO * factor) +} + +/** + * Creates a startDate, endDate pair that checks a period of time before the RPO horizon + * + * @param {number} offset - How many seconds we should check + * @return {{endDate: Date, startDate: Date}} + */ +export function getDatesBeforeRPO(offset) { + const now = new Date() + const endDate = new Date(now.getTime() - RPO) + return { + endDate, + startDate: new Date(endDate.getTime() - offset * 1000), + } +} diff --git a/services/history-v1/buildscript.txt b/services/history-v1/buildscript.txt index 3b1c7d83de..f8c895901b 100644 --- a/services/history-v1/buildscript.txt +++ b/services/history-v1/buildscript.txt @@ -7,4 +7,4 @@ history-v1 --node-version=20.18.2 --public-repo=False --script-version=4.5.0 ---tsconfig-extra-includes=backup-deletion-app.mjs,backup-verifier-app.mjs,api/**/*,migrations/**/*,storage/**/* +--tsconfig-extra-includes=backup-deletion-app.mjs,backup-verifier-app.mjs,backup-worker-app.mjs,api/**/*,migrations/**/*,storage/**/* diff --git a/services/history-v1/config/custom-environment-variables.json b/services/history-v1/config/custom-environment-variables.json index f2ec342444..daf804251c 100644 --- a/services/history-v1/config/custom-environment-variables.json +++ b/services/history-v1/config/custom-environment-variables.json @@ -66,6 +66,7 @@ }, "healthCheckBlobs": "HEALTH_CHECK_BLOBS", "healthCheckProjects": "HEALTH_CHECK_PROJECTS", + "backupRPOInMS": "BACKUP_RPO_IN_MS", "minSoftDeletionPeriodDays": "MIN_SOFT_DELETION_PERIOD_DAYS", "mongo": { "uri": "MONGO_CONNECTION_STRING" diff --git a/services/history-v1/config/default.json b/services/history-v1/config/default.json index 74c5bcd237..5222b84d87 100644 --- a/services/history-v1/config/default.json +++ b/services/history-v1/config/default.json @@ -23,12 +23,14 @@ } } }, + "backupRPOInMS": "3600000", "chunkStore": { "historyStoreConcurrency": "4" }, "zipStore": { "zipTimeoutMs": "360000" }, + "hasProjectsWithoutHistory": false, "minSoftDeletionPeriodDays": "90", "maxDeleteKeys": "1000", "useDeleteObjects": "true", diff --git a/services/history-v1/config/test.json b/services/history-v1/config/test.json index d9b91ffbc0..c38e28e564 100644 --- a/services/history-v1/config/test.json +++ b/services/history-v1/config/test.json @@ -36,6 +36,7 @@ }, "healthCheckBlobs": "[\"42/f70d7bba4ae1f07682e0358bd7a2068094fc023b\",\"000000000000000000000042/98d5521fe746bc2d11761edab5d0829bee286009\"]", "healthCheckProjects": "[\"42\",\"000000000000000000000042\"]", + "backupRPOInMS": "360000", "maxDeleteKeys": "3", "useDeleteObjects": "false", "mongo": { diff --git a/services/history-v1/docker-compose.ci.yml b/services/history-v1/docker-compose.ci.yml index 35c40bb7bb..7245ef14e2 100644 --- a/services/history-v1/docker-compose.ci.yml +++ b/services/history-v1/docker-compose.ci.yml @@ -40,7 +40,7 @@ services: - ./test/acceptance/certs:/certs depends_on: mongo: - condition: service_healthy + condition: service_started redis: condition: service_healthy postgres: @@ -74,10 +74,15 @@ services: mongo: image: mongo:6.0.13 command: --replSet overleaf - healthcheck: - test: "mongosh --quiet localhost/test --eval 'quit(db.runCommand({ ping: 1 }).ok ? 0 : 1)'" - interval: 1s - retries: 20 + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 postgres: image: postgres:10 environment: diff --git a/services/history-v1/docker-compose.yml b/services/history-v1/docker-compose.yml index 17dad0cb99..608dd1d325 100644 --- a/services/history-v1/docker-compose.yml +++ b/services/history-v1/docker-compose.yml @@ -57,7 +57,7 @@ services: user: node depends_on: mongo: - condition: service_healthy + condition: service_started redis: condition: service_healthy postgres: @@ -82,10 +82,15 @@ services: mongo: image: mongo:6.0.13 command: --replSet overleaf - healthcheck: - test: "mongosh --quiet localhost/test --eval 'quit(db.runCommand({ ping: 1 }).ok ? 0 : 1)'" - interval: 1s - retries: 20 + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 postgres: image: postgres:10 diff --git a/services/history-v1/storage/index.js b/services/history-v1/storage/index.js index 7fd1d589ea..238cd12a38 100644 --- a/services/history-v1/storage/index.js +++ b/services/history-v1/storage/index.js @@ -15,3 +15,6 @@ exports.zipStore = require('./lib/zip_store') const { BlobStore, loadGlobalBlobs } = require('./lib/blob_store') exports.BlobStore = BlobStore exports.loadGlobalBlobs = loadGlobalBlobs + +const { InvalidChangeError } = require('./lib/errors') +exports.InvalidChangeError = InvalidChangeError diff --git a/services/history-v1/storage/lib/backupVerifier.mjs b/services/history-v1/storage/lib/backupVerifier.mjs index 55247b91d7..14b84cd320 100644 --- a/services/history-v1/storage/lib/backupVerifier.mjs +++ b/services/history-v1/storage/lib/backupVerifier.mjs @@ -1,14 +1,24 @@ // @ts-check -import config from 'config' import OError from '@overleaf/o-error' -import { backupPersistor, projectBlobsBucket } from './backupPersistor.mjs' -import { Blob } from 'overleaf-editor-core' -import { BlobStore, makeProjectKey } from './blob_store/index.js' +import chunkStore from '../lib/chunk_store/index.js' +import { + backupPersistor, + chunksBucket, + projectBlobsBucket, +} from './backupPersistor.mjs' +import { Blob, Chunk, History } from 'overleaf-editor-core' +import { BlobStore, GLOBAL_BLOBS, makeProjectKey } from './blob_store/index.js' import blobHash from './blob_hash.js' import { NotFoundError } from '@overleaf/object-persistor/src/Errors.js' +import logger from '@overleaf/logger' +import path from 'node:path' +import projectKey from './project_key.js' +import streams from './streams.js' +import objectPersistor from '@overleaf/object-persistor' +import { getEndDateForRPO } from '../../backupVerifier/utils.mjs' /** - * @typedef {import("@overleaf/object-persistor/src/PerProjectEncryptedS3Persistor").CachedPerProjectEncryptedS3Persistor} CachedPerProjectEncryptedS3Persistor + * @typedef {import("@overleaf/object-persistor/src/PerProjectEncryptedS3Persistor.js").CachedPerProjectEncryptedS3Persistor} CachedPerProjectEncryptedS3Persistor */ /** @@ -20,13 +30,13 @@ export async function verifyBlob(historyId, hash) { } /** + * * @param {string} historyId - * @param {Array} hashes + * @return {Promise} */ -export async function verifyBlobs(historyId, hashes) { - let projectCache +async function getProjectPersistor(historyId) { try { - projectCache = await backupPersistor.forProjectRO( + return await backupPersistor.forProjectRO( projectBlobsBucket, makeProjectKey(historyId, '') ) @@ -36,16 +46,19 @@ export async function verifyBlobs(historyId, hashes) { } throw err } - await verifyBlobsWithCache(historyId, projectCache, hashes) } /** * @param {string} historyId - * @param {CachedPerProjectEncryptedS3Persistor} projectCache * @param {Array} hashes + * @param {CachedPerProjectEncryptedS3Persistor} [projectCache] */ -export async function verifyBlobsWithCache(historyId, projectCache, hashes) { +export async function verifyBlobs(historyId, hashes, projectCache) { if (hashes.length === 0) throw new Error('bug: empty hashes') + + if (!projectCache) { + projectCache = await getProjectPersistor(historyId) + } const blobStore = new BlobStore(historyId) for (const hash of hashes) { const path = makeProjectKey(historyId, hash) @@ -58,41 +71,146 @@ export async function verifyBlobsWithCache(historyId, projectCache, hashes) { }) } catch (err) { if (err instanceof NotFoundError) { - throw new BackupCorruptedError('missing blob') + throw new BackupCorruptedMissingBlobError('missing blob', { + path, + hash, + }) } throw err } const backupHash = await blobHash.fromStream(blob.getByteLength(), stream) if (backupHash !== hash) { - throw new BackupCorruptedError('hash mismatch for backed up blob', { - path, - hash, - backupHash, - }) + throw new BackupCorruptedInvalidBlobError( + 'hash mismatch for backed up blob', + { + path, + hash, + backupHash, + } + ) } } } -export class BackupCorruptedError extends OError {} - -export async function healthCheck() { - /** @type {Array} */ - const HEALTH_CHECK_BLOBS = JSON.parse(config.get('healthCheckBlobs')) - if (HEALTH_CHECK_BLOBS.length !== 2) { - throw new Error('expected 2 healthCheckBlobs') - } - if (!HEALTH_CHECK_BLOBS.some(path => path.split('/')[0].length === 24)) { - throw new Error('expected mongo id in healthCheckBlobs') - } - if (!HEALTH_CHECK_BLOBS.some(path => path.split('/')[0].length < 24)) { - throw new Error('expected postgres id in healthCheckBlobs') - } - if (HEALTH_CHECK_BLOBS.some(path => path.split('/')[1]?.length !== 40)) { - throw new Error('expected hash in healthCheckBlobs') - } - - for (const path of HEALTH_CHECK_BLOBS) { - const [historyId, hash] = path.split('/') - await verifyBlob(historyId, hash) +/** + * @param {string} historyId + * @param {Date} [endTimestamp] + */ +export async function verifyProjectWithErrorContext( + historyId, + endTimestamp = getEndDateForRPO() +) { + try { + await verifyProject(historyId, endTimestamp) + } catch (err) { + // @ts-ignore err is Error instance + throw OError.tag(err, 'verifyProject', { historyId, endTimestamp }) } } + +/** + * + * @param {string} historyId + * @param {number} startVersion + * @param {CachedPerProjectEncryptedS3Persistor} backupPersistorForProject + * @return {Promise} + */ +async function loadChunk(historyId, startVersion, backupPersistorForProject) { + const key = path.join( + projectKey.format(historyId), + projectKey.pad(startVersion) + ) + try { + const buf = await streams.gunzipStreamToBuffer( + await backupPersistorForProject.getObjectStream(chunksBucket, key) + ) + return JSON.parse(buf.toString('utf-8')) + } catch (err) { + if (err instanceof objectPersistor.Errors.NotFoundError) { + throw new Chunk.NotPersistedError(historyId) + } + if (err instanceof Error) { + throw OError.tag(err, 'Failed to load chunk', { historyId, startVersion }) + } + throw err + } +} + +/** + * @param {string} historyId + * @param {Date} endTimestamp + */ +export async function verifyProject(historyId, endTimestamp) { + const backend = chunkStore.getBackend(historyId) + const [first, last] = await Promise.all([ + backend.getFirstChunkBeforeTimestamp(historyId, endTimestamp), + backend.getLastActiveChunkBeforeTimestamp(historyId, endTimestamp), + ]) + + const chunksRecordsToVerify = [ + { + chunkId: first.id, + chunkLabel: 'first', + }, + ] + if (first.startVersion !== last.startVersion) { + chunksRecordsToVerify.push({ + chunkId: last.id, + chunkLabel: 'last before RPO', + }) + } + + const projectCache = await getProjectPersistor(historyId) + + const chunks = await Promise.all( + chunksRecordsToVerify.map(async chunk => { + try { + return History.fromRaw( + await loadChunk(historyId, chunk.startVersion, projectCache) + ) + } catch (err) { + if (err instanceof Chunk.NotPersistedError) { + throw new BackupRPOViolationChunkNotBackedUpError( + 'BackupRPOviolation: chunk not backed up', + chunk + ) + } + throw err + } + }) + ) + const seenBlobs = new Set() + const blobsToVerify = [] + for (const chunk of chunks) { + /** @type {Set} */ + const chunkBlobs = new Set() + chunk.findBlobHashes(chunkBlobs) + let hasAddedBlobFromThisChunk = false + for (const blobHash of chunkBlobs) { + if (seenBlobs.has(blobHash)) continue // old blob + if (GLOBAL_BLOBS.has(blobHash)) continue // global blob + seenBlobs.add(blobHash) + if (!hasAddedBlobFromThisChunk) { + blobsToVerify.push(blobHash) + hasAddedBlobFromThisChunk = true + } + } + } + if (blobsToVerify.length === 0) { + logger.debug( + { + historyId, + chunksRecordsToVerify: chunksRecordsToVerify.map(c => c.chunkId), + }, + 'chunks contain no blobs to verify' + ) + return + } + await verifyBlobs(historyId, blobsToVerify, projectCache) +} + +export class BackupCorruptedError extends OError {} +export class BackupRPOViolationError extends OError {} +export class BackupCorruptedMissingBlobError extends BackupCorruptedError {} +export class BackupCorruptedInvalidBlobError extends BackupCorruptedError {} +export class BackupRPOViolationChunkNotBackedUpError extends OError {} diff --git a/services/history-v1/storage/lib/backup_store/index.js b/services/history-v1/storage/lib/backup_store/index.js index 37770c702f..da7944786a 100644 --- a/services/history-v1/storage/lib/backup_store/index.js +++ b/services/history-v1/storage/lib/backup_store/index.js @@ -3,8 +3,18 @@ const { projects, backedUpBlobs } = require('../mongodb') const OError = require('@overleaf/o-error') // List projects with pending backups older than the specified interval -function listPendingBackups(timeIntervalMs = 0) { +function listPendingBackups(timeIntervalMs = 0, limit = null) { const cutoffTime = new Date(Date.now() - timeIntervalMs) + const options = { + projection: { 'overleaf.backup.pendingChangeAt': 1 }, + sort: { 'overleaf.backup.pendingChangeAt': 1 }, + } + + // Apply limit if provided + if (limit) { + options.limit = limit + } + const cursor = projects.find( { 'overleaf.backup.pendingChangeAt': { @@ -12,10 +22,30 @@ function listPendingBackups(timeIntervalMs = 0) { $lt: cutoffTime, }, }, + options + ) + return cursor +} + +// List projects that have never been backed up and are older than the specified interval +function listUninitializedBackups(timeIntervalMs = 0, limit = null) { + const cutoffTimeInSeconds = (Date.now() - timeIntervalMs) / 1000 + const options = { + projection: { _id: 1 }, + sort: { _id: 1 }, + } + // Apply limit if provided + if (limit) { + options.limit = limit + } + const cursor = projects.find( { - projection: { 'overleaf.backup': 1, 'overleaf.history': 1 }, - sort: { 'overleaf.backup.pendingChangeAt': 1 }, - } + 'overleaf.backup.lastBackedUpVersion': null, + _id: { + $lt: ObjectId.createFromTime(cutoffTimeInSeconds), + }, + }, + options ) return cursor } @@ -176,6 +206,7 @@ module.exports = { updateCurrentMetadataIfNotSet, updatePendingChangeTimestamp, listPendingBackups, + listUninitializedBackups, getBackedUpBlobHashes, unsetBackedUpBlobHashes, } diff --git a/services/history-v1/storage/lib/chunk_store/index.js b/services/history-v1/storage/lib/chunk_store/index.js index e367df44bf..c1fbb9d607 100644 --- a/services/history-v1/storage/lib/chunk_store/index.js +++ b/services/history-v1/storage/lib/chunk_store/index.js @@ -155,15 +155,22 @@ async function loadAtTimestamp(projectId, timestamp) { * * @param {string} projectId * @param {Chunk} chunk + * @param {Date} [earliestChangeTimestamp] * @return {Promise.} for the chunkId of the inserted chunk */ -async function create(projectId, chunk) { +async function create(projectId, chunk, earliestChangeTimestamp) { assert.projectId(projectId, 'bad projectId') assert.instance(chunk, Chunk, 'bad chunk') + assert.maybe.date(earliestChangeTimestamp, 'bad timestamp') const backend = getBackend(projectId) const chunkId = await uploadChunk(projectId, chunk) - await backend.confirmCreate(projectId, chunk, chunkId) + await backend.confirmCreate( + projectId, + chunk, + chunkId, + earliestChangeTimestamp + ) } /** @@ -195,18 +202,31 @@ async function uploadChunk(projectId, chunk) { * @param {string} projectId * @param {number} oldEndVersion * @param {Chunk} newChunk + * @param {Date} [earliestChangeTimestamp] * @return {Promise} */ -async function update(projectId, oldEndVersion, newChunk) { +async function update( + projectId, + oldEndVersion, + newChunk, + earliestChangeTimestamp +) { assert.projectId(projectId, 'bad projectId') assert.integer(oldEndVersion, 'bad oldEndVersion') assert.instance(newChunk, Chunk, 'bad newChunk') + assert.maybe.date(earliestChangeTimestamp, 'bad timestamp') const backend = getBackend(projectId) const oldChunkId = await getChunkIdForVersion(projectId, oldEndVersion) const newChunkId = await uploadChunk(projectId, newChunk) - await backend.confirmUpdate(projectId, oldChunkId, newChunk, newChunkId) + await backend.confirmUpdate( + projectId, + oldChunkId, + newChunk, + newChunkId, + earliestChangeTimestamp + ) } /** diff --git a/services/history-v1/storage/lib/chunk_store/mongo.js b/services/history-v1/storage/lib/chunk_store/mongo.js index 6090c555bb..bb93679fec 100644 --- a/services/history-v1/storage/lib/chunk_store/mongo.js +++ b/services/history-v1/storage/lib/chunk_store/mongo.js @@ -54,6 +54,35 @@ async function getChunkForVersion(projectId, version) { return chunkFromRecord(record) } +/** + * Get the metadata for the chunk that contains the given version before the endTime. + */ +async function getFirstChunkBeforeTimestamp(projectId, timestamp) { + assert.mongoId(projectId, 'bad projectId') + assert.date(timestamp, 'bad timestamp') + + const recordActive = await getChunkForVersion(projectId, 0) + if (recordActive && recordActive.endTimestamp <= timestamp) { + return recordActive + } + + // fallback to deleted chunk + const recordDeleted = await mongodb.chunks.findOne( + { + projectId: new ObjectId(projectId), + state: 'deleted', + startVersion: 0, + updatedAt: { $lte: timestamp }, // indexed for state=deleted + endTimestamp: { $lte: timestamp }, + }, + { sort: { updatedAt: -1 } } + ) + if (recordDeleted) { + return chunkFromRecord(recordDeleted) + } + throw new Chunk.BeforeTimestampNotFoundError(projectId, timestamp) +} + /** * Get the metadata for the chunk that contains the version that was current at * the given timestamp. @@ -86,6 +115,39 @@ async function getChunkForTimestamp(projectId, timestamp) { return chunkFromRecord(record) } +/** + * Get the metadata for the chunk that contains the version that was current before + * the given timestamp. + */ +async function getLastActiveChunkBeforeTimestamp(projectId, timestamp) { + assert.mongoId(projectId, 'bad projectId') + assert.date(timestamp, 'bad timestamp') + + const record = await mongodb.chunks.findOne( + { + projectId: new ObjectId(projectId), + state: 'active', + $or: [ + { + endTimestamp: { + $lte: timestamp, + }, + }, + { + endTimestamp: null, + }, + ], + }, + // We use the index on the startVersion for sorting records. This assumes + // that timestamps go up with each version. + { sort: { startVersion: -1 } } + ) + if (record == null) { + throw new Chunk.BeforeTimestampNotFoundError(projectId, timestamp) + } + return chunkFromRecord(record) +} + /** * Get all of a project's chunk ids */ @@ -137,7 +199,13 @@ async function insertPendingChunk(projectId, chunk) { /** * Record that a new chunk was created. */ -async function confirmCreate(projectId, chunk, chunkId, mongoOpts = {}) { +async function confirmCreate( + projectId, + chunk, + chunkId, + earliestChangeTimestamp, + mongoOpts = {} +) { assert.mongoId(projectId, 'bad projectId') assert.instance(chunk, Chunk, 'bad chunk') assert.mongoId(chunkId, 'bad chunkId') @@ -166,13 +234,23 @@ async function confirmCreate(projectId, chunk, chunkId, mongoOpts = {}) { if (result.matchedCount === 0) { throw new OError('pending chunk not found', { projectId, chunkId }) } - await updateProjectRecord(projectId, chunk, mongoOpts) + await updateProjectRecord( + projectId, + chunk, + earliestChangeTimestamp, + mongoOpts + ) } /** * Write the metadata to the project record */ -async function updateProjectRecord(projectId, chunk, mongoOpts = {}) { +async function updateProjectRecord( + projectId, + chunk, + earliestChangeTimestamp, + mongoOpts = {} +) { // record the end version against the project await mongodb.projects.updateOne( { @@ -189,7 +267,7 @@ async function updateProjectRecord(projectId, chunk, mongoOpts = {}) { // be cleared every time a backup is completed. $min: { 'overleaf.backup.pendingChangeAt': - chunk.getEndTimestamp() || new Date(), + earliestChangeTimestamp || chunk.getEndTimestamp() || new Date(), }, }, mongoOpts @@ -199,7 +277,13 @@ async function updateProjectRecord(projectId, chunk, mongoOpts = {}) { /** * Record that a chunk was replaced by a new one. */ -async function confirmUpdate(projectId, oldChunkId, newChunk, newChunkId) { +async function confirmUpdate( + projectId, + oldChunkId, + newChunk, + newChunkId, + earliestChangeTimestamp +) { assert.mongoId(projectId, 'bad projectId') assert.mongoId(oldChunkId, 'bad oldChunkId') assert.instance(newChunk, Chunk, 'bad newChunk') @@ -209,7 +293,13 @@ async function confirmUpdate(projectId, oldChunkId, newChunk, newChunkId) { try { await session.withTransaction(async () => { await deleteChunk(projectId, oldChunkId, { session }) - await confirmCreate(projectId, newChunk, newChunkId, { session }) + await confirmCreate( + projectId, + newChunk, + newChunkId, + earliestChangeTimestamp, + { session } + ) }) } finally { await session.endSession() @@ -310,6 +400,8 @@ function chunkFromRecord(record) { module.exports = { getLatestChunk, + getFirstChunkBeforeTimestamp, + getLastActiveChunkBeforeTimestamp, getChunkForVersion, getChunkForTimestamp, getProjectChunkIds, diff --git a/services/history-v1/storage/lib/chunk_store/postgres.js b/services/history-v1/storage/lib/chunk_store/postgres.js index 072f1f1ce6..0964b0ecca 100644 --- a/services/history-v1/storage/lib/chunk_store/postgres.js +++ b/services/history-v1/storage/lib/chunk_store/postgres.js @@ -46,6 +46,59 @@ async function getChunkForVersion(projectId, version) { return chunkFromRecord(record) } +/** + * Get the metadata for the chunk that contains the given version. + */ +async function getFirstChunkBeforeTimestamp(projectId, timestamp) { + assert.date(timestamp, 'bad timestamp') + + const recordActive = await getChunkForVersion(projectId, 0) + // projectId must be valid if getChunkForVersion did not throw + projectId = parseInt(projectId, 10) + if (recordActive && recordActive.endTimestamp <= timestamp) { + return recordActive + } + + // fallback to deleted chunk + const recordDeleted = await knex('old_chunks') + .where('doc_id', projectId) + .where('start_version', '=', 0) + .where('end_timestamp', '<=', timestamp) + .orderBy('end_version', 'desc') + .first() + if (recordDeleted) { + return chunkFromRecord(recordDeleted) + } + throw new Chunk.BeforeTimestampNotFoundError(projectId, timestamp) +} + +/** + * Get the metadata for the chunk that contains the version that was current at + * the given timestamp. + */ +async function getLastActiveChunkBeforeTimestamp(projectId, timestamp) { + assert.date(timestamp, 'bad timestamp') + assert.postgresId(projectId, 'bad projectId') + projectId = parseInt(projectId, 10) + + const query = knex('chunks') + .where('doc_id', projectId) + .where(function () { + this.where('end_timestamp', '<=', timestamp).orWhere( + 'end_timestamp', + null + ) + }) + .orderBy('end_version', 'desc', 'last') + + const record = await query.first() + + if (!record) { + throw new Chunk.BeforeTimestampNotFoundError(projectId, timestamp) + } + return chunkFromRecord(record) +} + /** * Get the metadata for the chunk that contains the version that was current at * the given timestamp. @@ -140,7 +193,12 @@ async function insertPendingChunk(projectId, chunk) { /** * Record that a new chunk was created. */ -async function confirmCreate(projectId, chunk, chunkId) { +async function confirmCreate( + projectId, + chunk, + chunkId, + earliestChangeTimestamp +) { assert.postgresId(projectId, `bad projectId ${projectId}`) projectId = parseInt(projectId, 10) @@ -149,14 +207,20 @@ async function confirmCreate(projectId, chunk, chunkId) { _deletePendingChunk(tx, projectId, chunkId), _insertChunk(tx, projectId, chunk, chunkId), ]) - await updateProjectRecord(projectId, chunk) + await updateProjectRecord(projectId, chunk, earliestChangeTimestamp) }) } /** * Record that a chunk was replaced by a new one. */ -async function confirmUpdate(projectId, oldChunkId, newChunk, newChunkId) { +async function confirmUpdate( + projectId, + oldChunkId, + newChunk, + newChunkId, + earliestChangeTimestamp +) { assert.postgresId(projectId, `bad projectId ${projectId}`) projectId = parseInt(projectId, 10) @@ -166,7 +230,7 @@ async function confirmUpdate(projectId, oldChunkId, newChunk, newChunkId) { _deletePendingChunk(tx, projectId, newChunkId), _insertChunk(tx, projectId, newChunk, newChunkId), ]) - await updateProjectRecord(projectId, newChunk) + await updateProjectRecord(projectId, newChunk, earliestChangeTimestamp) }) } @@ -280,6 +344,8 @@ async function generateProjectId() { module.exports = { getLatestChunk, + getFirstChunkBeforeTimestamp, + getLastActiveChunkBeforeTimestamp, getChunkForVersion, getChunkForTimestamp, getProjectChunkIds, diff --git a/services/history-v1/storage/lib/persist_changes.js b/services/history-v1/storage/lib/persist_changes.js index 0b0d8db16b..8a848aa214 100644 --- a/services/history-v1/storage/lib/persist_changes.js +++ b/services/history-v1/storage/lib/persist_changes.js @@ -65,6 +65,9 @@ async function persistChanges(projectId, allChanges, limits, clientEndVersion) { const blobStore = new BlobStore(projectId) + const earliestChangeTimestamp = + allChanges.length > 0 ? allChanges[0].getTimestamp() : null + let currentChunk /** @@ -78,12 +81,6 @@ async function persistChanges(projectId, allChanges, limits, clientEndVersion) { let originalEndVersion let changesToPersist - /** - * It's only useful to log validation errors once per flush. When we enforce - * content hash validation, it will stop the flush right away anyway. - */ - let validationErrorLogged = false - limits = limits || {} _.defaults(limits, { changeBucketMinutes: 60, @@ -128,22 +125,7 @@ async function persistChanges(projectId, allChanges, limits, clientEndVersion) { for (const operation of change.iterativelyApplyTo(currentSnapshot, { strict: true, })) { - try { - await validateContentHash(operation) - } catch (err) { - // Temporary: skip validation errors - if (err instanceof InvalidChangeError) { - if (!validationErrorLogged) { - logger.warn( - { err, projectId }, - 'content snapshot mismatch (ignored)' - ) - validationErrorLogged = true - } - } else { - throw err - } - } + await validateContentHash(operation) } chunk.pushChanges([change]) @@ -220,7 +202,12 @@ async function persistChanges(projectId, allChanges, limits, clientEndVersion) { checkElapsedTime(timer) - await chunkStore.update(projectId, originalEndVersion, currentChunk) + await chunkStore.update( + projectId, + originalEndVersion, + currentChunk, + earliestChangeTimestamp + ) } async function createNewChunksAsNeeded() { @@ -234,7 +221,7 @@ async function persistChanges(projectId, allChanges, limits, clientEndVersion) { if (changesPushed) { checkElapsedTime(timer) currentChunk = chunk - await chunkStore.create(projectId, chunk) + await chunkStore.create(projectId, chunk, earliestChangeTimestamp) } else { throw new Error('failed to fill empty chunk') } diff --git a/services/history-v1/storage/scripts/backup.mjs b/services/history-v1/storage/scripts/backup.mjs index f5bbe0bc70..474192dc74 100644 --- a/services/history-v1/storage/scripts/backup.mjs +++ b/services/history-v1/storage/scripts/backup.mjs @@ -2,8 +2,12 @@ import logger from '@overleaf/logger' import commandLineArgs from 'command-line-args' -import { History } from 'overleaf-editor-core' -import { getProjectChunks, loadLatestRaw } from '../lib/chunk_store/index.js' +import { Chunk, History, Snapshot } from 'overleaf-editor-core' +import { + getProjectChunks, + loadLatestRaw, + create, +} from '../lib/chunk_store/index.js' import { client } from '../lib/mongodb.js' import knex from '../lib/knex.js' import { historyStore } from '../lib/history_store.js' @@ -30,7 +34,7 @@ import { projectBlobsBucket, } from '../lib/backupPersistor.mjs' import { backupGenerator } from '../lib/backupGenerator.mjs' -import { promises as fs } from 'node:fs' +import { promises as fs, createWriteStream } from 'node:fs' import os from 'node:os' import path from 'node:path' import projectKey from '../lib/project_key.js' @@ -89,7 +93,7 @@ process.on('SIGTERM', handleSignal) function handleSignal() { gracefulShutdownInitiated = true - console.warn('graceful shutdown initiated, draining queue') + logger.info({}, 'graceful shutdown initiated, draining queue') } async function retry(fn, times, delayMs) { @@ -321,12 +325,18 @@ const optionDefinitions = [ description: 'Time interval in seconds for pending backups (default: 3600)', defaultValue: 3600, }, + { + name: 'fix', + type: Number, + description: 'Fix projects without chunks', + }, { name: 'init', alias: 'I', type: Boolean, description: 'Initialize backups for all projects.', }, + { name: 'output', alias: 'o', type: String, description: 'Output file' }, { name: 'start-date', type: String, @@ -366,6 +376,7 @@ function handleOptions() { !options.list && !options.pending && !options.init && + !(options.fix >= 0) && !(options.compare && options['start-date'] && options['end-date']) if (projectIdRequired && !options.projectId) { @@ -680,19 +691,68 @@ function convertToISODate(dateStr) { return new Date(dateStr + 'T00:00:00.000Z').toISOString() } +export async function fixProjectsWithoutChunks(options) { + const limit = options.fix || 1 + const query = { + 'overleaf.history.id': { $exists: true }, + 'overleaf.backup.lastBackedUpVersion': { $in: [null] }, + } + const cursor = client + .db() + .collection('projects') + .find(query, { + projection: { _id: 1, 'overleaf.history.id': 1 }, + readPreference: READ_PREFERENCE_SECONDARY, + }) + .limit(limit) + for await (const project of cursor) { + const historyId = project.overleaf.history.id.toString() + const chunks = await getProjectChunks(historyId) + if (chunks.length > 0) { + continue + } + if (DRY_RUN) { + console.log( + 'Would create new chunk for Project ID:', + project._id.toHexString(), + 'History ID:', + historyId, + 'Chunks:', + chunks + ) + } else { + console.log( + 'Creating new chunk for Project ID:', + project._id.toHexString(), + 'History ID:', + historyId, + 'Chunks:', + chunks + ) + const snapshot = new Snapshot() + const history = new History(snapshot, []) + const chunk = new Chunk(history, 0) + await create(historyId, chunk) + const newChunks = await getProjectChunks(historyId) + console.log('New chunk:', newChunks) + } + } +} + export async function initializeProjects(options) { await ensureGlobalBlobsLoaded() let totalErrors = 0 let totalProjects = 0 const query = { - 'overleaf.history.id': { $exists: true }, - 'overleaf.backup.lastBackedUpVersion': { $exists: false }, - 'overleaf.backup.pendingChangeAt': { $exists: false }, - _id: { + 'overleaf.backup.lastBackedUpVersion': { $in: [null] }, + } + + if (options['start-date'] && options['end-date']) { + query._id = { $gte: objectIdFromInput(convertToISODate(options['start-date'])), $lt: objectIdFromInput(convertToISODate(options['end-date'])), - }, + } } const cursor = client @@ -703,6 +763,18 @@ export async function initializeProjects(options) { readPreference: READ_PREFERENCE_SECONDARY, }) + if (options.output) { + console.log("Writing project IDs to file: '" + options.output + "'") + const output = createWriteStream(options.output) + for await (const project of cursor) { + output.write(project._id.toHexString() + '\n') + totalProjects++ + } + output.end() + console.log('Wrote ' + totalProjects + ' project IDs to file') + return + } + for await (const project of cursor) { if (gracefulShutdownInitiated) { console.warn('graceful shutdown: stopping project initialization') @@ -969,11 +1041,12 @@ async function main() { const options = handleOptions() await ensureGlobalBlobsLoaded() const projectId = options.projectId - if (options.status) { await displayBackupStatus(projectId) } else if (options.list) { await displayPendingBackups(options) + } else if (options.fix !== undefined) { + await fixProjectsWithoutChunks(options) } else if (options.pending) { await backupPendingProjects(options) } else if (options.init) { diff --git a/services/history-v1/storage/scripts/backup_blob.mjs b/services/history-v1/storage/scripts/backup_blob.mjs new file mode 100644 index 0000000000..2a777d0074 --- /dev/null +++ b/services/history-v1/storage/scripts/backup_blob.mjs @@ -0,0 +1,171 @@ +// @ts-check +import commandLineArgs from 'command-line-args' +import { backupBlob, downloadBlobToDir } from '../lib/backupBlob.mjs' +import withTmpDir from '../../api/controllers/with_tmp_dir.js' +import { + BlobStore, + GLOBAL_BLOBS, + loadGlobalBlobs, +} from '../lib/blob_store/index.js' +import assert from '../lib/assert.js' +import knex from '../lib/knex.js' +import { client } from '../lib/mongodb.js' +import { setTimeout } from 'node:timers/promises' +import fs from 'node:fs' + +await loadGlobalBlobs() + +/** + * Gracefully shutdown the process + * @return {Promise} + */ +async function gracefulShutdown() { + console.log('Gracefully shutting down') + await knex.destroy() + await client.close() + await setTimeout(100) + process.exit() +} + +/** + * + * @param {string} row + * @return {BackupBlobJob} + */ +function parseCSVRow(row) { + const [historyId, hash] = row.split(',') + validateBackedUpBlobJob({ historyId, hash }) + return { historyId, hash } +} + +/** + * + * @param {BackupBlobJob} job + */ +function validateBackedUpBlobJob(job) { + assert.projectId(job.historyId) + assert.blobHash(job.hash) +} + +/** + * + * @param {string} path + * @return {Promise>} + */ +async function readCSV(path) { + let fh + /** @type {Array} */ + const rows = [] + try { + fh = await fs.promises.open(path, 'r') + } catch (error) { + console.error(`Could not open file: ${error}`) + throw error + } + for await (const line of fh.readLines()) { + try { + const row = parseCSVRow(line) + if (GLOBAL_BLOBS.has(row.hash)) { + console.log(`Skipping global blob: ${line}`) + continue + } + rows.push(row) + } catch (error) { + console.error(error instanceof Error ? error.message : error) + console.log(`Skipping invalid row: ${line}`) + } + } + return rows +} + +/** + * @typedef {Object} BackupBlobJob + * @property {string} hash + * @property {string} historyId + */ + +/** + * @param {Object} options + * @property {string} [options.historyId] + * @property {string} [options.hash] + * @property {string} [options.input] + * @return {Promise>} + */ +async function initialiseJobs({ historyId, hash, input }) { + if (input) { + return await readCSV(input) + } + + if (!historyId) { + console.error('historyId is required') + process.exitCode = 1 + await gracefulShutdown() + } + + if (!hash) { + console.error('hash is required') + process.exitCode = 1 + await gracefulShutdown() + } + + validateBackedUpBlobJob({ historyId, hash }) + + if (GLOBAL_BLOBS.has(hash)) { + console.error(`Blob ${hash} is a global blob; not backing up`) + process.exitCode = 1 + await gracefulShutdown() + } + return [{ hash, historyId }] +} + +/** + * + * @param {string} historyId + * @param {string} hash + * @return {Promise} + */ +export async function downloadAndBackupBlob(historyId, hash) { + const blobStore = new BlobStore(historyId) + const blob = await blobStore.getBlob(hash) + if (!blob) { + throw new Error(`Blob ${hash} could not be loaded`) + } + await withTmpDir(`blob-${hash}`, async tmpDir => { + const filePath = await downloadBlobToDir(historyId, blob, tmpDir) + console.log(`Downloaded blob ${hash} to ${filePath}`) + await backupBlob(historyId, blob, filePath) + console.log('Backed up blob') + }) +} + +let jobs + +const options = commandLineArgs([ + { name: 'historyId', type: String }, + { name: 'hash', type: String }, + { name: 'input', type: String }, +]) + +try { + jobs = await initialiseJobs(options) +} catch (error) { + console.error(error) + await gracefulShutdown() +} + +if (!Array.isArray(jobs)) { + // This is mostly to satisfy typescript + process.exitCode = 1 + await gracefulShutdown() + process.exit(1) +} + +for (const { historyId, hash } of jobs) { + try { + await downloadAndBackupBlob(historyId, hash) + } catch (error) { + console.error(error) + process.exitCode = 1 + } +} +await gracefulShutdown() diff --git a/services/history-v1/storage/scripts/backup_sample.mjs b/services/history-v1/storage/scripts/backup_sample.mjs index 06a2e4a375..35ee1e93f8 100644 --- a/services/history-v1/storage/scripts/backup_sample.mjs +++ b/services/history-v1/storage/scripts/backup_sample.mjs @@ -32,34 +32,18 @@ async function takeSample(sampleSize) { [ { $sample: { size: sampleSize } }, { - $project: { - _id: 0, - hasBackup: { - $ifNull: ['$overleaf.backup.lastBackedUpVersion', false], - }, - }, + $match: { 'overleaf.backup.lastBackedUpVersion': { $exists: true } }, }, { - $group: { - _id: null, - totalSampled: { $sum: 1 }, - backedUp: { - $sum: { - $cond: ['$hasBackup', 1, 0], - }, - }, - }, + $count: 'total', }, ], { readPreference: READ_PREFERENCE_SECONDARY } ) .toArray() - if (results.length === 0) { - return { totalSampled: 0, backedUp: 0 } - } - - return results[0] + const count = results[0]?.total || 0 + return { totalSampled: sampleSize, backedUp: count } } function calculateStatistics( @@ -67,7 +51,7 @@ function calculateStatistics( cumulativeBackedUp, totalPopulation ) { - const proportion = cumulativeBackedUp / cumulativeSampled + const proportion = Math.max(1, cumulativeBackedUp) / cumulativeSampled // Standard error with finite population correction const fpc = Math.sqrt( diff --git a/services/history-v1/storage/scripts/backup_scheduler.mjs b/services/history-v1/storage/scripts/backup_scheduler.mjs index 44c6b7c4ec..164512701e 100644 --- a/services/history-v1/storage/scripts/backup_scheduler.mjs +++ b/services/history-v1/storage/scripts/backup_scheduler.mjs @@ -2,6 +2,11 @@ import Queue from 'bull' import config from 'config' import commandLineArgs from 'command-line-args' import logger from '@overleaf/logger' +import { + listPendingBackups, + listUninitializedBackups, + getBackupStatus, +} from '../lib/backup_store/index.js' logger.initialize('backup-queue') @@ -28,16 +33,100 @@ const optionDefinitions = [ description: 'Project IDs or date range in YYYY-MM-DD:YYYY-MM-DD format', }, { name: 'monitor', type: Boolean }, + { + name: 'queue-pending', + type: Number, + description: + 'Find projects with pending changes older than N seconds and add them to the queue', + }, + { + name: 'show-pending', + type: Number, + description: + 'Show count of pending projects older than N seconds without adding to queue', + }, + { + name: 'limit', + type: Number, + description: 'Limit the number of jobs to be added', + }, + { + name: 'interval', + type: Number, + description: 'Time in seconds to spread jobs over (default: 300)', + defaultValue: 300, + }, + { + name: 'backoff-delay', + type: Number, + description: + 'Backoff delay in milliseconds for failed jobs (default: 1000)', + defaultValue: 1000, + }, + { + name: 'attempts', + type: Number, + description: 'Number of retry attempts for failed jobs (default: 3)', + defaultValue: 3, + }, + { + name: 'warn-threshold', + type: Number, + description: 'Warn about any project exceeding this pending age', + defaultValue: 2 * 3600, // 2 hours + }, + { + name: 'verbose', + alias: 'v', + type: Boolean, + description: 'Show detailed information when used with --show-pending', + }, ] // Parse command line arguments const options = commandLineArgs(optionDefinitions) +const WARN_THRESHOLD = options['warn-threshold'] // Helper to validate date format function isValidDateFormat(dateStr) { return /^\d{4}-\d{2}-\d{2}$/.test(dateStr) } +// Helper to validate the pending time parameter +function validatePendingTime(option, value) { + if (typeof value !== 'number' || value <= 0) { + console.error( + `Error: --${option} requires a positive numeric TIME argument in seconds` + ) + console.error(`Example: --${option} 3600`) + process.exit(1) + } + return value +} + +// Helper to format the pending time display +function formatPendingTime(timestamp) { + const now = new Date() + const diffMs = now - timestamp + const seconds = Math.floor(diffMs / 1000) + return `${timestamp.toISOString()} (${seconds} seconds ago)` +} + +// Helper to add a job to the queue, checking for duplicates +async function addJobWithCheck(queue, data, options) { + const jobId = options.jobId + + // Check if the job already exists + const existingJob = await queue.getJob(jobId) + + if (existingJob) { + return { job: existingJob, added: false } + } else { + const job = await queue.add(data, options) + return { job, added: true } + } +} + // Setup queue event listeners function setupMonitoring() { console.log('Starting queue monitoring. Press Ctrl+C to exit.') @@ -99,15 +188,125 @@ async function addDateRangeJob(input) { ) return } - const job = await backupQueue.add( + + const jobId = `backup-${startDate}-to-${endDate}` + const { job, added } = await addJobWithCheck( + backupQueue, { startDate, endDate }, - { jobId: `backup-${startDate}-to-${endDate}` } + { jobId } ) + console.log( - `Added date range backup job: ${startDate} to ${endDate}, job ID: ${job.id}` + `${added ? 'Added' : 'Already exists'}: date range backup job: ${startDate} to ${endDate}, job ID: ${job.id}` ) } +// Helper to list pending and uninitialized backups +// This function combines the two cursors into a single generator +// to yield projects from both lists +async function* pendingCursor(timeIntervalMs, limit) { + for await (const project of listPendingBackups(timeIntervalMs, limit)) { + yield project + } + for await (const project of listUninitializedBackups(timeIntervalMs, limit)) { + yield project + } +} + +// Process pending projects with changes older than the specified seconds +async function processPendingProjects( + age, + showOnly, + limit, + verbose, + jobInterval, + jobOpts = {} +) { + const timeIntervalMs = age * 1000 + console.log( + `Finding projects with pending changes older than ${age} seconds${showOnly ? ' (count only)' : ''}` + ) + + let count = 0 + let addedCount = 0 + let existingCount = 0 + // Pass the limit directly to MongoDB query for better performance + const changeTimes = [] + for await (const project of pendingCursor(timeIntervalMs, limit)) { + const projectId = project._id.toHexString() + const pendingAt = + project.overleaf?.backup?.pendingChangeAt || project._id.getTimestamp() + if (pendingAt) { + changeTimes.push(pendingAt) + const pendingAge = Math.floor((Date.now() - pendingAt.getTime()) / 1000) + if (pendingAge > WARN_THRESHOLD) { + const backupStatus = await getBackupStatus(projectId) + logger.warn( + { + projectId, + pendingAt, + pendingAge, + backupStatus, + warnThreshold: WARN_THRESHOLD, + }, + `pending change exceeds rpo warning threshold` + ) + } + } + if (showOnly && verbose) { + console.log( + `Project: ${projectId} (pending since: ${formatPendingTime(pendingAt)})` + ) + } else if (!showOnly) { + const delay = Math.floor(Math.random() * jobInterval * 1000) // add random delay to avoid all jobs running simultaneously + const { job, added } = await addJobWithCheck( + backupQueue, + { projectId, pendingChangeAt: pendingAt.getTime() }, + { ...jobOpts, delay, jobId: projectId } + ) + + if (added) { + if (verbose) { + console.log( + `Added job for project: ${projectId}, job ID: ${job.id} (pending since: ${formatPendingTime(pendingAt)})` + ) + } + addedCount++ + } else { + if (verbose) { + console.log( + `Job already exists for project: ${projectId}, job ID: ${job.id} (pending since: ${formatPendingTime(pendingAt)})` + ) + } + existingCount++ + } + } + + count++ + if (count % 1000 === 0) { + console.log( + `Processed ${count} projects`, + showOnly ? '' : `(${addedCount} added, ${existingCount} existing)` + ) + } + } + + const oldestChange = changeTimes.reduce((min, time) => + time < min ? time : min + ) + + if (showOnly) { + console.log( + `Found ${count} projects with pending changes (not added to queue)` + ) + } else { + console.log(`Found ${count} projects with pending changes:`) + console.log(` ${addedCount} jobs added to queue`) + console.log(` ${existingCount} jobs already existed in queue`) + console.log(` Oldest pending change: ${formatPendingTime(oldestChange)}`) + } +} + // Main execution block async function run() { const optionCount = [ @@ -115,6 +314,8 @@ async function run() { options.status, options.add, options.monitor, + options['queue-pending'] !== undefined, + options['show-pending'] !== undefined, ].filter(Boolean).length if (optionCount > 1) { console.error('Only one option can be specified') @@ -141,24 +342,65 @@ async function run() { await addDateRangeJob(input) } else { // Handle project ID format - const job = await backupQueue.add( + const { job, added } = await addJobWithCheck( + backupQueue, { projectId: input }, { jobId: input } ) - console.log(`Added job for project: ${input}, job ID: ${job.id}`) + console.log( + `${added ? 'Added' : 'Already exists'}: job for project: ${input}, job ID: ${job.id}` + ) } } } else if (options.monitor) { setupMonitoring() + } else if (options['queue-pending'] !== undefined) { + const age = validatePendingTime('queue-pending', options['queue-pending']) + await processPendingProjects( + age, + false, + options.limit, + options.verbose, + options.interval, + { + attempts: options.attempts, + backoff: { + type: 'exponential', + delay: options['backoff-delay'], + }, + } + ) + } else if (options['show-pending'] !== undefined) { + const age = validatePendingTime('show-pending', options['show-pending']) + await processPendingProjects(age, true, options.limit, options.verbose) } else { console.log('Usage:') - console.log(' --clean Clean up completed and failed jobs') - console.log(' --status Show current job counts') - console.log(' --add [projectId] Add a job for the specified projectId') + console.log(' --clean Clean up completed and failed jobs') + console.log(' --status Show current job counts') + console.log(' --add [projectId] Add a job for the specified projectId') console.log( ' --add [YYYY-MM-DD:YYYY-MM-DD] Add a job for the specified date range' ) - console.log(' --monitor Monitor queue events') + console.log(' --monitor Monitor queue events') + console.log( + ' --queue-pending TIME Find projects with changes older than TIME seconds and add them to the queue' + ) + console.log( + ' --show-pending TIME Show count of pending projects older than TIME seconds' + ) + console.log(' --limit N Limit the number of jobs to be added') + console.log( + ' --interval TIME Time interval in seconds to spread jobs over' + ) + console.log( + ' --backoff-delay TIME Backoff delay in milliseconds for failed jobs (default: 1000)' + ) + console.log( + ' --attempts N Number of retry attempts for failed jobs (default: 3)' + ) + console.log( + ' --verbose, -v Show detailed information when used with --show-pending' + ) } } diff --git a/services/history-v1/storage/scripts/backup_worker.mjs b/services/history-v1/storage/scripts/backup_worker.mjs index f09381e3d3..1097bb04b9 100644 --- a/services/history-v1/storage/scripts/backup_worker.mjs +++ b/services/history-v1/storage/scripts/backup_worker.mjs @@ -9,8 +9,12 @@ import { } from './backup.mjs' const CONCURRENCY = 15 +const WARN_THRESHOLD = 2 * 60 * 60 * 1000 // warn if projects are older than this const redisOptions = config.get('redis.queue') -const TIME_BUCKETS = [10, 100, 500, 1000, 5000, 10000, 30000, 60000] +const JOB_TIME_BUCKETS = [10, 100, 500, 1000, 5000, 10000, 30000, 60000] // milliseconds +const LAG_TIME_BUCKETS_HRS = [ + 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.75, 2, 3, 4, 5, 6, +] // hours // Configure backup settings to match worker concurrency configureBackup({ concurrency: 50, useSecondary: true }) @@ -27,12 +31,12 @@ const backupQueue = new Queue('backup', { // Log queue events backupQueue.on('active', job => { - logger.info({ job }, 'job is now active') + logger.debug({ job }, 'job is now active') }) backupQueue.on('completed', (job, result) => { metrics.inc('backup_worker_job', 1, { status: 'completed' }) - logger.info({ job, result }, 'job completed') + logger.debug({ job, result }, 'job completed') }) backupQueue.on('failed', (job, err) => { @@ -41,7 +45,7 @@ backupQueue.on('failed', (job, err) => { }) backupQueue.on('waiting', jobId => { - logger.info({ jobId }, 'job is waiting') + logger.debug({ jobId }, 'job is waiting') }) backupQueue.on('error', error => { @@ -69,7 +73,7 @@ backupQueue.process(CONCURRENCY, async job => { const { projectId, startDate, endDate } = job.data if (projectId) { - return await runBackup(projectId) + return await runBackup(projectId, job.data, job) } else if (startDate && endDate) { return await runInit(startDate, endDate) } else { @@ -77,23 +81,40 @@ backupQueue.process(CONCURRENCY, async job => { } }) -async function runBackup(projectId) { +async function runBackup(projectId, data, job) { + const { pendingChangeAt } = data + // record the time it takes to run the backup job const timer = new metrics.Timer( 'backup_worker_job_duration', 1, {}, - TIME_BUCKETS + JOB_TIME_BUCKETS ) + const pendingAge = Date.now() - pendingChangeAt + if (pendingAge > WARN_THRESHOLD) { + logger.warn( + { projectId, pendingAge, job }, + 'project has been pending for a long time' + ) + } try { - logger.info({ projectId }, 'processing backup for project') - const { errors, completed } = await backupProject(projectId, {}) - metrics.inc('backup_worker_project', completed - errors, { + logger.debug({ projectId }, 'processing backup for project') + await backupProject(projectId, {}) + metrics.inc('backup_worker_project', 1, { status: 'success', }) - metrics.inc('backup_worker_project', errors, { status: 'failed' }) timer.done() - return `backup completed ${projectId} (${errors} failed in ${completed} projects)` + // record the replication lag (time from change to backup) + if (pendingChangeAt) { + metrics.histogram( + 'backup_worker_replication_lag_in_hours', + (Date.now() - pendingChangeAt) / (3600 * 1000), + LAG_TIME_BUCKETS_HRS + ) + } + return `backup completed ${projectId}` } catch (err) { + metrics.inc('backup_worker_project', 1, { status: 'failed' }) logger.error({ projectId, err }, 'backup failed') throw err // Re-throw to mark job as failed } diff --git a/services/history-v1/storage/scripts/verify_blob_backed_up_by_path_bulk.mjs b/services/history-v1/storage/scripts/verify_blob_backed_up_by_path_bulk.mjs new file mode 100644 index 0000000000..c699b61b13 --- /dev/null +++ b/services/history-v1/storage/scripts/verify_blob_backed_up_by_path_bulk.mjs @@ -0,0 +1,177 @@ +import fs from 'node:fs' +import { makeProjectKey } from '../lib/blob_store/index.js' +import { backupPersistor, projectBlobsBucket } from '../lib/backupPersistor.mjs' +import { NotFoundError } from '@overleaf/object-persistor/src/Errors.js' +import commandLineArgs from 'command-line-args' +import OError from '@overleaf/o-error' +import assert from '../lib/assert.js' +import { client, projects } from '../lib/mongodb.js' +import { ObjectId } from 'mongodb' +import { setTimeout } from 'node:timers/promises' + +const { input, verbose } = commandLineArgs([ + { name: 'input', type: String }, + { name: 'verbose', type: Boolean, defaultValue: false }, +]) + +function parseCSVRow(row) { + const [path] = row.split(',') + const pathSegments = path.split('/') + const historyId = `${pathSegments[0]}${pathSegments[1]}${pathSegments[2]}` + .split('') + .reverse() + .join('') + + return { historyId, path, hash: `${pathSegments[3]}${pathSegments[4]}` } +} + +async function* readCSV(path) { + let fh + try { + fh = await fs.promises.open(path, 'r') + } catch (error) { + console.error(`Could not open file: ${error}`) + throw error + } + for await (const line of fh.readLines()) { + try { + const row = parseCSVRow(line) + yield row + } catch (error) { + console.error(error instanceof Error ? error.message : error) + console.log(`Skipping invalid row: ${line}`) + } + } +} + +class MissingDEKError extends OError {} +class InvalidHistoryIdError extends OError {} +class MissingProjectError extends OError {} +class MissingBlobError extends OError {} + +async function getProjectPersistor(historyId) { + try { + return await backupPersistor.forProjectRO( + projectBlobsBucket, + makeProjectKey(historyId, '') + ) + } catch (err) { + if (err instanceof NotFoundError) { + throw new MissingDEKError('dek does not exist', { historyId }, err) + } + throw err + } +} + +async function checkBlobExists(path, historyId) { + const persistor = await getProjectPersistor(historyId) + return await persistor.getObjectSize(projectBlobsBucket, path) +} + +let total = 0 +const errors = { + invalidProjectId: 0, + notBackedUpProjectId: 0, + missingBlob: 0, + notInMongo: 0, + unknown: 0, +} + +const notInMongoProjectIds = new Set() +const notBackedUpProjectIds = new Set() + +let stopping = false + +process.on('SIGTERM', () => { + console.log('SIGTERM received') + stopping = true +}) + +process.on('SIGINT', () => { + console.log('SIGINT received') + stopping = true +}) + +/** + * + * @param {string} historyId + * @param {string} path + * @param {string} hash + * @return {Promise} + */ +async function checkPath(historyId, path, hash) { + try { + assert.mongoId(historyId) + } catch (error) { + throw InvalidHistoryIdError('invalid history id', { historyId }) + } + if (notInMongoProjectIds.has(historyId)) { + throw new MissingProjectError('project not in mongo', { historyId }) + } + if (notBackedUpProjectIds.has(historyId)) { + throw new MissingDEKError('project not backed up', { historyId }) + } + + const project = await projects.findOne({ _id: new ObjectId(historyId) }) + if (!project) { + notInMongoProjectIds.add(historyId) + throw new MissingProjectError('project not in mongo', { historyId }) + } + try { + await checkBlobExists(path, historyId) + } catch (error) { + if (error instanceof NotFoundError) { + throw new MissingBlobError('missing blob', { historyId, hash }) + } + if (error instanceof MissingDEKError) { + notBackedUpProjectIds.add(historyId) + } + throw error + } +} + +for await (const line of readCSV(input)) { + if (stopping) break + total++ + if (total % 10_000 === 0) { + console.log(`checked ${total}`) + } + const { historyId, path, hash } = line + try { + await checkPath(historyId, path, hash) + if (verbose) { + console.log(`✓ Project ${historyId} has ${hash} backed up`) + } + } catch (error) { + if (error instanceof InvalidHistoryIdError) { + errors.invalidProjectId++ + console.warn(`invalid historyId ${historyId}`) + continue + } else if (error instanceof MissingProjectError) { + errors.notInMongo++ + console.warn(`✗ project ${historyId} not in mongo`) + continue + } else if (error instanceof MissingDEKError) { + errors.notBackedUpProjectId++ + console.error(`✗ Project DEK ${historyId} not found`) + continue + } else if (error instanceof MissingBlobError) { + errors.missingBlob++ + console.error(`✗ missing blob ${hash} from project ${historyId}`) + continue + } + errors.unknown++ + console.error(error) + } +} + +console.log(`total checked: ${total}`) +console.log(`invalid project id: ${errors.invalidProjectId}`) +console.log(`not found in mongo: ${errors.notInMongo}`) +console.log(`missing blob: ${errors.missingBlob}`) +console.log(`project not backed up: ${errors.notBackedUpProjectId}`) +console.log(`unknown errors: ${errors.unknown}`) + +await client.close() +await setTimeout(100) +process.exit() diff --git a/services/history-v1/storage/scripts/verify_project.mjs b/services/history-v1/storage/scripts/verify_project.mjs new file mode 100644 index 0000000000..6e1cb9de89 --- /dev/null +++ b/services/history-v1/storage/scripts/verify_project.mjs @@ -0,0 +1,33 @@ +import commandLineArgs from 'command-line-args' +import { verifyProjectWithErrorContext } from '../lib/backupVerifier.mjs' +import knex from '../lib/knex.js' +import { client } from '../lib/mongodb.js' +import { setTimeout } from 'node:timers/promises' +import { loadGlobalBlobs } from '../lib/blob_store/index.js' + +const { historyId } = commandLineArgs([{ name: 'historyId', type: String }]) + +async function gracefulShutdown(code = process.exitCode) { + await knex.destroy() + await client.close() + await setTimeout(1_000) + process.exit(code) +} + +if (!historyId) { + console.error('missing --historyId') + process.exitCode = 1 + await gracefulShutdown() +} + +await loadGlobalBlobs() + +try { + await verifyProjectWithErrorContext(historyId) + console.log('OK') +} catch (error) { + console.error('error verifying', error) + process.exitCode = 1 +} finally { + await gracefulShutdown() +} diff --git a/services/history-v1/storage/scripts/verify_sampled_projects.mjs b/services/history-v1/storage/scripts/verify_sampled_projects.mjs new file mode 100644 index 0000000000..e5b2d0c347 --- /dev/null +++ b/services/history-v1/storage/scripts/verify_sampled_projects.mjs @@ -0,0 +1,215 @@ +// @ts-check +import commandLineArgs from 'command-line-args' +import { + setWriteMetrics, + verifyProjectsCreatedInDateRange, + verifyRandomProjectSample, + verifyProjectsUpdatedInDateRange, +} from '../../backupVerifier/ProjectVerifier.mjs' +import knex from '../lib/knex.js' +import { client } from '../lib/mongodb.js' +import { setTimeout } from 'node:timers/promises' +import logger from '@overleaf/logger' +import { loadGlobalBlobs } from '../lib/blob_store/index.js' +import { getDatesBeforeRPO } from '../../backupVerifier/utils.mjs' +import { EventEmitter } from 'node:events' +import { mongodb } from '../index.js' + +logger.logger.level('fatal') + +const usageMessage = [ + 'Usage: node verify_sampled_projects.mjs [--startDate ] [--endDate ] [--nProjects ] [--verbose] [--usage] [--writeMetrics] [--concurrency ] [--strategy ]', + 'strategy: defaults to "range"; startDate and endDate are required for "range" strategy', +].join('\n') + +/** + * Gracefully shutdown the process + * @param code + * @return {Promise} + */ +async function gracefulShutdown(code = process.exitCode) { + await knex.destroy() + await client.close() + await setTimeout(1_000) + process.exit(code) +} + +const STATS = { + verifiable: 0, + unverifiable: 0, +} + +/** + * @typedef {Object} CLIOptions + * @property {(signal: EventEmitter) => Promise} projectVerifier + * @property {boolean} verbose + */ + +/** + * @typedef {import('../../backupVerifier/types.d.ts').VerificationJobStatus} VerificationJobStatus + */ + +/** + * + * @return {CLIOptions} + */ +function getOptions() { + const { + startDate, + endDate, + concurrency, + writeMetrics, + verbose, + nProjects, + strategy, + usage, + } = commandLineArgs([ + { name: 'startDate', type: String }, + { name: 'endDate', type: String }, + { name: 'concurrency', type: Number, defaultValue: 1 }, + { name: 'verbose', type: Boolean, defaultValue: false }, + { name: 'nProjects', type: Number, defaultValue: 10 }, + { name: 'usage', type: Boolean, defaultValue: false }, + { name: 'writeMetrics', type: Boolean, defaultValue: false }, + { name: 'strategy', type: String, defaultValue: 'range' }, + ]) + + if (usage) { + console.log(usageMessage) + process.exit(0) + } + + if (!['range', 'random', 'recent'].includes(strategy)) { + throw new Error(`Invalid strategy: ${strategy}`) + } + + setWriteMetrics(writeMetrics) + + switch (strategy) { + case 'random': + console.log('Verifying random projects') + return { + verbose, + projectVerifier: signal => verifyRandomProjectSample(nProjects, signal), + } + case 'recent': + return { + verbose, + projectVerifier: async signal => { + const { startDate, endDate } = getDatesBeforeRPO(3 * 3600) + return await verifyProjectsUpdatedInDateRange( + startDate, + endDate, + nProjects, + signal + ) + }, + } + case 'range': + default: { + if (!startDate || !endDate) { + throw new Error(usageMessage) + } + const start = Date.parse(startDate) + const end = Date.parse(endDate) + if (Number.isNaN(start)) { + throw new Error(`Invalid start date: ${startDate}`) + } + + if (Number.isNaN(end)) { + throw new Error(`Invalid end date: ${endDate}`) + } + if (verbose) { + console.log(`Verifying from ${startDate} to ${endDate}`) + console.log(`Concurrency: ${concurrency}`) + } + STATS.ranges = 0 + return { + projectVerifier: signal => + verifyProjectsCreatedInDateRange({ + startDate: new Date(start), + endDate: new Date(end), + projectsPerRange: nProjects, + concurrency, + signal, + }), + verbose, + } + } + } +} + +/** + * @type {CLIOptions} + */ +let options +try { + options = getOptions() +} catch (error) { + console.error(error) + process.exitCode = 1 + await gracefulShutdown(1) + process.exit() // just here so the type checker knows that the process will exit +} + +const { projectVerifier, verbose } = options + +if (verbose) { + logger.logger.level('debug') +} + +/** + * + * @param {Array} array + * @param {string} matchString + * @return {*} + */ +function sumStringInstances(array, matchString) { + return array.reduce((total, string) => { + return string === matchString ? total + 1 : total + }, 0) +} + +/** + * + * @param {VerificationJobStatus} stats + */ +function displayStats(stats) { + console.log(`Verified projects: ${stats.verified}`) + console.log(`Total projects sampled: ${stats.total}`) + if (stats.errorTypes.length > 0) { + console.log('Errors:') + for (const error of new Set(stats.errorTypes)) { + console.log(`${error}: ${sumStringInstances(stats.errorTypes, error)}`) + } + } +} + +const shutdownEmitter = new EventEmitter() + +shutdownEmitter.on('shutdown', async () => { + await gracefulShutdown() +}) + +process.on('SIGTERM', () => { + shutdownEmitter.emit('shutdown') +}) + +process.on('SIGINT', () => { + shutdownEmitter.emit('shutdown') +}) + +await loadGlobalBlobs() + +try { + const stats = await projectVerifier(shutdownEmitter) + displayStats(stats) + console.log(`completed`) +} catch (error) { + console.error(error) + console.log('completed with errors') + process.exitCode = 1 +} finally { + console.log('shutting down') + await gracefulShutdown() +} diff --git a/services/history-v1/test/acceptance/js/api/backupVerifier.test.mjs b/services/history-v1/test/acceptance/js/api/backupVerifier.test.mjs index 54a801a919..fe3a4d1591 100644 --- a/services/history-v1/test/acceptance/js/api/backupVerifier.test.mjs +++ b/services/history-v1/test/acceptance/js/api/backupVerifier.test.mjs @@ -6,23 +6,63 @@ import { expect } from 'chai' import testProjects from './support/test_projects.js' import { backupPersistor, + chunksBucket, projectBlobsBucket, } from '../../../../storage/lib/backupPersistor.mjs' import { BlobStore, makeProjectKey, } from '../../../../storage/lib/blob_store/index.js' -import Stream from 'stream' +import Stream from 'node:stream' import * as zlib from 'node:zlib' import { promisify } from 'node:util' import { execFile } from 'node:child_process' import { NotFoundError } from '@overleaf/object-persistor/src/Errors.js' +import { chunkStore } from '../../../../storage/index.js' +import { Change, File, Operation } from 'overleaf-editor-core' +import Crypto from 'node:crypto' +import path from 'node:path' +import projectKey from '../../../../storage/lib/project_key.js' +import { historyStore } from '../../../../storage/lib/history_store.js' /** * @typedef {import("node-fetch").Response} Response * @typedef {import("overleaf-editor-core").Blob} Blob */ +async function verifyProjectScript(historyId) { + try { + const result = await promisify(execFile)( + process.argv0, + ['storage/scripts/verify_project.mjs', `--historyId=${historyId}`], + { + encoding: 'utf-8', + timeout: 5_000, + env: { + ...process.env, + LOG_LEVEL: 'warn', + }, + } + ) + return { status: 0, stdout: result.stdout, stderr: result.stderr } + } catch (err) { + if ( + err && + typeof err === 'object' && + 'stdout' in err && + 'code' in err && + 'stderr' in err + ) { + return { + stdout: typeof err.stdout === 'string' ? err.stdout : '', + status: typeof err.code === 'number' ? err.code : -1, + stderr: typeof err.stdout === 'string' ? err.stderr : '', + } + } + throw err + } +} + /** * @param {string} historyId * @param {string} hash @@ -69,22 +109,84 @@ async function verifyBlobHTTP(historyId, hash) { ) } +async function backupChunk(historyId) { + const newChunk = await chunkStore.loadLatestRaw(historyId) + const { buffer: chunkBuffer } = await historyStore.loadRawWithBuffer( + historyId, + newChunk.id + ) + const md5 = Crypto.createHash('md5').update(chunkBuffer) + await backupPersistor.sendStream( + chunksBucket, + path.join( + projectKey.format(historyId), + projectKey.pad(newChunk.startVersion) + ), + Stream.Readable.from([chunkBuffer]), + { + contentType: 'application/json', + contentEncoding: 'gzip', + contentLength: chunkBuffer.byteLength, + sourceMd5: md5.digest('hex'), + } + ) +} + +const FIFTEEN_MINUTES_IN_MS = 900_000 + +async function addFileInNewChunk( + fileContents, + filePath, + historyId, + { creationDate = new Date() } +) { + const chunk = await chunkStore.loadLatest(historyId) + const operation = Operation.addFile( + `${historyId}.txt`, + File.fromString(fileContents) + ) + const changes = [new Change([operation], creationDate, [])] + chunk.pushChanges(changes) + await chunkStore.update(historyId, 0, chunk) +} + /** * @param {string} historyId + * @param {Object} [backup] * @return {Promise} */ -async function prepareProjectAndBlob(historyId) { +async function prepareProjectAndBlob( + historyId, + { shouldBackupBlob, shouldBackupChunk, shouldCreateChunk } = { + shouldBackupBlob: true, + shouldBackupChunk: true, + shouldCreateChunk: true, + } +) { await testProjects.createEmptyProject(historyId) const blobStore = new BlobStore(historyId) - const blob = await blobStore.putString(historyId) - const gzipped = zlib.gzipSync(Buffer.from(historyId)) - await backupPersistor.sendStream( - projectBlobsBucket, - makeProjectKey(historyId, blob.getHash()), - Stream.Readable.from([gzipped]), - { contentLength: gzipped.byteLength, contentEncoding: 'gzip' } - ) - await checkDEKExists(historyId) + const fileContents = historyId + const blob = await blobStore.putString(fileContents) + if (shouldCreateChunk) { + await addFileInNewChunk(fileContents, `${historyId}.txt`, historyId, { + creationDate: new Date(new Date().getTime() - FIFTEEN_MINUTES_IN_MS), + }) + } + + if (shouldBackupBlob) { + const gzipped = zlib.gzipSync(Buffer.from(historyId)) + await backupPersistor.sendStream( + projectBlobsBucket, + makeProjectKey(historyId, blob.getHash()), + Stream.Readable.from([gzipped]), + { contentLength: gzipped.byteLength, contentEncoding: 'gzip' } + ) + await checkDEKExists(historyId) + } + if (shouldCreateChunk && shouldBackupChunk) { + await backupChunk(historyId) + } + return blob.getHash() } @@ -123,6 +225,53 @@ describe('backupVerifier', function () { const response = await fetch(testServer.testUrl('/health_check')) expect(response.status).to.equal(200) }) + describe('storage/scripts/verify_project.mjs', function () { + describe('when the project is appropriately backed up', function () { + it('should return 0', async function () { + const response = await verifyProjectScript(historyIdPostgres) + expect(response.status).to.equal(0) + }) + }) + describe('when the project chunk is not backed up', function () { + let response + beforeEach(async function () { + await prepareProjectAndBlob('000000000000000000000043', { + shouldBackupChunk: false, + shouldBackupBlob: true, + shouldCreateChunk: true, + }) + response = await verifyProjectScript('000000000000000000000043') + }) + it('should return 1', async function () { + expect(response.status).to.equal(1) + }) + it('should emit an error message referring to a missing chunk', async function () { + const stderr = response.stderr + expect(stderr).to.include('BackupRPOViolationChunkNotBackedUpError') + }) + }) + describe('when a project blob is not backed up', function () { + let response + beforeEach(async function () { + await prepareProjectAndBlob('43', { + shouldBackupChunk: true, + shouldBackupBlob: false, + shouldCreateChunk: true, + }) + response = await verifyProjectScript('43') + }) + + it('should return 1', function () { + expect(response.status).to.equal(1) + }) + + it('includes a BackupCorruptedError in stderr', function () { + expect(response.stderr).to.include( + 'BackupCorruptedMissingBlobError: missing blob' + ) + }) + }) + }) describe('storage/scripts/verify_backup_blob.mjs', function () { it('throws and does not create DEK if missing', async function () { const historyId = '404' diff --git a/services/history-v1/test/acceptance/js/storage/backup.test.mjs b/services/history-v1/test/acceptance/js/storage/backup.test.mjs index 5c85d38057..83087a1384 100644 --- a/services/history-v1/test/acceptance/js/storage/backup.test.mjs +++ b/services/history-v1/test/acceptance/js/storage/backup.test.mjs @@ -201,7 +201,12 @@ describe('backup script', function () { textOperation: [newContentString.length, ' even more'], // Keep existing content, append ' even more' }) const additionalEditOp = Operation.editFile('main.tex', additionalTextOp) - const additionalChange = new Change([additionalEditOp], new Date(), []) + const firstTimestamp = new Date() + const additionalChange = new Change( + [additionalEditOp], + firstTimestamp, + [] + ) // add the nonbmp file const blobStore = new BlobStore(historyId) @@ -222,7 +227,12 @@ describe('backup script', function () { 'non_bmp.txt', File.fromHash(testFiles.NON_BMP_TXT_HASH) ) - const additionalChange2 = new Change([addNonBmpFileOp], new Date(), []) + const secondTimestamp = new Date() + const additionalChange2 = new Change( + [addNonBmpFileOp], + secondTimestamp, + [] + ) await persistChanges( historyId, @@ -242,10 +252,11 @@ describe('backup script', function () { expect(afterChangeResult.backupStatus.lastBackedUpAt) .to.be.an.instanceOf(Date) .and.to.deep.equal(result1.backupStatus.lastBackedUpAt) - // but it should update the pendingChangeAt timestamp + // but it should update the pendingChangeAt timestamp to the timestamp of the + // first change which modified the project expect(afterChangeResult.backupStatus.pendingChangeAt) .to.be.an.instanceOf(Date) - .and.to.be.greaterThan(result1.backupStatus.lastBackedUpAt) + .and.to.deep.equal(firstTimestamp) // Second backup const { stdout: stdout2 } = await runBackupScript([ @@ -410,12 +421,18 @@ describe('backup script', function () { }) describe('with complex project content', function () { + let beforeInitializationTimestamp + let afterInitializationTimestamp + beforeEach(async function () { // Create initial project await projectsCollection.insertOne(project) // Initialize project in chunk store + // bracket the initialisation with two timestamps to check the pendingChangeAt field + beforeInitializationTimestamp = new Date() await ChunkStore.initializeProject(historyId) + afterInitializationTimestamp = new Date() const blobStore = new BlobStore(historyId) @@ -528,6 +545,14 @@ describe('backup script', function () { ) }) + it('persistChanges should set the pendingChangeAt field to the time of snapshot initialisation', async function () { + const result = await getBackupStatus(projectId) + expect(result.backupStatus.pendingChangeAt).to.be.an.instanceOf(Date) + expect(result.backupStatus.pendingChangeAt) + .to.be.greaterThan(beforeInitializationTimestamp) + .and.to.be.lessThan(afterInitializationTimestamp) + }) + it('should backup all chunks and blobs from a complex project history', async function () { // Run backup script const { stdout } = await runBackupScript(['--projectId', projectId]) diff --git a/services/history-v1/test/acceptance/js/storage/chunk_store.test.js b/services/history-v1/test/acceptance/js/storage/chunk_store.test.js index 9228b2b58e..50341fdcb5 100644 --- a/services/history-v1/test/acceptance/js/storage/chunk_store.test.js +++ b/services/history-v1/test/acceptance/js/storage/chunk_store.test.js @@ -58,12 +58,42 @@ describe('chunkStore', function () { expect(chunk.getEndTimestamp()).not.to.exist }) + describe('creating a chunk', async function () { + const pendingChangeTimestamp = new Date('2014-01-01T00:00:00') + const lastChangeTimestamp = new Date('2015-01-01T00:00:00') + beforeEach(async function () { + const chunk = makeChunk( + [ + makeChange( + Operation.addFile('main.tex', File.fromString('abc')), + lastChangeTimestamp + ), + ], + 1 + ) + await chunkStore.create(projectId, chunk, pendingChangeTimestamp) + }) + it('creates a chunk and inserts the pending change timestamp', async function () { + const project = await projects.findOne({ + _id: new ObjectId(projectRecord.insertedId), + }) + expect(project.overleaf.history.currentEndVersion).to.equal(2) + expect(project.overleaf.history.currentEndTimestamp).to.deep.equal( + lastChangeTimestamp + ) + expect(project.overleaf.backup.pendingChangeAt).to.deep.equal( + pendingChangeTimestamp + ) + }) + }) + describe('adding and editing a blank file', function () { const testPathname = 'foo.txt' const testTextOperation = TextOperation.fromJSON({ textOperation: ['a'], }) // insert an a let lastChangeTimestamp + const pendingChangeTimestamp = new Date() beforeEach(async function () { const chunk = await chunkStore.loadLatest(projectId) @@ -74,7 +104,12 @@ describe('chunkStore', function () { ] lastChangeTimestamp = changes[1].getTimestamp() chunk.pushChanges(changes) - await chunkStore.update(projectId, oldEndVersion, chunk) + await chunkStore.update( + projectId, + oldEndVersion, + chunk, + pendingChangeTimestamp + ) }) it('records the correct metadata in db readOnly=false', async function () { @@ -132,13 +167,14 @@ describe('chunkStore', function () { lastChangeTimestamp ) expect(project.overleaf.backup.pendingChangeAt).to.deep.equal( - lastChangeTimestamp + pendingChangeTimestamp ) }) }) describe('multiple chunks', async function () { // Two chunks are 1 year apart + const pendingChangeTimestamp = new Date('2014-01-01T00:00:00') const firstChunkTimestamp = new Date('2015-01-01T00:00:00') const secondChunkTimestamp = new Date('2016-01-01T00:00:00') const thirdChunkTimestamp = new Date('2017-01-01T00:00:00') @@ -158,7 +194,12 @@ describe('chunkStore', function () { ], 0 ) - await chunkStore.update(projectId, 0, firstChunk) + await chunkStore.update( + projectId, + 0, + firstChunk, + pendingChangeTimestamp + ) firstChunk = await chunkStore.loadLatest(projectId) secondChunk = makeChunk( @@ -268,7 +309,7 @@ describe('chunkStore', function () { _id: new ObjectId(projectRecord.insertedId), }) expect(project.overleaf.backup.pendingChangeAt).to.deep.equal( - firstChunkTimestamp + pendingChangeTimestamp ) }) @@ -322,7 +363,7 @@ describe('chunkStore', function () { _id: new ObjectId(projectRecord.insertedId), }) expect(project.overleaf.backup.pendingChangeAt).to.deep.equal( - firstChunkTimestamp + pendingChangeTimestamp ) }) }) diff --git a/services/history-v1/test/acceptance/js/storage/persist_changes.test.js b/services/history-v1/test/acceptance/js/storage/persist_changes.test.js index 50eb505681..aa56dc8c2a 100644 --- a/services/history-v1/test/acceptance/js/storage/persist_changes.test.js +++ b/services/history-v1/test/acceptance/js/storage/persist_changes.test.js @@ -213,7 +213,7 @@ describe('persistChanges', function () { expect(result.numberOfChangesPersisted).to.equal(1) }) - it('acccepts a change with an invalid hash (only logs for now)', async function () { + it('rejects a change with an invalid hash', async function () { const limitsToPersistImmediately = { minChangeTimestamp: farFuture, maxChangeTimestamp: farFuture, @@ -235,13 +235,9 @@ describe('persistChanges', function () { ) const changes = [change] - const result = await persistChanges( - projectId, - changes, - limitsToPersistImmediately, - 0 - ) - expect(result.numberOfChangesPersisted).to.equal(1) + await expect( + persistChanges(projectId, changes, limitsToPersistImmediately, 0) + ).to.be.rejectedWith(storage.InvalidChangeError) }) }) }) diff --git a/services/history-v1/test/setup.js b/services/history-v1/test/setup.js index 20f891ceb6..38c1b283ad 100644 --- a/services/history-v1/test/setup.js +++ b/services/history-v1/test/setup.js @@ -10,10 +10,12 @@ require('mongodb').ObjectId.cacheHexString = true chai.use(chaiAsPromised) async function setupPostgresDatabase() { + this.timeout(60_000) await knex.migrate.latest() } async function setupMongoDatabase() { + this.timeout(60_000) await mongodb.db.collection('projectHistoryChunks').createIndexes([ { key: { projectId: 1, startVersion: 1 }, @@ -30,6 +32,7 @@ async function setupMongoDatabase() { } async function createGcsBuckets() { + this.timeout(60_000) for (const bucket of [ config.get('blobStore.globalBucket'), config.get('blobStore.projectBucket'), diff --git a/services/history-v1/tsconfig.json b/services/history-v1/tsconfig.json index f130ebba8f..0e20309d3d 100644 --- a/services/history-v1/tsconfig.json +++ b/services/history-v1/tsconfig.json @@ -15,4 +15,4 @@ "test/**/*", "types" ] -} \ No newline at end of file +} diff --git a/services/notifications/Makefile b/services/notifications/Makefile index 82b5a3839a..8ca3f983ff 100644 --- a/services/notifications/Makefile +++ b/services/notifications/Makefile @@ -116,13 +116,6 @@ test_acceptance_clean: $(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0 test_acceptance_pre_run: - $(DOCKER_COMPOSE_TEST_ACCEPTANCE) up -d mongo - $(DOCKER_COMPOSE_TEST_ACCEPTANCE) exec -T mongo sh -c ' \ - while ! mongosh --eval "db.version()" > /dev/null; do \ - echo "Waiting for Mongo..."; \ - sleep 1; \ - done; \ - mongosh --eval "rs.initiate({ _id: \"overleaf\", members: [ { _id: 0, host: \"mongo:27017\" } ] })"' ifneq (,$(wildcard test/acceptance/js/scripts/pre-run)) $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run endif diff --git a/services/notifications/docker-compose.ci.yml b/services/notifications/docker-compose.ci.yml index 6f1a608534..51eb64d126 100644 --- a/services/notifications/docker-compose.ci.yml +++ b/services/notifications/docker-compose.ci.yml @@ -26,7 +26,7 @@ services: NODE_OPTIONS: "--unhandled-rejections=strict" depends_on: mongo: - condition: service_healthy + condition: service_started user: node command: npm run test:acceptance @@ -41,7 +41,12 @@ services: mongo: image: mongo:6.0.13 command: --replSet overleaf - healthcheck: - test: "mongosh --quiet localhost/test --eval 'quit(db.runCommand({ ping: 1 }).ok ? 0 : 1)'" - interval: 1s - retries: 20 + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 diff --git a/services/notifications/docker-compose.yml b/services/notifications/docker-compose.yml index ba54a3d38e..c0902fee2d 100644 --- a/services/notifications/docker-compose.yml +++ b/services/notifications/docker-compose.yml @@ -38,14 +38,19 @@ services: user: node depends_on: mongo: - condition: service_healthy + condition: service_started command: npm run --silent test:acceptance mongo: image: mongo:6.0.13 command: --replSet overleaf - healthcheck: - test: "mongosh --quiet localhost/test --eval 'quit(db.runCommand({ ping: 1 }).ok ? 0 : 1)'" - interval: 1s - retries: 20 + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 diff --git a/services/project-history/Makefile b/services/project-history/Makefile index 746c03fc86..5cde05ea46 100644 --- a/services/project-history/Makefile +++ b/services/project-history/Makefile @@ -116,13 +116,6 @@ test_acceptance_clean: $(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0 test_acceptance_pre_run: - $(DOCKER_COMPOSE_TEST_ACCEPTANCE) up -d mongo - $(DOCKER_COMPOSE_TEST_ACCEPTANCE) exec -T mongo sh -c ' \ - while ! mongosh --eval "db.version()" > /dev/null; do \ - echo "Waiting for Mongo..."; \ - sleep 1; \ - done; \ - mongosh --eval "rs.initiate({ _id: \"overleaf\", members: [ { _id: 0, host: \"mongo:27017\" } ] })"' ifneq (,$(wildcard test/acceptance/js/scripts/pre-run)) $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run endif diff --git a/services/project-history/app/js/ErrorRecorder.js b/services/project-history/app/js/ErrorRecorder.js index 5f90d7b62a..648b53f569 100644 --- a/services/project-history/app/js/ErrorRecorder.js +++ b/services/project-history/app/js/ErrorRecorder.js @@ -1,54 +1,57 @@ +// @ts-check + import { callbackify } from 'node:util' import logger from '@overleaf/logger' import metrics from '@overleaf/metrics' +import OError from '@overleaf/o-error' import { db } from './mongodb.js' +/** + * @import { ProjectHistoryFailure } from './mongo-types' + */ + +/** + * @param {string} projectId + * @param {number} queueSize + * @param {Error} error + * @return {Promise} the failure record + */ async function record(projectId, queueSize, error) { - if (error != null) { - const errorRecord = { - queueSize, - error: error.toString(), - stack: error.stack, - ts: new Date(), - } - logger.debug( - { projectId, errorRecord }, - 'recording failed attempt to process updates' - ) - try { - await db.projectHistoryFailures.updateOne( - { project_id: projectId }, - { - $set: errorRecord, - $inc: { attempts: 1 }, - $push: { - history: { - $each: [errorRecord], - $position: 0, - $slice: 10, - }, - }, // only keep recent failures - }, - { upsert: true } - ) - } catch (mongoError) { - logger.error( - { projectId, mongoError }, - 'failed to change project statues in mongo' - ) - } - throw error - } else { - try { - await db.projectHistoryFailures.deleteOne({ project_id: projectId }) - } catch (mongoError) { - logger.error( - { projectId, mongoError }, - 'failed to change project statues in mongo' - ) - } - return queueSize + const errorRecord = { + queueSize, + error: error.toString(), + stack: error.stack ?? '', + ts: new Date(), } + logger.debug( + { projectId, errorRecord }, + 'recording failed attempt to process updates' + ) + const result = await db.projectHistoryFailures.findOneAndUpdate( + { project_id: projectId }, + { + $set: errorRecord, + $inc: { attempts: 1 }, + $push: { + history: { + $each: [errorRecord], + $position: 0, + // only keep recent failures + $slice: 10, + }, + }, + }, + { upsert: true, returnDocument: 'after', includeResultMetadata: true } + ) + if (result.value == null) { + // Since we upsert, the result should always have a value + throw new OError('no value returned when recording an error', { projectId }) + } + return result.value +} + +async function clearError(projectId) { + await db.projectHistoryFailures.deleteOne({ project_id: projectId }) } async function setForceDebug(projectId, state) { @@ -85,7 +88,6 @@ async function recordSyncStart(projectId) { /** * @param projectId - * @return {Promise<{error: string, forceDebug?: boolean}|null>} */ async function getFailureRecord(projectId) { return await db.projectHistoryFailures.findOne({ project_id: projectId }) @@ -238,6 +240,7 @@ const getFailureRecordCb = callbackify(getFailureRecord) const getFailuresCb = callbackify(getFailures) const getLastFailureCb = callbackify(getLastFailure) const recordCb = callbackify(record) +const clearErrorCb = callbackify(clearError) const recordSyncStartCb = callbackify(recordSyncStart) const setForceDebugCb = callbackify(setForceDebug) @@ -247,6 +250,7 @@ export { getLastFailureCb as getLastFailure, getFailuresCb as getFailures, recordCb as record, + clearErrorCb as clearError, recordSyncStartCb as recordSyncStart, setForceDebugCb as setForceDebug, } @@ -257,6 +261,7 @@ export const promises = { getLastFailure, getFailures, record, + clearError, recordSyncStart, setForceDebug, } diff --git a/services/project-history/app/js/HttpController.js b/services/project-history/app/js/HttpController.js index d69585c29e..766fb4a414 100644 --- a/services/project-history/app/js/HttpController.js +++ b/services/project-history/app/js/HttpController.js @@ -604,9 +604,7 @@ export function deleteProject(req, res, next) { if (err) { return next(err) } - // The third parameter to the following call is the error. Calling it - // with null will remove any failure record for this project. - ErrorRecorder.record(projectId, 0, null, err => { + ErrorRecorder.clearError(projectId, err => { if (err) { return next(err) } diff --git a/services/project-history/app/js/RedisManager.js b/services/project-history/app/js/RedisManager.js index fe17508452..2f79a10a91 100644 --- a/services/project-history/app/js/RedisManager.js +++ b/services/project-history/app/js/RedisManager.js @@ -298,6 +298,26 @@ async function getFirstOpTimestamp(projectId) { return firstOpTimestamp } +async function getFirstOpTimestamps(projectIds) { + const keys = projectIds.map(projectId => + Keys.projectHistoryFirstOpTimestamp({ project_id: projectId }) + ) + const results = await rclient.mget(keys) + const timestamps = results.map(result => { + // convert stored time back to a numeric timestamp + const timestamp = parseInt(result, 10) + + // check for invalid timestamp + if (isNaN(timestamp)) { + return null + } + + // convert numeric timestamp to a date object + return new Date(timestamp) + }) + return timestamps +} + async function clearFirstOpTimestamp(projectId) { const key = Keys.projectHistoryFirstOpTimestamp({ project_id: projectId }) await rclient.del(key) @@ -357,6 +377,7 @@ const getProjectIdsWithHistoryOpsCountCb = callbackify( ) const setFirstOpTimestampCb = callbackify(setFirstOpTimestamp) const getFirstOpTimestampCb = callbackify(getFirstOpTimestamp) +const getFirstOpTimestampsCb = callbackify(getFirstOpTimestamps) const clearFirstOpTimestampCb = callbackify(clearFirstOpTimestamp) const getProjectIdsWithFirstOpTimestampsCb = callbackify( getProjectIdsWithFirstOpTimestamps @@ -394,6 +415,7 @@ export { getProjectIdsWithHistoryOpsCountCb as getProjectIdsWithHistoryOpsCount, setFirstOpTimestampCb as setFirstOpTimestamp, getFirstOpTimestampCb as getFirstOpTimestamp, + getFirstOpTimestampsCb as getFirstOpTimestamps, clearFirstOpTimestampCb as clearFirstOpTimestamp, getProjectIdsWithFirstOpTimestampsCb as getProjectIdsWithFirstOpTimestamps, clearDanglingFirstOpTimestampCb as clearDanglingFirstOpTimestamp, @@ -413,6 +435,7 @@ export const promises = { getProjectIdsWithHistoryOpsCount, setFirstOpTimestamp, getFirstOpTimestamp, + getFirstOpTimestamps, clearFirstOpTimestamp, getProjectIdsWithFirstOpTimestamps, clearDanglingFirstOpTimestamp, diff --git a/services/project-history/app/js/RetryManager.js b/services/project-history/app/js/RetryManager.js index 4ae6ce22fc..b146da29f9 100644 --- a/services/project-history/app/js/RetryManager.js +++ b/services/project-history/app/js/RetryManager.js @@ -73,11 +73,11 @@ function isTemporaryFailure(failure) { return TEMPORARY_FAILURES.includes(failure.error) } -function isHardFailure(failure) { +export function isHardFailure(failure) { return HARD_FAILURES.includes(failure.error) } -function isFirstFailure(failure) { +export function isFirstFailure(failure) { return failure.attempts <= 1 } @@ -147,7 +147,7 @@ async function resyncProject(projectId, options = {}) { try { if (!/^[0-9a-f]{24}$/.test(projectId)) { logger.debug({ projectId }, 'clearing bad project id') - await ErrorRecorder.promises.record(projectId, 0, null) + await ErrorRecorder.promises.clearError(projectId) return } diff --git a/services/project-history/app/js/SnapshotManager.js b/services/project-history/app/js/SnapshotManager.js index f699e834cc..e735fd334b 100644 --- a/services/project-history/app/js/SnapshotManager.js +++ b/services/project-history/app/js/SnapshotManager.js @@ -73,7 +73,11 @@ async function getRangesSnapshot(projectId, version, pathname) { }) } if (!file.isEditable()) { - throw new Error('File is not editable') + // A binary file has no tracked changes or comments + return { + changes: [], + comments: [], + } } const historyId = await WebApiManager.promises.getHistoryId(projectId) await file.load('eager', HistoryStoreManager.getBlobStore(historyId)) diff --git a/services/project-history/app/js/SyncManager.js b/services/project-history/app/js/SyncManager.js index 271057cf25..ef8caf69eb 100644 --- a/services/project-history/app/js/SyncManager.js +++ b/services/project-history/app/js/SyncManager.js @@ -58,10 +58,8 @@ async function startResync(projectId, options = {}) { ) } catch (error) { // record error in starting sync ("sync ongoing") - try { + if (error instanceof Error) { await ErrorRecorder.promises.record(projectId, -1, error) - } catch (err) { - // swallow any error thrown by ErrorRecorder.record() } throw error } @@ -81,7 +79,9 @@ async function startHardResync(projectId, options = {}) { ) } catch (error) { // record error in starting sync ("sync ongoing") - await ErrorRecorder.promises.record(projectId, -1, error) + if (error instanceof Error) { + await ErrorRecorder.promises.record(projectId, -1, error) + } throw error } } diff --git a/services/project-history/app/js/UpdatesProcessor.js b/services/project-history/app/js/UpdatesProcessor.js index 44acbdb269..b52fac7af6 100644 --- a/services/project-history/app/js/UpdatesProcessor.js +++ b/services/project-history/app/js/UpdatesProcessor.js @@ -16,6 +16,7 @@ import * as SyncManager from './SyncManager.js' import * as Versions from './Versions.js' import * as Errors from './Errors.js' import * as Metrics from './Metrics.js' +import * as RetryManager from './RetryManager.js' import { Profiler } from './Profiler.js' const keys = Settings.redis.lock.key_schema @@ -84,11 +85,29 @@ export function startResyncAndProcessUpdatesUnderLock( }) }) }, - (error, queueSize) => { - if (error) { - OError.tag(error) + (flushError, queueSize) => { + if (flushError) { + OError.tag(flushError) + ErrorRecorder.record(projectId, queueSize, flushError, recordError => { + if (recordError) { + logger.error( + { err: recordError, projectId }, + 'failed to record error' + ) + } + callback(flushError) + }) + } else { + ErrorRecorder.clearError(projectId, clearError => { + if (clearError) { + logger.error( + { err: clearError, projectId }, + 'failed to clear error' + ) + } + callback() + }) } - ErrorRecorder.record(projectId, queueSize, error, callback) if (queueSize > 0) { const duration = (Date.now() - startTimeMs) / 1000 Metrics.historyFlushDurationSeconds.observe(duration) @@ -113,11 +132,44 @@ export function processUpdatesForProject(projectId, callback) { releaseLock ) }, - (error, queueSize) => { - if (error) { - OError.tag(error) + (flushError, queueSize) => { + if (flushError) { + OError.tag(flushError) + ErrorRecorder.record( + projectId, + queueSize, + flushError, + (recordError, failure) => { + if (recordError) { + logger.error( + { err: recordError, projectId }, + 'failed to record error' + ) + callback(recordError) + } else if ( + RetryManager.isFirstFailure(failure) && + RetryManager.isHardFailure(failure) + ) { + // This is the first failed flush since the last successful flush. + // Immediately attempt a resync. + logger.warn({ projectId }, 'Flush failed, attempting resync') + resyncProject(projectId, callback) + } else { + callback(flushError) + } + } + ) + } else { + ErrorRecorder.clearError(projectId, clearError => { + if (clearError) { + logger.error( + { err: clearError, projectId }, + 'failed to clear error' + ) + } + callback() + }) } - ErrorRecorder.record(projectId, queueSize, error, callback) if (queueSize > 0) { const duration = (Date.now() - startTimeMs) / 1000 Metrics.historyFlushDurationSeconds.observe(duration) @@ -129,6 +181,57 @@ export function processUpdatesForProject(projectId, callback) { ) } +export function resyncProject(projectId, callback) { + SyncManager.startHardResync(projectId, {}, error => { + if (error != null) { + return callback(OError.tag(error)) + } + // Flush the sync operations; this will not loop indefinitely + // because any failure won't be the first failure anymore. + LockManager.runWithLock( + keys.projectHistoryLock({ project_id: projectId }), + (extendLock, releaseLock) => { + _countAndProcessUpdates( + projectId, + extendLock, + REDIS_READ_BATCH_SIZE, + releaseLock + ) + }, + (flushError, queueSize) => { + if (flushError) { + ErrorRecorder.record( + projectId, + queueSize, + flushError, + (recordError, failure) => { + if (recordError) { + logger.error( + { err: recordError, projectId }, + 'failed to record error' + ) + callback(OError.tag(recordError)) + } else { + callback(OError.tag(flushError)) + } + } + ) + } else { + ErrorRecorder.clearError(projectId, clearError => { + if (clearError) { + logger.error( + { err: clearError, projectId }, + 'failed to clear error' + ) + } + callback() + }) + } + } + ) + }) +} + export function processUpdatesForProjectUsingBisect( projectId, amountToProcess, @@ -144,21 +247,29 @@ export function processUpdatesForProjectUsingBisect( releaseLock ) }, - (error, queueSize) => { + (flushError, queueSize) => { if (amountToProcess === 0 || queueSize === 0) { // no further processing possible - if (error != null) { + if (flushError != null) { ErrorRecorder.record( projectId, queueSize, - OError.tag(error), - callback + OError.tag(flushError), + recordError => { + if (recordError) { + logger.error( + { err: recordError, projectId }, + 'failed to record error' + ) + } + callback(flushError) + } ) } else { callback() } } else { - if (error != null) { + if (flushError != null) { // decrease the batch size when we hit an error processUpdatesForProjectUsingBisect( projectId, @@ -187,13 +298,31 @@ export function processSingleUpdateForProject(projectId, callback) { ) => { _countAndProcessUpdates(projectId, extendLock, 1, releaseLock) }, - ( - error, - queueSize // no need to clear the flush marker when single stepping - ) => { + (flushError, queueSize) => { + // no need to clear the flush marker when single stepping // it will be cleared up on the next background flush if // the queue is empty - ErrorRecorder.record(projectId, queueSize, error, callback) + if (flushError) { + ErrorRecorder.record(projectId, queueSize, flushError, recordError => { + if (recordError) { + logger.error( + { err: recordError, projectId }, + 'failed to record error' + ) + } + callback(flushError) + }) + } else { + ErrorRecorder.clearError(projectId, clearError => { + if (clearError) { + logger.error( + { err: clearError, projectId }, + 'failed to clear error' + ) + } + callback() + }) + } } ) } diff --git a/services/project-history/app/js/mongo-types.ts b/services/project-history/app/js/mongo-types.ts new file mode 100644 index 0000000000..9894e653d2 --- /dev/null +++ b/services/project-history/app/js/mongo-types.ts @@ -0,0 +1,22 @@ +import { ObjectId } from 'mongodb-legacy' + +export type ProjectHistoryFailure = { + _id: ObjectId + project_id: string + attempts: number + resyncAttempts: number + resyncStartedAt: Date + requestCount?: number + history: (ErrorRecord | SyncStartRecord)[] +} & ErrorRecord + +type ErrorRecord = { + error: string + stack: string + queueSize: number + ts: Date +} + +type SyncStartRecord = { + resyncStartedAt: Date +} diff --git a/services/project-history/app/js/mongodb.js b/services/project-history/app/js/mongodb.js index 98fe2a8ffe..d639903ce2 100644 --- a/services/project-history/app/js/mongodb.js +++ b/services/project-history/app/js/mongodb.js @@ -3,6 +3,10 @@ import Settings from '@overleaf/settings' import mongodb from 'mongodb-legacy' const { MongoClient, ObjectId } = mongodb +/** + * @import { ProjectHistoryFailure } from './mongo-types.ts' + */ + export { ObjectId } export const mongoClient = new MongoClient( @@ -16,6 +20,7 @@ Metrics.mongodb.monitor(mongoClient) export const db = { deletedProjects: mongoDb.collection('deletedProjects'), projects: mongoDb.collection('projects'), + /** @type {mongodb.Collection} */ projectHistoryFailures: mongoDb.collection('projectHistoryFailures'), projectHistoryLabels: mongoDb.collection('projectHistoryLabels'), projectHistorySyncState: mongoDb.collection('projectHistorySyncState'), diff --git a/services/project-history/config/settings.defaults.cjs b/services/project-history/config/settings.defaults.cjs index 2338718203..9e5a39868a 100644 --- a/services/project-history/config/settings.defaults.cjs +++ b/services/project-history/config/settings.defaults.cjs @@ -41,6 +41,9 @@ module.exports = { 10 ), }, + project_history: { + url: `http://${process.env.PROJECT_HISTORY_HOST || '127.0.0.1'}:3054`, + }, }, redis: { lock: { diff --git a/services/project-history/docker-compose.ci.yml b/services/project-history/docker-compose.ci.yml index 332a9710ca..bdf10c9732 100644 --- a/services/project-history/docker-compose.ci.yml +++ b/services/project-history/docker-compose.ci.yml @@ -29,7 +29,7 @@ services: NODE_OPTIONS: "--unhandled-rejections=strict" depends_on: mongo: - condition: service_healthy + condition: service_started redis: condition: service_healthy user: node @@ -53,7 +53,12 @@ services: mongo: image: mongo:6.0.13 command: --replSet overleaf - healthcheck: - test: "mongosh --quiet localhost/test --eval 'quit(db.runCommand({ ping: 1 }).ok ? 0 : 1)'" - interval: 1s - retries: 20 + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 diff --git a/services/project-history/docker-compose.yml b/services/project-history/docker-compose.yml index c5b410b4fd..39c7ed9009 100644 --- a/services/project-history/docker-compose.yml +++ b/services/project-history/docker-compose.yml @@ -41,7 +41,7 @@ services: user: node depends_on: mongo: - condition: service_healthy + condition: service_started redis: condition: service_healthy command: npm run --silent test:acceptance @@ -56,8 +56,13 @@ services: mongo: image: mongo:6.0.13 command: --replSet overleaf - healthcheck: - test: "mongosh --quiet localhost/test --eval 'quit(db.runCommand({ ping: 1 }).ok ? 0 : 1)'" - interval: 1s - retries: 20 + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 diff --git a/services/project-history/package.json b/services/project-history/package.json index 96cae3d726..2a54a807d3 100644 --- a/services/project-history/package.json +++ b/services/project-history/package.json @@ -37,6 +37,7 @@ "minimist": "^1.2.8", "mongodb-legacy": "6.1.3", "overleaf-editor-core": "*", + "p-queue": "^8.1.0", "request": "^2.88.2" }, "devDependencies": { diff --git a/services/project-history/scripts/flush_old.js b/services/project-history/scripts/flush_old.js new file mode 100644 index 0000000000..6dc140196e --- /dev/null +++ b/services/project-history/scripts/flush_old.js @@ -0,0 +1,191 @@ +#!/usr/bin/env node + +import Settings from '@overleaf/settings' +import minimist from 'minimist' +import logger from '@overleaf/logger' +import PQueue from 'p-queue' +import * as RedisManager from '../app/js/RedisManager.js' +import * as ErrorRecorder from '../app/js/ErrorRecorder.js' + +logger.logger.level('fatal') + +function usage() { + console.log(` +Usage: flush_old.js [options] + +Options: + -b, --batch-size Number of projects to process in each batch (default: 100) + -a, --max-age Maximum age of projects to keep (default: 3600) + -i, --interval Interval to spread the processing over (default: 300) + -c, --concurrency Number of concurrent jobs (default: 10) + -u, --buffer Buffer time in seconds to reserve at end (default: 15) + -n, --dry-run Show what would be done without making changes + -h, --help Show this help message + +Examples: + # Flush projects older than 24 hours with 5 concurrent jobs + flush_old.js --batch-size 100 --max-age 86400 -c 5 + + # Dry run to see what would be flushed + flush_old.js --max-age 3600 --dry-run +`) + process.exit(0) +} + +const argv = minimist(process.argv.slice(2), { + boolean: ['dry-run', 'help'], + alias: { + b: 'batch-size', + a: 'max-age', + i: 'interval', + c: 'concurrency', + n: 'dry-run', + u: 'buffer', + h: 'help', + }, + default: { + 'batch-size': 100, + 'max-age': 3600, + interval: 300, + concurrency: 10, + 'dry-run': false, + buffer: 15, + help: false, + }, +}) + +if (argv.help || process.argv.length === 2) { + usage() +} + +const batchSize = parseInt(argv['batch-size'], 10) +const maxAge = argv['max-age'] ? parseInt(argv['max-age'], 10) : null +const interval = parseInt(argv.interval, 10) || 300 +const concurrency = parseInt(argv.concurrency, 10) || 10 +const bufferTime = parseInt(argv.buffer, 10) || 15 +const dryRun = argv['dry-run'] + +/** + * Generator function that yields batches of items from an array + * @param {Array} array - The array to batch + * @param {number} size - The size of each batch + * @yields {Array} A batch of items + */ +function* getBatches(array, size) { + for (let i = 0; i < array.length; i += size) { + yield array.slice(i, i + size) + } +} + +let flushCount = 0 + +async function flushProject({ projectId, timestamp }) { + const url = `${Settings.apis.project_history.url}/project/${projectId}/flush` + if (dryRun) { + console.log(`[DRY RUN] would flush project ${projectId}`) + return + } + const response = await fetch(url, { + method: 'POST', + }) + flushCount++ + if (flushCount % 100 === 0) { + console.log('flushed', flushCount, 'projects, up to', timestamp) + } + if (!response.ok) { + throw new Error(`failed to flush project ${projectId}`) + } +} + +const SCRIPT_START_TIME = Date.now() // current time in milliseconds from start of script + +function olderThan(maxAge, timestamp) { + const age = (SCRIPT_START_TIME - timestamp) / 1000 + return age > maxAge +} + +async function main() { + const projectIds = await RedisManager.promises.getProjectIdsWithHistoryOps() + const failedProjects = await ErrorRecorder.promises.getFailedProjects() + const failedProjectIds = new Set(failedProjects.map(p => p.project_id)) + + const projectIdsToProcess = projectIds.filter(p => !failedProjectIds.has(p)) + console.log('number of projects with history ops', projectIds.length) + console.log( + 'number of failed projects to exclude', + projectIds.length - projectIdsToProcess.length + ) + const collectedProjects = [] + let nullCount = 0 + // iterate over the project ids in batches of doing a redis MGET to retrieve the first op timestamps + for (const batch of getBatches(projectIdsToProcess, batchSize)) { + const timestamps = await RedisManager.promises.getFirstOpTimestamps(batch) + const newProjects = batch + .map((projectId, idx) => { + return { projectId, timestamp: timestamps[idx] } + }) + .filter(({ timestamp }) => { + if (!timestamp) { + nullCount++ + } + return timestamp ? olderThan(maxAge, timestamp) : true + }) + collectedProjects.push(...newProjects) + } + // sort the collected projects by ascending timestamp + collectedProjects.sort((a, b) => a.timestamp - b.timestamp) + + console.log('number of projects to flush', collectedProjects.length) + console.log('number with null timestamps', nullCount) + + const elapsedTime = Math.floor((Date.now() - SCRIPT_START_TIME) / 1000) + console.log('elapsed time', elapsedTime, 'seconds, buffer time', bufferTime) + const remainingTime = Math.max(interval - elapsedTime - bufferTime, 0) + console.log('remaining time', remainingTime, 'seconds') + + const jobsPerSecond = Math.max( + Math.ceil(collectedProjects.length / Math.max(remainingTime, 60)), + 1 + ) + console.log('interval', interval, 'seconds') + console.log('jobs per second', jobsPerSecond) + console.log('concurrency', concurrency) + + const queue = new PQueue({ + concurrency, + interval: 1000, + intervalCap: jobsPerSecond, + }) + + const taskFns = collectedProjects.map(project => { + return async () => { + try { + await flushProject(project) + return { status: 'fulfilled', value: project } + } catch (error) { + return { status: 'rejected', reason: error, project } + } + } + }) + + const results = await queue.addAll(taskFns) + + console.log( + 'finished after', + Math.floor((Date.now() - SCRIPT_START_TIME) / 1000), + 'seconds' + ) + // count the number of successful and failed flushes + const success = results.filter(r => r.status === 'fulfilled').length + const failed = results.filter(r => r.status === 'rejected').length + console.log('completed', { success, failed }) +} + +main() + .then(() => { + process.exit(0) + }) + .catch(err => { + console.error(err) + process.exit(1) + }) diff --git a/services/project-history/test/unit/js/ErrorRecorder/ErrorRecorderTest.js b/services/project-history/test/unit/js/ErrorRecorder/ErrorRecorderTest.js index db6d767e58..79af1a8ce1 100644 --- a/services/project-history/test/unit/js/ErrorRecorder/ErrorRecorderTest.js +++ b/services/project-history/test/unit/js/ErrorRecorder/ErrorRecorderTest.js @@ -1,5 +1,4 @@ import sinon from 'sinon' -import { expect } from 'chai' import { strict as esmock } from 'esmock' import tk from 'timekeeper' @@ -12,7 +11,9 @@ describe('ErrorRecorder', function () { this.db = { projectHistoryFailures: { deleteOne: sinon.stub().resolves(), - updateOne: sinon.stub().resolves(), + findOneAndUpdate: sinon + .stub() + .resolves({ value: { failure: 'record' } }), }, } this.mongodb = { db: this.db } @@ -31,75 +32,65 @@ describe('ErrorRecorder', function () { }) describe('record', function () { - describe('with an error', function () { - beforeEach(async function () { - this.error = new Error('something bad') - await expect( - this.ErrorRecorder.promises.record( - this.project_id, - this.queueSize, - this.error - ) - ).to.be.rejected - }) - - it('should record the error to mongo', function () { - this.db.projectHistoryFailures.updateOne - .calledWithMatch( - { - project_id: this.project_id, - }, - { - $set: { - queueSize: this.queueSize, - error: this.error.toString(), - stack: this.error.stack, - ts: this.now, - }, - $inc: { - attempts: 1, - }, - $push: { - history: { - $each: [ - { - queueSize: this.queueSize, - error: this.error.toString(), - stack: this.error.stack, - ts: this.now, - }, - ], - $position: 0, - $slice: 10, - }, - }, - }, - { - upsert: true, - } - ) - .should.equal(true) - }) + beforeEach(async function () { + this.error = new Error('something bad') + await this.ErrorRecorder.promises.record( + this.project_id, + this.queueSize, + this.error + ) }) - describe('without an error', function () { - beforeEach(async function () { - this.result = await this.ErrorRecorder.promises.record( - this.project_id, - this.queueSize, - this.error + it('should record the error to mongo', function () { + this.db.projectHistoryFailures.findOneAndUpdate + .calledWithMatch( + { + project_id: this.project_id, + }, + { + $set: { + queueSize: this.queueSize, + error: this.error.toString(), + stack: this.error.stack, + ts: this.now, + }, + $inc: { + attempts: 1, + }, + $push: { + history: { + $each: [ + { + queueSize: this.queueSize, + error: this.error.toString(), + stack: this.error.stack, + ts: this.now, + }, + ], + $position: 0, + $slice: 10, + }, + }, + }, + { + upsert: true, + } ) - }) + .should.equal(true) + }) + }) - it('should remove any error from mongo', function () { - this.db.projectHistoryFailures.deleteOne - .calledWithMatch({ project_id: this.project_id }) - .should.equal(true) - }) + describe('clearError', function () { + beforeEach(async function () { + this.result = await this.ErrorRecorder.promises.clearError( + this.project_id + ) + }) - it('should return the queue size', function () { - expect(this.result).to.equal(this.queueSize) - }) + it('should remove any error from mongo', function () { + this.db.projectHistoryFailures.deleteOne + .calledWithMatch({ project_id: this.project_id }) + .should.equal(true) }) }) }) diff --git a/services/project-history/test/unit/js/HttpController/HttpControllerTests.js b/services/project-history/test/unit/js/HttpController/HttpControllerTests.js index 683fd9cea8..1b7adf0ef5 100644 --- a/services/project-history/test/unit/js/HttpController/HttpControllerTests.js +++ b/services/project-history/test/unit/js/HttpController/HttpControllerTests.js @@ -40,7 +40,7 @@ describe('HttpController', function () { clearCachedHistoryId: sinon.stub().yields(), } this.ErrorRecorder = { - record: sinon.stub().yields(), + clearError: sinon.stub().yields(), } this.LabelsManager = { createLabel: sinon.stub(), @@ -567,11 +567,7 @@ describe('HttpController', function () { }) it('should clear any failure record', function () { - this.ErrorRecorder.record.should.have.been.calledWith( - this.projectId, - 0, - null - ) + this.ErrorRecorder.clearError.should.have.been.calledWith(this.projectId) }) }) }) diff --git a/services/project-history/test/unit/js/UpdatesManager/UpdatesProcessorTests.js b/services/project-history/test/unit/js/UpdatesManager/UpdatesProcessorTests.js index adebf4d1b3..137169bfcf 100644 --- a/services/project-history/test/unit/js/UpdatesManager/UpdatesProcessorTests.js +++ b/services/project-history/test/unit/js/UpdatesManager/UpdatesProcessorTests.js @@ -1,17 +1,3 @@ -/* eslint-disable - mocha/no-nested-tests, - no-return-assign, - no-undef, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ import sinon from 'sinon' import { expect } from 'chai' import { strict as esmock } from 'esmock' @@ -53,7 +39,11 @@ describe('UpdatesProcessor', function () { } this.ErrorRecorder = { getLastFailure: sinon.stub(), - record: sinon.stub().yields(), + record: sinon.stub().yields(null, { attempts: 1 }), + } + this.RetryManager = { + isFirstFailure: sinon.stub().returns(true), + isHardFailure: sinon.stub().returns(false), } this.Profiler = { Profiler: class { @@ -101,6 +91,7 @@ describe('UpdatesProcessor', function () { '../../../../app/js/SyncManager.js': this.SyncManager, '../../../../app/js/ErrorRecorder.js': this.ErrorRecorder, '../../../../app/js/Profiler.js': this.Profiler, + '../../../../app/js/RetryManager.js': this.RetryManager, '../../../../app/js/Errors.js': Errors, '@overleaf/metrics': this.Metrics, '@overleaf/settings': this.Settings, @@ -109,7 +100,7 @@ describe('UpdatesProcessor', function () { this.project_id = 'project-id-123' this.ol_project_id = 'ol-project-id-234' this.callback = sinon.stub() - return (this.temporary = 'temp-mock') + this.temporary = 'temp-mock' }) describe('processUpdatesForProject', function () { @@ -124,20 +115,20 @@ describe('UpdatesProcessor', function () { describe('when there is no existing error', function () { beforeEach(function (done) { this.ErrorRecorder.getLastFailure.yields() - return this.UpdatesProcessor.processUpdatesForProject( - this.project_id, - done - ) + this.UpdatesProcessor.processUpdatesForProject(this.project_id, err => { + expect(err).to.equal(this.error) + done() + }) }) it('processes updates', function () { - return this.UpdatesProcessor._mocks._countAndProcessUpdates + this.UpdatesProcessor._mocks._countAndProcessUpdates .calledWith(this.project_id) .should.equal(true) }) - return it('records errors', function () { - return this.ErrorRecorder.record + it('records errors', function () { + this.ErrorRecorder.record .calledWith(this.project_id, this.queueSize, this.error) .should.equal(true) }) @@ -154,14 +145,14 @@ describe('UpdatesProcessor', function () { this.WebApiManager.getHistoryId.yields(null) }) - return it('returns null', function (done) { - return this.UpdatesProcessor._getHistoryId( + it('returns null', function (done) { + this.UpdatesProcessor._getHistoryId( this.project_id, this.updates, (error, projectHistoryId) => { expect(error).to.be.null expect(projectHistoryId).to.be.null - return done() + done() } ) }) @@ -169,102 +160,102 @@ describe('UpdatesProcessor', function () { describe('projectHistoryId is not present in updates', function () { beforeEach(function () { - return (this.updates = [ + this.updates = [ { p: 0, i: 'a' }, { p: 1, i: 's' }, - ]) + ] }) it('returns the id from web', function (done) { this.projectHistoryId = '1234' this.WebApiManager.getHistoryId.yields(null, this.projectHistoryId) - return this.UpdatesProcessor._getHistoryId( + this.UpdatesProcessor._getHistoryId( this.project_id, this.updates, (error, projectHistoryId) => { expect(error).to.be.null expect(projectHistoryId).equal(this.projectHistoryId) - return done() + done() } ) }) - return it('returns errors from web', function (done) { + it('returns errors from web', function (done) { this.error = new Error('oh no!') this.WebApiManager.getHistoryId.yields(this.error) - return this.UpdatesProcessor._getHistoryId( + this.UpdatesProcessor._getHistoryId( this.project_id, this.updates, error => { expect(error).to.equal(this.error) - return done() + done() } ) }) }) - return describe('projectHistoryId is present in some updates', function () { + describe('projectHistoryId is present in some updates', function () { beforeEach(function () { this.projectHistoryId = '1234' - return (this.updates = [ + this.updates = [ { p: 0, i: 'a' }, { p: 1, i: 's', projectHistoryId: this.projectHistoryId }, { p: 2, i: 'd', projectHistoryId: this.projectHistoryId }, - ]) + ] }) it('returns an error if the id is inconsistent between updates', function (done) { this.updates[1].projectHistoryId = 2345 - return this.UpdatesProcessor._getHistoryId( + this.UpdatesProcessor._getHistoryId( this.project_id, this.updates, error => { expect(error.message).to.equal( 'inconsistent project history id between updates' ) - return done() + done() } ) }) it('returns an error if the id is inconsistent between updates and web', function (done) { this.WebApiManager.getHistoryId.yields(null, 2345) - return this.UpdatesProcessor._getHistoryId( + this.UpdatesProcessor._getHistoryId( this.project_id, this.updates, error => { expect(error.message).to.equal( 'inconsistent project history id between updates and web' ) - return done() + done() } ) }) it('returns the id if it is consistent between updates and web', function (done) { this.WebApiManager.getHistoryId.yields(null, this.projectHistoryId) - return this.UpdatesProcessor._getHistoryId( + this.UpdatesProcessor._getHistoryId( this.project_id, this.updates, (error, projectHistoryId) => { expect(error).to.be.null expect(projectHistoryId).equal(this.projectHistoryId) - return done() + done() } ) }) - return it('returns the id if it is consistent between updates but unavaiable in web', function (done) { + it('returns the id if it is consistent between updates but unavaiable in web', function (done) { this.WebApiManager.getHistoryId.yields(new Error('oh no!')) - return this.UpdatesProcessor._getHistoryId( + this.UpdatesProcessor._getHistoryId( this.project_id, this.updates, (error, projectHistoryId) => { expect(error).to.be.null expect(projectHistoryId).equal(this.projectHistoryId) - return done() + done() } ) }) @@ -332,21 +323,21 @@ describe('UpdatesProcessor', function () { }) it('should get the latest version id', function () { - return this.HistoryStoreManager.getMostRecentVersion.should.have.been.calledWith( + this.HistoryStoreManager.getMostRecentVersion.should.have.been.calledWith( this.project_id, this.ol_project_id ) }) it('should skip updates when resyncing', function () { - return this.SyncManager.skipUpdatesDuringSync.should.have.been.calledWith( + this.SyncManager.skipUpdatesDuringSync.should.have.been.calledWith( this.project_id, this.rawUpdates ) }) it('should expand sync updates', function () { - return this.SyncManager.expandSyncUpdates.should.have.been.calledWith( + this.SyncManager.expandSyncUpdates.should.have.been.calledWith( this.project_id, this.ol_project_id, this.mostRecentChunk, @@ -356,13 +347,13 @@ describe('UpdatesProcessor', function () { }) it('should compress updates', function () { - return this.UpdateCompressor.compressRawUpdates.should.have.been.calledWith( + this.UpdateCompressor.compressRawUpdates.should.have.been.calledWith( this.expandedUpdates ) }) it('should create any blobs for the updates', function () { - return this.BlobManager.createBlobsForUpdates.should.have.been.calledWith( + this.BlobManager.createBlobsForUpdates.should.have.been.calledWith( this.project_id, this.ol_project_id, this.compressedUpdates @@ -370,14 +361,14 @@ describe('UpdatesProcessor', function () { }) it('should convert the updates into a change requests', function () { - return this.UpdateTranslator.convertToChanges.should.have.been.calledWith( + this.UpdateTranslator.convertToChanges.should.have.been.calledWith( this.project_id, this.updatesWithBlobs ) }) it('should send the change request to the history store', function () { - return this.HistoryStoreManager.sendChanges.should.have.been.calledWith( + this.HistoryStoreManager.sendChanges.should.have.been.calledWith( this.project_id, this.ol_project_id, ['change'] @@ -385,14 +376,14 @@ describe('UpdatesProcessor', function () { }) it('should set the sync state', function () { - return this.SyncManager.setResyncState.should.have.been.calledWith( + this.SyncManager.setResyncState.should.have.been.calledWith( this.project_id, this.newSyncState ) }) it('should call the callback with no error', function () { - return this.callback.should.have.been.called + this.callback.should.have.been.called }) }) @@ -420,7 +411,7 @@ describe('UpdatesProcessor', function () { }) }) - return describe('_skipAlreadyAppliedUpdates', function () { + describe('_skipAlreadyAppliedUpdates', function () { before(function () { this.UpdateTranslator.isProjectStructureUpdate.callsFake( update => update.version != null @@ -436,16 +427,15 @@ describe('UpdatesProcessor', function () { { doc: 'id', v: 3 }, { doc: 'id', v: 4 }, ] - return (this.updatesToApply = - this.UpdatesProcessor._skipAlreadyAppliedUpdates( - this.project_id, - this.updates, - { docs: {} } - )) + this.updatesToApply = this.UpdatesProcessor._skipAlreadyAppliedUpdates( + this.project_id, + this.updates, + { docs: {} } + ) }) - return it('should return the original updates', function () { - return expect(this.updatesToApply).to.eql(this.updates) + it('should return the original updates', function () { + expect(this.updatesToApply).to.eql(this.updates) }) }) @@ -457,16 +447,15 @@ describe('UpdatesProcessor', function () { { version: 3 }, { version: 4 }, ] - return (this.updatesToApply = - this.UpdatesProcessor._skipAlreadyAppliedUpdates( - this.project_id, - this.updates, - { docs: {} } - )) + this.updatesToApply = this.UpdatesProcessor._skipAlreadyAppliedUpdates( + this.project_id, + this.updates, + { docs: {} } + ) }) - return it('should return the original updates', function () { - return expect(this.updatesToApply).to.eql(this.updates) + it('should return the original updates', function () { + expect(this.updatesToApply).to.eql(this.updates) }) }) @@ -486,16 +475,15 @@ describe('UpdatesProcessor', function () { { version: 3 }, { version: 4 }, ] - return (this.updatesToApply = - this.UpdatesProcessor._skipAlreadyAppliedUpdates( - this.project_id, - this.updates, - { docs: {} } - )) + this.updatesToApply = this.UpdatesProcessor._skipAlreadyAppliedUpdates( + this.project_id, + this.updates, + { docs: {} } + ) }) - return it('should return the original updates', function () { - return expect(this.updatesToApply).to.eql(this.updates) + it('should return the original updates', function () { + expect(this.updatesToApply).to.eql(this.updates) }) }) @@ -512,25 +500,25 @@ describe('UpdatesProcessor', function () { '_skipAlreadyAppliedUpdates' ) try { - return (this.updatesToApply = + this.updatesToApply = this.UpdatesProcessor._skipAlreadyAppliedUpdates( this.project_id, this.updates, { docs: {} } - )) + ) } catch (error) {} }) after(function () { - return this.skipFn.restore() + this.skipFn.restore() }) - return it('should throw an exception', function () { - return this.skipFn.threw('OpsOutOfOrderError').should.equal(true) + it('should throw an exception', function () { + this.skipFn.threw('OpsOutOfOrderError').should.equal(true) }) }) - return describe('with project ops out of order', function () { + describe('with project ops out of order', function () { before(function () { this.updates = [ { version: 1 }, @@ -543,21 +531,21 @@ describe('UpdatesProcessor', function () { '_skipAlreadyAppliedUpdates' ) try { - return (this.updatesToApply = + this.updatesToApply = this.UpdatesProcessor._skipAlreadyAppliedUpdates( this.project_id, this.updates, { docs: {} } - )) + ) } catch (error) {} }) after(function () { - return this.skipFn.restore() + this.skipFn.restore() }) - return it('should throw an exception', function () { - return this.skipFn.threw('OpsOutOfOrderError').should.equal(true) + it('should throw an exception', function () { + this.skipFn.threw('OpsOutOfOrderError').should.equal(true) }) }) }) diff --git a/services/real-time/app/js/ConnectedUsersManager.js b/services/real-time/app/js/ConnectedUsersManager.js index 299a4b870a..1421e8eeef 100644 --- a/services/real-time/app/js/ConnectedUsersManager.js +++ b/services/real-time/app/js/ConnectedUsersManager.js @@ -3,6 +3,7 @@ const Settings = require('@overleaf/settings') const logger = require('@overleaf/logger') const redis = require('@overleaf/redis-wrapper') const OError = require('@overleaf/o-error') +const Metrics = require('@overleaf/metrics') const rclient = redis.createClient(Settings.redis.realtime) const Keys = Settings.redis.realtime.key_schema @@ -13,6 +14,20 @@ const FOUR_DAYS_IN_S = ONE_DAY_IN_S * 4 const USER_TIMEOUT_IN_S = ONE_HOUR_IN_S / 4 const REFRESH_TIMEOUT_IN_S = 10 // only show clients which have responded to a refresh request in the last 10 seconds +function recordProjectNotEmptySinceMetric(res, status) { + const diff = Date.now() / 1000 - parseInt(res, 10) + const BUCKETS = [ + 0, + ONE_HOUR_IN_S, + 2 * ONE_HOUR_IN_S, + ONE_DAY_IN_S, + 2 * ONE_DAY_IN_S, + 7 * ONE_DAY_IN_S, + 30 * ONE_DAY_IN_S, + ] + Metrics.histogram('project_not_empty_since', diff, BUCKETS, { status }) +} + module.exports = { // Use the same method for when a user connects, and when a user sends a cursor // update. This way we don't care if the connected_user key has expired when @@ -23,6 +38,7 @@ module.exports = { const multi = rclient.multi() multi.sadd(Keys.clientsInProject({ project_id: projectId }), clientId) + multi.scard(Keys.clientsInProject({ project_id: projectId })) multi.expire( Keys.clientsInProject({ project_id: projectId }), FOUR_DAYS_IN_S @@ -66,10 +82,15 @@ module.exports = { USER_TIMEOUT_IN_S ) - multi.exec(function (err) { + multi.exec(function (err, res) { if (err) { err = new OError('problem marking user as connected').withCause(err) } + const [, nConnectedClients] = res + Metrics.inc('editing_session_mode', 1, { + method: cursorData ? 'update' : 'connect', + status: nConnectedClients === 1 ? 'single' : 'multi', + }) callback(err) }) }, @@ -100,6 +121,7 @@ module.exports = { logger.debug({ projectId, clientId }, 'marking user as disconnected') const multi = rclient.multi() multi.srem(Keys.clientsInProject({ project_id: projectId }), clientId) + multi.scard(Keys.clientsInProject({ project_id: projectId })) multi.expire( Keys.clientsInProject({ project_id: projectId }), FOUR_DAYS_IN_S @@ -107,10 +129,56 @@ module.exports = { multi.del( Keys.connectedUser({ project_id: projectId, client_id: clientId }) ) - multi.exec(function (err) { + multi.exec(function (err, res) { if (err) { err = new OError('problem marking user as disconnected').withCause(err) } + const [, nConnectedClients] = res + const status = + nConnectedClients === 0 + ? 'empty' + : nConnectedClients === 1 + ? 'single' + : 'multi' + Metrics.inc('editing_session_mode', 1, { + method: 'disconnect', + status, + }) + if (status === 'empty') { + rclient.getdel(Keys.projectNotEmptySince({ projectId }), (err, res) => { + if (err) { + logger.warn( + { err, projectId }, + 'could not collect projectNotEmptySince' + ) + } else if (res) { + recordProjectNotEmptySinceMetric(res, status) + } + }) + } else { + // Only populate projectNotEmptySince when more clients remain connected. + const nowInSeconds = Math.ceil(Date.now() / 1000).toString() + // We can go back to SET GET after upgrading to redis 7.0+ + const multi = rclient.multi() + multi.get(Keys.projectNotEmptySince({ projectId })) + multi.set( + Keys.projectNotEmptySince({ projectId }), + nowInSeconds, + 'NX', + 'EX', + 31 * ONE_DAY_IN_S + ) + multi.exec((err, res) => { + if (err) { + logger.warn( + { err, projectId }, + 'could not get/set projectNotEmptySince' + ) + } else if (res[0]) { + recordProjectNotEmptySinceMetric(res[0], status) + } + }) + } callback(err) }) }, diff --git a/services/real-time/app/js/Router.js b/services/real-time/app/js/Router.js index e99369955f..238dc386a3 100644 --- a/services/real-time/app/js/Router.js +++ b/services/real-time/app/js/Router.js @@ -216,18 +216,6 @@ module.exports = Router = { }) metrics.gauge('socket-io.clients', io.sockets.clients().length) - const info = { - session, - publicId: client.publicId, - clientId: client.id, - isDebugging, - } - if (isDebugging) { - logger.info(info, 'client connected') - } else { - logger.debug(info, 'client connected') - } - let user if (session && session.passport && session.passport.user) { ;({ user } = session.passport) @@ -238,6 +226,20 @@ module.exports = Router = { user = { _id: 'anonymous-user', anonymousAccessToken } } + const info = { + userId: user._id, + projectId, + transport: client.transport, + publicId: client.publicId, + clientId: client.id, + isDebugging, + } + if (isDebugging) { + logger.info(info, 'client connected') + } else { + logger.debug(info, 'client connected') + } + const connectionDetails = { userId: user._id, projectId, @@ -577,7 +579,6 @@ module.exports = Router = { if (err) { Router._handleError(callback, err, client, 'applyOtUpdate', { doc_id: docId, - update, }) } else { callback() diff --git a/services/real-time/config/settings.defaults.js b/services/real-time/config/settings.defaults.js index 1cc0c0f107..96c116fb2e 100644 --- a/services/real-time/config/settings.defaults.js +++ b/services/real-time/config/settings.defaults.js @@ -38,6 +38,9 @@ const settings = { connectedUser({ project_id, client_id }) { return `connected_user:{${project_id}}:${client_id}` }, + projectNotEmptySince({ projectId }) { + return `projectNotEmptySince:{${projectId}}` + }, }, maxRetriesPerRequest: parseInt( process.env.REAL_TIME_REDIS_MAX_RETRIES_PER_REQUEST || diff --git a/services/real-time/test/unit/js/ConnectedUsersManagerTests.js b/services/real-time/test/unit/js/ConnectedUsersManagerTests.js index 9026d0bb42..dd4aeb35c9 100644 --- a/services/real-time/test/unit/js/ConnectedUsersManagerTests.js +++ b/services/real-time/test/unit/js/ConnectedUsersManagerTests.js @@ -30,12 +30,18 @@ describe('ConnectedUsersManager', function () { connectedUser({ project_id: projectId, client_id: clientId }) { return `connected_user:${projectId}:${clientId}` }, + projectNotEmptySince({ projectId }) { + return `projectNotEmptySince:{${projectId}}` + }, }, }, }, } this.rClient = { auth() {}, + getdel: sinon.stub(), + scard: sinon.stub(), + set: sinon.stub(), setex: sinon.stub(), sadd: sinon.stub(), get: sinon.stub(), @@ -51,10 +57,15 @@ describe('ConnectedUsersManager', function () { }, } tk.freeze(new Date()) + this.Metrics = { + inc: sinon.stub(), + histogram: sinon.stub(), + } this.ConnectedUsersManager = SandboxedModule.require(modulePath, { requires: { '@overleaf/settings': this.settings, + '@overleaf/metrics': this.Metrics, '@overleaf/redis-wrapper': { createClient: () => { return this.rClient @@ -83,7 +94,7 @@ describe('ConnectedUsersManager', function () { describe('updateUserPosition', function () { beforeEach(function () { - return this.rClient.exec.callsArgWith(0) + this.rClient.exec.yields(null, [1, 1]) }) it('should set a key with the date and give it a ttl', function (done) { @@ -240,7 +251,7 @@ describe('ConnectedUsersManager', function () { ) }) - return it('should set the cursor position when provided', function (done) { + it('should set the cursor position when provided', function (done) { return this.ConnectedUsersManager.updateUserPosition( this.project_id, this.client_id, @@ -259,11 +270,72 @@ describe('ConnectedUsersManager', function () { } ) }) + + describe('editing_session_mode', function () { + const cases = { + 'should bump the metric when connecting to empty room': { + nConnectedClients: 1, + cursorData: null, + labels: { + method: 'connect', + status: 'single', + }, + }, + 'should bump the metric when connecting to non-empty room': { + nConnectedClients: 2, + cursorData: null, + labels: { + method: 'connect', + status: 'multi', + }, + }, + 'should bump the metric when updating in empty room': { + nConnectedClients: 1, + cursorData: { row: 42 }, + labels: { + method: 'update', + status: 'single', + }, + }, + 'should bump the metric when updating in non-empty room': { + nConnectedClients: 2, + cursorData: { row: 42 }, + labels: { + method: 'update', + status: 'multi', + }, + }, + } + + for (const [ + name, + { nConnectedClients, cursorData, labels }, + ] of Object.entries(cases)) { + it(name, function (done) { + this.rClient.exec.yields(null, [1, nConnectedClients]) + this.ConnectedUsersManager.updateUserPosition( + this.project_id, + this.client_id, + this.user, + cursorData, + err => { + if (err) return done(err) + expect(this.Metrics.inc).to.have.been.calledWith( + 'editing_session_mode', + 1, + labels + ) + done() + } + ) + }) + } + }) }) describe('markUserAsDisconnected', function () { beforeEach(function () { - return this.rClient.exec.callsArgWith(0) + this.rClient.exec.yields(null, [1, 0]) }) it('should remove the user from the set', function (done) { @@ -294,7 +366,7 @@ describe('ConnectedUsersManager', function () { ) }) - return it('should add a ttl to the connected user set so it stays clean', function (done) { + it('should add a ttl to the connected user set so it stays clean', function (done) { return this.ConnectedUsersManager.markUserAsDisconnected( this.project_id, this.client_id, @@ -310,6 +382,161 @@ describe('ConnectedUsersManager', function () { } ) }) + + describe('editing_session_mode', function () { + const cases = { + 'should bump the metric when disconnecting from now empty room': { + nConnectedClients: 0, + labels: { + method: 'disconnect', + status: 'empty', + }, + }, + 'should bump the metric when disconnecting from now single room': { + nConnectedClients: 1, + labels: { + method: 'disconnect', + status: 'single', + }, + }, + 'should bump the metric when disconnecting from now multi room': { + nConnectedClients: 2, + labels: { + method: 'disconnect', + status: 'multi', + }, + }, + } + + for (const [name, { nConnectedClients, labels }] of Object.entries( + cases + )) { + it(name, function (done) { + this.rClient.exec.yields(null, [1, nConnectedClients]) + this.ConnectedUsersManager.markUserAsDisconnected( + this.project_id, + this.client_id, + err => { + if (err) return done(err) + expect(this.Metrics.inc).to.have.been.calledWith( + 'editing_session_mode', + 1, + labels + ) + done() + } + ) + }) + } + }) + + describe('projectNotEmptySince', function () { + it('should clear the projectNotEmptySince key when empty and skip metric if not set', function (done) { + this.rClient.exec.yields(null, [1, 0]) + this.rClient.getdel.yields(null, '') + this.ConnectedUsersManager.markUserAsDisconnected( + this.project_id, + this.client_id, + err => { + if (err) return done(err) + expect(this.rClient.getdel).to.have.been.calledWith( + `projectNotEmptySince:{${this.project_id}}` + ) + expect(this.Metrics.histogram).to.not.have.been.called + done() + } + ) + }) + it('should clear the projectNotEmptySince key when empty and record metric if set', function (done) { + this.rClient.exec.onFirstCall().yields(null, [1, 0]) + tk.freeze(1_234_000) + this.rClient.getdel.yields(null, '1230') + this.ConnectedUsersManager.markUserAsDisconnected( + this.project_id, + this.client_id, + err => { + if (err) return done(err) + expect(this.rClient.getdel).to.have.been.calledWith( + `projectNotEmptySince:{${this.project_id}}` + ) + expect(this.Metrics.histogram).to.have.been.calledWith( + 'project_not_empty_since', + 4, + sinon.match.any, + { status: 'empty' } + ) + done() + } + ) + }) + it('should set projectNotEmptySince key when single and skip metric if not set before', function (done) { + this.rClient.exec.onFirstCall().yields(null, [1, 1]) + tk.freeze(1_233_001) // should ceil up + this.rClient.exec.onSecondCall().yields(null, ['']) + this.ConnectedUsersManager.markUserAsDisconnected( + this.project_id, + this.client_id, + err => { + if (err) return done(err) + expect(this.rClient.set).to.have.been.calledWith( + `projectNotEmptySince:{${this.project_id}}`, + '1234', + 'NX', + 'EX', + 31 * 24 * 60 * 60 + ) + expect(this.Metrics.histogram).to.not.have.been.called + done() + } + ) + }) + const cases = { + 'should set projectNotEmptySince key when single and record metric if set before': + { + nConnectedClients: 1, + labels: { + status: 'single', + }, + }, + 'should set projectNotEmptySince key when multi and record metric if set before': + { + nConnectedClients: 2, + labels: { + status: 'multi', + }, + }, + } + for (const [name, { nConnectedClients, labels }] of Object.entries( + cases + )) { + it(name, function (done) { + this.rClient.exec.onFirstCall().yields(null, [1, nConnectedClients]) + tk.freeze(1_235_000) + this.rClient.exec.onSecondCall().yields(null, ['1230']) + this.ConnectedUsersManager.markUserAsDisconnected( + this.project_id, + this.client_id, + err => { + if (err) return done(err) + expect(this.rClient.set).to.have.been.calledWith( + `projectNotEmptySince:{${this.project_id}}`, + '1235', + 'NX', + 'EX', + 31 * 24 * 60 * 60 + ) + expect(this.Metrics.histogram).to.have.been.calledWith( + 'project_not_empty_since', + 5, + sinon.match.any, + labels + ) + done() + } + ) + }) + } + }) }) describe('_getConnectedUser', function () { diff --git a/services/web/.eslintrc.js b/services/web/.eslintrc.js index 47d15bca87..3c672de7e7 100644 --- a/services/web/.eslintrc.js +++ b/services/web/.eslintrc.js @@ -39,6 +39,12 @@ module.exports = { 'error', { functions: false, classes: false, variables: false }, ], + 'react-hooks/exhaustive-deps': [ + 'warn', + { + additionalHooks: '(useCommandProvider)', + }, + ], }, overrides: [ // NOTE: changing paths may require updating them in the Makefile too. diff --git a/services/web/app/src/Features/Captcha/CaptchaMiddleware.js b/services/web/app/src/Features/Captcha/CaptchaMiddleware.js index f796749f85..9c93b74cfa 100644 --- a/services/web/app/src/Features/Captcha/CaptchaMiddleware.js +++ b/services/web/app/src/Features/Captcha/CaptchaMiddleware.js @@ -6,6 +6,7 @@ const OError = require('@overleaf/o-error') const DeviceHistory = require('./DeviceHistory') const AuthenticationController = require('../Authentication/AuthenticationController') const { expressify } = require('@overleaf/promise-utils') +const EmailsHelper = require('../Helpers/EmailHelper') function respondInvalidCaptcha(req, res) { res.status(400).json({ @@ -41,9 +42,11 @@ async function canSkipCaptcha(req, res) { function validateCaptcha(action) { return expressify(async function (req, res, next) { + const email = EmailsHelper.parseEmail(req.body?.email) const trustedUser = - req.body?.email && - Settings.recaptcha.trustedUsers.includes(req.body.email) + email && + (Settings.recaptcha.trustedUsers.includes(email) || + Settings.recaptcha.trustedUsersRegex?.test(email)) if (!Settings.recaptcha?.siteKey || Settings.recaptcha.disabled[action]) { if (action === 'login') { AuthenticationController.setAuditInfo(req, { captcha: 'disabled' }) @@ -51,15 +54,17 @@ function validateCaptcha(action) { Metrics.inc('captcha', 1, { path: action, status: 'disabled' }) return next() } - if (trustedUser && action === 'login') { - AuthenticationController.setAuditInfo(req, { captcha: 'trusted' }) + if (trustedUser) { + if (action === 'login') { + AuthenticationController.setAuditInfo(req, { captcha: 'trusted' }) + } Metrics.inc('captcha', 1, { path: action, status: 'trusted' }) return next() } const reCaptchaResponse = req.body['g-recaptcha-response'] if (action === 'login') { await initializeDeviceHistory(req) - const fromKnownDevice = req.deviceHistory.has(req.body?.email) + const fromKnownDevice = req.deviceHistory.has(email) AuthenticationController.setAuditInfo(req, { fromKnownDevice }) if (!reCaptchaResponse && fromKnownDevice) { // The user has previously logged in from this device, which required diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsController.mjs b/services/web/app/src/Features/Collaborators/CollaboratorsController.mjs index 102e9cca28..d2cecbcfad 100644 --- a/services/web/app/src/Features/Collaborators/CollaboratorsController.mjs +++ b/services/web/app/src/Features/Collaborators/CollaboratorsController.mjs @@ -14,7 +14,6 @@ import { hasAdminAccess } from '../Helpers/AdminAuthorizationHelper.js' import TokenAccessHandler from '../TokenAccess/TokenAccessHandler.js' import ProjectAuditLogHandler from '../Project/ProjectAuditLogHandler.js' import LimitationsManager from '../Subscription/LimitationsManager.js' -import PrivilegeLevels from '../Authorization/PrivilegeLevels.js' const ObjectId = mongodb.ObjectId @@ -80,17 +79,20 @@ async function setCollaboratorInfo(req, res, next) { const userId = req.params.user_id const { privilegeLevel } = req.body - if (privilegeLevel !== PrivilegeLevels.READ_ONLY) { - const allowed = - await LimitationsManager.promises.canAddXEditCollaborators(projectId, 1) - if (!allowed) { - return HttpErrorHandler.forbidden( - req, - res, - 'edit collaborator limit reached' - ) - } + const allowed = + await LimitationsManager.promises.canChangeCollaboratorPrivilegeLevel( + projectId, + userId, + privilegeLevel + ) + if (!allowed) { + return HttpErrorHandler.forbidden( + req, + res, + 'edit collaborator limit reached' + ) } + await CollaboratorsHandler.promises.setCollaboratorPrivilegeLevel( projectId, userId, @@ -122,6 +124,7 @@ async function transferOwnership(req, res, next) { { allowTransferToNonCollaborators: hasAdminAccess(sessionUser), sessionUserId: new ObjectId(sessionUser._id), + ipAddress: req.ip, } ) res.sendStatus(204) diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsGetter.js b/services/web/app/src/Features/Collaborators/CollaboratorsGetter.js index 563a6ea6b7..77fb7ab2d3 100644 --- a/services/web/app/src/Features/Collaborators/CollaboratorsGetter.js +++ b/services/web/app/src/Features/Collaborators/CollaboratorsGetter.js @@ -60,6 +60,7 @@ async function getMemberIdsWithPrivilegeLevels(projectId) { publicAccesLevel: 1, pendingEditor_refs: 1, reviewer_refs: 1, + pendingReviewer_refs: 1, }) if (!project) { throw new Errors.NotFoundError(`no project found with id ${projectId}`) @@ -72,7 +73,8 @@ async function getMemberIdsWithPrivilegeLevels(projectId) { project.tokenAccessReadOnly_refs, project.publicAccesLevel, project.pendingEditor_refs, - project.reviewer_refs + project.reviewer_refs, + project.pendingReviewer_refs ) return memberIds } @@ -107,7 +109,8 @@ async function getInvitedMembersWithPrivilegeLevelsFromFields( [], null, [], - reviewerIds + reviewerIds, + [] ) return _loadMembers(members) } @@ -128,23 +131,25 @@ async function getMemberIdPrivilegeLevel(userId, projectId) { } async function getInvitedEditCollaboratorCount(projectId) { - // Only counts invited members with readAndWrite privilege + // Counts invited members with editor or reviewer roles const members = await getMemberIdsWithPrivilegeLevels(projectId) return members.filter( m => m.source === Sources.INVITE && - m.privilegeLevel === PrivilegeLevels.READ_AND_WRITE + (m.privilegeLevel === PrivilegeLevels.READ_AND_WRITE || + m.privilegeLevel === PrivilegeLevels.REVIEW) ).length } async function getInvitedPendingEditorCount(projectId) { - // Only counts invited members that are readonly pending editors + // Only counts invited members that are readonly pending editors or pending + // reviewers const members = await getMemberIdsWithPrivilegeLevels(projectId) return members.filter( m => m.source === Sources.INVITE && m.privilegeLevel === PrivilegeLevels.READ_ONLY && - m.pendingEditor === true + (m.pendingEditor || m.pendingReviewer) ).length } @@ -319,7 +324,8 @@ function _getMemberIdsWithPrivilegeLevelsFromFields( tokenAccessReadOnlyIds, publicAccessLevel, pendingEditorIds, - reviewerIds + reviewerIds, + pendingReviewerIds ) { const members = [] members.push({ @@ -327,6 +333,7 @@ function _getMemberIdsWithPrivilegeLevelsFromFields( privilegeLevel: PrivilegeLevels.OWNER, source: Sources.OWNER, }) + for (const memberId of collaboratorIds || []) { members.push({ id: memberId.toString(), @@ -334,16 +341,22 @@ function _getMemberIdsWithPrivilegeLevelsFromFields( source: Sources.INVITE, }) } + for (const memberId of readOnlyIds || []) { - members.push({ + const record = { id: memberId.toString(), privilegeLevel: PrivilegeLevels.READ_ONLY, source: Sources.INVITE, - ...(pendingEditorIds?.some(pe => memberId.equals(pe)) && { - pendingEditor: true, - }), - }) + } + + if (pendingEditorIds?.some(pe => memberId.equals(pe))) { + record.pendingEditor = true + } else if (pendingReviewerIds?.some(pr => memberId.equals(pr))) { + record.pendingReviewer = true + } + members.push(record) } + if (publicAccessLevel === PublicAccessLevels.TOKEN_BASED) { for (const memberId of tokenAccessIds || []) { members.push({ @@ -360,6 +373,7 @@ function _getMemberIdsWithPrivilegeLevelsFromFields( }) } } + for (const memberId of reviewerIds || []) { members.push({ id: memberId.toString(), @@ -384,11 +398,16 @@ async function _loadMembers(members) { signUpDate: 1, }) if (user != null) { - return { + const record = { user, privilegeLevel: member.privilegeLevel, - ...(member.pendingEditor && { pendingEditor: true }), } + if (member.pendingEditor) { + record.pendingEditor = true + } else if (member.pendingReviewer) { + record.pendingReviewer = true + } + return record } else { return null } diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsHandler.js b/services/web/app/src/Features/Collaborators/CollaboratorsHandler.js index 00ec34022c..05137a97f8 100644 --- a/services/web/app/src/Features/Collaborators/CollaboratorsHandler.js +++ b/services/web/app/src/Features/Collaborators/CollaboratorsHandler.js @@ -28,11 +28,35 @@ module.exports = { convertTrackChangesToExplicitFormat, }, } +// Forces null pendingReviewer_refs, readOnly_refs, and reviewer_refs to +// be empty arrays to avoid errors during $pull ops +// See https://github.com/overleaf/internal/issues/24610 +async function fixNullCollaboratorRefs(projectId) { + // Temporary cleanup for the case where pendingReviewer_refs is null + await Project.updateOne( + { _id: projectId, pendingReviewer_refs: { $type: 'null' } }, + { $set: { pendingReviewer_refs: [] } } + ).exec() + + // Temporary cleanup for the case where readOnly_refs is null + await Project.updateOne( + { _id: projectId, readOnly_refs: { $type: 'null' } }, + { $set: { readOnly_refs: [] } } + ).exec() + + // Temporary cleanup for the case where reviewer_refs is null + await Project.updateOne( + { _id: projectId, reviewer_refs: { $type: 'null' } }, + { $set: { reviewer_refs: [] } } + ).exec() +} async function removeUserFromProject(projectId, userId) { try { const project = await Project.findOne({ _id: projectId }).exec() + await fixNullCollaboratorRefs(projectId) + // Deal with the old type of boolean value for archived // In order to clear it if (typeof project.archived === 'boolean') { @@ -53,6 +77,7 @@ async function removeUserFromProject(projectId, userId) { reviewer_refs: userId, readOnly_refs: userId, pendingEditor_refs: userId, + pendingReviewer_refs: userId, tokenAccessReadOnly_refs: userId, tokenAccessReadAndWrite_refs: userId, trashed: userId, @@ -68,6 +93,7 @@ async function removeUserFromProject(projectId, userId) { readOnly_refs: userId, reviewer_refs: userId, pendingEditor_refs: userId, + pendingReviewer_refs: userId, tokenAccessReadOnly_refs: userId, tokenAccessReadAndWrite_refs: userId, archived: userId, @@ -106,7 +132,7 @@ async function addUserIdToProject( addingUserId, userId, privilegeLevel, - { pendingEditor } = {} + { pendingEditor, pendingReviewer } = {} ) { const project = await ProjectGetter.promises.getProject(projectId, { owner_ref: 1, @@ -133,9 +159,17 @@ async function addUserIdToProject( level = { readOnly_refs: userId } if (pendingEditor) { level.pendingEditor_refs = userId + } else if (pendingReviewer) { + level.pendingReviewer_refs = userId } logger.debug( - { privileges: 'readOnly', userId, projectId, pendingEditor }, + { + privileges: 'readOnly', + userId, + projectId, + pendingEditor, + pendingReviewer, + }, 'adding user' ) } else if (privilegeLevel === PrivilegeLevels.REVIEW) { @@ -246,6 +280,19 @@ async function transferProjects(fromUserId, toUserId) { } ).exec() + await Project.updateMany( + { pendingReviewer_refs: fromUserId }, + { + $addToSet: { pendingReviewer_refs: toUserId }, + } + ).exec() + await Project.updateMany( + { pendingReviewer_refs: fromUserId }, + { + $pull: { pendingReviewer_refs: fromUserId }, + } + ).exec() + // Flush in background, no need to block on this _flushProjects(projectIds).catch(err => { logger.err( @@ -259,7 +306,7 @@ async function setCollaboratorPrivilegeLevel( projectId, userId, privilegeLevel, - { pendingEditor } = {} + { pendingEditor, pendingReviewer } = {} ) { // Make sure we're only updating the project if the user is already a // collaborator @@ -272,6 +319,9 @@ async function setCollaboratorPrivilegeLevel( ], } let update + + await fixNullCollaboratorRefs(projectId) + switch (privilegeLevel) { case PrivilegeLevels.READ_AND_WRITE: { update = { @@ -279,6 +329,7 @@ async function setCollaboratorPrivilegeLevel( readOnly_refs: userId, pendingEditor_refs: userId, reviewer_refs: userId, + pendingReviewer_refs: userId, }, $addToSet: { collaberator_refs: userId }, } @@ -290,6 +341,7 @@ async function setCollaboratorPrivilegeLevel( readOnly_refs: userId, pendingEditor_refs: userId, collaberator_refs: userId, + pendingReviewer_refs: userId, }, $addToSet: { reviewer_refs: userId }, } @@ -316,11 +368,19 @@ async function setCollaboratorPrivilegeLevel( $pull: { collaberator_refs: userId, reviewer_refs: userId }, $addToSet: { readOnly_refs: userId }, } + if (pendingEditor) { update.$addToSet.pendingEditor_refs = userId } else { update.$pull.pendingEditor_refs = userId } + + if (pendingReviewer) { + update.$addToSet.pendingReviewer_refs = userId + } else { + update.$pull.pendingReviewer_refs = userId + } + break } default: { diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsInviteController.mjs b/services/web/app/src/Features/Collaborators/CollaboratorsInviteController.mjs index 722b1bdd4e..c6ffba1ea5 100644 --- a/services/web/app/src/Features/Collaborators/CollaboratorsInviteController.mjs +++ b/services/web/app/src/Features/Collaborators/CollaboratorsInviteController.mjs @@ -359,11 +359,22 @@ async function acceptInvite(req, res) { 'project:membership:changed', { invites: true, members: true } ) + + let editMode = 'edit' + if (invite.privileges === PrivilegeLevels.REVIEW) { + editMode = 'review' + } else if (invite.privileges === PrivilegeLevels.READ_ONLY) { + editMode = 'view' + } AnalyticsManager.recordEventForUserInBackground( currentUser._id, - 'project-invite-accept', + 'project-joined', { projectId, + ownerId: invite.sendingUserId, // only owner can invite others + mode: editMode, + role: invite.privileges, + source: 'email-invite', } ) diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsInviteHandler.mjs b/services/web/app/src/Features/Collaborators/CollaboratorsInviteHandler.mjs index 1409b6e43b..02db4dee99 100644 --- a/services/web/app/src/Features/Collaborators/CollaboratorsInviteHandler.mjs +++ b/services/web/app/src/Features/Collaborators/CollaboratorsInviteHandler.mjs @@ -152,33 +152,52 @@ const CollaboratorsInviteHandler = { const project = await ProjectGetter.promises.getProject(projectId, { owner_ref: 1, }) - const pendingEditor = - invite.privileges === PrivilegeLevels.READ_AND_WRITE && - !(await LimitationsManager.promises.canAcceptEditCollaboratorInvite( - project._id - )) - if (pendingEditor) { - logger.debug( - { projectId, userId: user._id }, - 'no collaborator slots available, user added as read only (pending editor)' + + let privilegeLevel = invite.privileges + const opts = {} + if ( + [PrivilegeLevels.READ_AND_WRITE, PrivilegeLevels.REVIEW].includes( + invite.privileges ) - await ProjectAuditLogHandler.promises.addEntry( - projectId, - 'editor-moved-to-pending', // controller already logged accept-invite - null, - null, - { - userId: user._id.toString(), + ) { + const allowed = + await LimitationsManager.promises.canAcceptEditCollaboratorInvite( + project._id + ) + if (!allowed) { + privilegeLevel = PrivilegeLevels.READ_ONLY + if (invite.privileges === PrivilegeLevels.READ_AND_WRITE) { + opts.pendingEditor = true + } else if (invite.privileges === PrivilegeLevels.REVIEW) { + opts.pendingReviewer = true } - ) + + logger.debug( + { projectId, userId: user._id, privileges: invite.privileges }, + 'no collaborator slots available, user added as read only (pending editor)' + ) + await ProjectAuditLogHandler.promises.addEntry( + projectId, + 'editor-moved-to-pending', // controller already logged accept-invite + null, + null, + { + userId: user._id.toString(), + role: + invite.privileges === PrivilegeLevels.REVIEW + ? 'reviewer' + : 'editor', + } + ) + } } await CollaboratorsHandler.promises.addUserIdToProject( projectId, invite.sendingUserId, user._id, - pendingEditor ? PrivilegeLevels.READ_ONLY : invite.privileges, - { pendingEditor } + privilegeLevel, + opts ) // Remove invite diff --git a/services/web/app/src/Features/Collaborators/OwnershipTransferHandler.js b/services/web/app/src/Features/Collaborators/OwnershipTransferHandler.js index b656bc6ce6..82c9c37ac7 100644 --- a/services/web/app/src/Features/Collaborators/OwnershipTransferHandler.js +++ b/services/web/app/src/Features/Collaborators/OwnershipTransferHandler.js @@ -15,7 +15,12 @@ module.exports = { } async function transferOwnership(projectId, newOwnerId, options = {}) { - const { allowTransferToNonCollaborators, sessionUserId, skipEmails } = options + const { + allowTransferToNonCollaborators, + sessionUserId, + skipEmails, + ipAddress, + } = options // Fetch project and user const [project, newOwner] = await Promise.all([ @@ -49,7 +54,7 @@ async function transferOwnership(projectId, newOwnerId, options = {}) { projectId, 'transfer-ownership', sessionUserId, - '', // IP address + ipAddress, { previousOwnerId, newOwnerId } ) diff --git a/services/web/app/src/Features/Compile/ClsiManager.js b/services/web/app/src/Features/Compile/ClsiManager.js index a211f82408..68c8a9c0de 100644 --- a/services/web/app/src/Features/Compile/ClsiManager.js +++ b/services/web/app/src/Features/Compile/ClsiManager.js @@ -765,6 +765,7 @@ function _finaliseRequest(projectId, options, project, docs, files) { return { compile: { options: { + buildId: options.buildId, compiler: project.compiler, timeout: options.timeout, imageName: project.imageName, diff --git a/services/web/app/src/Features/Compile/CompileManager.js b/services/web/app/src/Features/Compile/CompileManager.js index b4d34f1507..9b5404865a 100644 --- a/services/web/app/src/Features/Compile/CompileManager.js +++ b/services/web/app/src/Features/Compile/CompileManager.js @@ -1,4 +1,5 @@ let CompileManager +const Crypto = require('crypto') const Settings = require('@overleaf/settings') const RedisWrapper = require('../../infrastructure/RedisWrapper') const rclient = RedisWrapper.client('clsi_recently_compiled') @@ -25,6 +26,10 @@ function instrumentWithTimer(fn, key) { } } +function generateBuildId() { + return `${Date.now().toString(16)}-${Crypto.randomBytes(8).toString('hex')}` +} + async function compile(projectId, userId, options = {}) { const recentlyCompiled = await CompileManager._checkIfRecentlyCompiled( projectId, @@ -67,6 +72,9 @@ async function compile(projectId, userId, options = {}) { return { message: 'autocompile-backoff', outputFiles: [] } } + // Generate the buildId ahead of fetching the project content from redis/mongo so that the buildId's timestamp is before any lastUpdated date. + options.buildId = generateBuildId() + // only pass userId down to clsi if this is a per-user compile const compileAsUser = Settings.disablePerUserCompiles ? undefined : userId const { diff --git a/services/web/app/src/Features/DocumentUpdater/DocumentUpdaterHandler.js b/services/web/app/src/Features/DocumentUpdater/DocumentUpdaterHandler.js index 0aedff8853..493b812dab 100644 --- a/services/web/app/src/Features/DocumentUpdater/DocumentUpdaterHandler.js +++ b/services/web/app/src/Features/DocumentUpdater/DocumentUpdaterHandler.js @@ -11,6 +11,22 @@ const ProjectGetter = require('../Project/ProjectGetter') const FileStoreHandler = require('../FileStore/FileStoreHandler') const Features = require('../../infrastructure/Features') +function getProjectLastUpdatedAt(projectId, callback) { + _makeRequest( + { + path: `/project/${projectId}/last_updated_at`, + method: 'GET', + json: true, + }, + projectId, + 'project.redis.last_updated_at', + (err, body) => { + if (err || !body?.lastUpdatedAt) return callback(err, null) + callback(null, new Date(body.lastUpdatedAt)) + } + ) +} + /** * @param {string} projectId */ @@ -597,6 +613,7 @@ module.exports = { deleteDoc, getComment, getDocument, + getProjectLastUpdatedAt, setDocument, appendToDocument, getProjectDocsIfMatch, @@ -624,6 +641,7 @@ module.exports = { ]), setDocument: promisify(setDocument), getProjectDocsIfMatch: promisify(getProjectDocsIfMatch), + getProjectLastUpdatedAt: promisify(getProjectLastUpdatedAt), clearProjectState: promisify(clearProjectState), acceptChanges: promisify(acceptChanges), resolveThread: promisify(resolveThread), diff --git a/services/web/app/src/Features/FileStore/FileStoreHandler.js b/services/web/app/src/Features/FileStore/FileStoreHandler.js index f787dc216d..66ba94b37d 100644 --- a/services/web/app/src/Features/FileStore/FileStoreHandler.js +++ b/services/web/app/src/Features/FileStore/FileStoreHandler.js @@ -204,6 +204,12 @@ const FileStoreHandler = { }, getFileStream(projectId, fileId, query, callback) { + if (!Features.hasFeature('filestore')) { + return callback( + new Errors.NotFoundError('filestore is disabled, file not found') + ) + } + let queryString = '?from=getFileStream' if (query != null && query.format != null) { queryString += `&format=${query.format}` @@ -274,6 +280,9 @@ const FileStoreHandler = { }, deleteProject(projectId, callback) { + if (!Features.hasFeature('filestore')) { + return callback() // if filestore is not in use, we don't need to delete anything + } request( { method: 'delete', diff --git a/services/web/app/src/Features/Helpers/EmailHelper.js b/services/web/app/src/Features/Helpers/EmailHelper.js index c24e68876f..80b96dfea0 100644 --- a/services/web/app/src/Features/Helpers/EmailHelper.js +++ b/services/web/app/src/Features/Helpers/EmailHelper.js @@ -11,7 +11,7 @@ function getDomain(email) { } function parseEmail(email, parseRfcAddress = false) { - if (email == null) { + if (typeof email !== 'string' || !email) { return null } diff --git a/services/web/app/src/Features/InactiveData/InactiveProjectManager.js b/services/web/app/src/Features/InactiveData/InactiveProjectManager.js index cdbcfd888b..818fe70c08 100644 --- a/services/web/app/src/Features/InactiveData/InactiveProjectManager.js +++ b/services/web/app/src/Features/InactiveData/InactiveProjectManager.js @@ -5,7 +5,6 @@ const DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandle const ProjectGetter = require('../Project/ProjectGetter') const ProjectUpdateHandler = require('../Project/ProjectUpdateHandler') const { Project } = require('../../models/Project') -const { ObjectId } = require('mongodb-legacy') const Modules = require('../../infrastructure/Modules') const { READ_PREFERENCE_SECONDARY } = require('../../infrastructure/mongodb') const { callbackifyAll } = require('@overleaf/promise-utils') @@ -62,8 +61,6 @@ const InactiveProjectManager = { projects = await Project.find({ lastOpened: { $not: { $gt: oldProjectDate } }, }) - .where('_id') - .lt(ObjectId.createFromTime(oldProjectDate / 1000)) .where('active') .equals(true) .select('_id') diff --git a/services/web/app/src/Features/Metadata/MetaHandler.mjs b/services/web/app/src/Features/Metadata/MetaHandler.mjs index 1f7c1e2f86..ef5d11887e 100644 --- a/services/web/app/src/Features/Metadata/MetaHandler.mjs +++ b/services/web/app/src/Features/Metadata/MetaHandler.mjs @@ -23,14 +23,19 @@ async function extractMetaFromDoc(lines) { } const labelRe = /\\label{(.{0,80}?)}/g + const labelOptionRe = /\blabel={?(.{0,80}?)[\s},\]]/g const packageRe = /^\\usepackage(?:\[.{0,80}?])?{(.{0,80}?)}/g const reqPackageRe = /^\\RequirePackage(?:\[.{0,80}?])?{(.{0,80}?)}/g for (const rawLine of lines) { const line = getNonCommentedContent(rawLine) - for (const pkg of lineMatches(labelRe, line)) { - docMeta.labels.push(pkg) + for (const label of lineMatches(labelRe, line)) { + docMeta.labels.push(label) + } + + for (const label of lineMatches(labelOptionRe, line)) { + docMeta.labels.push(label) } for (const pkg of lineMatches(packageRe, line, ',')) { diff --git a/services/web/app/src/Features/Project/ProjectController.js b/services/web/app/src/Features/Project/ProjectController.js index 163794298a..8fd4668468 100644 --- a/services/web/app/src/Features/Project/ProjectController.js +++ b/services/web/app/src/Features/Project/ProjectController.js @@ -15,6 +15,7 @@ const metrics = require('@overleaf/metrics') const { User } = require('../../models/User') const SubscriptionLocator = require('../Subscription/SubscriptionLocator') const LimitationsManager = require('../Subscription/LimitationsManager') +const FeaturesHelper = require('../Subscription/FeaturesHelper') const Settings = require('@overleaf/settings') const AuthorizationManager = require('../Authorization/AuthorizationManager') const InactiveProjectManager = require('../InactiveData/InactiveProjectManager') @@ -337,14 +338,12 @@ const _ProjectController = { 'external-socket-heartbeat', 'full-project-search', 'null-test-share-modal', - 'paywall-cta', 'pdf-caching-cached-url-lookup', 'pdf-caching-mode', 'pdf-caching-prefetch-large', 'pdf-caching-prefetching', 'revert-file', 'revert-project', - 'review-panel-redesign', !anonymous && 'ro-mirror-on-client', 'track-pdf-download', !anonymous && 'writefull-oauth-promotion', @@ -577,19 +576,19 @@ const _ProjectController = { const pendingEditors = project.pendingEditor_refs?.length || 0 const exceedAtLimit = planLimit > -1 && namedEditors >= planLimit - let editMode = 'edit' + let mode = 'edit' if (privilegeLevel === PrivilegeLevels.READ_ONLY) { - editMode = 'view' + mode = 'view' } else if ( project.track_changes === true || project.track_changes?.[userId] === true ) { - editMode = 'review' + mode = 'review' } const projectOpenedSegmentation = { role: privilegeLevel, - editMode, + mode, ownerId: project.owner_ref, projectId: project._id, namedEditors, @@ -760,6 +759,20 @@ const _ProjectController = { const isOverleafAssistBundleEnabled = splitTestAssignments['overleaf-assist-bundle']?.variant === 'enabled' + let fullFeatureSet = user?.features + if (!anonymous) { + // generate users feature set including features added, or overriden via modules + const moduleFeatures = + (await Modules.promises.hooks.fire( + 'getModuleProvidedFeatures', + userId + )) || [] + fullFeatureSet = FeaturesHelper.computeFeatureSet([ + user.features, + ...moduleFeatures, + ]) + } + const isPaywallChangeCompileTimeoutEnabled = splitTestAssignments['paywall-change-compile-timeout']?.variant === 'enabled' @@ -802,7 +815,7 @@ const _ProjectController = { allowedFreeTrial, hasRecurlySubscription: subscription?.recurlySubscription_id != null, featureSwitches: user.featureSwitches, - features: user.features, + features: fullFeatureSet, featureUsage, refProviders: _.mapValues(user.refProviders, Boolean), writefull: { @@ -832,6 +845,7 @@ const _ProjectController = { overallTheme: user.ace.overallTheme, mathPreview: user.ace.mathPreview, referencesSearchMode: user.ace.referencesSearchMode, + enableNewEditor: user.ace.enableNewEditor ?? true, }, privilegeLevel, anonymous, @@ -853,6 +867,8 @@ const _ProjectController = { editorThemes: THEME_LIST, legacyEditorThemes: LEGACY_THEME_LIST, maxDocLength: Settings.max_doc_length, + maxReconnectGracefullyIntervalMs: + Settings.maxReconnectGracefullyIntervalMs, brandVariation, allowedImageNames, gitBridgePublicBaseUrl: Settings.gitBridgePublicBaseUrl, @@ -869,7 +885,6 @@ const _ProjectController = { metadata: { viewport: false }, showUpgradePrompt, fixedSizeDocument: true, - useOpenTelemetry: Settings.useOpenTelemetryClient, hasTrackChangesFeature: Features.hasFeature('track-changes'), projectTags, usedLatex: diff --git a/services/web/app/src/Features/Project/ProjectEditorHandler.js b/services/web/app/src/Features/Project/ProjectEditorHandler.js index 84c0a5831a..a85e8b5764 100644 --- a/services/web/app/src/Features/Project/ProjectEditorHandler.js +++ b/services/web/app/src/Features/Project/ProjectEditorHandler.js @@ -107,6 +107,7 @@ module.exports = ProjectEditorHandler = { privileges: member.privilegeLevel, signUpDate: user.signUpDate, pendingEditor: member.pendingEditor, + pendingReviewer: member.pendingReviewer, } }, diff --git a/services/web/app/src/Features/Project/ProjectListController.mjs b/services/web/app/src/Features/Project/ProjectListController.mjs index 281406645b..6ae7080961 100644 --- a/services/web/app/src/Features/Project/ProjectListController.mjs +++ b/services/web/app/src/Features/Project/ProjectListController.mjs @@ -350,7 +350,7 @@ async function projectListPage(req, res, next) { 'getUSGovBanner', userEmails, hasPaidAffiliation, - inactiveTutorials.includes('us-gov-banner') + inactiveTutorials ) const usGovBanner = (usGovBannerHooksResponse && @@ -409,15 +409,6 @@ async function projectListPage(req, res, next) { logger.error({ err: error }, 'Failed to get individual subscription') } - try { - await SplitTestHandler.promises.getAssignment(req, res, 'paywall-cta') - } catch (error) { - logger.error( - { err: error }, - 'failed to get "paywall-cta" split test assignment' - ) - } - // Get the user's assignment for the DS unified nav split test, which // populates splitTestVariants with a value for the split test name and allows // Pug to send it to the browser diff --git a/services/web/app/src/Features/Project/ProjectLocator.js b/services/web/app/src/Features/Project/ProjectLocator.js index c78dac1dbf..2feaa0cebf 100644 --- a/services/web/app/src/Features/Project/ProjectLocator.js +++ b/services/web/app/src/Features/Project/ProjectLocator.js @@ -7,6 +7,28 @@ const Errors = require('../Errors/Errors') const { promisifyMultiResult } = require('@overleaf/promise-utils') const { iterablePaths } = require('./IterablePath') +/** + * @param project + * @param predicate + * @returns {{path: string, value: *}} + */ +function findDeep(project, predicate) { + function find(value, path) { + if (predicate(value)) { + return { value, path: path.join('.') } + } + if (typeof value === 'object' && value !== null) { + for (const [childKey, childVal] of Object.entries(value)) { + const found = find(childVal, [...path, childKey]) + if (found) { + return found + } + } + } + } + return find(project.rootFolder, ['rootFolder']) +} + function findElement(options, _callback) { // The search algorithm below potentially invokes the callback multiple // times. @@ -308,6 +330,7 @@ module.exports = { findElementByPath, findRootDoc, findElementByMongoPath, + findDeep, promises: { findElement: promisifyMultiResult(findElement, [ 'element', diff --git a/services/web/app/src/Features/Project/ProjectRootDocManager.js b/services/web/app/src/Features/Project/ProjectRootDocManager.js index d822e86845..0bac5e35ed 100644 --- a/services/web/app/src/Features/Project/ProjectRootDocManager.js +++ b/services/web/app/src/Features/Project/ProjectRootDocManager.js @@ -113,6 +113,7 @@ module.exports = ProjectRootDocManager = { if (err != null) { return callback(err) } + let firstFileInRootFolder let doc = null return async.until( @@ -130,16 +131,26 @@ module.exports = ProjectRootDocManager = { if (DocumentHelper.contentHasDocumentclass(content)) { doc = { path: file, content } } - return cb(null) + + if (!firstFileInRootFolder && !file.includes('/')) { + firstFileInRootFolder = { path: file, content } + } + cb(null) } ) }, - err => - callback( - err, - doc != null ? doc.path : undefined, - doc != null ? doc.content : undefined - ) + err => { + if (err) { + return callback(err) + } + + // if no doc was found, use the first file in the root folder as the main doc + if (!doc && firstFileInRootFolder) { + doc = firstFileInRootFolder + } + + callback(null, doc?.path, doc?.content) + } ) } ), diff --git a/services/web/app/src/Features/SplitTests/SplitTestHandler.js b/services/web/app/src/Features/SplitTests/SplitTestHandler.js index 491c236a3c..5dcf00967b 100644 --- a/services/web/app/src/Features/SplitTests/SplitTestHandler.js +++ b/services/web/app/src/Features/SplitTests/SplitTestHandler.js @@ -150,6 +150,14 @@ async function hasUserBeenAssignedToVariant( const splitTest = await _getSplitTest(splitTestName) const currentVersion = SplitTestUtils.getCurrentVersion(splitTest) + if ( + !userId || + !SessionManager.isUserLoggedIn(session) || + !currentVersion?.active + ) { + return false + } + // Check the query string for an override, ignoring an invalid value const queryVariant = query[splitTestName] if (queryVariant === variant) { diff --git a/services/web/app/src/Features/Subscription/Errors.js b/services/web/app/src/Features/Subscription/Errors.js index 0e1c82498d..53ecf7ba12 100644 --- a/services/web/app/src/Features/Subscription/Errors.js +++ b/services/web/app/src/Features/Subscription/Errors.js @@ -22,6 +22,8 @@ class PendingChangeError extends OError {} class InactiveError extends OError {} +class SubtotalLimitExceededError extends OError {} + module.exports = { RecurlyTransactionError, DuplicateAddOnError, @@ -30,4 +32,5 @@ module.exports = { ManuallyCollectedError, PendingChangeError, InactiveError, + SubtotalLimitExceededError, } diff --git a/services/web/app/src/Features/Subscription/FeaturesHelper.js b/services/web/app/src/Features/Subscription/FeaturesHelper.js index fe90391c86..b948815477 100644 --- a/services/web/app/src/Features/Subscription/FeaturesHelper.js +++ b/services/web/app/src/Features/Subscription/FeaturesHelper.js @@ -1,6 +1,13 @@ const _ = require('lodash') const Settings = require('@overleaf/settings') +/** + * merges an array of feature sets to produce a final feature set + */ +function computeFeatureSet(featureSets) { + return featureSets.reduce(mergeFeatures, {}) +} + /** * Merge feature sets coming from different sources */ @@ -108,6 +115,7 @@ function getMatchedFeatureSet(features) { module.exports = { mergeFeatures, + computeFeatureSet, isFeatureSetBetter, compareFeatures, getMatchedFeatureSet, diff --git a/services/web/app/src/Features/Subscription/LimitationsManager.js b/services/web/app/src/Features/Subscription/LimitationsManager.js index 346bd0420a..d0c3d29b7b 100644 --- a/services/web/app/src/Features/Subscription/LimitationsManager.js +++ b/services/web/app/src/Features/Subscription/LimitationsManager.js @@ -1,3 +1,5 @@ +// @ts-check + const logger = require('@overleaf/logger') const ProjectGetter = require('../Project/ProjectGetter') const UserGetter = require('../User/UserGetter') @@ -5,6 +7,7 @@ const SubscriptionLocator = require('./SubscriptionLocator') const Settings = require('@overleaf/settings') const CollaboratorsGetter = require('../Collaborators/CollaboratorsGetter') const CollaboratorsInvitesGetter = require('../Collaborators/CollaboratorsInviteGetter') +const PrivilegeLevels = require('../Authorization/PrivilegeLevels') const { callbackify, callbackifyMultiResult, @@ -58,6 +61,53 @@ async function canAddXEditCollaborators( ) } +/** + * Check whether a collaborator can be switched to the given privilege level + * + * @param {string} projectId + * @param {string} userId + * @param {'readOnly' | 'review' | 'readAndWrite'} privilegeLevel + * @return {Promise} + */ +async function canChangeCollaboratorPrivilegeLevel( + projectId, + userId, + privilegeLevel +) { + if (privilegeLevel === PrivilegeLevels.READ_ONLY) { + return true + } + + const currentPrivilegeLevel = + await CollaboratorsGetter.promises.getMemberIdPrivilegeLevel( + userId, + projectId + ) + if ( + currentPrivilegeLevel === PrivilegeLevels.READ_AND_WRITE || + currentPrivilegeLevel === PrivilegeLevels.REVIEW + ) { + // Current collaborator already takes a slot, so changing the privilege + // level won't increase the collaborator count + return true + } + + const allowedNumber = await allowedNumberOfCollaboratorsInProject(projectId) + if (allowedNumber < 0) { + // -1 means unlimited + return true + } + + const slotsTaken = + await CollaboratorsGetter.promises.getInvitedEditCollaboratorCount( + projectId + ) + const inviteCount = + await CollaboratorsInvitesGetter.promises.getEditInviteCount(projectId) + + return slotsTaken + inviteCount < allowedNumber +} + async function hasPaidSubscription(user) { const { hasSubscription, subscription } = await userHasSubscription(user) const { isMember } = await userIsMemberOfGroupSubscription(user) @@ -122,6 +172,9 @@ const LimitationsManager = { allowedNumberOfCollaboratorsForUser ), canAddXEditCollaborators: callbackify(canAddXEditCollaborators), + canChangeCollaboratorPrivilegeLevel: callbackify( + canChangeCollaboratorPrivilegeLevel + ), hasPaidSubscription: callbackifyMultiResult(hasPaidSubscription, [ 'hasPaidSubscription', 'subscription', @@ -150,6 +203,7 @@ const LimitationsManager = { allowedNumberOfCollaboratorsForUser, canAcceptEditCollaboratorInvite, canAddXEditCollaborators, + canChangeCollaboratorPrivilegeLevel, hasPaidSubscription, userHasSubscriptionOrIsGroupMember, userHasSubscription, diff --git a/services/web/app/src/Features/Subscription/PlansLocator.js b/services/web/app/src/Features/Subscription/PlansLocator.js index 7497c78c91..937d2d3ccb 100644 --- a/services/web/app/src/Features/Subscription/PlansLocator.js +++ b/services/web/app/src/Features/Subscription/PlansLocator.js @@ -1,6 +1,12 @@ +// TODO: This file may be deleted when Stripe is fully implemented to all users, so, consider deleting it const Settings = require('@overleaf/settings') const logger = require('@overleaf/logger') +/** + * @typedef {import('../../../../types/subscription/plan').RecurlyPlanCode} RecurlyPlanCode + * @typedef {import('../../../../types/subscription/plan').StripeLookupKey} StripeLookupKey + */ + function ensurePlansAreSetupCorrectly() { Settings.plans.forEach(plan => { if (typeof plan.price_in_cents !== 'number') { @@ -18,6 +24,51 @@ function ensurePlansAreSetupCorrectly() { }) } +const recurlyPlanCodeToStripeLookupKey = { + 'professional-annual': 'professional_annual', + professional: 'professional_monthly', + professional_free_trial_7_days: 'professional_monthly', + 'collaborator-annual': 'standard_annual', + collaborator: 'standard_monthly', + collaborator_free_trial_7_days: 'standard_monthly', + 'student-annual': 'student_annual', + student: 'student_monthly', + student_free_trial_7_days: 'student_monthly', +} + +/** + * + * @param {RecurlyPlanCode} recurlyPlanCode + * @returns {StripeLookupKey} + */ +function mapRecurlyPlanCodeToStripeLookupKey(recurlyPlanCode) { + return recurlyPlanCodeToStripeLookupKey[recurlyPlanCode] +} + +const recurlyPlanCodeToPlanTypeAndPeriod = { + collaborator: { planType: 'standard', period: 'monthly' }, + collaborator_free_trial_7_days: { planType: 'standard', period: 'monthly' }, + 'collaborator-annual': { planType: 'standard', period: 'annual' }, + professional: { planType: 'professional', period: 'monthly' }, + professional_free_trial_7_days: { + planType: 'professional', + period: 'monthly', + }, + 'professional-annual': { planType: 'professional', period: 'annual' }, + student: { planType: 'student', period: 'monthly' }, + student_free_trial_7_days: { planType: 'student', period: 'monthly' }, + 'student-annual': { planType: 'student', period: 'annual' }, +} + +/** + * + * @param {RecurlyPlanCode} recurlyPlanCode + * @returns {{ planType: 'standard' | 'professional' | 'student', period: 'annual' | 'monthly'}} + */ +function getPlanTypeAndPeriodFromRecurlyPlanCode(recurlyPlanCode) { + return recurlyPlanCodeToPlanTypeAndPeriod[recurlyPlanCode] +} + function findLocalPlanInSettings(planCode) { for (const plan of Settings.plans) { if (plan.planCode === planCode) { @@ -30,4 +81,6 @@ function findLocalPlanInSettings(planCode) { module.exports = { ensurePlansAreSetupCorrectly, findLocalPlanInSettings, + mapRecurlyPlanCodeToStripeLookupKey, + getPlanTypeAndPeriodFromRecurlyPlanCode, } diff --git a/services/web/app/src/Features/Subscription/RecurlyClient.js b/services/web/app/src/Features/Subscription/RecurlyClient.js index 37234f1f70..7bd7d1f3b2 100644 --- a/services/web/app/src/Features/Subscription/RecurlyClient.js +++ b/services/web/app/src/Features/Subscription/RecurlyClient.js @@ -16,7 +16,10 @@ const { RecurlyPlan, RecurlyImmediateCharge, } = require('./RecurlyEntities') -const { MissingBillingInfoError } = require('./Errors') +const { + MissingBillingInfoError, + SubtotalLimitExceededError, +} = require('./Errors') /** * @import { RecurlySubscriptionChangeRequest } from './RecurlyEntities' @@ -116,14 +119,37 @@ async function getSubscriptionForUser(userId) { */ async function applySubscriptionChangeRequest(changeRequest) { const body = subscriptionChangeRequestToApi(changeRequest) - const change = await client.createSubscriptionChange( - `uuid-${changeRequest.subscription.id}`, - body - ) - logger.debug( - { subscriptionId: changeRequest.subscription.id, changeId: change.id }, - 'created subscription change' - ) + + try { + const change = await client.createSubscriptionChange( + `uuid-${changeRequest.subscription.id}`, + body + ) + logger.debug( + { subscriptionId: changeRequest.subscription.id, changeId: change.id }, + 'created subscription change' + ) + } catch (err) { + if (err instanceof recurly.errors.ValidationError) { + /** + * @type {{params?: { param?: string }[] | null}} + */ + const validationError = err + if ( + validationError.params?.some( + p => p.param === 'subtotal_amount_in_cents' + ) + ) { + throw new SubtotalLimitExceededError( + 'Subtotal amount in cents exceeded error', + { + subscriptionId: changeRequest.subscription.id, + } + ) + } + } + throw err + } } /** @@ -134,14 +160,38 @@ async function applySubscriptionChangeRequest(changeRequest) { */ async function previewSubscriptionChange(changeRequest) { const body = subscriptionChangeRequestToApi(changeRequest) - const subscriptionChange = await client.previewSubscriptionChange( - `uuid-${changeRequest.subscription.id}`, - body - ) - return subscriptionChangeFromApi( - changeRequest.subscription, - subscriptionChange - ) + + try { + const subscriptionChange = await client.previewSubscriptionChange( + `uuid-${changeRequest.subscription.id}`, + body + ) + + return subscriptionChangeFromApi( + changeRequest.subscription, + subscriptionChange + ) + } catch (err) { + if (err instanceof recurly.errors.ValidationError) { + /** + * @type {{params?: { param?: string }[] | null}} + */ + const validationError = err + if ( + validationError.params?.some( + p => p.param === 'subtotal_amount_in_cents' + ) + ) { + throw new SubtotalLimitExceededError( + 'Subtotal amount in cents exceeded error', + { + subscriptionId: changeRequest.subscription.id, + } + ) + } + } + throw err + } } async function removeSubscriptionChange(subscriptionId) { diff --git a/services/web/app/src/Features/Subscription/SubscriptionController.js b/services/web/app/src/Features/Subscription/SubscriptionController.js index 62568cffbe..cb8a293fb4 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionController.js +++ b/services/web/app/src/Features/Subscription/SubscriptionController.js @@ -48,12 +48,13 @@ async function userSubscriptionPage(req, res) { await SplitTestHandler.promises.getAssignment(req, res, 'pause-subscription') - const { variant: flexibleLicensingVariant } = - await SplitTestHandler.promises.getAssignment( - req, - res, - 'flexible-group-licensing' - ) + const groupPricingDiscount = await SplitTestHandler.promises.getAssignment( + req, + res, + 'group-discount-10' + ) + + const showGroupDiscount = groupPricingDiscount.variant === 'enabled' const results = await SubscriptionViewModelBuilder.promises.buildUsersSubscriptionViewModel( @@ -139,7 +140,6 @@ async function userSubscriptionPage(req, res) { (managedUsersResults?.[0] === true || groupSSOResults?.[0] === true) && isGroupAdmin && - flexibleLicensingVariant === 'enabled' && plan?.canUseFlexibleLicensing ) } @@ -167,6 +167,7 @@ async function userSubscriptionPage(req, res) { managedGroupSubscriptions, managedInstitutions, managedPublishers, + showGroupDiscount, currentInstitutionsWithLicence, canUseFlexibleLicensing: personalSubscription?.plan?.canUseFlexibleLicensing, @@ -449,30 +450,6 @@ async function previewSubscription(req, res, next) { res.render('subscriptions/preview-change', { changePreview }) } -function updateSubscription(req, res, next) { - const origin = req && req.query ? req.query.origin : null - const user = SessionManager.getSessionUser(req.session) - const planCode = req.body.plan_code - if (planCode == null) { - const err = new Error('plan_code is not defined') - logger.warn( - { userId: user._id, err, planCode, origin, body: req.body }, - '[Subscription] error in updateSubscription form' - ) - return next(err) - } - logger.debug({ planCode, userId: user._id }, 'updating subscription') - SubscriptionHandler.updateSubscription(user, planCode, null, function (err) { - if (err) { - OError.tag(err, 'something went wrong updating subscription', { - user_id: user._id, - }) - return next(err) - } - res.redirect('/user/subscription') - }) -} - function cancelPendingSubscriptionChange(req, res, next) { const user = SessionManager.getSessionUser(req.session) logger.debug({ userId: user._id }, 'canceling pending subscription change') @@ -644,7 +621,7 @@ async function getRecommendedCurrency(req, res) { ip = req.query.ip } const currencyLookup = await GeoIpLookup.promises.getCurrencyCode(ip) - const countryCode = currencyLookup.countryCode + let countryCode = currencyLookup.countryCode const recommendedCurrency = currencyLookup.currencyCode let currency = null @@ -655,6 +632,13 @@ async function getRecommendedCurrency(req, res) { currency = recommendedCurrency } + const queryCountryCode = req.query.countryCode?.toUpperCase() + + // only enable countryCode testing flag on staging or dev environments + if (queryCountryCode && process.env.NODE_ENV !== 'production') { + countryCode = queryCountryCode + } + return { currency, recommendedCurrency, @@ -790,7 +774,6 @@ module.exports = { canceledSubscription: expressify(canceledSubscription), cancelV1Subscription, previewSubscription: expressify(previewSubscription), - updateSubscription, cancelPendingSubscriptionChange, updateAccountEmailAddress, reactivateSubscription, diff --git a/services/web/app/src/Features/Subscription/SubscriptionGroupController.mjs b/services/web/app/src/Features/Subscription/SubscriptionGroupController.mjs index 637802a665..14d73f91de 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionGroupController.mjs +++ b/services/web/app/src/Features/Subscription/SubscriptionGroupController.mjs @@ -8,8 +8,6 @@ import SessionManager from '../Authentication/SessionManager.js' import UserAuditLogHandler from '../User/UserAuditLogHandler.js' import { expressify } from '@overleaf/promise-utils' import Modules from '../../infrastructure/Modules.js' -import SplitTestHandler from '../SplitTests/SplitTestHandler.js' -import ErrorController from '../Errors/ErrorController.js' import UserGetter from '../User/UserGetter.js' import { Subscription } from '../../models/Subscription.js' import { isProfessionalGroupPlan } from './PlansHelper.mjs' @@ -18,6 +16,7 @@ import { ManuallyCollectedError, PendingChangeError, InactiveError, + SubtotalLimitExceededError, } from './Errors.js' import RecurlyClient from './RecurlyClient.js' @@ -205,6 +204,13 @@ async function previewAddSeatsSubscriptionChange(req, res) { return res.status(422).end() } + if (error instanceof SubtotalLimitExceededError) { + return res.status(422).json({ + code: 'subtotal_limit_exceeded', + adding: req.body.adding, + }) + } + logger.err( { error }, 'error trying to preview "add seats" subscription change' @@ -239,6 +245,13 @@ async function createAddSeatsSubscriptionChange(req, res) { return res.status(422).end() } + if (error instanceof SubtotalLimitExceededError) { + return res.status(422).json({ + code: 'subtotal_limit_exceeded', + adding: req.body.adding, + }) + } + logger.err( { error }, 'error trying to create "add seats" subscription change' @@ -273,20 +286,6 @@ async function submitForm(req, res) { res.sendStatus(204) } -async function flexibleLicensingSplitTest(req, res, next) { - const { variant } = await SplitTestHandler.promises.getAssignment( - req, - res, - 'flexible-group-licensing' - ) - - if (variant !== 'enabled') { - return ErrorController.notFound(req, res) - } - - next() -} - async function subscriptionUpgradePage(req, res) { try { const userId = SessionManager.getLoggedInUserId(req.session) @@ -313,6 +312,10 @@ async function subscriptionUpgradePage(req, res) { ) } + if (error instanceof SubtotalLimitExceededError) { + return res.redirect('/user/subscription/group/subtotal-limit-exceeded') + } + if (error instanceof PendingChangeError || error instanceof InactiveError) { return res.redirect('/user/subscription') } @@ -370,12 +373,26 @@ async function manuallyCollectedSubscription(req, res) { } } +async function subtotalLimitExceeded(req, res) { + try { + const userId = SessionManager.getLoggedInUserId(req.session) + const subscription = + await SubscriptionLocator.promises.getUsersSubscription(userId) + + res.render('subscriptions/subtotal-limit-exceeded', { + groupName: subscription.teamName, + }) + } catch (error) { + logger.err({ error }, 'error trying to render subtotal limit exceeded page') + return res.render('/user/subscription') + } +} + export default { removeUserFromGroup: expressify(removeUserFromGroup), removeSelfFromGroup: expressify(removeSelfFromGroup), addSeatsToGroupSubscription: expressify(addSeatsToGroupSubscription), submitForm: expressify(submitForm), - flexibleLicensingSplitTest: expressify(flexibleLicensingSplitTest), previewAddSeatsSubscriptionChange: expressify( previewAddSeatsSubscriptionChange ), @@ -386,4 +403,5 @@ export default { upgradeSubscription: expressify(upgradeSubscription), missingBillingInformation: expressify(missingBillingInformation), manuallyCollectedSubscription: expressify(manuallyCollectedSubscription), + subtotalLimitExceeded: expressify(subtotalLimitExceeded), } diff --git a/services/web/app/src/Features/Subscription/SubscriptionLocator.js b/services/web/app/src/Features/Subscription/SubscriptionLocator.js index ee64085d0f..a980399c29 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionLocator.js +++ b/services/web/app/src/Features/Subscription/SubscriptionLocator.js @@ -2,6 +2,10 @@ const { callbackifyAll } = require('@overleaf/promise-utils') const { Subscription } = require('../../models/Subscription') const { DeletedSubscription } = require('../../models/DeletedSubscription') const logger = require('@overleaf/logger') +const { + AI_ADD_ON_CODE, + isStandaloneAiAddOnPlanCode, +} = require('./RecurlyEntities') require('./GroupPlansData') // make sure dynamic group plans are loaded const SubscriptionLocator = { @@ -114,6 +118,15 @@ const SubscriptionLocator = { }).exec() }, + async hasAiAssist(userOrId) { + const userId = SubscriptionLocator._getUserId(userOrId) + const subscription = await Subscription.findOne({ admin_id: userId }).exec() + return Boolean( + isStandaloneAiAddOnPlanCode(subscription?.planCode) || + subscription?.addOns?.some(addOn => addOn.code === AI_ADD_ON_CODE) + ) + }, + _getUserId(userOrId) { if (userOrId && userOrId._id) { return userOrId._id diff --git a/services/web/app/src/Features/Subscription/SubscriptionRouter.mjs b/services/web/app/src/Features/Subscription/SubscriptionRouter.mjs index 27032df669..0bb30b578e 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionRouter.mjs +++ b/services/web/app/src/Features/Subscription/SubscriptionRouter.mjs @@ -73,7 +73,6 @@ export default { '/user/subscription/group/add-users', AuthenticationController.requireLogin(), RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), - SubscriptionGroupController.flexibleLicensingSplitTest, SubscriptionGroupController.addSeatsToGroupSubscription ) @@ -108,7 +107,6 @@ export default { '/user/subscription/group/upgrade-subscription', AuthenticationController.requireLogin(), RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), - SubscriptionGroupController.flexibleLicensingSplitTest, SubscriptionGroupController.subscriptionUpgradePage ) @@ -123,7 +121,6 @@ export default { '/user/subscription/group/missing-billing-information', AuthenticationController.requireLogin(), RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), - SubscriptionGroupController.flexibleLicensingSplitTest, SubscriptionGroupController.missingBillingInformation ) @@ -131,10 +128,16 @@ export default { '/user/subscription/group/manually-collected-subscription', AuthenticationController.requireLogin(), RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), - SubscriptionGroupController.flexibleLicensingSplitTest, SubscriptionGroupController.manuallyCollectedSubscription ) + webRouter.get( + '/user/subscription/group/subtotal-limit-exceeded', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + SubscriptionGroupController.subtotalLimitExceeded + ) + // Team invites webRouter.get( '/subscription/invites/:token/', @@ -160,7 +163,12 @@ export default { // recurly callback publicApiRouter.post( '/user/subscription/callback', - RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + RateLimiterMiddleware.rateLimit( + new RateLimiter('recurly-callback', { + points: 200, + duration: 60, + }) + ), AuthenticationController.requireBasicAuth({ [Settings.apis.recurly.webhookUser]: Settings.apis.recurly.webhookPass, }), @@ -175,12 +183,6 @@ export default { RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), SubscriptionController.previewSubscription ) - webRouter.post( - '/user/subscription/update', - AuthenticationController.requireLogin(), - RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), - SubscriptionController.updateSubscription - ) webRouter.get( '/user/subscription/addon/:addOnCode/add', AuthenticationController.requireLogin(), diff --git a/services/web/app/src/Features/ThirdPartyDataStore/TpdsController.mjs b/services/web/app/src/Features/ThirdPartyDataStore/TpdsController.mjs index 1db15aa932..2b8667bed4 100644 --- a/services/web/app/src/Features/ThirdPartyDataStore/TpdsController.mjs +++ b/services/web/app/src/Features/ThirdPartyDataStore/TpdsController.mjs @@ -138,36 +138,50 @@ async function updateFolder(req, res) { // .gitignore, etc because people are generally more explicit with the files // they want in git. -async function updateProjectContents(req, res, next) { +async function updateProjectContents(req, res) { const projectId = req.params.project_id const path = `/${req.params[0]}` // UpdateMerger expects leading slash const source = req.headers['x-update-source'] || 'unknown' try { - await UpdateMerger.promises.mergeUpdate(null, projectId, path, req, source) + const metadata = await UpdateMerger.promises.mergeUpdate( + null, + projectId, + path, + req, + source + ) + res.json({ + entityId: metadata.entityId.toString(), + rev: metadata.rev, + }) } catch (error) { if ( error instanceof Errors.InvalidNameError || error instanceof Errors.DuplicateNameError ) { - return res.sendStatus(422) + res.sendStatus(422) } else { throw error } } - res.sendStatus(200) } -async function deleteProjectContents(req, res, next) { +async function deleteProjectContents(req, res) { const projectId = req.params.project_id const path = `/${req.params[0]}` // UpdateMerger expects leading slash const source = req.headers['x-update-source'] || 'unknown' - await UpdateMerger.promises.deleteUpdate(null, projectId, path, source) - res.sendStatus(200) + const entityId = await UpdateMerger.promises.deleteUpdate( + null, + projectId, + path, + source + ) + res.json({ entityId }) } -async function getQueues(req, res, next) { +async function getQueues(req, res) { const userId = SessionManager.getLoggedInUserId(req.session) res.json(await TpdsQueueManager.promises.getQueues(userId)) } @@ -206,6 +220,11 @@ export default { deleteProjectContents: expressify(deleteProjectContents), getQueues: expressify(getQueues), + promises: { + deleteProjectContents, + updateProjectContents, + }, + // for tests only parseParams, } diff --git a/services/web/app/src/Features/ThirdPartyDataStore/UpdateMerger.js b/services/web/app/src/Features/ThirdPartyDataStore/UpdateMerger.js index e073ccdd54..e67d5fc30d 100644 --- a/services/web/app/src/Features/ThirdPartyDataStore/UpdateMerger.js +++ b/services/web/app/src/Features/ThirdPartyDataStore/UpdateMerger.js @@ -12,15 +12,15 @@ const { pipeline } = require('stream/promises') async function mergeUpdate(userId, projectId, path, updateRequest, source) { const fsPath = await writeUpdateToDisk(projectId, updateRequest) + try { - const metadata = await _mergeUpdate(userId, projectId, path, fsPath, source) - return metadata + // note: important to await here so file reading finishes before cleaning up below + return await _mergeUpdate(userId, projectId, path, fsPath, source) } finally { - try { - await fsPromises.unlink(fsPath) - } catch (err) { + // note: not awaited or thrown + fsPromises.unlink(fsPath).catch(err => { logger.err({ err, projectId, fsPath }, 'error deleting file') - } + }) } } @@ -128,7 +128,7 @@ async function _mergeUpdate(userId, projectId, path, fsPath, source) { async function deleteUpdate(userId, projectId, path, source) { try { - await EditorController.promises.deleteEntityWithPath( + return await EditorController.promises.deleteEntityWithPath( projectId, path, source, diff --git a/services/web/app/src/Features/TokenAccess/TokenAccessController.mjs b/services/web/app/src/Features/TokenAccess/TokenAccessController.mjs index f573d3c726..ff4b93e88c 100644 --- a/services/web/app/src/Features/TokenAccess/TokenAccessController.mjs +++ b/services/web/app/src/Features/TokenAccess/TokenAccessController.mjs @@ -20,7 +20,6 @@ import { getSafeAdminDomainRedirect } from '../Helpers/UrlHelper.js' import UserGetter from '../User/UserGetter.js' import Settings from '@overleaf/settings' import LimitationsManager from '../Subscription/LimitationsManager.js' -import SplitTestHandler from '../SplitTests/SplitTestHandler.js' const orderedPrivilegeLevels = [ PrivilegeLevels.NONE, @@ -112,13 +111,6 @@ async function tokenAccessPage(req, res, next) { } } - // Populates splitTestVariants with a value for the split test name and allows - // Pug to read it - await SplitTestHandler.promises.getAssignment( - req, - res, - 'bs5-misc-pages-b2c' - ) res.render('project/token/access-react', { postUrl: makePostUrl(token), }) @@ -347,7 +339,12 @@ async function grantTokenAccessReadAndWrite(req, res, next) { } ) AnalyticsManager.recordEventForUserInBackground(userId, 'project-joined', { - mode: pendingEditor ? 'read-only' : 'read-write', + role: pendingEditor + ? PrivilegeLevels.READ_ONLY + : PrivilegeLevels.READ_AND_WRITE, + ownerId: project.owner_ref.toString(), + source: 'link-sharing', + mode: pendingEditor ? 'view' : 'edit', projectId: project._id.toString(), ...(pendingEditor && { pendingEditor: true }), }) @@ -450,7 +447,8 @@ async function grantTokenAccessReadOnly(req, res, next) { await TokenAccessHandler.promises.addReadOnlyUserToProject( userId, - project._id + project._id, + project.owner_ref ) return res.json({ diff --git a/services/web/app/src/Features/TokenAccess/TokenAccessHandler.js b/services/web/app/src/Features/TokenAccess/TokenAccessHandler.js index 06a5735e86..0d08903ec3 100644 --- a/services/web/app/src/Features/TokenAccess/TokenAccessHandler.js +++ b/services/web/app/src/Features/TokenAccess/TokenAccessHandler.js @@ -151,12 +151,15 @@ const TokenAccessHandler = { throw new Error('invalid token type') }, - async addReadOnlyUserToProject(userId, projectId) { + async addReadOnlyUserToProject(userId, projectId, ownerId) { userId = new ObjectId(userId.toString()) projectId = new ObjectId(projectId.toString()) Analytics.recordEventForUserInBackground(userId, 'project-joined', { - mode: 'read-only', + role: PrivilegeLevels.READ_ONLY, projectId: projectId.toString(), + source: 'link-sharing', + ownerId: ownerId.toString(), + mode: 'view', }) return await Project.updateOne( diff --git a/services/web/app/src/Features/Tutorial/TutorialController.mjs b/services/web/app/src/Features/Tutorial/TutorialController.mjs index 4aadb6b451..3672f8db0d 100644 --- a/services/web/app/src/Features/Tutorial/TutorialController.mjs +++ b/services/web/app/src/Features/Tutorial/TutorialController.mjs @@ -11,6 +11,7 @@ const VALID_KEYS = [ 'code-editor-mode-prompt', 'history-restore-promo', 'us-gov-banner', + 'us-gov-banner-fedramp', ] async function completeTutorial(req, res, next) { diff --git a/services/web/app/src/Features/Uploads/UploadsRouter.mjs b/services/web/app/src/Features/Uploads/UploadsRouter.mjs index 83df92d6f9..17ff5afc97 100644 --- a/services/web/app/src/Features/Uploads/UploadsRouter.mjs +++ b/services/web/app/src/Features/Uploads/UploadsRouter.mjs @@ -11,7 +11,7 @@ const rateLimiters = { duration: 60, }), fileUpload: new RateLimiter('file-upload', { - points: 200, + points: 500, duration: 60 * 15, }), } diff --git a/services/web/app/src/Features/User/UserAuditLogHandler.js b/services/web/app/src/Features/User/UserAuditLogHandler.js index 6984c4f034..b1d404303e 100644 --- a/services/web/app/src/Features/User/UserAuditLogHandler.js +++ b/services/web/app/src/Features/User/UserAuditLogHandler.js @@ -7,6 +7,7 @@ function _canHaveNoIpAddressId(operation, info) { if (operation === 'leave-group-subscription') return true if (operation === 'must-reset-password-set') return true if (operation === 'remove-email' && info.script) return true + if (operation === 'release-managed-user' && info.script) return true return false } @@ -22,6 +23,7 @@ function _canHaveNoInitiatorId(operation, info) { if (operation === 'must-reset-password-set') return true if (operation === 'must-reset-password-unset') return true if (operation === 'account-suspension' && info.script) return true + if (operation === 'release-managed-user' && info.script) return true } /** diff --git a/services/web/app/src/Features/User/UserController.js b/services/web/app/src/Features/User/UserController.js index d4257f7912..e4186d39a8 100644 --- a/services/web/app/src/Features/User/UserController.js +++ b/services/web/app/src/Features/User/UserController.js @@ -392,6 +392,9 @@ async function updateUserSettings(req, res, next) { req.body.referencesSearchMode === 'simple' ? 'simple' : 'advanced' user.ace.referencesSearchMode = mode } + if (req.body.enableNewEditor != null) { + user.ace.enableNewEditor = Boolean(req.body.enableNewEditor) + } await user.save() const newEmail = req.body.email?.trim().toLowerCase() diff --git a/services/web/app/src/Features/User/UserDeleter.js b/services/web/app/src/Features/User/UserDeleter.js index 2ead415a54..4009419ffe 100644 --- a/services/web/app/src/Features/User/UserDeleter.js +++ b/services/web/app/src/Features/User/UserDeleter.js @@ -95,9 +95,7 @@ async function expireDeletedUsersAfterDuration() { 'deleterData.deletedAt': { $lt: new Date(moment().subtract(DURATION, 'days')), }, - user: { - $ne: null, - }, + user: { $type: 'object' }, }).exec() if (deletedUsers.length === 0) { diff --git a/services/web/app/src/Features/User/UserEmailsController.js b/services/web/app/src/Features/User/UserEmailsController.js index 8d4745e891..54ace10cb0 100644 --- a/services/web/app/src/Features/User/UserEmailsController.js +++ b/services/web/app/src/Features/User/UserEmailsController.js @@ -480,10 +480,6 @@ async function confirmSecondaryEmailPage(req, res) { return res.redirect(redirectURL) } - // Populates splitTestVariants with a value for the split test name and allows - // Pug to read it - await SplitTestHandler.promises.getAssignment(req, res, 'bs5-misc-pages-b2c') - AnalyticsManager.recordEventForUserInBackground( userId, 'confirm-secondary-email-page-displayed' diff --git a/services/web/app/src/Features/User/UserPagesController.mjs b/services/web/app/src/Features/User/UserPagesController.mjs index d7bf00aed7..cd456a4377 100644 --- a/services/web/app/src/Features/User/UserPagesController.mjs +++ b/services/web/app/src/Features/User/UserPagesController.mjs @@ -117,10 +117,6 @@ async function settingsPage(req, res) { ) } - // Get the user's assignment for this page's Bootstrap 5 split test, which - // populates splitTestVariants with a value for the split test name and allows - // Pug to read it - await SplitTestHandler.promises.getAssignment(req, res, 'bootstrap-5') // Get the users write-and-cite assignment to switch between translation strings await SplitTestHandler.promises.getAssignment(req, res, 'write-and-cite') // Get the users papers-integration assignment to show the linking widget @@ -242,6 +238,7 @@ const UserPagesController = { } res.render('user/login', { title: Settings.nav?.login_support_title || 'login', + login_support_title: Settings.nav?.login_support_title, login_support_text: Settings.nav?.login_support_text, }) }, @@ -310,14 +307,6 @@ const UserPagesController = { }, async compromisedPasswordPage(req, res) { - // Populates splitTestVariants with a value for the split test name and allows - // Pug to read it - await SplitTestHandler.promises.getAssignment( - req, - res, - 'bs5-misc-pages-platform' - ) - res.render('user/compromised_password') }, diff --git a/services/web/app/src/Features/UserMembership/UserMembershipController.mjs b/services/web/app/src/Features/UserMembership/UserMembershipController.mjs index 547b3ba854..aaa8fa5812 100644 --- a/services/web/app/src/Features/UserMembership/UserMembershipController.mjs +++ b/services/web/app/src/Features/UserMembership/UserMembershipController.mjs @@ -11,7 +11,6 @@ import { import { SSOConfig } from '../../models/SSOConfig.js' import { Parser as CSVParser } from 'json2csv' import { expressify } from '@overleaf/promise-utils' -import SplitTestHandler from '../SplitTests/SplitTestHandler.js' import PlansLocator from '../Subscription/PlansLocator.js' import RecurlyClient from '../Subscription/RecurlyClient.js' @@ -31,15 +30,6 @@ async function manageGroupMembers(req, res, next) { entityConfig ) const ssoConfig = await SSOConfig.findById(subscription.ssoConfig).exec() - - await SplitTestHandler.promises.getAssignment( - req, - res, - 'flexible-group-licensing' - ) - - await SplitTestHandler.promises.getAssignment(req, res, 'bootstrap-5-groups') - const plan = PlansLocator.findLocalPlanInSettings(subscription.planCode) const userId = SessionManager.getLoggedInUserId(req.session) const isAdmin = subscription.admin_id.toString() === userId @@ -120,8 +110,6 @@ async function _renderManagersPage(req, res, next, template) { entityConfig ) - await SplitTestHandler.promises.getAssignment(req, res, 'bootstrap-5-groups') - res.render(template, { name: entityName, users, diff --git a/services/web/app/src/infrastructure/ExpressLocals.js b/services/web/app/src/infrastructure/ExpressLocals.js index 002c342eef..eae1b48219 100644 --- a/services/web/app/src/infrastructure/ExpressLocals.js +++ b/services/web/app/src/infrastructure/ExpressLocals.js @@ -429,6 +429,8 @@ module.exports = function (webRouter, privateApiRouter, publicApiRouter) { wikiEnabled: Settings.overleaf != null || Settings.proxyLearn, templatesEnabled: Settings.overleaf != null || Settings.templates?.user_id != null, + cioWriteKey: Settings.analytics?.cio?.writeKey, + cioSiteId: Settings.analytics?.cio?.siteId, } next() }) diff --git a/services/web/app/src/models/Project.js b/services/web/app/src/models/Project.js index 1555b471a4..8da4b888d3 100644 --- a/services/web/app/src/models/Project.js +++ b/services/web/app/src/models/Project.js @@ -41,6 +41,7 @@ const ProjectSchema = new Schema( reviewer_refs: [{ type: ObjectId, ref: 'User' }], readOnly_refs: [{ type: ObjectId, ref: 'User' }], pendingEditor_refs: [{ type: ObjectId, ref: 'User' }], + pendingReviewer_refs: [{ type: ObjectId, ref: 'User' }], rootDoc_id: { type: ObjectId }, rootFolder: [FolderSchema], mainBibliographyDoc_id: { type: ObjectId }, diff --git a/services/web/app/src/models/User.js b/services/web/app/src/models/User.js index 7f6081efe2..73506f161c 100644 --- a/services/web/app/src/models/User.js +++ b/services/web/app/src/models/User.js @@ -98,6 +98,7 @@ const UserSchema = new Schema( lineHeight: { type: String }, mathPreview: { type: Boolean, default: true }, referencesSearchMode: { type: String, default: 'advanced' }, // 'advanced' or 'simple' + enableNewEditor: { type: Boolean }, }, features: { collaborators: { @@ -194,6 +195,7 @@ const UserSchema = new Schema( writefull: { enabled: { type: Boolean, default: null }, autoCreatedAccount: { type: Boolean, default: false }, + isPremium: { type: Boolean, default: false }, }, aiErrorAssistant: { enabled: { type: Boolean, default: true }, diff --git a/services/web/app/templates/plans/groups.json b/services/web/app/templates/plans/groups.json index ba7b22aec6..1a4994c268 100644 --- a/services/web/app/templates/plans/groups.json +++ b/services/web/app/templates/plans/groups.json @@ -1029,177 +1029,177 @@ "professional": { "AUD": { "2": { - "price_in_cents": 91800, + "price_in_cents": 82600, "additional_license_legacy_price_in_cents": 32100 }, "3": { - "price_in_cents": 137700, + "price_in_cents": 123900, "additional_license_legacy_price_in_cents": 32100 }, "4": { - "price_in_cents": 183600, + "price_in_cents": 165200, "additional_license_legacy_price_in_cents": 32100 }, "5": { - "price_in_cents": 229500, + "price_in_cents": 206500, "additional_license_legacy_price_in_cents": 32100 }, "10": { - "price_in_cents": 459000, + "price_in_cents": 413000, "additional_license_legacy_price_in_cents": 29800 }, "20": { - "price_in_cents": 918000, + "price_in_cents": 826000, "additional_license_legacy_price_in_cents": 27500 }, "50": { - "price_in_cents": 2295000, + "price_in_cents": 2065000, "additional_license_legacy_price_in_cents": 25200 } }, "BRL": { "2": { - "price_in_cents": 199800, + "price_in_cents": 179800, "additional_license_legacy_price_in_cents": 69900 }, "3": { - "price_in_cents": 299700, + "price_in_cents": 269700, "additional_license_legacy_price_in_cents": 69900 }, "4": { - "price_in_cents": 399600, + "price_in_cents": 359600, "additional_license_legacy_price_in_cents": 69900 }, "5": { - "price_in_cents": 499500, + "price_in_cents": 449500, "additional_license_legacy_price_in_cents": 69900 }, "10": { - "price_in_cents": 999000, + "price_in_cents": 899000, "additional_license_legacy_price_in_cents": 64900 }, "20": { - "price_in_cents": 1998000, + "price_in_cents": 1798000, "additional_license_legacy_price_in_cents": 59900 }, "50": { - "price_in_cents": 4995000, + "price_in_cents": 4495000, "additional_license_legacy_price_in_cents": 54900 } }, "CAD": { "2": { - "price_in_cents": 89800, + "price_in_cents": 80800, "additional_license_legacy_price_in_cents": 31400 }, "3": { - "price_in_cents": 134700, + "price_in_cents": 121200, "additional_license_legacy_price_in_cents": 31400 }, "4": { - "price_in_cents": 179600, + "price_in_cents": 161600, "additional_license_legacy_price_in_cents": 31400 }, "5": { - "price_in_cents": 224500, + "price_in_cents": 202000, "additional_license_legacy_price_in_cents": 31400 }, "10": { - "price_in_cents": 449000, + "price_in_cents": 404000, "additional_license_legacy_price_in_cents": 29100 }, "20": { - "price_in_cents": 898000, + "price_in_cents": 808000, "additional_license_legacy_price_in_cents": 26900 }, "50": { - "price_in_cents": 2245000, + "price_in_cents": 2020000, "additional_license_legacy_price_in_cents": 24600 } }, "CHF": { "2": { - "price_in_cents": 79800, + "price_in_cents": 71800, "additional_license_legacy_price_in_cents": 49900 }, "3": { - "price_in_cents": 119700, + "price_in_cents": 107700, "additional_license_legacy_price_in_cents": 49900 }, "4": { - "price_in_cents": 159600, + "price_in_cents": 143600, "additional_license_legacy_price_in_cents": 49900 }, "5": { - "price_in_cents": 199500, + "price_in_cents": 179500, "additional_license_legacy_price_in_cents": 49900 }, "10": { - "price_in_cents": 399000, + "price_in_cents": 359000, "additional_license_legacy_price_in_cents": 25900 }, "20": { - "price_in_cents": 798000, + "price_in_cents": 718000, "additional_license_legacy_price_in_cents": 23900 }, "50": { - "price_in_cents": 1995000, + "price_in_cents": 1795000, "additional_license_legacy_price_in_cents": 21900 } }, "CLP": { "2": { - "price_in_cents": 48198000, + "price_in_cents": 43378200, "additional_license_legacy_price_in_cents": 16869300 }, "3": { - "price_in_cents": 72297000, + "price_in_cents": 65067300, "additional_license_legacy_price_in_cents": 16869300 }, "4": { - "price_in_cents": 96396000, + "price_in_cents": 86756400, "additional_license_legacy_price_in_cents": 16869300 }, "5": { - "price_in_cents": 120495000, + "price_in_cents": 108445500, "additional_license_legacy_price_in_cents": 16869300 }, "10": { - "price_in_cents": 240990000, + "price_in_cents": 216891000, "additional_license_legacy_price_in_cents": 15664300 }, "20": { - "price_in_cents": 481980000, + "price_in_cents": 433782000, "additional_license_legacy_price_in_cents": 14459400 }, "50": { - "price_in_cents": 1204950000, + "price_in_cents": 1084455000, "additional_license_legacy_price_in_cents": 13254400 } }, "COP": { "2": { - "price_in_cents": 157980000, + "price_in_cents": 142182000, "additional_license_legacy_price_in_cents": 55293000 }, "3": { - "price_in_cents": 236970000, + "price_in_cents": 213273000, "additional_license_legacy_price_in_cents": 55293000 }, "4": { - "price_in_cents": 315960000, + "price_in_cents": 284364000, "additional_license_legacy_price_in_cents": 55293000 }, "5": { - "price_in_cents": 394950000, + "price_in_cents": 355455000, "additional_license_legacy_price_in_cents": 55293000 }, "10": { - "price_in_cents": 789900000, + "price_in_cents": 710910000, "additional_license_legacy_price_in_cents": 51343500 }, "20": { - "price_in_cents": 1579800000, + "price_in_cents": 1421820000, "additional_license_legacy_price_in_cents": 47394000 }, "50": { @@ -1209,331 +1209,331 @@ }, "DKK": { "2": { - "price_in_cents": 475800, + "price_in_cents": 428200, "additional_license_legacy_price_in_cents": 166500 }, "3": { - "price_in_cents": 713700, + "price_in_cents": 642300, "additional_license_legacy_price_in_cents": 166500 }, "4": { - "price_in_cents": 951600, + "price_in_cents": 856400, "additional_license_legacy_price_in_cents": 166500 }, "5": { - "price_in_cents": 1189500, + "price_in_cents": 1070500, "additional_license_legacy_price_in_cents": 166500 }, "10": { - "price_in_cents": 2379000, + "price_in_cents": 2141000, "additional_license_legacy_price_in_cents": 154600 }, "20": { - "price_in_cents": 4758000, + "price_in_cents": 4282000, "additional_license_legacy_price_in_cents": 142700 }, "50": { - "price_in_cents": 11895000, + "price_in_cents": 10705000, "additional_license_legacy_price_in_cents": 130800 } }, "EUR": { "2": { - "price_in_cents": 73800, + "price_in_cents": 66400, "additional_license_legacy_price_in_cents": 25800 }, "3": { - "price_in_cents": 110700, + "price_in_cents": 99600, "additional_license_legacy_price_in_cents": 25800 }, "4": { - "price_in_cents": 147600, + "price_in_cents": 132800, "additional_license_legacy_price_in_cents": 25800 }, "5": { - "price_in_cents": 184500, + "price_in_cents": 166000, "additional_license_legacy_price_in_cents": 25800 }, "10": { - "price_in_cents": 369000, + "price_in_cents": 332000, "additional_license_legacy_price_in_cents": 23900 }, "20": { - "price_in_cents": 738000, + "price_in_cents": 664000, "additional_license_legacy_price_in_cents": 22100 }, "50": { - "price_in_cents": 1845000, + "price_in_cents": 1660000, "additional_license_legacy_price_in_cents": 20200 } }, "GBP": { "2": { - "price_in_cents": 63800, + "price_in_cents": 57400, "additional_license_legacy_price_in_cents": 22300 }, "3": { - "price_in_cents": 95700, + "price_in_cents": 86100, "additional_license_legacy_price_in_cents": 22300 }, "4": { - "price_in_cents": 127600, + "price_in_cents": 114800, "additional_license_legacy_price_in_cents": 22300 }, "5": { - "price_in_cents": 159500, + "price_in_cents": 143500, "additional_license_legacy_price_in_cents": 22300 }, "10": { - "price_in_cents": 319000, + "price_in_cents": 287000, "additional_license_legacy_price_in_cents": 20700 }, "20": { - "price_in_cents": 638000, + "price_in_cents": 574000, "additional_license_legacy_price_in_cents": 19100 }, "50": { - "price_in_cents": 1595000, + "price_in_cents": 1435000, "additional_license_legacy_price_in_cents": 17500 } }, "INR": { "2": { - "price_in_cents": 1919800, + "price_in_cents": 1727800, "additional_license_legacy_price_in_cents": 671900 }, "3": { - "price_in_cents": 2879700, + "price_in_cents": 2591700, "additional_license_legacy_price_in_cents": 671900 }, "4": { - "price_in_cents": 3839600, + "price_in_cents": 3455600, "additional_license_legacy_price_in_cents": 671900 }, "5": { - "price_in_cents": 4799500, + "price_in_cents": 4319500, "additional_license_legacy_price_in_cents": 671900 }, "10": { - "price_in_cents": 9599000, + "price_in_cents": 8639000, "additional_license_legacy_price_in_cents": 623900 }, "20": { - "price_in_cents": 19198000, + "price_in_cents": 17278000, "additional_license_legacy_price_in_cents": 575900 }, "50": { - "price_in_cents": 47995000, + "price_in_cents": 43195000, "additional_license_legacy_price_in_cents": 527900 } }, "MXN": { "2": { - "price_in_cents": 1179800, + "price_in_cents": 1061800, "additional_license_legacy_price_in_cents": 412900 }, "3": { - "price_in_cents": 1769700, + "price_in_cents": 1592700, "additional_license_legacy_price_in_cents": 412900 }, "4": { - "price_in_cents": 2359600, + "price_in_cents": 2123600, "additional_license_legacy_price_in_cents": 412900 }, "5": { - "price_in_cents": 2949500, + "price_in_cents": 2654500, "additional_license_legacy_price_in_cents": 412900 }, "10": { - "price_in_cents": 5899000, + "price_in_cents": 5309000, "additional_license_legacy_price_in_cents": 383400 }, "20": { - "price_in_cents": 11798000, + "price_in_cents": 10618000, "additional_license_legacy_price_in_cents": 353900 }, "50": { - "price_in_cents": 29495000, + "price_in_cents": 26545000, "additional_license_legacy_price_in_cents": 324400 } }, "NOK": { "2": { - "price_in_cents": 573800, + "price_in_cents": 516400, "additional_license_legacy_price_in_cents": 200800 }, "3": { - "price_in_cents": 860700, + "price_in_cents": 774600, "additional_license_legacy_price_in_cents": 200800 }, "4": { - "price_in_cents": 1147600, + "price_in_cents": 1032800, "additional_license_legacy_price_in_cents": 200800 }, "5": { - "price_in_cents": 1434500, + "price_in_cents": 1291000, "additional_license_legacy_price_in_cents": 200800 }, "10": { - "price_in_cents": 2869000, + "price_in_cents": 2582000, "additional_license_legacy_price_in_cents": 186400 }, "20": { - "price_in_cents": 5738000, + "price_in_cents": 5164000, "additional_license_legacy_price_in_cents": 172100 }, "50": { - "price_in_cents": 14345000, + "price_in_cents": 12910000, "additional_license_legacy_price_in_cents": 157700 } }, "NZD": { "2": { - "price_in_cents": 91800, + "price_in_cents": 82600, "additional_license_legacy_price_in_cents": 32100 }, "3": { - "price_in_cents": 137700, + "price_in_cents": 123900, "additional_license_legacy_price_in_cents": 32100 }, "4": { - "price_in_cents": 183600, + "price_in_cents": 165200, "additional_license_legacy_price_in_cents": 32100 }, "5": { - "price_in_cents": 229500, + "price_in_cents": 206500, "additional_license_legacy_price_in_cents": 32100 }, "10": { - "price_in_cents": 459000, + "price_in_cents": 413000, "additional_license_legacy_price_in_cents": 29800 }, "20": { - "price_in_cents": 918000, + "price_in_cents": 826000, "additional_license_legacy_price_in_cents": 27500 }, "50": { - "price_in_cents": 2295000, + "price_in_cents": 2065000, "additional_license_legacy_price_in_cents": 25200 } }, "PEN": { "2": { - "price_in_cents": 191800, + "price_in_cents": 172600, "additional_license_legacy_price_in_cents": 67100 }, "3": { - "price_in_cents": 287700, + "price_in_cents": 258900, "additional_license_legacy_price_in_cents": 67100 }, "4": { - "price_in_cents": 383600, + "price_in_cents": 345200, "additional_license_legacy_price_in_cents": 67100 }, "5": { - "price_in_cents": 479500, + "price_in_cents": 431500, "additional_license_legacy_price_in_cents": 67100 }, "10": { - "price_in_cents": 959000, + "price_in_cents": 863000, "additional_license_legacy_price_in_cents": 62300 }, "20": { - "price_in_cents": 1918000, + "price_in_cents": 1726000, "additional_license_legacy_price_in_cents": 57500 }, "50": { - "price_in_cents": 4795000, + "price_in_cents": 4315000, "additional_license_legacy_price_in_cents": 52700 } }, "SEK": { "2": { - "price_in_cents": 573800, + "price_in_cents": 516400, "additional_license_legacy_price_in_cents": 200800 }, "3": { - "price_in_cents": 860700, + "price_in_cents": 774600, "additional_license_legacy_price_in_cents": 200800 }, "4": { - "price_in_cents": 1147600, + "price_in_cents": 1032800, "additional_license_legacy_price_in_cents": 200800 }, "5": { - "price_in_cents": 1434500, + "price_in_cents": 1291000, "additional_license_legacy_price_in_cents": 200800 }, "10": { - "price_in_cents": 2869000, + "price_in_cents": 2582000, "additional_license_legacy_price_in_cents": 186400 }, "20": { - "price_in_cents": 5738000, + "price_in_cents": 5164000, "additional_license_legacy_price_in_cents": 172100 }, "50": { - "price_in_cents": 14345000, + "price_in_cents": 12910000, "additional_license_legacy_price_in_cents": 157700 } }, "SGD": { "2": { - "price_in_cents": 103800, + "price_in_cents": 93400, "additional_license_legacy_price_in_cents": 36300 }, "3": { - "price_in_cents": 155700, + "price_in_cents": 140100, "additional_license_legacy_price_in_cents": 36300 }, "4": { - "price_in_cents": 207600, + "price_in_cents": 186800, "additional_license_legacy_price_in_cents": 36300 }, "5": { - "price_in_cents": 259500, + "price_in_cents": 233500, "additional_license_legacy_price_in_cents": 36300 }, "10": { - "price_in_cents": 519000, + "price_in_cents": 467000, "additional_license_legacy_price_in_cents": 33700 }, "20": { - "price_in_cents": 1038000, + "price_in_cents": 934000, "additional_license_legacy_price_in_cents": 31100 }, "50": { - "price_in_cents": 2595000, + "price_in_cents": 2335000, "additional_license_legacy_price_in_cents": 28500 } }, "USD": { "2": { - "price_in_cents": 79800, + "price_in_cents": 71800, "additional_license_legacy_price_in_cents": 27900 }, "3": { - "price_in_cents": 119700, + "price_in_cents": 107700, "additional_license_legacy_price_in_cents": 27900 }, "4": { - "price_in_cents": 159600, + "price_in_cents": 143600, "additional_license_legacy_price_in_cents": 27900 }, "5": { - "price_in_cents": 199500, + "price_in_cents": 179500, "additional_license_legacy_price_in_cents": 27900 }, "10": { - "price_in_cents": 399000, + "price_in_cents": 359000, "additional_license_legacy_price_in_cents": 25900 }, "20": { - "price_in_cents": 798000, + "price_in_cents": 718000, "additional_license_legacy_price_in_cents": 23900 }, "50": { - "price_in_cents": 1995000, + "price_in_cents": 1795000, "additional_license_legacy_price_in_cents": 21900 } } @@ -1541,511 +1541,511 @@ "collaborator": { "AUD": { "2": { - "price_in_cents": 47800, + "price_in_cents": 43000, "additional_license_legacy_price_in_cents": 16700 }, "3": { - "price_in_cents": 71700, + "price_in_cents": 64500, "additional_license_legacy_price_in_cents": 16700 }, "4": { - "price_in_cents": 95600, + "price_in_cents": 86000, "additional_license_legacy_price_in_cents": 16700 }, "5": { - "price_in_cents": 119500, + "price_in_cents": 107500, "additional_license_legacy_price_in_cents": 16700 }, "10": { - "price_in_cents": 239000, + "price_in_cents": 215000, "additional_license_legacy_price_in_cents": 15500 }, "20": { - "price_in_cents": 478000, + "price_in_cents": 430000, "additional_license_legacy_price_in_cents": 14300 }, "50": { - "price_in_cents": 1195000, + "price_in_cents": 1075000, "additional_license_legacy_price_in_cents": 13100 } }, "BRL": { "2": { - "price_in_cents": 99800, + "price_in_cents": 89800, "additional_license_legacy_price_in_cents": 34900 }, "3": { - "price_in_cents": 149700, + "price_in_cents": 134700, "additional_license_legacy_price_in_cents": 34900 }, "4": { - "price_in_cents": 199600, + "price_in_cents": 179600, "additional_license_legacy_price_in_cents": 34900 }, "5": { - "price_in_cents": 249500, + "price_in_cents": 224500, "additional_license_legacy_price_in_cents": 34900 }, "10": { - "price_in_cents": 499000, + "price_in_cents": 449000, "additional_license_legacy_price_in_cents": 32400 }, "20": { - "price_in_cents": 998000, + "price_in_cents": 898000, "additional_license_legacy_price_in_cents": 29900 }, "50": { - "price_in_cents": 2495000, + "price_in_cents": 2245000, "additional_license_legacy_price_in_cents": 27400 } }, "CAD": { "2": { - "price_in_cents": 45800, + "price_in_cents": 41200, "additional_license_legacy_price_in_cents": 16000 }, "3": { - "price_in_cents": 68700, + "price_in_cents": 61800, "additional_license_legacy_price_in_cents": 16000 }, "4": { - "price_in_cents": 91600, + "price_in_cents": 82400, "additional_license_legacy_price_in_cents": 16000 }, "5": { - "price_in_cents": 114500, + "price_in_cents": 103000, "additional_license_legacy_price_in_cents": 16000 }, "10": { - "price_in_cents": 229000, + "price_in_cents": 206000, "additional_license_legacy_price_in_cents": 14800 }, "20": { - "price_in_cents": 458000, + "price_in_cents": 412000, "additional_license_legacy_price_in_cents": 13700 }, "50": { - "price_in_cents": 1145000, + "price_in_cents": 1030000, "additional_license_legacy_price_in_cents": 12500 } }, "CHF": { "2": { - "price_in_cents": 39800, + "price_in_cents": 35800, "additional_license_legacy_price_in_cents": 13900 }, "3": { - "price_in_cents": 59700, + "price_in_cents": 53700, "additional_license_legacy_price_in_cents": 13900 }, "4": { - "price_in_cents": 79600, + "price_in_cents": 71600, "additional_license_legacy_price_in_cents": 13900 }, "5": { - "price_in_cents": 99500, + "price_in_cents": 89500, "additional_license_legacy_price_in_cents": 13900 }, "10": { - "price_in_cents": 199000, + "price_in_cents": 179000, "additional_license_legacy_price_in_cents": 12900 }, "20": { - "price_in_cents": 398000, + "price_in_cents": 358000, "additional_license_legacy_price_in_cents": 11900 }, "50": { - "price_in_cents": 995000, + "price_in_cents": 895000, "additional_license_legacy_price_in_cents": 10900 } }, "CLP": { "2": { - "price_in_cents": 22198000, + "price_in_cents": 19978200, "additional_license_legacy_price_in_cents": 7769300 }, "3": { - "price_in_cents": 33297000, + "price_in_cents": 29967300, "additional_license_legacy_price_in_cents": 7769300 }, "4": { - "price_in_cents": 44396000, + "price_in_cents": 39956400, "additional_license_legacy_price_in_cents": 7769300 }, "5": { - "price_in_cents": 55495000, + "price_in_cents": 49945500, "additional_license_legacy_price_in_cents": 7769300 }, "10": { - "price_in_cents": 110990000, + "price_in_cents": 99891000, "additional_license_legacy_price_in_cents": 7214300 }, "20": { - "price_in_cents": 221980000, + "price_in_cents": 199782000, "additional_license_legacy_price_in_cents": 6659400 }, "50": { - "price_in_cents": 554950000, + "price_in_cents": 499455000, "additional_license_legacy_price_in_cents": 6104400 } }, "COP": { "2": { - "price_in_cents": 77980000, + "price_in_cents": 70182000, "additional_license_legacy_price_in_cents": 27293000 }, "3": { - "price_in_cents": 116970000, + "price_in_cents": 105273000, "additional_license_legacy_price_in_cents": 27293000 }, "4": { - "price_in_cents": 155960000, + "price_in_cents": 140364000, "additional_license_legacy_price_in_cents": 27293000 }, "5": { - "price_in_cents": 194950000, + "price_in_cents": 175455000, "additional_license_legacy_price_in_cents": 27293000 }, "10": { - "price_in_cents": 389900000, + "price_in_cents": 350910000, "additional_license_legacy_price_in_cents": 25343500 }, "20": { - "price_in_cents": 779800000, + "price_in_cents": 701820000, "additional_license_legacy_price_in_cents": 23394000 }, "50": { - "price_in_cents": 1949500000, + "price_in_cents": 1754550000, "additional_license_legacy_price_in_cents": 21444500 } }, "DKK": { "2": { - "price_in_cents": 239800, + "price_in_cents": 215800, "additional_license_legacy_price_in_cents": 83900 }, "3": { - "price_in_cents": 359700, + "price_in_cents": 323700, "additional_license_legacy_price_in_cents": 83900 }, "4": { - "price_in_cents": 479600, + "price_in_cents": 431600, "additional_license_legacy_price_in_cents": 83900 }, "5": { - "price_in_cents": 599500, + "price_in_cents": 539500, "additional_license_legacy_price_in_cents": 83900 }, "10": { - "price_in_cents": 1199000, + "price_in_cents": 1079000, "additional_license_legacy_price_in_cents": 77900 }, "20": { - "price_in_cents": 2398000, + "price_in_cents": 2158000, "additional_license_legacy_price_in_cents": 71900 }, "50": { - "price_in_cents": 5995000, + "price_in_cents": 5395000, "additional_license_legacy_price_in_cents": 65900 } }, "EUR": { "2": { - "price_in_cents": 35800, + "price_in_cents": 32200, "additional_license_legacy_price_in_cents": 12500 }, "3": { - "price_in_cents": 53700, + "price_in_cents": 48300, "additional_license_legacy_price_in_cents": 12500 }, "4": { - "price_in_cents": 71600, + "price_in_cents": 64400, "additional_license_legacy_price_in_cents": 12500 }, "5": { - "price_in_cents": 89500, + "price_in_cents": 80500, "additional_license_legacy_price_in_cents": 12500 }, "10": { - "price_in_cents": 179000, + "price_in_cents": 161000, "additional_license_legacy_price_in_cents": 11600 }, "20": { - "price_in_cents": 358000, + "price_in_cents": 322000, "additional_license_legacy_price_in_cents": 10700 }, "50": { - "price_in_cents": 895000, + "price_in_cents": 805000, "additional_license_legacy_price_in_cents": 9800 } }, "GBP": { "2": { - "price_in_cents": 31800, + "price_in_cents": 28600, "additional_license_legacy_price_in_cents": 11100 }, "3": { - "price_in_cents": 47700, + "price_in_cents": 42900, "additional_license_legacy_price_in_cents": 11100 }, "4": { - "price_in_cents": 63600, + "price_in_cents": 57200, "additional_license_legacy_price_in_cents": 11100 }, "5": { - "price_in_cents": 79500, + "price_in_cents": 71500, "additional_license_legacy_price_in_cents": 11100 }, "10": { - "price_in_cents": 159000, + "price_in_cents": 143000, "additional_license_legacy_price_in_cents": 10300 }, "20": { - "price_in_cents": 318000, + "price_in_cents": 286000, "additional_license_legacy_price_in_cents": 9500 }, "50": { - "price_in_cents": 795000, + "price_in_cents": 715000, "additional_license_legacy_price_in_cents": 8700 } }, "INR": { "2": { - "price_in_cents": 919800, + "price_in_cents": 827800, "additional_license_legacy_price_in_cents": 321900 }, "3": { - "price_in_cents": 1379700, + "price_in_cents": 1241700, "additional_license_legacy_price_in_cents": 321900 }, "4": { - "price_in_cents": 1839600, + "price_in_cents": 1655600, "additional_license_legacy_price_in_cents": 321900 }, "5": { - "price_in_cents": 2299500, + "price_in_cents": 2069500, "additional_license_legacy_price_in_cents": 321900 }, "10": { - "price_in_cents": 4599000, + "price_in_cents": 4139000, "additional_license_legacy_price_in_cents": 298900 }, "20": { - "price_in_cents": 9198000, + "price_in_cents": 8278000, "additional_license_legacy_price_in_cents": 275900 }, "50": { - "price_in_cents": 22995000, + "price_in_cents": 20695000, "additional_license_legacy_price_in_cents": 252900 } }, "MXN": { "2": { - "price_in_cents": 579800, + "price_in_cents": 521800, "additional_license_legacy_price_in_cents": 202900 }, "3": { - "price_in_cents": 869700, + "price_in_cents": 782700, "additional_license_legacy_price_in_cents": 202900 }, "4": { - "price_in_cents": 1159600, + "price_in_cents": 1043600, "additional_license_legacy_price_in_cents": 202900 }, "5": { - "price_in_cents": 1449500, + "price_in_cents": 1304500, "additional_license_legacy_price_in_cents": 202900 }, "10": { - "price_in_cents": 2899000, + "price_in_cents": 2609000, "additional_license_legacy_price_in_cents": 188400 }, "20": { - "price_in_cents": 5798000, + "price_in_cents": 5218000, "additional_license_legacy_price_in_cents": 173900 }, "50": { - "price_in_cents": 14495000, + "price_in_cents": 13045000, "additional_license_legacy_price_in_cents": 159400 } }, "NOK": { "2": { - "price_in_cents": 289800, + "price_in_cents": 260800, "additional_license_legacy_price_in_cents": 101400 }, "3": { - "price_in_cents": 434700, + "price_in_cents": 391200, "additional_license_legacy_price_in_cents": 101400 }, "4": { - "price_in_cents": 579600, + "price_in_cents": 521600, "additional_license_legacy_price_in_cents": 101400 }, "5": { - "price_in_cents": 724500, + "price_in_cents": 652000, "additional_license_legacy_price_in_cents": 101400 }, "10": { - "price_in_cents": 1449000, + "price_in_cents": 1304000, "additional_license_legacy_price_in_cents": 94100 }, "20": { - "price_in_cents": 2898000, + "price_in_cents": 2608000, "additional_license_legacy_price_in_cents": 86900 }, "50": { - "price_in_cents": 7245000, + "price_in_cents": 6520000, "additional_license_legacy_price_in_cents": 79600 } }, "NZD": { "2": { - "price_in_cents": 47800, + "price_in_cents": 43000, "additional_license_legacy_price_in_cents": 16700 }, "3": { - "price_in_cents": 71700, + "price_in_cents": 64500, "additional_license_legacy_price_in_cents": 16700 }, "4": { - "price_in_cents": 95600, + "price_in_cents": 86000, "additional_license_legacy_price_in_cents": 16700 }, "5": { - "price_in_cents": 119500, + "price_in_cents": 107500, "additional_license_legacy_price_in_cents": 16700 }, "10": { - "price_in_cents": 239000, + "price_in_cents": 215000, "additional_license_legacy_price_in_cents": 15500 }, "20": { - "price_in_cents": 478000, + "price_in_cents": 430000, "additional_license_legacy_price_in_cents": 14300 }, "50": { - "price_in_cents": 1195000, + "price_in_cents": 1075000, "additional_license_legacy_price_in_cents": 13100 } }, "PEN": { "2": { - "price_in_cents": 91800, + "price_in_cents": 82600, "additional_license_legacy_price_in_cents": 32100 }, "3": { - "price_in_cents": 137700, + "price_in_cents": 123900, "additional_license_legacy_price_in_cents": 32100 }, "4": { - "price_in_cents": 183600, + "price_in_cents": 165200, "additional_license_legacy_price_in_cents": 32100 }, "5": { - "price_in_cents": 229500, + "price_in_cents": 206500, "additional_license_legacy_price_in_cents": 32100 }, "10": { - "price_in_cents": 459000, + "price_in_cents": 413000, "additional_license_legacy_price_in_cents": 29800 }, "20": { - "price_in_cents": 918000, + "price_in_cents": 826000, "additional_license_legacy_price_in_cents": 27500 }, "50": { - "price_in_cents": 2295000, + "price_in_cents": 2065000, "additional_license_legacy_price_in_cents": 25200 } }, "SEK": { "2": { - "price_in_cents": 289800, + "price_in_cents": 260800, "additional_license_legacy_price_in_cents": 101400 }, "3": { - "price_in_cents": 434700, + "price_in_cents": 391200, "additional_license_legacy_price_in_cents": 101400 }, "4": { - "price_in_cents": 579600, + "price_in_cents": 521600, "additional_license_legacy_price_in_cents": 101400 }, "5": { - "price_in_cents": 724500, + "price_in_cents": 652000, "additional_license_legacy_price_in_cents": 101400 }, "10": { - "price_in_cents": 1449000, + "price_in_cents": 1304000, "additional_license_legacy_price_in_cents": 94100 }, "20": { - "price_in_cents": 2898000, + "price_in_cents": 2608000, "additional_license_legacy_price_in_cents": 86900 }, "50": { - "price_in_cents": 7245000, + "price_in_cents": 6520000, "additional_license_legacy_price_in_cents": 79600 } }, "SGD": { "2": { - "price_in_cents": 51800, + "price_in_cents": 46600, "additional_license_legacy_price_in_cents": 18100 }, "3": { - "price_in_cents": 77700, + "price_in_cents": 69900, "additional_license_legacy_price_in_cents": 18100 }, "4": { - "price_in_cents": 103600, + "price_in_cents": 93200, "additional_license_legacy_price_in_cents": 18100 }, "5": { - "price_in_cents": 129500, + "price_in_cents": 116500, "additional_license_legacy_price_in_cents": 18100 }, "10": { - "price_in_cents": 259000, + "price_in_cents": 233000, "additional_license_legacy_price_in_cents": 16800 }, "20": { - "price_in_cents": 518000, + "price_in_cents": 466000, "additional_license_legacy_price_in_cents": 15500 }, "50": { - "price_in_cents": 1295000, + "price_in_cents": 1165000, "additional_license_legacy_price_in_cents": 14200 } }, "USD": { "2": { - "price_in_cents": 39800, + "price_in_cents": 35800, "additional_license_legacy_price_in_cents": 13900 }, "3": { - "price_in_cents": 59700, + "price_in_cents": 53700, "additional_license_legacy_price_in_cents": 13900 }, "4": { - "price_in_cents": 79600, + "price_in_cents": 71600, "additional_license_legacy_price_in_cents": 13900 }, "5": { - "price_in_cents": 99500, + "price_in_cents": 89500, "additional_license_legacy_price_in_cents": 13900 }, "10": { - "price_in_cents": 199000, + "price_in_cents": 179000, "additional_license_legacy_price_in_cents": 12900 }, "20": { - "price_in_cents": 398000, + "price_in_cents": 358000, "additional_license_legacy_price_in_cents": 11900 }, "50": { - "price_in_cents": 995000, + "price_in_cents": 895000, "additional_license_legacy_price_in_cents": 10900 } } diff --git a/services/web/app/views/_customer_io.pug b/services/web/app/views/_customer_io.pug new file mode 100644 index 0000000000..81d75f7d7f --- /dev/null +++ b/services/web/app/views/_customer_io.pug @@ -0,0 +1,26 @@ +if(customerIoEnabled && ExposedSettings.cioWriteKey && ExposedSettings.cioSiteId) + script(type="text/javascript", id="cio-loader", nonce=scriptNonce, data-cio-write-key=ExposedSettings.cioWriteKey, data-cio-site-id=ExposedSettings.cioSiteId, data-session-analytics-id=getSessionAnalyticsId(), data-user-id=getLoggedInUserId()). + var cioSettings = document.querySelector('#cio-loader').dataset; + var analyticsId = cioSettings.sessionAnalyticsId; + var siteId = cioSettings.cioSiteId; + var writeKey = cioSettings.cioWriteKey; + var userId = cioSettings.userId; + + !function(){var i="cioanalytics", analytics=(window[i]=window[i]||[]);if(!analytics.initialize)if(analytics.invoked)window.console&&console.error&&console.error("Snippet included twice.");else{analytics.invoked=!0;analytics.methods=["trackSubmit","trackClick","trackLink","trackForm","pageview","identify","reset","group","track","ready","alias","debug","page","once","off","on","addSourceMiddleware","addIntegrationMiddleware","setAnonymousId","addDestinationMiddleware"];analytics.factory=function(e){return function(){var t=Array.prototype.slice.call(arguments);t.unshift(e);analytics.push(t);return analytics}};for(var e=0;e